Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Update from Hackage at 2018-01-06T10:18:39Z | homepage: https://github.com/k0ral/euler-tour-tree
changelog-type: ''
hash: 01659609d03b785120ecafdb16d96b22f9f147d90e75049313f6060bd02883bf
test-bench-deps:
hlint: -any
base: ! '>=4.9'
containers: -any
keys: -any
sequence: -any
tasty-quickcheck: -any
tasty-hunit: -any
tasty: -any
QuickCheck: -any
euler-tour-tree: -any
maintainer: koral@mailoo.org
synopsis: Euler tour trees
changelog: ''
basic-deps:
fingertree: -any
base: ! '>=4.9 && <4.11'
parser-combinators: -any
Unique: -any
containers: -any
mtl: -any
transformers: -any
all-versions:
- '0.1.0.0'
author: koral
latest: '0.1.0.0'
description-type: haddock
description: ''
license-name: PublicDomain
| homepage: https://github.com/k0ral/euler-tour-tree
changelog-type: ''
hash: 10a5f9c00e0c86668753aa59b919a8ae3992af49a3a5dd468d8363957b0d40be
test-bench-deps:
hlint: -any
base: ! '>=4.9'
containers: -any
keys: -any
sequence: -any
tasty-quickcheck: -any
tasty-hunit: -any
tasty: -any
QuickCheck: -any
euler-tour-tree: -any
maintainer: koral@mailoo.org
synopsis: Euler tour trees
changelog: ''
basic-deps:
fingertree: -any
base: ! '>=4.9 && <4.11'
parser-combinators: -any
Unique: -any
containers: -any
mtl: -any
transformers: -any
all-versions:
- '0.1.0.0'
- '0.1.0.1'
author: koral
latest: '0.1.0.1'
description-type: haddock
description: ''
license-name: PublicDomain
|
Make sure we're actually installing | version: 0.5.2.{build}
os: Windows Server 2012 R2
environment:
matrix:
# http://www.appveyor.com/docs/installed-software#python
- PYTHON: "C:\\Python34"
- PYTHON: "C:\\Python35"
- PYTHON: "C:\\Python36"
- PYTHON: "C:\\Python34-x64"
DISTUTILS_USE_SDK: "1"
- PYTHON: "C:\\Python35-x64"
- PYTHON: "C:\\Python36-x64"
# TODO: Miniconda
install:
- "%PYTHON%\\Scripts\\pip install -r requirements-tests.txt"
build: off
test_script:
- "%PYTHON%\\Scripts\\py.test"
| version: 0.5.2.{build}
os: Windows Server 2012 R2
environment:
matrix:
# http://www.appveyor.com/docs/installed-software#python
- PYTHON: "C:\\Python34"
- PYTHON: "C:\\Python35"
- PYTHON: "C:\\Python36"
- PYTHON: "C:\\Python34-x64"
DISTUTILS_USE_SDK: "1"
- PYTHON: "C:\\Python35-x64"
- PYTHON: "C:\\Python36-x64"
# TODO: Miniconda
install:
- "%PYTHON%\\Scripts\\pip install -r requirements-tests.txt"
- "python setup.py develop"
build: off
test_script:
- "%PYTHON%\\Scripts\\py.test"
|
Set bwc skip version after backport | ---
teardown:
- do:
cluster.put_settings:
body:
transient:
search.max_keep_alive: null
---
"Max keep alive":
- skip:
version: " - 7.0.0"
reason: search.max_keep_alive was added in 7.0.0
- do:
index:
index: test_scroll
type: test
id: 1
body: { foo: 1 }
- do:
index:
index: test_scroll
type: test
id: 2
body: { foo: 1 }
- do:
indices.refresh: {}
- do:
cluster.put_settings:
body:
transient:
search.default_keep_alive: "1m"
search.max_keep_alive: "1m"
- do:
catch: /.*Keep alive for scroll.*is too large.*/
search:
index: test_scroll
size: 1
scroll: 2m
sort: foo
body:
query:
match_all: {}
- do:
search:
index: test_scroll
size: 1
scroll: 1m
sort: foo
body:
query:
match_all: {}
- set: {_scroll_id: scroll_id}
- match: {hits.total: 2 }
- length: {hits.hits: 1 }
- do:
catch: /.*Keep alive for scroll.*is too large.*/
scroll:
scroll_id: $scroll_id
scroll: 3m
| ---
teardown:
- do:
cluster.put_settings:
body:
transient:
search.max_keep_alive: null
---
"Max keep alive":
- skip:
version: " - 6.0.99"
reason: search.max_keep_alive was added in 6.1.0
- do:
index:
index: test_scroll
type: test
id: 1
body: { foo: 1 }
- do:
index:
index: test_scroll
type: test
id: 2
body: { foo: 1 }
- do:
indices.refresh: {}
- do:
cluster.put_settings:
body:
transient:
search.default_keep_alive: "1m"
search.max_keep_alive: "1m"
- do:
catch: /.*Keep alive for scroll.*is too large.*/
search:
index: test_scroll
size: 1
scroll: 2m
sort: foo
body:
query:
match_all: {}
- do:
search:
index: test_scroll
size: 1
scroll: 1m
sort: foo
body:
query:
match_all: {}
- set: {_scroll_id: scroll_id}
- match: {hits.total: 2 }
- length: {hits.hits: 1 }
- do:
catch: /.*Keep alive for scroll.*is too large.*/
scroll:
scroll_id: $scroll_id
scroll: 3m
|
Fix github workflow for TestPyPI | name: publish PyPI and TestPyPI
on: push
jobs:
build-linux:
runs-on: ubuntu-latest
strategy:
max-parallel: 5
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Make sdist
run: |
./get_nanoversion.sh
cat __nanoversion__.txt
pip install numpy
python setup.py sdist
- name: Publish package to TestPyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads/rc')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
- name: Publish package to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads/master')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
| name: publish PyPI and TestPyPI
on: push
jobs:
build-linux:
runs-on: ubuntu-latest
strategy:
max-parallel: 5
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Make sdist
run: |
./get_nanoversion.sh
cat __nanoversion__.txt
pip install numpy
python setup.py sdist
- name: Publish package to TestPyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads/rc')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.TEST_PYPI_API_TOKEN }}
repository_url: https://test.pypi.org/legacy/
- name: Publish package to PyPI
if: github.event_name == 'push' && startsWith(github.ref, 'refs/heads/master')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
|
Use `cabal new-build` on AppVeyor | install:
# Using '-y' and 'refreshenv' as a workaround to:
# https://github.com/haskell/cabal/issues/3687
- choco install -y ghc --version 8.0.2
- refreshenv
# See http://help.appveyor.com/discussions/problems/6312-curl-command-not-found#comment_42195491
# NB: Do this after refreshenv, otherwise it will be clobbered!
- set PATH=C:\Program Files\Git\mingw64\bin;%PATH%;C:\msys64\usr\bin
- curl -o cabal.zip --progress-bar https://www.haskell.org/cabal/release/cabal-install-1.24.0.0/cabal-install-1.24.0.0-x86_64-unknown-mingw32.zip
- 7z x -bd cabal.zip
- cabal --version
- cabal update
- curl -o z3.zip -L https://github.com/Z3Prover/z3/releases/download/z3-4.5.0/z3-4.5.0-x64-win.zip
- 7z x -bd z3.zip
- cp z3-4.5.0-x64-win/bin/z3.exe .
build_script:
- cabal sandbox init
- cabal install Cabal
- make
| install:
# Using '-y' and 'refreshenv' as a workaround to:
# https://github.com/haskell/cabal/issues/3687
- choco install -y ghc --version 8.2.1
- refreshenv
# See http://help.appveyor.com/discussions/problems/6312-curl-command-not-found#comment_42195491
# NB: Do this after refreshenv, otherwise it will be clobbered!
- set PATH=C:\Program Files\Git\mingw64\bin;%PATH%;C:\msys64\usr\bin
- curl -o z3.zip -L https://github.com/Z3Prover/z3/releases/download/z3-4.5.0/z3-4.5.0-x64-win.zip
- 7z x -bd z3.zip
- cp z3-4.5.0-x64-win/bin/z3.exe .
build_script:
- cabal update
- cabal new-build -j
|
Set envionment python to 3.6 by default | name: ospcdyn
channels:
- ospc
dependencies:
- setuptools
- mkl
- scipy>=0.18.1
- matplotlib
- taxcalc>=0.19.0
- numba
- numpy>=1.12.1
- pandas>=0.20.1
- pytest
- pytest-pep8
- pytest-xdist
- dask
- bokeh
| name: ospcdyn
channels:
- ospc
dependencies:
- python>=3.6
- setuptools
- mkl
- scipy>=0.18.1
- matplotlib
- taxcalc>=0.19.0
- numba
- numpy>=1.12.1
- pandas>=0.20.1
- pytest
- pytest-pep8
- pytest-xdist
- dask
- bokeh
|
Allow setting of container scale via ENV var | version: '2'
services:
gp-lookup:
scale: 1
start_on_create: true
health_check:
response_timeout: 5000
healthy_threshold: 2
port: 9292
unhealthy_threshold: 3
initializing_timeout: 60000
interval: 5000
strategy: recreate
request_line: GET "/" "HTTP/1.0"
reinitializing_timeout: 60000
| version: '2'
services:
gp-lookup:
scale: $INITIAL_CONTAINER_SCALE
start_on_create: true
health_check:
response_timeout: 5000
healthy_threshold: 2
port: 9292
unhealthy_threshold: 3
initializing_timeout: 60000
interval: 5000
strategy: recreate
request_line: GET "/" "HTTP/1.0"
reinitializing_timeout: 60000
|
Use puppet-openstack-module-unit-jobs to define unit test jobs | - project-template:
name: puppet-tripleo-module-unit-jobs
check:
jobs:
- puppet-openstack-unit-5.5-centos-7
- puppet-openstack-unit-5.5-centos-8
- puppet-openstack-unit-6.0-ubuntu-bionic
- puppet-openstack-unit-latest-ubuntu-bionic:
voting: false
gate:
jobs:
- puppet-openstack-unit-5.5-centos-7
- puppet-openstack-unit-5.5-centos-8
- puppet-openstack-unit-6.0-ubuntu-bionic
- project:
templates:
- puppet-openstack-check-jobs
- puppet-tripleo-module-unit-jobs
- tripleo-undercloud-jobs
- tripleo-multinode-container-minimal
- release-notes-jobs-python3
- tripleo-standalone-scenarios-full
check:
jobs:
- tripleo-ci-centos-7-scenario007-multinode-oooq-container:
files:
- ^manifests/profile/pacemaker/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/plugins/ml2/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/agents/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/ovn.*$
gate:
queue: tripleo
| - project:
templates:
- puppet-openstack-check-jobs
- puppet-openstack-module-unit-jobs
- tripleo-undercloud-jobs
- tripleo-multinode-container-minimal
- release-notes-jobs-python3
- tripleo-standalone-scenarios-full
check:
jobs:
- tripleo-ci-centos-7-scenario007-multinode-oooq-container:
files:
- ^manifests/profile/pacemaker/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/plugins/ml2/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/agents/ovn.*$
- ^manifests/profile/(base|pacemaker)/neutron/ovn.*$
gate:
queue: tripleo
|
Add org-wide config for Stale app | # Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
# Label to use when marking an issue as stale
staleLabel: stale
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false
| _extends: .github
|
Fix for making work Ansistrano with Ansible 2.0 | ---
# Update code deployment step
- local_action:
set_fact ansistrano_release_version={{ lookup('pipe', 'date -u +%Y%m%d%H%M%SZ') }}
run_once: true
when: ansistrano_release_version is not defined
- name: ANSISTRANO | Get release path
command: echo "{{ ansistrano_releases_path.stdout }}/{{ ansistrano_release_version }}"
register: ansistrano_release_path
- include: "update-code/{{ ansistrano_deploy_via | default('rsync') }}.yml"
- name: ANSISTRANO | Copy release version into REVISION file
copy: content={{ ansistrano_release_version }} dest={{ ansistrano_release_path.stdout }}/REVISION
| ---
# Update code deployment step
- name: ANSISTRANO | Get release version
set_fact:
ansistrano_release_version: "{{ lookup('pipe', 'date -u +%Y%m%d%H%M%SZ') }}"
run_once: true
when: ansistrano_release_version is not defined
delegate_to: 127.0.0.1
- name: ANSISTRANO | Get release path
command: echo "{{ ansistrano_releases_path.stdout }}/{{ ansistrano_release_version }}"
register: ansistrano_release_path
- include: "update-code/{{ ansistrano_deploy_via | default('rsync') }}.yml"
- name: ANSISTRANO | Copy release version into REVISION file
copy: content={{ ansistrano_release_version }} dest={{ ansistrano_release_path.stdout }}/REVISION
|
Update from Hackage at 2018-05-07T05:59:58Z | homepage: ''
changelog-type: ''
hash: 8c8faad109dbe295d6300c3af19cb0bf816114583c15d3f776c0ec1ed09d32f6
test-bench-deps: {}
maintainer: strake888@gmail.com
synopsis: Utilities
changelog: ''
basic-deps:
base: ! '>=4.9 && <5'
all-versions:
- '0.1.0.0'
- '0.1.1.0'
- '0.1.2.0'
- '0.1.2.1'
- '0.1.3.0'
- '0.1.4.0'
- '0.1.5.0'
- '0.1.6.0'
- '0.1.7.0'
author: M Farkas-Dyck
latest: '0.1.7.0'
description-type: haddock
description: ''
license-name: BSD3
| homepage: ''
changelog-type: ''
hash: 729b8ae9ab723882e8c23e1d7d8927ed880c402fdb459ff072c720efff449dd1
test-bench-deps: {}
maintainer: strake888@gmail.com
synopsis: Utilities
changelog: ''
basic-deps:
base: ! '>=4.9 && <5'
all-versions:
- '0.1.0.0'
- '0.1.1.0'
- '0.1.2.0'
- '0.1.2.1'
- '0.1.3.0'
- '0.1.4.0'
- '0.1.5.0'
- '0.1.6.0'
- '0.1.7.0'
- '0.1.8.0'
author: M Farkas-Dyck
latest: '0.1.8.0'
description-type: haddock
description: ''
license-name: BSD3
|
Update AdminControl to 1.0.4 (5) | Categories:
- System
License: GPL-3.0-or-later
SourceCode: https://github.com/linux-colonel/AdminControl
IssueTracker: https://github.com/linux-colonel/AdminControl/issues
AutoName: AdminControl
Summary: Additional security settings
Description: Allows you to disable the fingerprint reader on the Lock Screen without
deleting all of your fingerprints.
RepoType: git
Repo: https://github.com/linux-colonel/AdminControl
Builds:
- versionName: 1.0.2
versionCode: 3
commit: v1.0.2
subdir: app
gradle:
- yes
- versionName: 1.0.3
versionCode: 4
commit: v1.0.3
subdir: app
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: 1.0.3
CurrentVersionCode: 4
| Categories:
- System
License: GPL-3.0-or-later
SourceCode: https://github.com/linux-colonel/AdminControl
IssueTracker: https://github.com/linux-colonel/AdminControl/issues
AutoName: AdminControl
Summary: Additional security settings
Description: Allows you to disable the fingerprint reader on the Lock Screen without
deleting all of your fingerprints.
RepoType: git
Repo: https://github.com/linux-colonel/AdminControl
Builds:
- versionName: 1.0.2
versionCode: 3
commit: v1.0.2
subdir: app
gradle:
- yes
- versionName: 1.0.3
versionCode: 4
commit: v1.0.3
subdir: app
gradle:
- yes
- versionName: 1.0.4
versionCode: 5
commit: v1.0.4
subdir: app
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: 1.0.4
CurrentVersionCode: 5
|
Add securedrop_code var to upgrade scenario | ---
install_local_packages: false
primary_network_iface: eth0
ssh_users: vagrant
securedrop_app_install_from_repo: true
allow_direct_access: true
ssh_listening_address: 0.0.0.0
monitor_ip: "{{ hostvars['mon-staging']['ansible_'+primary_network_iface].ipv4.address }}"
monitor_hostname: "{{ hostvars['mon-staging'].ansible_hostname }}"
app_ip: "{{ hostvars['app-staging']['ansible_'+primary_network_iface].ipv4.address }}"
app_hostname: "{{ hostvars['app-staging'].ansible_hostname }}"
tor_apt_repo_url: "https://tor-apt.freedom.press"
etc_hosts:
app-staging:
- reg: ^127.0.0.1
line: 127.0.0.1 app-staging localhost
- reg: securedrop-monitor-server-alias$
line: "{{ monitor_ip }} mon-staging securedrop-monitor-server-alias"
mon-staging:
- reg: ^127.0.0.1\s+mon
line: 127.0.0.1 mon-staging localhost
- reg: app-staging$
line: "{{ app_ip }} app-staging"
# Set Xenial as apt repo channel for hosting deb packages
rep_dist: xenial
| ---
install_local_packages: false
primary_network_iface: eth0
ssh_users: vagrant
securedrop_app_install_from_repo: true
allow_direct_access: true
ssh_listening_address: 0.0.0.0
monitor_ip: "{{ hostvars['mon-staging']['ansible_'+primary_network_iface].ipv4.address }}"
monitor_hostname: "{{ hostvars['mon-staging'].ansible_hostname }}"
app_ip: "{{ hostvars['app-staging']['ansible_'+primary_network_iface].ipv4.address }}"
app_hostname: "{{ hostvars['app-staging'].ansible_hostname }}"
tor_apt_repo_url: "https://tor-apt.freedom.press"
securedrop_code: "/var/www/securedrop"
etc_hosts:
app-staging:
- reg: ^127.0.0.1
line: 127.0.0.1 app-staging localhost
- reg: securedrop-monitor-server-alias$
line: "{{ monitor_ip }} mon-staging securedrop-monitor-server-alias"
mon-staging:
- reg: ^127.0.0.1\s+mon
line: 127.0.0.1 mon-staging localhost
- reg: app-staging$
line: "{{ app_ip }} app-staging"
# Set Xenial as apt repo channel for hosting deb packages
rep_dist: xenial
|
Add additional 100gb Graphite disk | ---
vapps:
- name: 'offsite-backup-1'
vdc_name: '0e7t-IS-OVDC-BACKUP'
catalog_name: 'ubuntu'
vapp_template_name: 'preseeded-540059de'
vm:
hardware_config:
memory: '2048'
cpu: '2'
network_connections:
- name: Default
ip_address: '192.168.152.10'
extra_disks:
- name: backup-disk
size: 307200
- name: logs-disk
size: 524288
- name: assetsbackup-assets
size: 524288
- name: graphitebackup-graphite
size: 102400
- name: assetsbackup-assets2
size: 524288
- name: assetsbackup-assets3
size: 524288
bootstrap:
script_path: 'vcloud/box/common/bootstrap.erb'
vars:
pass_hash: '$6$/66cG1LV$F16Yn7DmFHL6Hkti8U0CBS9r7GtDT03KKsrdMVvVM79s2xONq9A4KdGEldozDanhsdgpg4ZzjWCwzOwxdCrWi0'
| ---
vapps:
- name: 'offsite-backup-1'
vdc_name: '0e7t-IS-OVDC-BACKUP'
catalog_name: 'ubuntu'
vapp_template_name: 'preseeded-540059de'
vm:
hardware_config:
memory: '2048'
cpu: '2'
network_connections:
- name: Default
ip_address: '192.168.152.10'
extra_disks:
- name: backup-disk
size: 307200
- name: logs-disk
size: 524288
- name: assetsbackup-assets
size: 524288
- name: graphitebackup-graphite
size: 102400
- name: assetsbackup-assets2
size: 524288
- name: assetsbackup-assets3
size: 524288
- name: graphitebackup-graphite2
size: 102400
bootstrap:
script_path: 'vcloud/box/common/bootstrap.erb'
vars:
pass_hash: '$6$/66cG1LV$F16Yn7DmFHL6Hkti8U0CBS9r7GtDT03KKsrdMVvVM79s2xONq9A4KdGEldozDanhsdgpg4ZzjWCwzOwxdCrWi0'
|
Fix up the GHC warning | # Upgrade the resolver semi-regularly so that in Appveyor the "stack init"
# and the below resolver can share at least the compiler
resolver: nightly-2018-08-21
packages: [foo, bar, baz]
ghc-options:
"$locals": -Werror -Wunused-binds -Wunused-imports -Wno-missing-home-modules -Wno-nonportable-include-path
| # Upgrade the resolver semi-regularly so that in Appveyor the "stack init"
# and the below resolver can share at least the compiler
resolver: nightly-2018-08-21
packages: [foo, bar, baz]
ghc-options:
"$locals": -Werror -Wunused-binds -Wunused-imports -Wno-missing-home-modules -optP-Wno-nonportable-include-path
|
Improve cell wall signal in Endogenous_BASL_02 | ---
wall_channel: 0
marker_channel: 1
surface_percentile: 95
wall_percentile_filter_percentile: 95
wall_percentile_filter_size: 2
wall_zabove: -4
wall_zbelow: 6
wall_threshold_adaptive_block_size: 151
wall_remove_small_objects_in_cell_min_size: 20
wall_remove_small_objects_in_wall_min_size: 20
wall_erode_step: True
marker_zabove: -4
marker_zbelow: 6
marker_min_intensity: 0
| ---
wall_channel: 0
marker_channel: 1
surface_percentile: 95
wall_percentile_filter_percentile: 95
wall_percentile_filter_size: 2
wall_zabove: -2
wall_zbelow: 6
wall_threshold_adaptive_block_size: 151
wall_remove_small_objects_in_cell_min_size: 20
wall_remove_small_objects_in_wall_min_size: 20
wall_erode_step: True
marker_zabove: -4
marker_zbelow: 6
marker_min_intensity: 0
|
Configure VNET to secure the RPC node | version: '2'
services:
nako:
container_name: city-nako
networks:
- city
image: coinvault/nako:core
command: city
ports:
- 9019:9000
depends_on:
- mongo
- client
client:
container_name: city-client
networks:
- city
image: citychain/citychain
command: ["-server=1", "-rpcallowip=0.0.0.0", "-rpcport=5000", "-rpcuser=rpcuser", "-rpcpassword=rpcpassword", "-rpcthreads=300", "-txindex=1"]
ports:
- 5019:5000
- 4333:4333
mongo:
container_name: city-mongo
networks:
- city
image: mongo:3.2
networks:
city:
driver: bridge | version: '2'
services:
nako:
container_name: city-nako
networks:
nakonet:
ipv4_address: 172.16.10.100
image: coinvault/nako
command: city
ports:
- 9019:9000
depends_on:
- mongo
- client
client:
container_name: city-client
networks:
nakonet:
ipv4_address: 172.16.10.101
image: citychain/citychain
command: ["-server=1", "-rpcallowip=172.16.10.100", "-rpcbind=172.16.10.101", "-rpcport=5000", "-rpcuser=rpcuser", "-rpcpassword=rpcpassword", "-rpcthreads=300", "-txindex=1"]
ports:
- 5019:5000
- 4333:4333
mongo:
container_name: city-mongo
networks:
nakonet:
ipv4_address: 172.16.10.102
image: mongo:3.2
networks:
nakonet:
driver: bridge
ipam:
driver: default
config:
- subnet: 172.16.10.0/24 |
Update configuration for docker hub | sudo: required
services:
- docker
language: node_js
node_js:
- "6.0"
cache:
yarn: true
script:
- "npm run lint"
- "npm run test:cover"
after_success:
- bash <(curl -s https://codecov.io/bash)
- docker build --build-arg version=$(node -p -e "require('./package.json').version") -t oshalygin/olegjs:$(node -p -e "require('./package.json').version") .
- if [ "$TRAVIS_BRANCH" == "master" ]; then
docker login -u="$DOCKER_USERNAME" -p="$DOCKER_PASSWORD" http://index.docker.io;
docker push oshalygin/olegjs:$(node -p -e "require('./package.json').version");
fi
| sudo: required
services:
- docker
language: node_js
node_js:
- "6.0"
cache:
yarn: true
script:
- "npm run lint"
- "npm run test:cover"
after_success:
- bash <(curl -s https://codecov.io/bash)
- docker build --build-arg version=$(node -p -e "require('./package.json').version") -t index.docker.io/oshalygin/olegjs:$(node -p -e "require('./package.json').version") .
- if [ "$TRAVIS_BRANCH" == "master" ]; then
docker login -u="$DOCKER_USERNAME" -p="$DOCKER_PASSWORD" http://index.docker.io;
docker push index.docker.io/oshalygin/olegjs:$(node -p -e "require('./package.json').version");
fi
|
Fix Travis build order to allow ql tests to pass | language: go
go:
- 1.1
- 1.2
services:
- redis-server
before_script:
- mysql -e "CREATE DATABASE goat"
- mysql goat < res/mysql/announce_log.sql
- mysql goat < res/mysql/api_keys.sql
- mysql goat < res/mysql/files.sql
- mysql goat < res/mysql/files_users.sql
- mysql goat < res/mysql/scrape_log.sql
- mysql goat < res/mysql/users.sql
- mysql goat < res/mysql/whitelist.sql
- mysql -e "UPDATE mysql.user SET password=PASSWORD('travis') WHERE user='travis'; FLUSH PRIVILEGES"
script:
- go get
- make
- ./bin/goat -test
- make test
- go get github.com/cznic/ql
- make ql
- ./bin/goat -test
| language: go
go:
- 1.1
- 1.2
services:
- redis-server
before_script:
- mysql -e "CREATE DATABASE goat"
- mysql goat < res/mysql/announce_log.sql
- mysql goat < res/mysql/api_keys.sql
- mysql goat < res/mysql/files.sql
- mysql goat < res/mysql/files_users.sql
- mysql goat < res/mysql/scrape_log.sql
- mysql goat < res/mysql/users.sql
- mysql goat < res/mysql/whitelist.sql
- mysql -e "UPDATE mysql.user SET password=PASSWORD('travis') WHERE user='travis'; FLUSH PRIVILEGES"
script:
- go get
- go get github.com/cznic/ql
- make
- ./bin/goat -test
- make test
- make ql
- ./bin/goat -test
|
Increase Go's own test timeout. | sudo: required
language: go
go:
- 1.4
- 1.5
services:
- docker
script:
- docker login -e $CI_EMAIL -u $CI_USER -p $CI_PASS $CI_HOST
- docker pull $CI_HOST/kbweb
- docker run --name kbweb3000 -d -p 3000:3000 $CI_HOST/kbweb
- cd go && ./test/check_or_start_kbweb.sh && go test -v ./...
# Only test races on pushes to master, since it takes a long time.
- if [ $TRAVIS_BRANCH == 'master' ] && [ $TRAVIS_PULL_REQUEST == 'false' ]; then
travis_wait go test -race -v ./...;
fi
notifications:
slack: keybase:WKlhs55Mb4am4obv1rNRg8P0
| sudo: required
language: go
go:
- 1.4
- 1.5
services:
- docker
script:
- docker login -e $CI_EMAIL -u $CI_USER -p $CI_PASS $CI_HOST
- docker pull $CI_HOST/kbweb
- docker run --name kbweb3000 -d -p 3000:3000 $CI_HOST/kbweb
- cd go && ./test/check_or_start_kbweb.sh && go test -v ./...
# Only test races on pushes to master, since it takes a long time.
- if [ $TRAVIS_BRANCH == 'master' ] && [ $TRAVIS_PULL_REQUEST == 'false' ]; then
travis_wait go test -timeout 30m -race -v ./...;
fi
notifications:
slack: keybase:WKlhs55Mb4am4obv1rNRg8P0
|
Make Travis test only the versions that we actually support | language: ruby
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby
- rbx
gemfile:
- gemfiles/active_record_40.gemfile
- gemfiles/active_record_41.gemfile
- gemfiles/active_record_edge.gemfile
matrix:
allow_failures:
- rvm: rbx
- rvm: jruby
fast_finish: true
| language: ruby
rvm:
- 1.9.3
- 2.0.0
- 2.1.0
- jruby
- rbx
gemfile:
- gemfiles/active_record_31.gemfile
- gemfiles/active_record_32.gemfile
matrix:
allow_failures:
- rvm: rbx
- rvm: jruby
fast_finish: true
|
Remove pytest to get proper coverage. | # Travis CI setup
language: python
python:
- "2.7"
- "3.4"
- "3.5"
- "3.6"
# Setup anaconda
before_install:
# http://conda.pydata.org/docs/travis.html
- sudo apt-get update
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
sudo apt-get install python-dev build-essential;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
sudo apt-get install python3-dev build-essential;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda config --add channels conda-forge
- conda update -q conda
- conda info -a
install:
- conda create -q -n test-env python=$TRAVIS_PYTHON_VERSION --file requirements/conda.txt
- source activate test-env
- pip install -r requirements/requirements.txt
- pip install -r requirements/test.txt
- python setup.py install
script:
- pytest --cov=. --cov-report term-missing --durations 5 tests/
after_success:
- coveralls -i
| # Travis CI setup
language: python
python:
- "2.7"
- "3.4"
- "3.5"
- "3.6"
# Setup anaconda
before_install:
# http://conda.pydata.org/docs/travis.html
- sudo apt-get update
- if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then
wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh;
sudo apt-get install python-dev build-essential;
else
wget https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -O miniconda.sh;
sudo apt-get install python3-dev build-essential;
fi
- bash miniconda.sh -b -p $HOME/miniconda
- export PATH="$HOME/miniconda/bin:$PATH"
- hash -r
- conda config --set always_yes yes --set changeps1 no
- conda config --add channels conda-forge
- conda update -q conda
- conda info -a
install:
- conda create -q -n test-env python=$TRAVIS_PYTHON_VERSION --file requirements/conda.txt
- source activate test-env
- pip install -r requirements/requirements.txt
- pip install -r requirements/test.txt
- python setup.py install
script:
- pytest --cov=. --cov-report term-missing --durations 5
after_success:
- coveralls -i
|
Update git repo url in Travis | sudo: required
language: bash
services:
- docker
install:
- git clone https://github.com/thyrlian/AndroidSDK ~/AndroidSDK
script:
- docker build -t android-sdk ~/AndroidSDK/android-sdk
- docker images
- echo "gradle -v && sdkmanager --list" | docker run -i android-sdk
| sudo: required
language: bash
services:
- docker
install:
- git clone https://github.com/thyrlian/AndroidSDK.git ~/AndroidSDK
script:
- docker build -t android-sdk ~/AndroidSDK/android-sdk
- docker images
- echo "gradle -v && sdkmanager --list" | docker run -i android-sdk
|
Build against Go 1.15.7, drop 1.10.5, 1.11.2 | language: go
go:
- "1.11.2"
- "1.10.5"
script:
- go vet ./...
- go test ./...
| language: go
go:
- "1.15.7"
script:
- go vet ./...
- go test ./...
|
Configure Travis CI to test on latest Node.js | ---
language: "node_js"
node_js: [ "4", "5" ]
sudo: false
script: ['npm run test', 'npm run lint']
| ---
language: "node_js"
node_js: [ "4", "node" ]
sudo: false
script: ['npm run test', 'npm run lint']
|
Configure Travis to run ginkgo | language: go
| language: go
install:
- go get -v github.com/onsi/ginkgo/ginkgo
- go get -v github.com/onsi/gomega
- go get -v -t ./...
- export PATH=$PATH:$HOME/gopath/bin
script: ginkgo -r --randomizeAllSpecs --randomizeSuites --failOnPending --trace --race --compilers=2
|
Correct syntax on the Travis exclusions. | language: python
python:
- 3.4
- 3.3
- 2.7
script: make coverage
env:
global:
- DATABASE_URL='postgres://postgres@localhost/incuna_mail'
matrix:
- DJANGO_VERSION='django>=1.6,<1.7'
- DJANGO_VERSION='django>=1.5,<1.6'
- DJANGO_VERSION='django>=1.4,<1.5'
matrix:
exclude:
- env: DJANGO_VERSION>=1.4,<1.5
python: 3.4
- env: DJANGO_VERSION>=1.4,<1.5
python: 3.3
install:
- psql -c 'CREATE DATABASE incuna_mail' -U postgres;
- pip install $DJANGO_VERSION
- pip install -e .
- pip install -r test_requirements.txt
- pip install coveralls
after_success:
coveralls
| language: python
python:
- 3.4
- 3.3
- 2.7
script: make coverage
env:
global:
- DATABASE_URL='postgres://postgres@localhost/incuna_mail'
matrix:
- DJANGO_VERSION='django>=1.6,<1.7'
- DJANGO_VERSION='django>=1.5,<1.6'
- DJANGO_VERSION='django>=1.4,<1.5'
matrix:
exclude:
- env: DJANGO_VERSION='django>=1.4,<1.5'
python: 3.4
- env: DJANGO_VERSION='django>=1.4,<1.5'
python: 3.3
install:
- psql -c 'CREATE DATABASE incuna_mail' -U postgres;
- pip install $DJANGO_VERSION
- pip install -e .
- pip install -r test_requirements.txt
- pip install coveralls
after_success:
coveralls
|
Remove unnecessary Travis config info, set dist to trusty, don't install specific npm, cache npm. | language: node_js
node_js:
- 8.11.1
env:
- CXX=g++-4.8
sudo: required
addons:
chrome: stable
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8
before_install:
#- npm install -g protractor@3.3.0
install:
- npm install -g npm@6.0.1
- npm install -g grunt-cli
- npm install -g bower
#- npm install selenium-webdriver
- npm install
- bower install
#- ./node_modules/protractor/bin/webdriver-manager update --standalone
before_script:
#- export CHROME_BIN=chromium-browser
#- export DISPLAY=:99.0
#- sh -e /etc/init.d/xvfb start
script:
- npm run build
- npm test
# - grunt e2e-test
notifications:
slack:
secure: iGLGsYyVIyKVpVVCskGh/zc6Pkqe0D7jpUtbywSbnq6l5seE6bvBVqm0F2FSCIN+AIC+qal2mPEWysDVsLACm5tTEeA8NfL8dmCrAKbiFbi+gHl4mnHHCHl7ii/7UkoIIXNc5UXbgMSXRS5l8UcsSDlN8VxC5zWstbJvjeYIvbA=
| language: node_js
node_js:
- 8.11.1
dist: trusty
sudo: required
addons:
chrome: stable
#before_install:
#- npm install -g protractor@3.3.0
cache:
directories:
- $HOME/.npm
install:
#- npm install -g npm@6.0.1
- npm install -g grunt-cli
- npm install -g bower
#- npm install selenium-webdriver
- npm install
- bower install
#- ./node_modules/protractor/bin/webdriver-manager update --standalone
#before_script:
#- export CHROME_BIN=chromium-browser
#- export DISPLAY=:99.0
#- sh -e /etc/init.d/xvfb start
script:
- npm run build
- npm test
# - grunt e2e-test
notifications:
slack:
secure: iGLGsYyVIyKVpVVCskGh/zc6Pkqe0D7jpUtbywSbnq6l5seE6bvBVqm0F2FSCIN+AIC+qal2mPEWysDVsLACm5tTEeA8NfL8dmCrAKbiFbi+gHl4mnHHCHl7ii/7UkoIIXNc5UXbgMSXRS5l8UcsSDlN8VxC5zWstbJvjeYIvbA=
|
Remove godep from Travis; we're using Go 1.6 vendoring now | sudo: false
language: go
go:
- 1.6
- tip
matrix:
allow_failures:
- go: tip
branches:
only:
- master
install:
- go get github.com/tools/godep
script:
- make test testrace
| sudo: false
language: go
go:
- 1.6
- tip
matrix:
allow_failures:
- go: tip
branches:
only:
- master
script:
- make test testrace
|
Use OpenJDK because Oracle doesn't like people to use their Java distribution anymore. | language: java
install: true
before_install:
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start"
- chmod +x gradlew
jdk: oraclejdk11
git:
depth: false
addons:
sonarcloud:
organization: "gurkenlabs-github"
script:
- ./gradlew fullbuild
- jdk_switcher use oraclejdk8
- ./gradlew fullbuild
- if [ $TRAVIS_PULL_REQUEST = 'false' ]; then
sonar-scanner;
fi
cache:
directories:
- '$HOME/.m2/repository'
- '$HOME/.sonar/cache'
- '$HOME/.gradle/caches/'
- '$HOME/.gradle/wrapper/' | language: java
install: true
before_install:
- "export DISPLAY=:99.0"
- "sh -e /etc/init.d/xvfb start"
- chmod +x gradlew
jdk: openjdk11
git:
depth: false
addons:
sonarcloud:
organization: "gurkenlabs-github"
script:
- ./gradlew fullbuild
- jdk_switcher use openjdk8
- ./gradlew fullbuild
- if [ $TRAVIS_PULL_REQUEST = 'false' ]; then
sonar-scanner;
fi
cache:
directories:
- '$HOME/.m2/repository'
- '$HOME/.sonar/cache'
- '$HOME/.gradle/caches/'
- '$HOME/.gradle/wrapper/' |
Simplify Travis config and fix ChefDK 2.0 failures | sudo: required
dist: trusty
addons:
apt:
sources:
- chef-stable-trusty
packages:
- chefdk
# Don't `bundle install` which takes about 1.5 mins
install: echo "skip bundle install"
branches:
only:
- master
services: docker
env:
matrix:
- INSTANCE=default-fedora-latest
before_script:
- sudo iptables -L DOCKER || ( echo "DOCKER iptables chain missing" ; sudo iptables -N DOCKER )
- eval "$(/opt/chefdk/bin/chef shell-init bash)"
- /opt/chefdk/embedded/bin/chef --version
- /opt/chefdk/embedded/bin/cookstyle --version
- /opt/chefdk/embedded/bin/foodcritic --version
script: KITCHEN_LOCAL_YAML=.kitchen.dokken.yml /opt/chefdk/embedded/bin/kitchen verify ${INSTANCE}
matrix:
include:
- script:
- /opt/chefdk/bin/chef exec delivery local all
env: UNIT_AND_LINT=1
| sudo: required
dist: trusty
addons:
apt:
sources:
- chef-current-trusty
packages:
- chefdk
# Don't `bundle install` which takes about 1.5 mins
install: echo "skip bundle install"
branches:
only:
- master
services: docker
env:
matrix:
- INSTANCE=default-fedora-latest
before_script:
- sudo iptables -L DOCKER || ( echo "DOCKER iptables chain missing" ; sudo iptables -N DOCKER )
- eval "$(chef shell-init bash)"
- chef --version
- cookstyle --version
- foodcritic --version
script: KITCHEN_LOCAL_YAML=.kitchen.dokken.yml kitchen verify ${INSTANCE}
matrix:
include:
- script:
- chef exec delivery local all
env: UNIT_AND_LINT=1
|
Revert "Retrying tests max three times" | language: ruby
before_script: "export RAILS_ENV=development"
script: "travis_retry bundle exec rake test"
rvm:
- 2.0.0
- 1.9.3
| language: ruby
before_script: "export RAILS_ENV=development"
script: "bundle exec rake test"
rvm:
- 2.0.0
- 1.9.3
|
Print xgettext version before tests | language: node_js
os:
- linux
node_js:
- "0.10"
sudo: false
install:
- npm install grunt-cli
- npm install
script:
- node_modules/grunt-cli/bin/grunt test
| language: node_js
os:
- linux
node_js:
- "0.10"
sudo: false
before_install:
- xgettext --version
install:
- npm install grunt-cli
- npm install
script:
- node_modules/grunt-cli/bin/grunt test
|
Test against Rubinius in both Ruby 1.8 and 1.9 mode | bundler_args: "--without production"
rvm:
- 1.8.7
- 1.9.2
- rbx
- rbx-2.0
- ree
env: "RAILS_ENV=test"
| bundler_args: "--without production"
rvm:
- 1.8.7
- 1.9.2
- rbx-18mode
- rbx-19mode
- ree
env: "RAILS_ENV=test"
|
Test using node 0.10, 0.12 and iojs | language: node_js
node_js:
- "0.10"
- "0.11"
before_install:
- sudo sh -c "echo 'JVM_OPTS=\"\${JVM_OPTS} -Djava.net.preferIPv4Stack=false\"' >> /usr/local/cassandra/conf/cassandra-env.sh"
- sudo service cassandra start
before_script:
- while [ `nc localhost 9042 < /dev/null; echo $?` != 0 ] ; do
echo 'waiting for cassandra...' ;
sleep 1 ;
done
notifications:
email:
- services@wikimedia.org
script: npm run-script coverage && npm run-script coveralls
| language: node_js
node_js:
- "0.10"
- "0.12"
- "iojs"
before_install:
- sudo sh -c "echo 'JVM_OPTS=\"\${JVM_OPTS} -Djava.net.preferIPv4Stack=false\"' >> /usr/local/cassandra/conf/cassandra-env.sh"
- sudo service cassandra start
before_script:
- while [ `nc localhost 9042 < /dev/null; echo $?` != 0 ] ; do
echo 'waiting for cassandra...' ;
sleep 1 ;
done
notifications:
email:
- services@wikimedia.org
script: npm run-script coverage && npm run-script coveralls
|
Return to parent dir after success. | language: python
env:
# Docker Username
- secure: "U21QfZbe3BasafwItwydmHjv7AnJf2XgSiIBOxJqkWjizrW1vR5S6+yQTsD7mlkk1Z+uyRd3zBEnh3UiPoaPxmtWoMbDVxA+qSYWe11IpkAgK5/j8Re/IIcwHiMug/l705qs1DpkPQm1vR8yJhqWOyMTxQNdnGeyOdrs/De46ls="
# Docker Password
- secure: "V6Nzz/c+3YoC1WtapnIV3KnllzMxCIsGTKDPGKRI6slrGTew5zBX9KbkZZlvBGH4s7Yx4o6vEIwOEDCFixjuAuWqo6wPJt5HKhHws3UQ7yXYifUKJyBcSXwxXBfd/5slgJvevjjUXUP9+D/kABjJ3thUEdZn5ukh0HIPBpl03EM="
python:
- 3.5
# For running installing packages via pip, there is a dependency on Postgres,
# However, this appears not to be a problem for Travis. Moreore, the service
# declaration below actually starts Postgres, which isn't necessary for tests.
# services:
# - postgresql
services:
- docker
install:
- cd web
- pip install -r requirements.txt
script:
- python3 -m unittest discover
after_success:
- ./deploy.sh
| language: python
env:
# Docker Username
- secure: "U21QfZbe3BasafwItwydmHjv7AnJf2XgSiIBOxJqkWjizrW1vR5S6+yQTsD7mlkk1Z+uyRd3zBEnh3UiPoaPxmtWoMbDVxA+qSYWe11IpkAgK5/j8Re/IIcwHiMug/l705qs1DpkPQm1vR8yJhqWOyMTxQNdnGeyOdrs/De46ls="
# Docker Password
- secure: "V6Nzz/c+3YoC1WtapnIV3KnllzMxCIsGTKDPGKRI6slrGTew5zBX9KbkZZlvBGH4s7Yx4o6vEIwOEDCFixjuAuWqo6wPJt5HKhHws3UQ7yXYifUKJyBcSXwxXBfd/5slgJvevjjUXUP9+D/kABjJ3thUEdZn5ukh0HIPBpl03EM="
python:
- 3.5
# For running installing packages via pip, there is a dependency on Postgres,
# However, this appears not to be a problem for Travis. Moreore, the service
# declaration below actually starts Postgres, which isn't necessary for tests.
# services:
# - postgresql
services:
- docker
install:
- cd web
- pip install -r requirements.txt
script:
- python3 -m unittest discover
after_success:
- cd ..
- ./deploy.sh
|
Include optional features in docs generation | language: rust
matrix:
include:
- rust: nightly
- rust: beta
- rust: stable
script:
- cargo test --features="live_tests"
- cargo test --features="tls"
sudo: false
after_success: |
[ $TRAVIS_BRANCH = master ] &&
[ $TRAVIS_PULL_REQUEST = false ] &&
[ $TRAVIS_RUST_VERSION = stable ] &&
cargo doc &&
echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
pip install --user ghp-import &&
/home/travis/.local/bin/ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env:
global:
secure: eSYRUJ2wTq1g6AiPp0zvtxVJFn/3FnrCRAJmGCN1TBYpnl11ZvLZfhUA9IC4S48/YVmdeP1pywpIjY3ZGk7gWuaRLpTrwBxgm01RbOglQS1if6Pryc01FcwCSGb1fJKY4qR0v6iQRb23jaFfSELHfThf4rmG4QiKiNviHJRzb0c=
| language: rust
matrix:
include:
- rust: nightly
- rust: beta
- rust: stable
script:
- cargo test --features="live_tests"
- cargo test --features="tls"
sudo: false
after_success: |
[ $TRAVIS_BRANCH = master ] &&
[ $TRAVIS_PULL_REQUEST = false ] &&
[ $TRAVIS_RUST_VERSION = stable ] &&
cargo doc --no-deps --features "tls" &&
echo "<meta http-equiv=refresh content=0;url=solicit/index.html>" > target/doc/index.html &&
pip install --user ghp-import &&
/home/travis/.local/bin/ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env:
global:
secure: eSYRUJ2wTq1g6AiPp0zvtxVJFn/3FnrCRAJmGCN1TBYpnl11ZvLZfhUA9IC4S48/YVmdeP1pywpIjY3ZGk7gWuaRLpTrwBxgm01RbOglQS1if6Pryc01FcwCSGb1fJKY4qR0v6iQRb23jaFfSELHfThf4rmG4QiKiNviHJRzb0c=
|
Change Ruby versions on Travis |
language: ruby
rvm:
#- 1.8.7 # no, since it doesn't get fun(a, *b, c) or fun0\n.fun1
#- 1.9.3 # Travis broken September 2017
#- ruby-1.9.3-p551
#- 2.1.1
- 2.2.2
- 2.3.1
- 2.4.1
#- jruby-19mode # Travis broken September 2017
#- jruby-20mode # Travis broken September 2017
- jruby-9.1.13.0
#matrix:
# include:
# - rvm: jruby-9.1.13.0
#before_install: gem install bundler
script: bundle exec rspec
branches:
only:
- master
#except:
# - master
sudo: false
cache: bundler
env:
- ""
- TZ=UTC
- TZ=Asia/Tokyo
- TZ=America/Chicago
|
language: ruby
rvm:
#- 1.8.7 # no, since it doesn't get fun(a, *b, c) or fun0\n.fun1
#- 1.9.3 # Travis broken September 2017
#- ruby-1.9.3-p551
#- 2.1.1
#- 2.2.2 # removed 2018-01-05
- 2.3.1
- 2.4.1
- 2.5.3
- 2.6.0
#- jruby-19mode # Travis broken September 2017
#- jruby-20mode # Travis broken September 2017
- jruby-9.1.13.0
#matrix:
# include:
# - rvm: jruby-9.1.13.0
#before_install: gem install bundler
script: bundle exec rspec
branches:
only:
- master
#except:
# - master
sudo: false
cache: bundler
env:
- ""
- TZ=UTC
- TZ=Asia/Tokyo
- TZ=America/Chicago
|
Remove 0.10 and 0.12 from the Travis CI build. | language: node_js
node_js:
- "7"
- "6"
- "4"
- "0.12"
- "0.10"
before_install: npm install -g grunt-cli
install: npm install
before_script: grunt build
sudo: false
| language: node_js
node_js:
- "7"
- "6"
- "4"
before_install: npm install -g grunt-cli
install: npm install
before_script: grunt build
sudo: false
|
Make a point to ignore errors during the first part of script | language: python
python:
- "2.7"
# - "3.3"
env:
- SUBLIME_TEXT_VERSION="2"
- SUBLIME_TEXT_VERSION="3"
- SUBLIME_TEXT_VERSION="3" SUBLIME_TEXT_RENAME="sublime_text"
- SUBLIME_TEXT_VERSION="3" SUBLIME_TEXT_RENAME="sublime_texttt" SUBLIME_TEXT_PATH="/usr/bin/sublime_texttt"
- SUBLIME_TEXT_VERSION=""
install:
# Install Sublime Text
- ./test/install.sh
# Install `sublime_info`
- python setup.py develop
# Install dev requirements
- pip install -r requirements-dev.txt
script:
# Set up variables for test
- 'test -n "$SUBLIME_TEXT_VERSION" && export EXPECTED_PATH="/usr/bin/subl"'
- 'test -n "$SUBLIME_TEXT_RENAME" && export EXPECTED_PATH="/usr/bin/$SUBLIME_TEXT_RENAME"'
- 'test -z "$SUBLIME_TEXT_VERSION" && export EXPECT_ERROR="1"'
# Run our tests
- ./test.sh
notifications:
email: false
| language: python
python:
- "2.7"
# - "3.3"
env:
- SUBLIME_TEXT_VERSION="2"
- SUBLIME_TEXT_VERSION="3"
- SUBLIME_TEXT_VERSION="3" SUBLIME_TEXT_RENAME="sublime_text"
- SUBLIME_TEXT_VERSION="3" SUBLIME_TEXT_RENAME="sublime_texttt" SUBLIME_TEXT_PATH="/usr/bin/sublime_texttt"
- SUBLIME_TEXT_VERSION=""
install:
# Install Sublime Text
- ./test/install.sh
# Install `sublime_info`
- python setup.py develop
# Install dev requirements
- pip install -r requirements-dev.txt
script:
# Set up variables for test
- 'test -n "$SUBLIME_TEXT_VERSION" && export EXPECTED_PATH="/usr/bin/subl"; exit 0'
- 'test -n "$SUBLIME_TEXT_RENAME" && export EXPECTED_PATH="/usr/bin/$SUBLIME_TEXT_RENAME"; exit 0'
- 'test -z "$SUBLIME_TEXT_VERSION" && export EXPECT_ERROR="1"; exit 0'
# Run our tests
- ./test.sh
notifications:
email: false
|
Move to latest of minor ruby versions | language: ruby
sudo: false
rvm:
- 2.0
- 2.1
- 2.2
- 2.3.1
- 2.4.0
cache: bundler
script:
- bundle exec rspec
after_success:
- bundle exec codeclimate-test-reporter
addons:
postgresql: "9.3"
env:
- RSPEC_VERSION="<2.99"
- RSPEC_VERSION="~>3.0
| language: ruby
sudo: false
rvm:
- 2.0
- 2.1
- 2.2
- 2.3
- 2.4
cache: bundler
script:
- bundle exec rspec
after_success:
- bundle exec codeclimate-test-reporter
addons:
postgresql: "9.3"
env:
- RSPEC_VERSION="<2.99"
- RSPEC_VERSION="~>3.0
|
Use the latest rubies on Travis | language: ruby
os:
- osx
before_install:
- gem install bundler
gemfile:
- Gemfile
rvm:
- 2.0.0
- 2.1
- 2.2.3
script:
- bundle exec rspec spec
| language: ruby
os:
- osx
before_install:
- gem install bundler
gemfile:
- Gemfile
rvm:
- 2.0.0
- 2.1.8
- 2.2.4
- 2.3.0
- ruby-head
script:
- bundle exec rspec spec
|
Add build script for Travis CI | language: csharp
solution: "Extension Library.sln"
mono: none
install:
- nuget restore "Extension Library.sln"
- nuget install xunit.runners -Version 1.9.2 -OutputDirectory testrunner
script:
- xbuild /p:Configuration=Release "Extension Library.sln"
- mono ./testrunner/xunit.runners.1.9.2/tools/xunit.console.clr4.exe ./Extension_Library.Tests/bin/Release/MyProject.Tests.dll
branches:
only:
- master | language: csharp
solution: Extension-Library.sln
mono: none
install:
- nuget restore Extension-Library.sln
- nuget install xunit.runners -Version 1.9.2 -OutputDirectory testrunner
script:
- xbuild /p:Configuration=Release Extension-Library.sln
- mono ./testrunner/xunit.runners.1.9.2/tools/xunit.console.clr4.exe ./Extension-Library.Tests/bin/Release/MyProject.Tests.dll
branches:
only:
- master |
Add Node.js v0.12 and io.js in Travis CI setting | language: node_js
node_js:
- "0.10"
- "0.11"
| language: node_js
node_js:
- "0.10"
- "0.12"
- "iojs"
|
Add notifications for build status | language: java
sudo: false
jdk:
- openjdk7
- oraclejdk8
os:
- linux
| language: java
sudo: false
jdk:
- openjdk7
- oraclejdk8
os:
- linux
notifications:
slack:
secure: gokcF0ETKhnPlP9HPV78Qk89bklwZmAfaO+qbAEsiYCbnBaJb6UReqIOH4bi46vyWxkyYbalsVSbAFCRKbR63ko8MmBW3t80FZ9y4AZYtIaV+Zt6WLW67kxfhnaXlGdb8vR2jbnz5KE64BEfpHLB8gwzEwjICKp/+me2Bdo+9URuazvUe6tJS3VDBmUYJijfgz1mvrP/La9kRHZtlf6QbbevMC5Iba7ZmZxK+C9ioDDaU1+H27pqdmBfVxKk9K04urvPzlFNbyhc2dplkvQSs4KyGUMzGLayF+Sf/mY1YFoVE9eugPZI+8xpFUCKE63oWvOS2oyKB30t8O0IHDIK6GhNTKIyQ3BuwdKn9PSxKvl//4ccmQdaIxPIrPTGZBk38DHUxtZDmWE8ht55JgjTi5uW8sKn4I9PDeNa1inJY11zMqhYNkP6ULWvHgeKFL3R9dUyLPy2Jf3gJHo6aRMxZeEe8xVnFbXFVw6IgbJe/uZmGncxhQIqNF3UT9d/RFDXMXlWA2OqdHf/j/Ur3Iur8auB0ZTJ6obkzhl+IeBI402YCKbOG9Yh/y4PCX11hvJO047W9ratYPxjCWuxvHFUvi8Wm9lyUglFcOqyp/UpwI3czvDnJL7rgXWr8A2HW5Dh4MyB+8Udk5EPgRo2gSM1tPeQQXpnxlJjIs8cysN1DTU=
on_success: change
on_failure: always
|
Add mongodb service for Travis CI | language: node_js
node_js:
- "0.10"
- "0.8"
| language: node_js
node_js:
- 0.10
- 0.8
services: mongodb
|
Check for current year in LICENSE | language: scala
scala:
- 2.12.1
jdk:
- oraclejdk8
notifications:
webhooks:
urls:
- https://webhooks.gitter.im/e/d7d25eec49bfe73a4f9b
on_success: always # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: always # options: [always|never|change] default: always
script:
- "sbt scalafmtTest || ( sbt scalafmt; git diff --exit-code ) "
- sbt test assembly doc
deploy:
provider: releases
api_key:
secure: EgJ7RuEBzNjcKBuQP4jtwPU1hp8LzoacGufAaimOfG4+3pTfbPFttXjMXjb/8HP4QF1vcFH3BemPyZng20qTTP1LuoNvU3Ae4ZakCFDS0F9KfajibcmJRCqoJ7LSI6olFS/zyMWNVWkEUiBcOcLIafBTz1HB6M0ucxanN6/dUS4=
file: "target/scala-*/konstructs-server-*.jar"
file_glob: true
on:
repo: konstructs/server
tags: true
| language: scala
scala:
- 2.12.1
jdk:
- oraclejdk8
notifications:
webhooks:
urls:
- https://webhooks.gitter.im/e/d7d25eec49bfe73a4f9b
on_success: always # options: [always|never|change] default: always
on_failure: always # options: [always|never|change] default: always
on_start: always # options: [always|never|change] default: always
script:
- "sbt scalafmtTest || ( sbt scalafmt; git diff --exit-code ) "
- "grep $(date +%Y) LICENSE"
- sbt test assembly doc
deploy:
provider: releases
api_key:
secure: EgJ7RuEBzNjcKBuQP4jtwPU1hp8LzoacGufAaimOfG4+3pTfbPFttXjMXjb/8HP4QF1vcFH3BemPyZng20qTTP1LuoNvU3Ae4ZakCFDS0F9KfajibcmJRCqoJ7LSI6olFS/zyMWNVWkEUiBcOcLIafBTz1HB6M0ucxanN6/dUS4=
file: "target/scala-*/konstructs-server-*.jar"
file_glob: true
on:
repo: konstructs/server
tags: true
|
Disable email notification for Travis. | language: node_js
cache:
directories:
- $HOME/.npm
- $HOME/.yarn-cache
- node_modules
node_js:
- "6"
before_install:
- npm i -g yarn --cache-min 999999999
install:
- yarn
| language: node_js
cache:
directories:
- $HOME/.npm
- $HOME/.yarn-cache
- node_modules
node_js:
- "6"
before_install:
- npm i -g yarn --cache-min 999999999
install:
- yarn
notifications:
email: false
|
Disable xdebug for faster builds | language: php
php:
- 5.6
- 7.0
- 7.1
- hhvm
before_script:
- composer self-update
- composer install --prefer-source --no-interaction
script: vendor/bin/phpspec run -v | language: php
php:
- 5.6
- 7.0
- 7.1
- hhvm
before_script:
- phpenv config-rm xdebug.ini
- composer self-update
- composer install --prefer-source --no-interaction
script: vendor/bin/phpspec run -v |
Add config.yml copying for CI | rvm:
- ree
- 1.8.7
- 1.9.2
branches:
only:
- production
- develop
notifications:
disabled: true
| rvm:
- ree
- 1.8.7
- 1.9.2
branches:
only:
- production
- develop
notifications:
disabled: true
script: "cp spec/config.example.yml spec/config.yml; bundle exec rake"
|
Use explicit region specific bucket name | language: node_js
sudo: false
node_js:
- '0.10'
before_script: ./node_modules/.bin/gulp
deploy:
provider: s3
access_key_id: AKIAIT4X4NDGM2WVL6VA
secret_access_key:
secure: Q5Ns7G3ibVV8HrpI1GeZbypETL/MEywN0PuSljmW+0aYCERNgKqgU0Uy8AJrCy/ymfyCzmB+MD62Urq3ZCqWyVeVYJcKpYL518IlnOtv6KI2GXJgQ2eGFJSX5jw7fKvCh9Le0nw9TP7op8EHo22n16pUZkFupfkFajeR1JBmVnY=
bucket: blu.octoblu.com
endpoint: blu.octoblu.com.s3-website-us-west-2.amazonaws.com
local-dir: public
skip_cleanup: true
acl: public_read
on:
branch: master
| language: node_js
sudo: false
node_js:
- '0.10'
before_script: ./node_modules/.bin/gulp
deploy:
provider: s3
access_key_id: AKIAIT4X4NDGM2WVL6VA
secret_access_key:
secure: Q5Ns7G3ibVV8HrpI1GeZbypETL/MEywN0PuSljmW+0aYCERNgKqgU0Uy8AJrCy/ymfyCzmB+MD62Urq3ZCqWyVeVYJcKpYL518IlnOtv6KI2GXJgQ2eGFJSX5jw7fKvCh9Le0nw9TP7op8EHo22n16pUZkFupfkFajeR1JBmVnY=
bucket: blu.octoblu.com.s3-website-us-west-2.amazonaws.com
endpoint: blu.octoblu.com.s3-website-us-west-2.amazonaws.com
local-dir: public
skip_cleanup: true
acl: public_read
on:
branch: master
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path: ''
webhook_url:
sections:
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
instant_preview: false
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: gem install jekyll
preview_docker_image: forestryio/build:latest
mount_path: "/opt/buildhome/repo"
instant_preview_command: jekyll serve --drafts --unpublished --future --port 8080
--host 0.0.0.0 -d _site
preview_command: jekyll build --drafts --unpublished --future -d _site
| ---
new_page_extension: md
auto_deploy: false
admin_path: ''
webhook_url:
sections:
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
instant_preview: true
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: gem install jekyll
preview_docker_image: forestryio/build:latest
mount_path: "/opt/buildhome/repo"
instant_preview_command: jekyll serve --drafts --unpublished --future --port 8080
--host 0.0.0.0 -d _site
preview_command: jekyll build --drafts --unpublished --future -d _site
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: true
admin_path: "/admin"
webhook_url:
sections:
- type: jekyll-posts
label: News & Results
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path:
file_template: ":filename:"
build:
preview_command: bundle exec jekyll build --drafts --unpublished --future -d _site
publish_command: jekyll build -d _site
preview_env:
- JEKYLL_ENV=staging
publish_env:
- JEKYLL_ENV=production
preview_output_directory: _site
output_directory: _site
| ---
new_page_extension: md
auto_deploy: true
admin_path: admin
webhook_url:
sections:
- type: jekyll-posts
label: News & Results
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path:
file_template: ":filename:"
build:
preview_command: bundle exec jekyll build --drafts --unpublished --future -d _site
publish_command: jekyll build -d _site
preview_env:
- JEKYLL_ENV=staging
publish_env:
- JEKYLL_ENV=production
preview_output_directory: _site
output_directory: _site
|
Change version to string in dictionary | # Default st2 immutable vars
---
# List of available `st2` services:
# https://github.com/StackStorm/st2/blob/master/st2common/bin/st2ctl#L17
st2_services:
- st2actionrunner
- st2garbagecollector
- st2notifier
- st2resultstracker
- st2rulesengine
- st2sensorcontainer
- st2api
- st2stream
- st2auth
# List of additional stackstorm services associated with specific st2 version release
st2_services_versioned:
2.8:
- st2workflowengine
2.9:
- st2timersengine
2.10:
- st2scheduler
# Placeholder for st2 installed version, determined during run
st2_version_installed: "{{ _st2_version_installed.stdout }}"
# Where to store the ST2 datastore encryption key (automatically generated during install)
st2_datastore_key_file: /etc/st2/keys/datastore_key.json
# List of config vars (by section) that should have no_log: true to avoid showing up in ansible output
st2_config_no_log:
coordination:
- url
database:
- username
- password
messaging:
- url
mistral:
- keystone_username
- keystone_password
| # Default st2 immutable vars
---
# List of available `st2` services:
# https://github.com/StackStorm/st2/blob/master/st2common/bin/st2ctl#L17
st2_services:
- st2actionrunner
- st2garbagecollector
- st2notifier
- st2resultstracker
- st2rulesengine
- st2sensorcontainer
- st2api
- st2stream
- st2auth
# List of additional stackstorm services associated with specific st2 version release
st2_services_versioned:
"2.8":
- st2workflowengine
"2.9":
- st2timersengine
"2.10":
- st2scheduler
# Placeholder for st2 installed version, determined during run
st2_version_installed: "{{ _st2_version_installed.stdout }}"
# Where to store the ST2 datastore encryption key (automatically generated during install)
st2_datastore_key_file: /etc/st2/keys/datastore_key.json
# List of config vars (by section) that should have no_log: true to avoid showing up in ansible output
st2_config_no_log:
coordination:
- url
database:
- username
- password
messaging:
- url
mistral:
- keystone_username
- keystone_password
|
Update Config with Social Media & Other | # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely edit after that. If you find
# yourself editing these this file very often, consider using Jekyll's data files
# feature for the data you need to update frequently.
#
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
# These are used to personalize your new site. If you look in the HTML files,
# you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
# You can create any custom variable you would like, and they will be accessible
# in the templates via {{ site.myvariable }}.
title: eSports Initiative
email: esportsinitiative@gmail.com
description: > # this means to ignore newlines until "baseurl:"
Write an awesome description for your new site here. You can edit this
line in _config.yml. It will appear in your document head meta (for
Google search results) and in your feed.xml site description.
baseurl: "" # the subpath of your site, e.g. /blog
url: "http://example.com" # the base hostname & protocol for your site
#twitter_username: jekyllrb
#github_username: jekyll
# Build settings
markdown: kramdown
#theme: minima
| # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely edit after that. If you find
# yourself editing these this file very often, consider using Jekyll's data files
# feature for the data you need to update frequently.
#
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
# These are used to personalize your new site. If you look in the HTML files,
# you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
# You can create any custom variable you would like, and they will be accessible
# in the templates via {{ site.myvariable }}.
title: eSports Initiative
email: esportsinitiative@gmail.com
description: > # this means to ignore newlines until "baseurl:"
eSports Initiative (ESI) is a student organization at The Ohio State University (OSU).
ESI fosters, manages, and runs eSports related events, teams, and tournaments at OSU.
baseurl: "" # the subpath of your site, e.g. /blog
url: "http://esi.gg" # the base hostname & protocol for your site
# Social Media
twitter_username: ESI_OSU
facebook_username: esportsinitiative
instagram_username: esportsinitiative
twitch_username: esportsinitiative
challonge_username: esportsinitiative # users/esportsinitiative
youtube_username: ESportsInitiative # user/ESportsInitiative
github_username: esportsinitiative
# Build settings
markdown: kramdown
#theme: minima
|
Remove trailing URL forward slash | # Site settings
title: Derek's blog
motto: Data science and bioinformatics
author: Derek Croote
email: dcroote@gmail.com
baseurl: "" # the subpath of your site, e.g. /blog/
url: "https://dcroote.github.io/" # the base hostname & protocol for your site
github_username: dcroote
# Build settings
markdown: kramdown
include: [".well-known"]
plugins:
- jekyll-sitemap
| # Site settings
title: Derek's blog
motto: Data science and bioinformatics
author: Derek Croote
email: dcroote@gmail.com
baseurl: "" # the subpath of your site, e.g. /blog/
url: "https://dcroote.github.io" # the base hostname & protocol for your site
github_username: dcroote
# Build settings
markdown: kramdown
include: [".well-known"]
plugins:
- jekyll-sitemap
|
Test with Node 6 & latest | language: node_js
matrix:
include:
- node_js: '0.10'
- node_js: '0.12'
- node_js: iojs
- node_js: 4
- node_js: 5
env: COVERALLS=1
after_success:
- '[[ "$COVERALLS" ]] && gulp coveralls'
| language: node_js
matrix:
include:
- node_js: '0.10'
- node_js: '0.12'
- node_js: iojs
- node_js: 4
- node_js: 5
- node_js: 6
- node_js: node
env: COVERALLS=1
after_success:
- '[[ "$COVERALLS" ]] && gulp coveralls'
|
Correct file pattern in Test Packaged Scans | name: Test Packaged Scans
on:
pull_request:
paths:
- docker/*
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run tox
run: |
cd docker
tox
| name: Test Packaged Scans
on:
pull_request:
paths:
- docker/**
jobs:
test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.5, 3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install tox
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run tox
run: |
cd docker
tox
|
Move unit test checking of latest dependencies to place to fail build | language: php
php:
- 5.5
- 5.6
- 7.0
- hhvm
matrix:
allow_failures:
- php: hhvm
cache:
directories:
- $HOME/.composer/cache
script:
- vendor/bin/phpunit
before_script:
- composer install --no-interaction
after_script:
- wget https://scrutinizer-ci.com/ocular.phar
- php ocular.phar code-coverage:upload --format=php-clover build/logs/clover.xml
- composer update --no-interaction --prefer-source
- vendor/bin/phpunit -v
notifications:
irc: "irc.freenode.org#phpdocumentor"
email:
- mike.vanriel@naenius.com
- ashnazg@php.net
- boen.robot@gmail.com
| language: php
php:
- 5.5
- 5.6
- 7.0
- hhvm
matrix:
allow_failures:
- php: hhvm
cache:
directories:
- $HOME/.composer/cache
script:
- vendor/bin/phpunit
- composer update --no-interaction --prefer-source
- vendor/bin/phpunit -v
before_script:
- composer install --no-interaction
after_script:
- wget https://scrutinizer-ci.com/ocular.phar
- php ocular.phar code-coverage:upload --format=php-clover build/logs/clover.xml
notifications:
irc: "irc.freenode.org#phpdocumentor"
email:
- mike.vanriel@naenius.com
- ashnazg@php.net
- boen.robot@gmail.com
|
Add fonts recommended for pandoc to pdf | ---
packages:
- at
- bmon
- byobu #For remove-old-kernels
- git
- gnupg
- gparted
- htop
- incron
- iotop
- lftp
- lmodern #For pandoc to pdf
- most
- mtr
- ncdu
- openssl
- pandoc
- preload
- pv
- pydf
- ranger
- rsync
- screenfetch
- task-spooler
- tlp
- tlp-rdw
- tmux
- tree
- undistract-me
- xclip
packages_arch:
- openssh
- cronie
packages_debian:
- anacron
- aptitude
- openssh-client
- openssh-server
| ---
packages:
- at
- bmon
- byobu #For remove-old-kernels
- git
- gnupg
- gparted
- htop
- incron
- iotop
- lftp
- most
- mtr
- ncdu
- openssl
- pandoc
- preload
- pv
- pydf
- ranger
- rsync
- screenfetch
- task-spooler
- texlive-fonts-recommended #for pandoc to pdf
- tlp
- tlp-rdw
- tmux
- tree
- undistract-me
- xclip
packages_arch:
- openssh
- cronie
packages_debian:
- anacron
- aptitude
- openssh-client
- openssh-server
|
Add a step to checkout the repository (GitHub Actions). | name: "Build and run tests"
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install -qy uthash-dev
- name: Build & Test
uses: ashutoshvarma/action-cmake-build@master
with:
build-dir: ${{ runner.workspace }}/build
cc: gcc
build-type: Release
run-test: true
install-build: true
parallel: 2
| name: "Build and run tests"
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Install dependencies
run: sudo apt-get update && sudo apt-get install -qy uthash-dev
- name: Build & Test
uses: ashutoshvarma/action-cmake-build@master
with:
build-dir: ${{ runner.workspace }}/build
cc: gcc
build-type: Release
run-test: true
install-build: true
parallel: 2
|
Build production files for Precog | machine:
node:
version: 4.4.7
environment:
NODE_ENV: test
dependencies:
# Gulp must be installed in the global environment.
pre:
- npm install -g gulp-cli
post:
- npm install -g aws-cli
test:
post:
# For Precog.
- cp -R build data index.html $CIRCLE_ARTIFACTS
# For hosting on mapzen.com. Only deploy if tests pass. Compiled files are
# rebuilt for a production environment.
deployment:
production:
branch: master
commands:
- NODE_ENV=production gulp build
- cp -R build data index.html $CIRCLE_ARTIFACTS
- aws s3 sync $CIRCLE_ARTIFACTS $AWS_DESTINATION --delete
- ./deploy/invalidate_cloudfront.sh
| machine:
node:
version: 4.4.7
environment:
NODE_ENV: test
dependencies:
# Gulp must be installed in the global environment.
pre:
- npm install -g gulp-cli
post:
- npm install -g aws-cli
test:
post:
# For Precog.
- NODE_ENV=production gulp build
- cp -R build data index.html $CIRCLE_ARTIFACTS
# For hosting on mapzen.com. Only deploy if tests pass. Compiled files are
# rebuilt for a production environment.
deployment:
production:
branch: master
commands:
- aws s3 sync $CIRCLE_ARTIFACTS $AWS_DESTINATION --delete
- ./deploy/invalidate_cloudfront.sh
|
Add css build into CI | machine:
timezone:
Europe/Paris
node:
version: 8
services:
- docker
dependencies:
pre:
- npm uninstall -g npm
- curl -o- -L https://yarnpkg.com/install.sh | bash
override:
- yarn
test:
override:
- yarn run coverage
deployment:
production:
branch: master
owner: mdcarter
commands:
- yarn run deploy;
- yarn run coverage:publish; | machine:
timezone:
Europe/Paris
node:
version: 8
services:
- docker
dependencies:
pre:
- npm uninstall -g npm
- curl -o- -L https://yarnpkg.com/install.sh | bash
override:
- yarn
test:
override:
- yarn run build-css
- yarn run coverage
deployment:
production:
branch: master
owner: mdcarter
commands:
- yarn run deploy
- yarn run coverage:publish |
Fix accidentially changed contract name | - kind: dependency
metadata:
namespace: main
name: alice_stage
user: Alice
contract: twitter_statsa
labels:
# Deploy new version of visualization code (pass image tag for container)
tsvisimage: demo-v62
# San Francisco, Boston, New York
locations: -71.4415,41.9860,-70.4747,42.9041,-122.75,36.8,-121.75,37.8,-74,40,-73,41
| - kind: dependency
metadata:
namespace: main
name: alice_stage
user: Alice
contract: twitter_stats
labels:
# Deploy new version of visualization code (pass image tag for container)
tsvisimage: demo-v62
# San Francisco, Boston, New York
locations: -71.4415,41.9860,-70.4747,42.9041,-122.75,36.8,-121.75,37.8,-74,40,-73,41
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
sections:
- type: directory
path: _events
label: Events
create: all
match: "**/*"
- type: directory
path: _programs
label: Programs
create: all
match: "**/*"
- type: directory
path: _team
label: Team
create: all
match: "**/*"
upload_dir: assets/img
public_path: assets/img
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: bundle install --path vendor/bundle
preview_docker_image: forestryio/ruby:2.6
mount_path: "/srv"
working_dir: "/srv"
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
| ---
new_page_extension: md
auto_deploy: false
admin_path: ''
webhook_url:
sections:
- type: directory
path: _events
label: Events
create: all
match: "**/*"
- type: directory
path: _programs
label: Programs
create: all
match: "**/*"
- type: directory
path: _team
label: Team
create: all
match: "**/*"
upload_dir: assets/img
public_path: assets/img
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: bundle install --path vendor/bundle
preview_docker_image: forestryio/ruby:2.6
mount_path: "/srv"
working_dir: "/srv"
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
|
Use black if not using 2.7 | name: Run tests
on: push
jobs:
test:
name: Run tests
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: ["2.7", "3.6", "3.7", "3.8", "3.9"]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install pytest mock -r requirements.txt
- run: make schema
- run: PYTHONPATH=$GITHUB_WORKSPACE py.test -m "not hw_raspberrypi and not mqtt"
| name: Run tests
on: push
jobs:
test:
name: Run tests
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: ["2.7", "3.6", "3.7", "3.8", "3.9"]
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- run: pip install pytest mock -r requirements.txt
- run: make schema
- run: pip install black
if: matrix.python-version != "2.7"
- run: make black
if: matrix.python-version != "2.7"
- run: PYTHONPATH=$GITHUB_WORKSPACE py.test -m "not hw_raspberrypi and not mqtt"
|
Revert "MySQL needs basedir to point to installed location" | ---
mysql::configdir: "%{::boxen::config::configdir}/mysql"
mysql::globalconfigprefix: "%{::boxen::config::homebrewdir}/opt/mysql"
mysql::datadir: "%{::boxen::config::datadir}/mysql"
mysql::bindir: "%{::boxen::config::homebrewdir}/bin"
mysql::executable: "%{::boxen::config::homebrewdir}/bin/mysqld_safe"
mysql::client: "%{::boxen::config::homebrewdir}/bin/mysql"
mysql::logdir: "%{::boxen::config::logdir}/mysql"
mysql::servicename: "dev.mysql"
mysql::user: "%{::boxen_user}"
mysql::host: "127.0.0.1"
mysql::port: "13306"
mysql::socket: "%{::boxen::config::datadir}/socket"
mysql::package: boxen/brews/mysql
mysql::version: 5.6.23
| ---
mysql::configdir: "%{::boxen::config::configdir}/mysql"
mysql::globalconfigprefix: "%{::boxen::config::homebrewdir}"
mysql::datadir: "%{::boxen::config::datadir}/mysql"
mysql::bindir: "%{::boxen::config::homebrewdir}/bin"
mysql::executable: "%{::boxen::config::homebrewdir}/bin/mysqld_safe"
mysql::client: "%{::boxen::config::homebrewdir}/bin/mysql"
mysql::logdir: "%{::boxen::config::logdir}/mysql"
mysql::servicename: "dev.mysql"
mysql::user: "%{::boxen_user}"
mysql::host: "127.0.0.1"
mysql::port: "13306"
mysql::socket: "%{::boxen::config::datadir}/socket"
mysql::package: boxen/brews/mysql
mysql::version: 5.6.23
|
Update apt after adding nginx repo | ---
- name: prereqs for adding an apt repo
apt: pkg=python-software-properties state=installed
# Play2 needs nginx > 1.2 ( http://www.playframework.com/documentation/2.1.x/HTTPServer ), which is not in most
# Ubuntu repo's by default. Hence, we add the nginx repo manually
- name: add stable nginx repo
apt_repository: repo=ppa:nginx/stable
- name: install nginx
apt: pkg=nginx state=present
- name: configure nginx
template: src=nginx.conf.j2 dest=/etc/nginx/nginx.conf
notify:
- restart nginx
- name: ensure nginx running
service: name=nginx state=running enabled=yes
| ---
- name: prereqs for adding an apt repo
apt: pkg=python-software-properties state=installed
# Play2 needs nginx > 1.2 ( http://www.playframework.com/documentation/2.1.x/HTTPServer ), which is not in most
# Ubuntu repo's by default. Hence, we add the nginx repo manually
- name: add stable nginx repo
apt_repository: repo=ppa:nginx/stable
- name: update apt with latest nginx
apt: update_cache=yes
- name: install nginx
apt: pkg=nginx state=present
- name: configure nginx
template: src=nginx.conf.j2 dest=/etc/nginx/nginx.conf
notify:
- restart nginx
- name: ensure nginx running
service: name=nginx state=running enabled=yes
|
Add more numpy version and remove macos from core test. | name: NeoCoreTest
on:
pull_request:
branches: [master]
types: [synchronize, opened, reopened, ready_for_review]
# run checks on any change of master, including merge of PRs
push:
branches: [master]
jobs:
multi-os-python-numpy:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
os: ["ubuntu-latest", "macos-latest", "windows-latest"]
python-version: ['3.7', '3.8', '3.9']
numpy-version: ['1.16.6', '1.19.5', '1.20.3', '1.21.5', '1.22.3']
exclude:
- python-version: '3.7'
numpy-version: '1.22.3'
steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Checkout repository
uses: actions/checkout@v2
- name: Install numpy ${{ matrix.numpy-version }}
run: |
python -m pip install --upgrade pip
pip install numpy==${{ matrix.numpy-version }}
pip install pytest pytest-cov
pip install .
- name: List pip packages
run: |
pip -V
pip list
- name: Run tests
run: |
pytest --cov=neo neo/test/coretest
| name: NeoCoreTest
on:
pull_request:
branches: [master]
types: [synchronize, opened, reopened, ready_for_review]
# run checks on any change of master, including merge of PRs
push:
branches: [master]
jobs:
multi-os-python-numpy:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: true
matrix:
os: ["ubuntu-latest", "windows-latest"]
# "macos-latest",
python-version: ['3.7', '3.8', '3.9']
numpy-version: ['1.16.6', '1.17.5', '1.18.5', '1.19.5', '1.20.3', '1.21.5', '1.22.3']
exclude:
- python-version: '3.7'
numpy-version: '1.22.3'
steps:
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Checkout repository
uses: actions/checkout@v2
- name: Install numpy ${{ matrix.numpy-version }}
run: |
python -m pip install --upgrade pip
pip install numpy==${{ matrix.numpy-version }}
pip install pytest pytest-cov
pip install .
- name: List pip packages
run: |
pip -V
pip list
- name: Run tests
run: |
pytest --cov=neo neo/test/coretest
|
Disable Arrow serialization for Spark JDBC driver | databases:
presto:
jdbc_user: root
delta:
jdbc_driver_class: com.databricks.client.jdbc.Driver
schema: default
prepare_statement:
- USE ${databases.delta.schema}
table_manager_type: jdbc
jdbc_url: ${DATABRICKS_JDBC_URL}
jdbc_user: ${DATABRICKS_LOGIN}
jdbc_password: ${DATABRICKS_TOKEN}
s3:
server_type: aws
| databases:
presto:
jdbc_user: root
delta:
jdbc_driver_class: com.databricks.client.jdbc.Driver
schema: default
prepare_statement:
- USE ${databases.delta.schema}
table_manager_type: jdbc
jdbc_url: ${DATABRICKS_JDBC_URL};EnableArrow=0
jdbc_user: ${DATABRICKS_LOGIN}
jdbc_password: ${DATABRICKS_TOKEN}
s3:
server_type: aws
|
Update from Hackage at 2019-05-17T18:00:37Z | homepage: https://github.com/jdreaver/eventful#readme
changelog-type: ''
hash: 8766a5ebc16044c254d1789ee63d01c8fdf9d1f878e720bf6069526d76d4e4d7
test-bench-deps: {}
maintainer: David Reaver
synopsis: Common library for SQL event stores
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.9 && <5'
eventful-core: -any
persistent: -any
text: -any
uuid: -any
persistent-template: -any
mtl: -any
aeson: -any
all-versions:
- 0.1.0
- 0.1.1
- 0.1.2
- 0.1.3
- 0.2.0
author: ''
latest: 0.2.0
description-type: haddock
description: Common library for SQL event stores
license-name: MIT
| homepage: https://github.com/jdreaver/eventful#readme
changelog-type: ''
hash: 21e1b6dbc648b1c93720e7e557be5460177cbf4aa7e3caa7d49f80412ec058f0
test-bench-deps: {}
maintainer: David Reaver
synopsis: Common library for SQL event stores
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.9 && <5'
eventful-core: ! '>=0.2 && <0.3'
persistent: -any
text: -any
uuid: -any
persistent-template: <2.7
mtl: -any
aeson: -any
all-versions:
- 0.1.0
- 0.1.1
- 0.1.2
- 0.1.3
- 0.2.0
author: ''
latest: 0.2.0
description-type: haddock
description: Common library for SQL event stores
license-name: MIT
|
Update from Hackage at 2021-08-23T23:19:26Z | homepage: ''
changelog-type: markdown
hash: 2f8b888144282d164af1bd872023610e093439116ea2a2e76b3f0a432e12f691
test-bench-deps: {}
maintainer: srid@srid.ca
synopsis: Extract "contextual links" from Pandoc
changelog: |
# Revision history for pandoc-link-context
## 1.0.0.0
* First version. Released on an unsuspecting world.
basic-deps:
base: '>=4.12 && <4.15'
text: -any
pandoc-types: -any
containers: -any
relude: '>=0.7.0.0'
all-versions:
- 1.0.0.0
author: Sridhar Ratnakumar
latest: 1.0.0.0
description-type: haddock
description: A library to pull out all links with their surrounding context in your
Pandoc documents. Useful for software dealing with wiki-links and Zettelkasten.
license-name: BSD-3-Clause
| homepage: ''
changelog-type: markdown
hash: 1bddfdb0522ee587d54a3b7345a00f0b51beea617717492f9316c399d4fa6158
test-bench-deps: {}
maintainer: srid@srid.ca
synopsis: Extract "contextual links" from Pandoc
changelog: "# Revision history for pandoc-link-context\n\n## 1.2.0.0\n\n- Include
Link attributes (excluded id/class) in results \n- Allow mulitple links using the
same Url\n\n## 1.0.0.0\n\n* First version. Released on an unsuspecting world.\n"
basic-deps:
base: '>=4.12 && <4.15'
text: -any
pandoc-types: -any
containers: -any
relude: '>=0.7.0.0'
all-versions:
- 1.0.0.0
- 1.2.0.0
author: Sridhar Ratnakumar
latest: 1.2.0.0
description-type: haddock
description: A library to pull out all links with their surrounding context in your
Pandoc documents. Useful for software dealing with wiki-links and Zettelkasten.
license-name: BSD-3-Clause
|
Change repo to base on bootstrap 4 | # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely need to edit after that.
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
title: Clear UI
email: minhbkpro@gmail.com
description: > # this means to ignore newlines until "baseurl:"
Free theme base on bootstrap 3
baseurl: "" # the subpath of your site, e.g. /blog
url: "http://yourdomain.com" # the base hostname & protocol for your site
twitter_username: jekyllrb
github_username: jekyll
# Build settings
markdown: kramdown
| # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely need to edit after that.
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
title: Clear UI
email: minhbkpro@gmail.com
description: > # this means to ignore newlines until "baseurl:"
Free theme base on bootstrap 4
baseurl: "" # the subpath of your site, e.g. /blog
url: "http://yourdomain.com" # the base hostname & protocol for your site
twitter_username: jekyllrb
github_username: jekyll
# Build settings
markdown: kramdown
|
Brush up CI, add Windows build | # https://help.github.com/en/categories/automating-your-workflow-with-github-actions
on:
pull_request:
push:
name: "Continuous Integration"
jobs:
tests:
name: "Tests"
runs-on: "ubuntu-latest"
strategy:
matrix:
php-version:
- "7.1"
- "7.2"
- "7.3"
- "7.4"
- "8.0"
steps:
- name: "Checkout"
uses: "actions/checkout@v2"
- name: "Install PHP with extensions"
uses: "shivammathur/setup-php@v2"
with:
coverage: "none"
extensions: "intl"
php-version: "${{ matrix.php-version }}"
- name: "Install dependencies with composer"
run: "composer update --no-interaction --no-progress"
- name: "Run tests with phpunit/phpunit"
run: "vendor/bin/phpunit"
| # https://help.github.com/en/categories/automating-your-workflow-with-github-actions
on:
pull_request:
push:
name: Continuous Integration
jobs:
tests:
name: CI on ${{ matrix.operating-system }} with PHP ${{ matrix.php-version }}
strategy:
matrix:
operating-system: ['ubuntu-latest']
php-version: ['7.1', '7.2', '7.3', '7.4', '8.0']
include:
- { operating-system: 'windows-latest', php-version: '7.1' }
runs-on: ${{ matrix.operating-system }}
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Install PHP with extensions
uses: shivammathur/setup-php@v2
with:
extensions: intl
php-version: ${{ matrix.php-version }}
- name: Get composer cache directory
id: composer-cache
run: echo "::set-output name=dir::$(composer config cache-files-dir)"
- name: Cache dependencies
uses: actions/cache@v2
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: composer-${{ runner.os }}-${{ matrix.php-version }}-${{ hashFiles('composer.*') }}
restore-keys: |
composer-${{ runner.os }}-${{ matrix.php-version }}-
composer-${{ runner.os }}-
composer-
- name: Install dependencies with Composer
run: |
composer update --no-interaction --no-progress
- name: Run tests with PHPUnit
run: |
vendor/bin/phpunit
|
Remove extra line at top |
name: Spring Break
category: Break Notice
schedule:
- title: We're on break! Hours may be wrong
notes: Okay, hours are probably wrong.
hours: []
breakSchedule:
fall: []
thanksgiving: []
winter: []
interim: []
spring: []
easter: []
summer: []
| name: Spring Break
category: Break Notice
schedule:
- title: We're on break! Hours may be wrong
notes: Okay, hours are probably wrong.
hours: []
breakSchedule:
fall: []
thanksgiving: []
winter: []
interim: []
spring: []
easter: []
summer: []
|
Remove global npm installation in Wercker | box: nodesource/trusty:LTS
build:
steps:
- script:
name: npm versions before
code: |
npm --versions
- script:
name: Install PhantomJS
code: |
sudo apt-get update
sudo apt-get install -y phantomjs
- script:
name: Where was PhantomJS installed and its version
code: |
which phantomjs
phantomjs --version
- script:
name: Install global stuff which seems to be needed
code: |
sudo npm i -g npm eslint mocha-phantomjs
- script:
name: npm versions before
code: |
npm --versions
- npm-install
- npm-test
| box: nodesource/trusty:LTS
build:
steps:
- script:
name: npm versions
code: |
npm --versions
- script:
name: Install PhantomJS
code: |
sudo apt-get update
sudo apt-get install -y phantomjs
- script:
name: Where is PhantomJS installed and its version
code: |
which phantomjs
phantomjs --version
- npm-install
- npm-test
|
Update platforms in Test Kitchen | driver:
name: vagrant
provisioner:
name: chef_zero
platforms:
- name: centos-5.11
- name: centos-6.7
- name: centos-7.2
- name: debian-7.9
run_list: apt::default
- name: debian-8.2
run_list: apt::default
- name: fedora-22
run_list: yum::dnf_yum_compat
- name: fedora-23
run_list: yum::dnf_yum_compat
- name: ubuntu-12.04
run_list: apt::default
- name: ubuntu-14.04
run_list: apt::default
suites:
- name: default
run_list:
- recipe[iptables_test::default]
attributes:
iptables:
iptables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
ip6tables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
- name: disabled
run_list:
- recipe[iptables::disabled]
- name: no_template
run_list:
- recipe[iptables_test::no_template]
attributes:
iptables:
iptables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
ip6tables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
| driver:
name: vagrant
provisioner:
name: chef_zero
platforms:
- name: centos-5.11
- name: centos-6.7
- name: centos-7.2
- name: debian-7.10
run_list: apt::default
- name: debian-8.4
run_list: apt::default
- name: fedora-22
run_list: yum::dnf_yum_compat
- name: fedora-23
run_list: yum::dnf_yum_compat
- name: ubuntu-12.04
run_list: apt::default
- name: ubuntu-14.04
run_list: apt::default
suites:
- name: default
run_list:
- recipe[iptables_test::default]
attributes:
iptables:
iptables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
ip6tables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
- name: disabled
run_list:
- recipe[iptables::disabled]
- name: no_template
run_list:
- recipe[iptables_test::no_template]
attributes:
iptables:
iptables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
ip6tables_sysconfig:
IPTABLES_STATUS_VERBOSE: 'yes'
|
Switch to recommended boxes + customize a bit | ---
driver_plugin: vagrant
platforms:
- name: ubuntu-12.04
driver_config:
box: canonical-ubuntu-12.04
box_url: http://cloud-images.ubuntu.com/vagrant/precise/current/precise-server-cloudimg-amd64-vagrant-disk1.box
require_chef_omnibus: true
run_list:
- recipe[apt]
- name: centos-6.3
driver_config:
box: opscode-centos-6.3
box_url: http://opscode-vm.s3.amazonaws.com/vagrant/opscode_centos-6.3_chef-11.2.0.box
run_list:
- recipe[yum]
suites:
- name: default
run_list:
- recipe[java::openjdk]
- recipe[sbt-extras]
attributes:
java:
jdk_version: 7
sbt-extras:
sbtopts:
mem: 128
| ---
driver_plugin: vagrant
platforms:
- name: ubuntu-12.04
driver_config:
box: opscode-ubuntu-12.04
box_url: http://cloud-images.ubuntu.com/vagrant/precise/current/precise-server-cloudimg-amd64-vagrant-disk1.box
require_chef_omnibus: true
customize:
memory: 512
run_list:
- recipe[apt]
- name: centos-6.4
driver_config:
box: opscode-centos-6.4
box_url: https://opscode-vm.s3.amazonaws.com/vagrant/opscode_centos-6.4_provisionerless.box
require_chef_omnibus: true
customize:
memory: 512
run_list:
- recipe[yum]
suites:
- name: default
run_list:
- recipe[java::openjdk]
- recipe[sbt-extras]
attributes:
java:
jdk_version: 7
sbt-extras:
sbtopts:
mem: 256
owner: vagrant
group: vagrant
setup_dir: /usr/local/sbt
preinstall_matrix:
vagrant: ['0.12.3', '0.11.3']
|
Enable new cops by default | AllCops:
Exclude:
- 'bin/*'
Documentation:
Enabled: false
Metrics/BlockLength:
Exclude:
- 'spec/**/*.rb'
| AllCops:
Exclude:
- 'bin/*'
NewCops: enable
Documentation:
Enabled: false
Metrics/BlockLength:
Exclude:
- 'spec/**/*.rb'
|
Revert to non-broken version of mdtraj | package:
name: fahmunge-dev
version: 0.0.0
source:
path: ../..
build:
preserve_egg_dir: True
number: 0
entry_points:
- munge-fah-data = fahmunge.cli:main
requirements:
build:
- setuptools
- python
- numpy
- pandas
- mdtraj-dev
run:
- python
- numpy
- pandas
- mdtraj-dev
test:
requires:
- nose
- nose-timer
- fahmunge-dev
imports:
- fahmunge
commands:
- nosetests fahmunge --nocapture --verbosity=2 --with-doctest --with-timer
- munge-fah-data --help
# Test on some data
- git clone https://github.com/choderalab/fahmunge-testdata.git
- munge-fah-data --projects fahmunge-testdata/projects.csv --outpath munged
about:
home: https://github.com/choderalab/fahmunge
license: GNU Lesser General Public License v2 or later (LGPLv2+)
| package:
name: fahmunge-dev
version: 0.0.0
source:
path: ../..
build:
preserve_egg_dir: True
number: 0
entry_points:
- munge-fah-data = fahmunge.cli:main
requirements:
build:
- setuptools
- python
- numpy
- pandas
- mdtraj 1.5.1 # TODO remove me when segment_id bug is fixed (this is installed through openmoltools)
run:
- python
- numpy
- pandas
- mdtraj 1.5.1 # TODO remove me when segment_id bug is fixed (this is installed through openmoltools)
test:
requires:
- nose
- nose-timer
- fahmunge-dev
imports:
- fahmunge
commands:
- nosetests fahmunge --nocapture --verbosity=2 --with-doctest --with-timer
- munge-fah-data --help
# Test on some data
- git clone https://github.com/choderalab/fahmunge-testdata.git
- munge-fah-data --projects fahmunge-testdata/projects.csv --outpath munged
about:
home: https://github.com/choderalab/fahmunge
license: GNU Lesser General Public License v2 or later (LGPLv2+)
|
Update from Hackage at 2017-07-04T14:04:18Z | homepage: ''
changelog-type: markdown
hash: 85e9949bc1de917b5d1e377ae65331d662c89411f1d96acbd15f86963faa05bc
test-bench-deps: {}
maintainer: zaomir@outlook.com
synopsis: ''
changelog: ! '# Revision history for singnal
## 0.1.0.0 -- YYYY-mm-dd
* First version. Released on an unsuspecting world.
'
basic-deps:
base: ! '>=4.9 && <4.10'
all-versions:
- '0.1.0.0'
author: Zaoqi
latest: '0.1.0.0'
description-type: haddock
description: Singnal
license-name: AGPL-3
| homepage: https://github.com/zaoqi/Signal.hs
changelog-type: markdown
hash: d5fed58448b5df04ed9ff6673a8ebebe8c624d7634cbcc1ecc08952d5080001d
test-bench-deps: {}
maintainer: zaomir@outlook.com
synopsis: Singnal
changelog: ! '# Revision history for singnal
## 0.1.1.0 -- 2017-07-04
* Add runStreamSignal, isStreamSignal, noSampleOn
## 0.1.0.0 -- 2017-07-03
* First version. Released on an unsuspecting world.
'
basic-deps:
base: ! '>=4.9 && <4.10'
all-versions:
- '0.1.0.0'
- '0.1.1.0'
author: Zaoqi
latest: '0.1.1.0'
description-type: haddock
description: Singnal
license-name: AGPL-3
|
Allow snap app to access $HOME directory. | name: storjshare
version: 3.1.0
summary: farm data on the Storj network.
description: |
Earn money by sharing your hard drive space.
Daemon + CLI for farming data on the Storj network.
grade: stable
confinement: strict
apps:
storjshare:
command: storjshare
plugs: [network, network-bind]
parts:
storjshare-daemon:
source: .
plugin: nodejs
node-engine: '6.10.0'
build-packages: [git, python, build-essential]
stage-packages: [nano]
| name: storjshare
version: 3.1.0
summary: farm data on the Storj network.
description: |
Earn money by sharing your hard drive space.
Daemon + CLI for farming data on the Storj network.
grade: stable
confinement: strict
apps:
storjshare:
command: storjshare
plugs: [network, network-bind, home]
parts:
storjshare-daemon:
source: .
plugin: nodejs
node-engine: '6.10.0'
build-packages: [git, python, build-essential]
stage-packages: [nano]
|
Remove outdate comments from AppVeyor config | version: "{build}"
build: off
cache:
- c:\Users\appveyor\.node-gyp
- '%AppData%\npm-cache'
environment:
SKIP_SASS_BINARY_DOWNLOAD_FOR_CI: true
matrix:
# node.js
- nodejs_version: 0.10
- nodejs_version: 0.12
# io.js
- nodejs_version: 1
- nodejs_version: 2
- nodejs_version: 3
# node
- nodejs_version: 4
install:
- ps: Install-Product node $env:nodejs_version
- node --version
- npm --version
- git submodule update --init --recursive
- npm install --msvs_version=2013
test_script: npm test
on_success:
# Save artifact with full qualified names of binding.node
# (which we use in node-sass-binaries repo)
- ps: Get-ChildItem .\vendor\**\*.node | % `
{
Push-AppveyorArtifact $_.FullName -FileName
(($_.FullName.Split('\\') | Select-Object -Last 2) -join '_')
}
| version: "{build}"
build: off
cache:
- c:\Users\appveyor\.node-gyp
- '%AppData%\npm-cache'
environment:
SKIP_SASS_BINARY_DOWNLOAD_FOR_CI: true
matrix:
- nodejs_version: 0.10
- nodejs_version: 0.12
- nodejs_version: 1
- nodejs_version: 2
- nodejs_version: 3
- nodejs_version: 4
install:
- ps: Install-Product node $env:nodejs_version
- node --version
- npm --version
- git submodule update --init --recursive
- npm install --msvs_version=2013
test_script: npm test
on_success:
# Save artifact with full qualified names of binding.node
# (which we use in node-sass-binaries repo)
- ps: Get-ChildItem .\vendor\**\*.node | % `
{
Push-AppveyorArtifact $_.FullName -FileName
(($_.FullName.Split('\\') | Select-Object -Last 2) -join '_')
}
|
Test on Node 5 instead of 4 | # http://www.appveyor.com/docs/appveyor-yml
clone_depth: 10
# Fix line endings in Windows. (runs before repo cloning)
init:
- git config --global core.autocrlf input
# Test against these versions of Node.js.
environment:
matrix:
- nodejs_version: "0.12"
- nodejs_version: "4"
# Install scripts. (runs after repo cloning)
install:
# Get the Node version with matching major & minor numbers
- ps: Install-Product node $env:nodejs_version
# Log HTTP requests.
- npm config set loglevel http
# Typical npm stuff.
- npm install
# Post-install test scripts.
test_script:
# Output useful info for debugging.
- node --version
- npm --version
- npm test
# Don't actually build.
build: off
# Finish immediately if one of the jobs fails.
matrix:
fast_finish: true
# Set up cache, clear it on package.json changes.
cache:
# npm cache.
- C:\Users\appveyor\AppData\Roaming\npm-cache -> package.json
# Local npm packages.
- node_modules -> package.json
# Set build version format here instead of in the admin panel.
version: "{build}"
| # http://www.appveyor.com/docs/appveyor-yml
clone_depth: 10
# Fix line endings in Windows. (runs before repo cloning)
init:
- git config --global core.autocrlf input
# Test against these versions of Node.js.
environment:
matrix:
- nodejs_version: "0.12"
- nodejs_version: "5"
# Install scripts. (runs after repo cloning)
install:
# Get the Node version with matching major & minor numbers
- ps: Install-Product node $env:nodejs_version
# Log HTTP requests.
- npm config set loglevel http
# Typical npm stuff.
- npm install
# Post-install test scripts.
test_script:
# Output useful info for debugging.
- node --version
- npm --version
- npm test
# Don't actually build.
build: off
# Finish immediately if one of the jobs fails.
matrix:
fast_finish: true
# Set up cache, clear it on package.json changes.
cache:
# npm cache.
- C:\Users\appveyor\AppData\Roaming\npm-cache -> package.json
# Local npm packages.
- node_modules -> package.json
# Set build version format here instead of in the admin panel.
version: "{build}"
|
Set CI variable on Linux to match feedstocks | jobs:
- job: linux_64
pool:
vmImage: ubuntu-16.04
strategy:
maxParallel: 8
matrix:
linux:
CONFIG: azure-linux-64-comp7
CF_MAX_PY_VER: 37
AZURE: True
timeoutInMinutes: 360
steps:
- script: |
sudo pip install --upgrade pip
sudo pip install setuptools shyaml
displayName: Install dependencies
- script: .circleci/run_docker_build.sh
displayName: Run docker build
| jobs:
- job: linux_64
pool:
vmImage: ubuntu-16.04
strategy:
maxParallel: 8
matrix:
linux:
CONFIG: azure-linux-64-comp7
CF_MAX_PY_VER: 37
AZURE: True
timeoutInMinutes: 360
steps:
- script: |
sudo pip install --upgrade pip
sudo pip install setuptools shyaml
displayName: Install dependencies
- script: |
export CI=azure
.circleci/run_docker_build.sh
displayName: Run docker build
|
Add redhat-lsb-core to default packages (for lsb_release command) | ---
# Copyright Red Hat, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
base_packages:
- deltarpm
- "@Development tools"
- git
- python-setuptools
- wget
debug_packages:
- net-tools
- lsof
- tcpdump
| ---
# Copyright Red Hat, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
base_packages:
- deltarpm
- "@Development tools"
- git
- python-setuptools
- wget
- redhat-lsb-core
debug_packages:
- net-tools
- lsof
- tcpdump
|
Update GitHub Pages URL for the Closure Library site. RELNOTES: n/a | title: Closure Library
email: sdh@google.com
description: Documentation for Google Closure Library.
baseurl: "/closure-library" # the subpath of your site, e.g. /blog
url: "https://google.github.com/closure-library"
github_username: google
destination: closure-library
# Defaults to plug into all Jekyll frontmatters
defaults:
- scope:
path: ""
values:
root_url: "/closure-library"
github_url: "https://github.com/google/closure-library"
product_name: "Closure Library"
ga: "UA-64923423-1"
- scope:
path: "develop"
values:
layout: article
| title: Closure Library
email: sdh@google.com
description: Documentation for Google Closure Library.
baseurl: "/closure-library" # the subpath of your site, e.g. /blog
url: "https://google.github.io/closure-library"
github_username: google
destination: closure-library
# Defaults to plug into all Jekyll frontmatters
defaults:
- scope:
path: ""
values:
root_url: "/closure-library"
github_url: "https://github.com/google/closure-library"
product_name: "Closure Library"
ga: "UA-64923423-1"
- scope:
path: "develop"
values:
layout: article
|
Add mountpoints to fstab, ensure fuse-utils is present. | ---
- name: Install build deps for s3fs
apt: pkg={{ item }} state=installed
with_items:
- build-essential
- libfuse-dev
- libcurl4-openssl-dev
- libxml2-dev
- mime-support
- automake
- libtool
- name: Download latest s3fs tarball
get_url: url=https://github.com/s3fs-fuse/s3fs-fuse/archive/v{{ s3fs_version }}.tar.gz
dest={{ default_build_dir }}/s3fs.tar.gz
- name: Extract s3fs
unarchive: src={{ default_build_dir }}/s3fs.tar.gz
dest={{ default_build_dir }} copy=no
creates={{ default_build_dir }}/s3fs-fuse-{{ s3fs_version }}
- name: Install s3fs
shell: ./autogen.sh && ./configure --prefix=/usr && make && make install
args:
chdir: "{{ default_build_dir }}/s3fs-fuse-{{ s3fs_version }}"
creates: /usr/bin/s3fs
- name: Create mountpoint
file: path={{ item.path }} state=directory
with_items: s3_mountpoints
- name: Add s3fs auth file
template: src=s3fs.j2 dest=/etc/passwd-s3fs mode=600
| ---
- name: Install build deps for s3fs
apt: pkg={{ item }} state=installed
with_items:
- build-essential
- libfuse-dev
- fuse-utils
- libcurl4-openssl-dev
- libxml2-dev
- mime-support
- automake
- libtool
- name: Download latest s3fs tarball
get_url: url=https://github.com/s3fs-fuse/s3fs-fuse/archive/v{{ s3fs_version }}.tar.gz
dest={{ default_build_dir }}/s3fs.tar.gz
- name: Extract s3fs
unarchive: src={{ default_build_dir }}/s3fs.tar.gz
dest={{ default_build_dir }} copy=no
creates={{ default_build_dir }}/s3fs-fuse-{{ s3fs_version }}
- name: Install s3fs
shell: ./autogen.sh && ./configure --prefix=/usr && make && make install
args:
chdir: "{{ default_build_dir }}/s3fs-fuse-{{ s3fs_version }}"
creates: /usr/bin/s3fs
- name: Add s3fs auth file
template: src=s3fs.j2 dest=/etc/passwd-s3fs mode=600
- name: Create mountpoint
file: path={{ item.path }} state=directory
with_items: s3_mountpoints
- name: Add any mountpoints to fstab
mount: name={{ item.path }} state=mounted fstype=fuse
src='s3fs#{{ item.bucket }}' opts=allow_other,uid=radio,gid=radio
with_items: s3_mountpoints
|
Change hourly API update to process all semesters | ---
- apt: name={{ item }}
with_items:
- build-essential
- php5-curl
- php5-mysql
- python-mysqldb # Required for mysql ansible modules
- { include: install_dependencies.yml, sudo: no }
- cron: name='Update API data' special_time=hourly job='cd {{ project_dir }}/api && grunt ay2014to2015sem1 rsync --target=dist' user={{ user }}
- mysql_db: name={{ yourls_db_name }}
- mysql_user:
name={{ yourls_db_user }}
password={{ yourls_db_pass }}
priv={{ yourls_db_name }}.*:ALL
- template:
src=etc/nginx/sites-available/{{ item }}.j2
dest=/etc/nginx/sites-available/{{ item }}
with_items: domains
- file:
src=/etc/nginx/sites-available/{{ item }}
dest=/etc/nginx/sites-enabled/{{ item }}
state=link
with_items: domains
notify: reload nginx
| ---
- apt: name={{ item }}
with_items:
- build-essential
- php5-curl
- php5-mysql
- python-mysqldb # Required for mysql ansible modules
- { include: install_dependencies.yml, sudo: no }
- cron: name='Update API data' special_time=hourly job='cd {{ project_dir }}/api && grunt all rsync --target=dist' user={{ user }}
- mysql_db: name={{ yourls_db_name }}
- mysql_user:
name={{ yourls_db_user }}
password={{ yourls_db_pass }}
priv={{ yourls_db_name }}.*:ALL
- template:
src=etc/nginx/sites-available/{{ item }}.j2
dest=/etc/nginx/sites-available/{{ item }}
with_items: domains
- file:
src=/etc/nginx/sites-available/{{ item }}
dest=/etc/nginx/sites-enabled/{{ item }}
state=link
with_items: domains
notify: reload nginx
|
FIX : sqlectron - command problem solved, script created to get sqlectron version, grep is working now | ---
- name: check if Sqlectron is installed?
command: "dnf list installed | grep -i sqlectron | grep {{ sqlectron.version }}"
register: current_version
changed_when: false
ignore_errors: true
args:
warn: no
- debug:
msg: "current_version : {{ current_version.stdout }}"
- block:
- name: install the Sqlectron packages
package:
name: {{ item }}
state: present
with_items:
- "{{ sqlectron.url_rpm}}"
become: yes
when: current_version.stdout is defined and sqlectron.version not in current_version.stdout
when: current_version.stdout | version_compare(sqlectron.version, '>')
| ---
- name: check if Sqlectron is installed?
shell: "{{playbook_dir}}/roles/sqlectron/scripts/grepSqlectron.sh -i={{ sqlectron.version }}"
register: sqlectron_current_version
changed_when: false
failed_when: false
become: yes
- debug:
msg: "sqlectron_current_version : {{ sqlectron_current_version.stdout }}"
- block:
- name: install the Sqlectron packages
package:
name: "{{ item }}"
state: present
with_items:
- "{{ sqlectron.url_rpm}}"
become: yes
when: sqlectron_current_version.stdout == "" or sqlectron_current_version.stdout | version_compare(sqlectron.version, '>')
|
Revert "Disable coveralls while it is down" | init:
- git config --global core.autocrlf input
environment:
COVERALLS_REPO_TOKEN:
secure: 1dnzq+xYoQT5eCrmH7IE/npTL0kpQ3IyeMYSWK1NvVuAXX6NNbhvO5eP1SCr7q/X
build_script:
- cmd: build.cmd
- cmd: build.cmd Coverage
test: off
version: 0.3.3.{build}
artifacts:
- path: bin
name: bin
| init:
- git config --global core.autocrlf input
environment:
COVERALLS_REPO_TOKEN:
secure: 1dnzq+xYoQT5eCrmH7IE/npTL0kpQ3IyeMYSWK1NvVuAXX6NNbhvO5eP1SCr7q/X
build_script:
- cmd: build.cmd
- cmd: build.cmd Coverage
- cmd: packages\coveralls.net\csmacnz.Coveralls.exe --opencover -i coverage.xml --repoToken %COVERALLS_REPO_TOKEN% --useRelativePaths --commitId %APPVEYOR_REPO_COMMIT% --commitBranch "%APPVEYOR_REPO_BRANCH%" --commitAuthor "%APPVEYOR_REPO_COMMIT_AUTHOR%" --commitEmail %APPVEYOR_REPO_COMMIT_AUTHOR_EMAIL% --commitMessage "%APPVEYOR_REPO_COMMIT_MESSAGE%" --jobId %APPVEYOR_JOB_ID%
test: off
version: 0.3.3.{build}
artifacts:
- path: bin
name: bin
|
Update Auto Auto-Rotate to 0.3 (3) | Categories:
- System
License: GPL-3.0-only
SourceCode: https://gitlab.com/juanitobananas/auto-auto-rotate
IssueTracker: https://gitlab.com/juanitobananas/auto-auto-rotate/issues
AutoName: Auto Auto-Rotate
RepoType: git
Repo: https://gitlab.com/juanitobananas/auto-auto-rotate
Builds:
- versionName: '0.2'
versionCode: 2
commit: v0.2
subdir: app
submodules: true
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: '0.2'
CurrentVersionCode: 2
| Categories:
- System
License: GPL-3.0-only
SourceCode: https://gitlab.com/juanitobananas/auto-auto-rotate
IssueTracker: https://gitlab.com/juanitobananas/auto-auto-rotate/issues
AutoName: Auto Auto-Rotate
RepoType: git
Repo: https://gitlab.com/juanitobananas/auto-auto-rotate
Builds:
- versionName: '0.2'
versionCode: 2
commit: v0.2
subdir: app
submodules: true
gradle:
- yes
- versionName: '0.3'
versionCode: 3
commit: v0.3
subdir: app
submodules: true
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: '0.3'
CurrentVersionCode: 3
|
Remove pancurses from windows build | os: Visual Studio 2015
cache:
- c:\cargo\registry
- c:\cargo\git
init:
- mkdir c:\cargo
- mkdir c:\rustup
- SET PATH=c:\cargo\bin;%PATH%
environment:
CARGO_HOME: "c:\\cargo"
RUSTUP_HOME: "c:\\rustup"
matrix:
- TARGET: i686-pc-windows-msvc
CHANNEL: stable
- TARGET: x86_64-pc-windows-msvc
CHANNEL: stable
install:
- curl -sSf -o rustup-init.exe https://win.rustup.rs/
- rustup-init.exe -y --default-host %TARGET% --no-modify-path
- if defined MSYS2_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS2_BITS%\bin
- rustc -V
- cargo -V
# 'cargo test' takes care of building for us, so disable Appveyor's build stage. This prevents
# the "directory does not contain a project or solution file" error.
build: false
# Equivalent to Travis' `script` phase
# TODO modify this phase as you see fit
test_script:
- cd cursive && cargo test --verbose --all --no-default-features --features markdown,pancurses-backend,crossterm-backend
| os: Visual Studio 2015
cache:
- c:\cargo\registry
- c:\cargo\git
init:
- mkdir c:\cargo
- mkdir c:\rustup
- SET PATH=c:\cargo\bin;%PATH%
environment:
CARGO_HOME: "c:\\cargo"
RUSTUP_HOME: "c:\\rustup"
matrix:
- TARGET: i686-pc-windows-msvc
CHANNEL: stable
- TARGET: x86_64-pc-windows-msvc
CHANNEL: stable
install:
- curl -sSf -o rustup-init.exe https://win.rustup.rs/
- rustup-init.exe -y --default-host %TARGET% --no-modify-path
- if defined MSYS2_BITS set PATH=%PATH%;C:\msys64\mingw%MSYS2_BITS%\bin
- rustc -V
- cargo -V
# 'cargo test' takes care of building for us, so disable Appveyor's build stage. This prevents
# the "directory does not contain a project or solution file" error.
build: false
# Equivalent to Travis' `script` phase
# TODO modify this phase as you see fit
test_script:
- cd cursive && cargo test --verbose --all --no-default-features --features markdown,crossterm-backend
|
Add TLS directory if TLS is enabled | ---
# File: tasks/tls.yml - TLS tasks for Vault
- name: Vault SSL Certificate and Key
copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
owner: "{{ vault_user }}"
group: "{{ vault_group }}"
mode: "{{ item.mode }}"
with_items:
- src: "{{ vault_tls_cert_file }}"
dest: "{{ vault_tls_cert_file_dest }}"
mode: "0644"
- src: "{{ vault_tls_key_file }}"
dest: "{{ vault_tls_key_file_dest }}"
mode: "0600"
| ---
# File: tasks/tls.yml - TLS tasks for Vault
- name: Create tls directory
file:
dest: "{{ item }}"
state: directory
owner: "{{ vault_user }}"
group: "{{ vault_group}}"
with_items:
- "{{ vault_tls_config_path }}"
- name: Vault SSL Certificate and Key
copy:
src: "{{ item.src }}"
dest: "{{ item.dest }}"
owner: "{{ vault_user }}"
group: "{{ vault_group }}"
mode: "{{ item.mode }}"
with_items:
- src: "{{ vault_tls_cert_file }}"
dest: "{{ vault_tls_cert_file_dest }}"
mode: "0644"
- src: "{{ vault_tls_key_file }}"
dest: "{{ vault_tls_key_file_dest }}"
mode: "0600"
|
Update Travis with explicit emails | language: node_js
node_js:
- '0.10'
before_install:
- currentfolder=${PWD##*/}
- if [ "$currentfolder" != 'core' ]; then cd .. && eval "mv $currentfolder core" && cd core; fi
- npm install -g grunt-cli
| language: node_js
node_js:
- '0.10'
before_install:
- currentfolder=${PWD##*/}
- if [ "$currentfolder" != 'core' ]; then cd .. && eval "mv $currentfolder core" && cd core; fi
- npm install -g grunt-cli
notifications:
email:
- dave@famo.us
- fetterman@famo.us
- myles@famo.us
|
Switch to Bionic distribution for CI | language: python
python:
- "2.7"
- "3.6"
- "3.7"
- "pypy"
install:
- pip install .
- pip install -r tests/requirements.txt
# - ./tests/setup.sh
script: nosetests -v --with-coverage --cover-package=project_generator
after_success: coveralls
sudo: false
| dist: bionic
language: python
python:
- "2.7"
- "3.6"
- "3.7"
- "pypy"
install:
- pip install .
- pip install -r tests/requirements.txt
# - ./tests/setup.sh
script: nosetests -v --with-coverage --cover-package=project_generator
after_success: coveralls
sudo: false
|
Add Laravel 6 Support Test | language: php
php:
- 7.1
- 7.2
- 7.3
env:
matrix:
- COMPOSER_FLAGS="--prefer-lowest"
- COMPOSER_FLAGS=""
before_script:
- travis_retry composer self-update
- travis_retry composer update ${COMPOSER_FLAGS} --no-interaction --prefer-source
script:
- phpunit --coverage-text --coverage-clover=coverage.clover
after_script:
- php vendor/bin/ocular code-coverage:upload --format=php-clover coverage.clover | language: php
php:
- 7.2
- 7.3
env:
matrix:
- COMPOSER_FLAGS="--prefer-lowest"
- COMPOSER_FLAGS=""
before_script:
- travis_retry composer self-update
- travis_retry composer update ${COMPOSER_FLAGS} --no-interaction --prefer-source
script:
- phpunit --coverage-text --coverage-clover=coverage.clover
after_script:
- php vendor/bin/ocular code-coverage:upload --format=php-clover coverage.clover |
Use coffee binary from node_modules | language: node_js
install: npm install -g
script:
- coffee -o . -c .
- jasmine-node --captureExceptions --runWithRequireJs --coffee --verbose spec
| language: node_js
install: npm install -g
script:
- node_modules/.bin/coffee -o . -c .
- jasmine-node --captureExceptions --runWithRequireJs --coffee --verbose spec
|
Install keras and tensorflow on test machine. | language: python
sudo: false
env:
global:
- CONDA_DEPS="pip flake8 pytest numpy scipy matplotlib pandas" PIP_DEPS="coveralls pytest-cov" MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
python:
- '2.7'
- '3.5'
- '3.6'
before_install:
- export MINICONDA=$HOME/miniconda
- export PATH="$MINICONDA/bin:$PATH"
- hash -r
- echo $MINICONDA_URL
- wget $MINICONDA_URL -O miniconda.sh;
- bash miniconda.sh -b -f -p $MINICONDA;
- conda config --set always_yes yes
- conda update conda
- conda info -a
- conda config --add channels conda-forge
- conda install python=$TRAVIS_PYTHON_VERSION $CONDA_DEPS
- travis_retry pip install $PIP_DEPS
install:
- python setup.py install
script:
- flake8 --ignore N802,N806 `find . -name \*.py | grep -v setup.py | grep -v /doc/`
- mkdir for_test
- cd for_test
- py.test --pyargs keratin --cov-report term-missing --cov=keratin
after_success:
- coveralls
| language: python
sudo: false
env:
global:
- CONDA_DEPS="pip flake8 pytest numpy scipy" PIP_DEPS="coveralls pytest-cov tensorflow keras" MINICONDA_URL="https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh"
python:
- '2.7'
- '3.5'
- '3.6'
before_install:
- export MINICONDA=$HOME/miniconda
- export PATH="$MINICONDA/bin:$PATH"
- hash -r
- echo $MINICONDA_URL
- wget $MINICONDA_URL -O miniconda.sh;
- bash miniconda.sh -b -f -p $MINICONDA;
- conda config --set always_yes yes
- conda update conda
- conda info -a
- conda config --add channels conda-forge
- conda install python=$TRAVIS_PYTHON_VERSION $CONDA_DEPS
- travis_retry pip install $PIP_DEPS
install:
- python setup.py install
script:
- flake8 --ignore N802,N806 `find . -name \*.py | grep -v setup.py | grep -v /doc/`
- mkdir for_test
- cd for_test
- py.test --pyargs keratin --cov-report term-missing --cov=keratin
after_success:
- coveralls
|
Switch to Travis container-based infrastructure | language: node_js
node_js:
- "0.10"
before_install:
- npm install -g npm
before_script:
- npm install -g grunt-cli
| language: node_js
node_js:
- "0.10"
before_install:
- npm install -g npm
before_script:
- npm install -g grunt-cli
sudo: false
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.