Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Add a few more dependencies. | name: eigenmodes-nanodisc-with-particle
dependencies:
- python
- ipython
- jupyter
- matplotlib>=1.5
- numpy
- pandas
- statsmodels
| name: eigenmodes-nanodisc-with-particle
dependencies:
- python
- future
- ipython
- jupyter
- matplotlib>=1.5
- numexpr
- numpy
- pandas
- statsmodels
- pip:
- brewer2mpl
|
Switch to noarch and entrypoints. | {% set name = "eliot-tree" %}
{% set version = "18.1.1" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
sha256: d492154cbc4093faf4492dbf503e87f51e27f89c7fc8697201157e0fb8d7d027
build:
number: 0
skip: true # [py<35 or win]
script: "{{ PYTHON }} -m pip install . -vv"
requirements:
host:
- python >= 3.5
- pip
run:
- python >=3.5
- six
- jmespath
- iso8601
- tree-format
- termcolor
- toolz
- eliot
test:
commands:
- eliot-tree --help
about:
home: https://github.com/jonathanj/eliottree
license: MIT
license_family: MIT
license_file: LICENSE
summary: 'Render Eliot logs as an ASCII tree'
description: 'Render Eliot logs as an ASCII tree'
doc_url: https://github.com/jonathanj/eliottree
dev_url: https://github.com/jonathanj/eliottree
extra:
recipe-maintainers:
- itamarst
| {% set name = "eliot-tree" %}
{% set version = "18.1.1" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
sha256: d492154cbc4093faf4492dbf503e87f51e27f89c7fc8697201157e0fb8d7d027
build:
number: 0
noarch: python
script: "{{ PYTHON }} -m pip install . -vv"
entry_points:
- eliot-tree = eliottree._cli:main
requirements:
host:
- python >= 3.5
- pip
run:
- python >=3.5
- six
- jmespath
- iso8601
- tree-format
- termcolor
- toolz
- eliot
test:
commands:
- eliot-tree --help
about:
home: https://github.com/jonathanj/eliottree
license: MIT
license_family: MIT
license_file: LICENSE
summary: 'Render Eliot logs as an ASCII tree'
description: 'Render Eliot logs as an ASCII tree'
doc_url: https://github.com/jonathanj/eliottree
dev_url: https://github.com/jonathanj/eliottree
extra:
recipe-maintainers:
- itamarst
|
Add node 16 to GitHub action | name: Build
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: ['10.x', '12.x', '14.x']
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Cache modules
uses: actions/cache@v1
with:
path: ~/.npm
key: ${{ runner.OS }}-node-${{ hashFiles('package-lock.json') }}
restore-keys: |
${{ runner.OS }}-node-
${{ runner.OS }}-
- name: Install
run: npm ci
- name: Lint
run: npm run lint
- name: Test
run: npm test
| name: Build
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: ['10.x', '12.x', '14.x', '16.x']
steps:
- name: Checkout
uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Cache modules
uses: actions/cache@v1
with:
path: ~/.npm
key: ${{ runner.OS }}-node-${{ hashFiles('package-lock.json') }}
restore-keys: |
${{ runner.OS }}-node-
${{ runner.OS }}-
- name: Install
run: npm ci
- name: Lint
run: npm run lint
- name: Test
run: npm test
|
Remove import check for lack of dgl | {% set name = "dgllife" %}
{% set version = "0.2.8" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/dgllife-{{ version }}.tar.gz
sha256: 197cfdaee81e486cf42894be6b28d80246296646168d932534868a9c95c0b855
build:
number: 0
noarch: python
script: {{ PYTHON }} -m pip install . -vv
requirements:
host:
- pip
- python >=3.6
run:
- hyperopt
- joblib
- networkx >=2.1
- numpy >=1.14.0
- pandas
- python >=3.6
- requests >=2.22.0
- scikit-learn >=0.22.2
- scipy >=1.1.0
- tqdm
test:
imports:
- dgllife
- dgllife.data
commands:
- pip check
requires:
- pip
about:
home: https://github.com/awslabs/dgl-lifesci
summary: DGL-based package for Life Science
license: Apache-2.0
license_file: LICENSE
extra:
recipe-maintainers:
- hadim
| {% set name = "dgllife" %}
{% set version = "0.2.8" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/dgllife-{{ version }}.tar.gz
sha256: 197cfdaee81e486cf42894be6b28d80246296646168d932534868a9c95c0b855
build:
number: 0
noarch: python
script: {{ PYTHON }} -m pip install . -vv
requirements:
host:
- pip
- python >=3.6
run:
- hyperopt
- joblib
- networkx >=2.1
- numpy >=1.14.0
- pandas
- python >=3.6
- requests >=2.22.0
- scikit-learn >=0.22.2
- scipy >=1.1.0
- tqdm
test:
commands:
- pip check
requires:
- pip
about:
home: https://github.com/awslabs/dgl-lifesci
summary: DGL-based package for Life Science
license: Apache-2.0
license_file: LICENSE
extra:
recipe-maintainers:
- hadim
|
Revert "Update gradle to 3.4.1" | ---
- hosts: all
vars:
cf_cli_version: "6.25.0"
maven_version: "3.3.9"
maven_checksum: "516923b3955b6035ba6b0a5b031fbd8b"
sbt_version: "0.13.13"
intellij_version: "2016.3.4"
sts_version: "3.8.3"
eclipse_version: "4.6.2"
docker_compose_version: "1.11.2"
atom_version: "1.14.4"
gradle_version: "3.4.1"
gather_facts: yes
become: yes
roles:
- disable_auto_update
- developer_packages
- atom
- docker_service
- docker_compose
- cf_cli
- maven
- sbt
- gradle
- intellij
- sts
- unity_desktop
| ---
- hosts: all
vars:
cf_cli_version: "6.25.0"
maven_version: "3.3.9"
maven_checksum: "516923b3955b6035ba6b0a5b031fbd8b"
sbt_version: "0.13.13"
intellij_version: "2016.3.4"
sts_version: "3.8.3"
eclipse_version: "4.6.2"
docker_compose_version: "1.11.2"
atom_version: "1.14.4"
gradle_version: "3.4"
gather_facts: yes
become: yes
roles:
- disable_auto_update
- developer_packages
- atom
- docker_service
- docker_compose
- cf_cli
- maven
- sbt
- gradle
- intellij
- sts
- unity_desktop
|
Update from Hackage at 2016-03-03T23:24:57+0000 | homepage: https://github.com/relrod/gitignore
changelog-type: ''
hash: 1a67666770b85818149577ae503a78e362b0c582d12675c5a5f047f5c37f1249
test-bench-deps: {}
maintainer: ricky@elrod.me
synopsis: Apply GitHub .gitignore templates to already existing repositories.
changelog: ''
basic-deps:
bytestring: ! '>=0.10 && <1'
base: ! '>=4 && <5'
base64-bytestring: ! '>=1 && <2'
text: ! '>=0.11 && <1.3'
network: ! '>=2.4 && <3'
http-conduit: ! '>=2 && <3'
aeson: ! '>=0.8 && <0.9'
safe: ! '>=0.3 && <0.4'
all-versions:
- '1.0.0'
- '1.0.1'
author: Ricky Elrod
latest: '1.0.1'
description-type: haddock
description: Apply GitHub .gitignore templates to already existing repositories.
license-name: BSD3
| homepage: https://github.com/relrod/gitignore
changelog-type: ''
hash: b104cdf531064b7e76f888dfd49e658d9605b23bc7eb0df138faa935f78048a2
test-bench-deps: {}
maintainer: ricky@elrod.me
synopsis: Apply GitHub .gitignore templates to already existing repositories.
changelog: ''
basic-deps:
bytestring: ! '>=0.10 && <1'
base: ! '>=4 && <5'
base64-bytestring: ! '>=1 && <2'
text: ! '>=0.11 && <1.3'
network: ! '>=2.4 && <3'
http-conduit: ! '>=2 && <3'
aeson: ! '>=0.8 && <1'
safe: ! '>=0.3 && <0.4'
all-versions:
- '1.0.0'
- '1.0.1'
- '1.1'
author: Ricky Elrod
latest: '1.1'
description-type: haddock
description: Apply GitHub .gitignore templates to already existing repositories.
license-name: BSD3
|
Correct serial number to latest deployment | node_meta_data:
description: Deep Profiler Dock 1A
location: Slope Base
node_id_name: SN0204-PD01A
oms_sample_rate: 10
reference_designator: RS01SBPD-PD01A
serial_number: SN0204
read_only_mode: True
node_streams:
deep_profiler_dock_eng_data:
- name: 00-ENG
| node_meta_data:
description: Deep Profiler Dock 1A
location: Slope Base
node_id_name: SN0202-PD01A
oms_sample_rate: 10
reference_designator: RS01SBPD-PD01A
serial_number: SN0202
read_only_mode: True
node_streams:
deep_profiler_dock_eng_data:
- name: 00-ENG
|
Add Kimurai framework for scraping | name: Web Content Scrapers
description:
projects:
- anemone
- arachnid2
- boilerpipe-ruby
- cobweb
- data_miner
- docparser
- fletcher
- horsefield
- link_thumbnailer
- metainspector
- pismo
- sinew
- url_scraper
- wiki-api
- wombat
| name: Web Content Scrapers
description:
projects:
- anemone
- arachnid2
- boilerpipe-ruby
- cobweb
- data_miner
- docparser
- fletcher
- horsefield
- kimurai
- link_thumbnailer
- metainspector
- pismo
- sinew
- url_scraper
- wiki-api
- wombat
|
Remove attempts field from peer grading template | ---
metadata:
display_name: Peer Grading Interface
attempts: 1
use_for_single_location: False
link_to_location: None
is_graded: False
max_grade: 1
weight: ""
data: |
<peergrading>
</peergrading>
children: []
| ---
metadata:
display_name: Peer Grading Interface
use_for_single_location: False
link_to_location: None
is_graded: False
max_grade: 1
weight: ""
data: |
<peergrading>
</peergrading>
children: []
|
Make sure apache2 is started | ---
#
# Copyright (c) 2014 Davide Guerri <davide.guerri@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
- meta: flush_handlers
- include: packages.yml
- include: configuration.yml
- meta: flush_handlers
| ---
#
# Copyright (c) 2014 Davide Guerri <davide.guerri@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
- meta: flush_handlers
- include: packages.yml
- include: configuration.yml
- meta: flush_handlers
- name: Make sure Apache is started
service: name=apache2 state=started
|
Test with multiple Ruby versions | name: build
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: 3.1
bundler-cache: true
- run: bundle exec rake test
| name: build
on: [push, pull_request]
jobs:
build:
strategy:
fail-fast: false
matrix:
ruby: [2.6, 2.7, "3.0", 3.1]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby }}
bundler-cache: true
- run: bundle exec rake test
|
Add new init script to lint | ---
name: Shell
on:
push:
paths:
- .github/workflows/shell.yml
- arch/bootstrap
- arch/go
- arch/pacstrap
- debian/go
- '**.bash'
jobs:
shellcheck:
name: shellcheck
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run shellcheck
run: |
shellcheck --version
shellcheck arch/bootstrap
shellcheck arch/go
shellcheck arch/pacstrap
shellcheck debian/go
find . -name '*.bash' | xargs shellcheck
shfmt:
name: shfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Download shfmt
run: |
wget -O shfmt https://github.com/mvdan/sh/releases/download/v3.2.0/shfmt_v3.2.0_linux_amd64
chmod +x shfmt
- name: Run shfmt
run: |
./shfmt --version
./shfmt -s -i 2 -ci -sr -d arch/bootstrap
./shfmt -s -i 2 -ci -sr -d arch/go
./shfmt -s -i 2 -ci -sr -d arch/pacstrap
./shfmt -s -i 2 -ci -sr -d debian/go
find . -name '*.bash' | xargs ./shfmt -s -i 2 -ci -sr -d
| ---
name: Shell
on:
push:
paths:
- .github/workflows/shell.yml
- arch/bootstrap
- arch/go
- arch/init
- arch/pacstrap
- debian/go
- '**.bash'
jobs:
shellcheck:
name: shellcheck
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run shellcheck
run: |
shellcheck --version
shellcheck arch/bootstrap
shellcheck arch/go
shellcheck arch/init
shellcheck arch/pacstrap
shellcheck debian/go
find . -name '*.bash' | xargs shellcheck
shfmt:
name: shfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Download shfmt
run: |
wget -O shfmt https://github.com/mvdan/sh/releases/download/v3.2.0/shfmt_v3.2.0_linux_amd64
chmod +x shfmt
- name: Run shfmt
run: |
./shfmt --version
./shfmt -s -i 2 -ci -sr -d arch/bootstrap
./shfmt -s -i 2 -ci -sr -d arch/go
./shfmt -s -i 2 -ci -sr -d arch/init
./shfmt -s -i 2 -ci -sr -d arch/pacstrap
./shfmt -s -i 2 -ci -sr -d debian/go
find . -name '*.bash' | xargs ./shfmt -s -i 2 -ci -sr -d
|
Use the new exclusion format in Rubocop | AllCops:
Include:
- Berksfile
- Gemfile
- Rakefile
- Thorfile
- Guardfile
Exclude:
- vendor/**
ClassLength:
Enabled: false
Documentation:
Enabled: false
Encoding:
Enabled: false
HashSyntax:
Enabled: false
LineLength:
Enabled: false
MethodLength:
Enabled: false
SignalException:
Enabled: false
TrailingComma:
Enabled: false
WordArray:
Enabled: false
| AllCops:
Include:
- Berksfile
- Gemfile
- Rakefile
- Thorfile
- Guardfile
Exclude:
- vendor/**/*
ClassLength:
Enabled: false
Documentation:
Enabled: false
Encoding:
Enabled: false
HashSyntax:
Enabled: false
LineLength:
Enabled: false
MethodLength:
Enabled: false
SignalException:
Enabled: false
TrailingComma:
Enabled: false
WordArray:
Enabled: false
|
Test on CentOS 6.9 in TK | driver:
name: vagrant
provisioner:
name: chef_zero
deprecations_as_errors: true
verifier:
name: inspec
platforms:
- name: centos-6.8
- name: centos-7.3
- name: debian-7.11
- name: debian-8.7
- name: fedora-25
- name: opensuse-leap-42.2
- name: ubuntu-14.04
- name: ubuntu-16.04
suites:
- name: default
run_list:
- recipe[test::default]
- name: instance
run_list:
- recipe[test::instance]
- name: runit_instance
run_list:
- recipe[test::runit]
| driver:
name: vagrant
provisioner:
name: chef_zero
deprecations_as_errors: true
verifier:
name: inspec
platforms:
- name: centos-6.9
- name: centos-7.3
- name: debian-7.11
- name: debian-8.7
- name: fedora-25
- name: opensuse-leap-42.2
- name: ubuntu-14.04
- name: ubuntu-16.04
suites:
- name: default
run_list:
- recipe[test::default]
- name: instance
run_list:
- recipe[test::instance]
- name: runit_instance
run_list:
- recipe[test::runit]
|
Set latest-milestone back to 1.20 | title: Efficient watch resumption after kube-apiserver reboot
kep-number: 1904
authors:
- "@wojtek-t"
owning-sig: sig-api-machinery
participating-sigs:
- sig-scalability
status: implementable
creation-date: 2020-07-23
reviewers:
- "@jpbetz"
approvers:
- "@deads2k"
- "@lavalamp"
prr-approvers:
see-also:
- "/keps/sig-api-machinery/20191210-consistent-reads-from-cache.md"
replaces:
# The target maturity stage in the current dev cycle for this KEP.
stage: alpha
# The most recent milestone for which work toward delivery of this KEP has been
# done. This can be the current (upcoming) milestone, if it is being actively
# worked on.
latest-milestone: "v1.21"
# The milestone at which this feature was, or is targeted to be, at each stage.
milestone:
alpha: "v1.20"
beta: "v1.21"
stable: "v1.23"
# The following PRR answers are required at alpha release
# List the feature gate name and the components for which it must be enabled
feature-gates:
- name: EfficientWatchResumption
components:
- kube-apiserver
disable-supported: true
# The following PRR answers are required at beta release
metrics:
- etcd_bookmark_counts
| title: Efficient watch resumption after kube-apiserver reboot
kep-number: 1904
authors:
- "@wojtek-t"
owning-sig: sig-api-machinery
participating-sigs:
- sig-scalability
status: implementable
creation-date: 2020-07-23
reviewers:
- "@jpbetz"
approvers:
- "@deads2k"
- "@lavalamp"
prr-approvers:
see-also:
- "/keps/sig-api-machinery/20191210-consistent-reads-from-cache.md"
replaces:
# The target maturity stage in the current dev cycle for this KEP.
stage: alpha
# The most recent milestone for which work toward delivery of this KEP has been
# done. This can be the current (upcoming) milestone, if it is being actively
# worked on.
latest-milestone: "v1.20"
# The milestone at which this feature was, or is targeted to be, at each stage.
milestone:
alpha: "v1.20"
beta: "v1.21"
stable: "v1.23"
# The following PRR answers are required at alpha release
# List the feature gate name and the components for which it must be enabled
feature-gates:
- name: EfficientWatchResumption
components:
- kube-apiserver
disable-supported: true
# The following PRR answers are required at beta release
metrics:
- etcd_bookmark_counts
|
Create a pull request to production on staging merge. | on: push
jobs:
create_pull_request_to_production:
runs-on: ubuntu-latest
steps:
- uses: actions/github-script@0.8.0
with:
script: |
let response = await github.pulls.get({
owner: 'easystorage',
repo: 'aws-cli',
pull_number: 37
})
const staging_pull_request = response.data
response = await github.pulls.get({
owner: 'easystorage',
repo: 'aws-cli',
pull_number: 38
})
const production_pull_request = response.data
github.pulls.create({
owner: 'easystorage',
repo: 'aws-cli',
title: staging_pull_request.title.replace(/deploy to staging/i, 'Deploy to Production'),
head: 'ga-test11',
base: 'master',
body: staging_pull_request.body
}).then(({ data, headers, status }) => {
console.log('created!')
}).catch((data) => {
console.log(data)
if (data.errors[0].message.startsWith('A pull request already exists')) {
console.log('exists')
github.issues.removeLabel({
owner: 'easystorage',
repo: 'aws-cli',
issue_number: 38,
name: 'In Progress'
});
}
})
| on:
pull_request:
branches:
- staging
types:
- closed
jobs:
create_pull_request_to_production:
runs-on: ubuntu-latest
if: github.event.pull_request.merged
steps:
- uses: actions/github-script@0.8.0
with:
script: |
const staging_pull_request = context.payload.pull_request
github.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
title: staging_pull_request.title.replace(/deploy to staging/i, 'Deploy to Production'),
head: staging_pull_request.head.ref,
base: 'master',
body: staging_pull_request.body
}).then(({ data, headers, status }) => {
let production_pull_request = data
github.issues.addAssignees({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: production_pull_request.number,
assignees: staging_pull_request.assignees.map((user) => { return user.login })
})
}).catch((data) => {
if (data.errors[0].message.startsWith('A pull request already exists')) {
github.issues.removeLabel({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: production_pull_request.number,
name: 'In Progress'
});
}
})
|
Build linux distribution on github. | # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Node.js CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: sudo apt-get update
- run: sudo apt-get install --fix-missing libpixman-1-dev libcairo-dev libsdl-pango-dev libjpeg-dev libgif-dev
- run: npm ci
- run: npm run version-stamp
- run: npm rebuild canvas --update-binary
- run: npm run build-spa
- run: npm run test
- run: npm run self-check
- run: npm run gen
| # This workflow will do a clean install of node dependencies, build the source code and run tests across different versions of node
# For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
name: Node.js CI
on:
push:
branches: [master]
pull_request:
branches: [master]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: sudo apt-get update
- run: sudo apt-get install --fix-missing libpixman-1-dev libcairo-dev libsdl-pango-dev libjpeg-dev libgif-dev
- run: npm ci
- run: npm run version-stamp
- run: npm rebuild canvas --update-binary
- run: npm run build-spa
- run: npm run test
- run: npm run self-check
- run: npm run gen
- run: npm run dist-linux
|
Replace Composer setting we don't want with GitHub setting we do want | version: 2
dependencies:
- type: php
path: /
settings:
composer_options: "--ignore-platform-reqs"
| version: 2
dependencies:
- type: php
settings:
github_labels:
- "scope:distribution"
|
Update from Hackage at 2018-01-28T15:45:50Z | homepage: ''
changelog-type: ''
hash: 37ff15bcf859dae1b86e3fa942abd3e11750fcc60f568fa67653c08363023c67
test-bench-deps:
MonadRandom: -any
base: ! '>=4.7 && <5'
haskell-ml: -any
maintainer: capn.freako@gmail.com
synopsis: Machine learning in Haskell
changelog: ''
basic-deps:
MonadRandom: -any
base: ! '>=4.7 && <5'
text: -any
haskell-ml: -any
random-shuffle: -any
singletons: -any
binary: -any
attoparsec: -any
hmatrix: -any
vector: -any
all-versions:
- '0.4.0'
author: David Banas
latest: '0.4.0'
description-type: markdown
description: ! '# Haskell_ML
Various examples of machine learning, in Haskell.
To get started, or learn more, visit the [wiki page]( https://github.com/capn-freako/Haskell_ML/wiki).
'
license-name: BSD3
| homepage: ''
changelog-type: ''
hash: 13f4d701baccc6294ad923793716568498669d6116911b9f6c4e667f9a97017f
test-bench-deps:
MonadRandom: -any
base: ! '>=4.7 && <5'
haskell-ml: -any
maintainer: capn.freako@gmail.com
synopsis: Machine learning in Haskell
changelog: ''
basic-deps:
MonadRandom: -any
base: ! '>=4.7 && <5'
text: -any
haskell-ml: -any
random-shuffle: -any
singletons: -any
binary: -any
attoparsec: -any
hmatrix: -any
vector: -any
all-versions:
- '0.4.0'
- '0.4.1'
author: David Banas
latest: '0.4.1'
description-type: markdown
description: ! '# Haskell_ML
Various examples of machine learning, in Haskell.
To get started, or learn more, visit the [wiki page]( https://github.com/capn-freako/Haskell_ML/wiki).
'
license-name: BSD3
|
Disable this workflow that does nothing useful. | name: Java CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Build with Gradle
run: ./gradlew -Pgpr.user=dmdirc -Pgpr.key=${{ secrets.GITHUB_TOKEN }} publish
| name: Java CI
on:
push:
branches-ignore:
- '**'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Build with Gradle
run: ./gradlew -Pgpr.user=dmdirc -Pgpr.key=${{ secrets.GITHUB_TOKEN }} publish
|
Split percy into separate job | name: Jekyll site CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build the site in the jekyll/builder container
run: |
docker run \
-v ${{ github.workspace }}:/srv/jekyll -v ${{ github.workspace }}/_site:/srv/jekyll/_site \
jekyll/builder:latest /bin/bash -c "chmod 777 /srv/jekyll && jekyll build --future"
- run: ls -ltrh _site/
- name: Percy Test
uses: percy/snapshot-action@v0.1.0
with:
build-directory: "_site/"
env:
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
| name: Jekyll site CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build the site in the jekyll/builder container
run: |
docker run \
-v ${{ github.workspace }}:/srv/jekyll -v ${{ github.workspace }}/_site:/srv/jekyll/_site \
jekyll/builder:latest /bin/bash -c "chmod 777 /srv/jekyll && jekyll build --future"
- run: ls -ltrh _site/
- run: sudo apt-get update && sudo apt-get install -y tree
- run: tree _site/
percy_snapshots:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Build the site in the jekyll/builder container
run: |
docker run \
-v ${{ github.workspace }}:/srv/jekyll -v ${{ github.workspace }}/_site:/srv/jekyll/_site \
jekyll/builder:latest /bin/bash -c "chmod 777 /srv/jekyll && jekyll build --future"
- name: Percy Test
uses: percy/snapshot-action@v0.1.0
with:
build-directory: "_site/"
env:
PERCY_TOKEN: ${{ secrets.PERCY_TOKEN }}
|
Remove cython and pip check test | {% set name = "cmeutils" %}
{% set version = "0.0.1" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/cmelab/cmeutils/archive/refs/tags/v{{ version }}.tar.gz
sha256: bc8ba46d485e35d45f9d0e719a139afaaacf534ae44e55c1ca1f7ac314671067
build:
noarch: python
script: {{ PYTHON }} -m pip install . -vv
number: 0
requirements:
host:
- python >=3.6
- pip
run:
- python >=3.6
- cython
- freud
- gsd
- numpy
- matplotlib-base
- pymbar
- rowan
test:
imports:
- cmeutils
requires:
- pip
commands:
- pip check
about:
home: https://github.com/cmelab/cmeutils
summary: 'Useful functions by and for the CME lab.'
description: |
Helpful functions used by the CME Lab.
license: GPL-3.0-or-later
license_family: GPL
license_file: LICENSE
doc_url: https://github.com/cmelab/cmeutils
dev_url: https://github.com/cmelab/cmeutils
extra:
recipe-maintainers:
- jennyfothergill
- chrisjonesBSU
| {% set name = "cmeutils" %}
{% set version = "0.0.1" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/cmelab/cmeutils/archive/refs/tags/v{{ version }}.tar.gz
sha256: bc8ba46d485e35d45f9d0e719a139afaaacf534ae44e55c1ca1f7ac314671067
build:
noarch: python
script: {{ PYTHON }} -m pip install . -vv
number: 0
requirements:
host:
- python >=3.6
- pip
run:
- python >=3.6
- freud
- gsd
- numpy
- matplotlib-base
- pymbar
- rowan
test:
imports:
- cmeutils
requires:
- pip
about:
home: https://github.com/cmelab/cmeutils
summary: 'Useful functions by and for the CME lab.'
description: |
Helpful functions used by the CME Lab.
license: GPL-3.0-or-later
license_family: GPL
license_file: LICENSE
doc_url: https://github.com/cmelab/cmeutils
dev_url: https://github.com/cmelab/cmeutils
extra:
recipe-maintainers:
- jennyfothergill
- chrisjonesBSU
|
Add force to ansible docker_image | ---
- name: Pull image
docker_image:
name: banjocat/jackmuratore
- name: Start container
docker_container:
name: jackmuratore
image: banjocat/jackmuratore
recreate: yes
exposed_ports:
- 8000
| ---
- name: Pull image
docker_image:
name: banjocat/jackmuratore
force: yes
- name: Start container
docker_container:
name: jackmuratore
image: banjocat/jackmuratore
recreate: yes
exposed_ports:
- 8000
|
Copy the file myself then! | image: Windows Server 2012 R2
#install:
# - C:\MinGW\bin\mingw-get install gcc
build_script:
- bash -c "mkdir -p /c/projects/crashjvm/target/classes/nativelibs/amd64/"
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64
- cl.exe src\main\java\uk\me\mjt\CrashJvm.c /I %JAVA_HOME%\include /I %JAVA_HOME%\include\win32 -FeCrashJvm.dll -MD -LD
- dir
#/link /DLL /out:target\classes\nativelibs\amd64\CrashJvm.dll
- mvn clean install --batch-mode
| image: Windows Server 2012 R2
#install:
# - C:\MinGW\bin\mingw-get install gcc
build_script:
- bash -c "mkdir -p /c/projects/crashjvm/target/classes/nativelibs/amd64/"
- call "C:\Program Files (x86)\Microsoft Visual Studio 14.0\VC\vcvarsall.bat" x64
- cl.exe src\main\java\uk\me\mjt\CrashJvm.c /I %JAVA_HOME%\include /I %JAVA_HOME%\include\win32 -FeCrashJvm.dll -MD -LD
- copy CrashJvm.dll target\classes\nativelibs\amd64\
- dir target\classes\nativelibs\amd64\
#/link /DLL /out:target\classes\nativelibs\amd64\CrashJvm.dll
- mvn clean install --batch-mode
|
Use Install-Product instead of Update-NodeJsInstallation | # appveyor file
# http://www.appveyor.com/docs/appveyor-yml
init:
- git config --global core.autocrlf input
# what combinations to test
environment:
matrix:
#node.js
- nodejs_version: 0.10
- nodejs_version: 0.11
- nodejs_version: 0.12
#io.js
- nodejs_version: 2.5.0
install:
- ps: Update-NodeJsInstallation (Get-NodeJsLatestBuild $env:nodejs_version)
- npm -g install npm@2
- set PATH=%APPDATA%\npm;%PATH%
- npm install -g gulp
- npm install
build: off
test_script:
- node --version
- npm --version
- ps: gulp test
- cmd: gulp test
| # appveyor file
# http://www.appveyor.com/docs/appveyor-yml
init:
- git config --global core.autocrlf input
# what combinations to test
environment:
matrix:
#node.js
- nodejs_version: 0.10
- nodejs_version: 0.11
- nodejs_version: 0.12
#io.js
- nodejs_version: 2.5.0
install:
- ps: Install-Product node $env:nodejs_version x64
- npm -g install npm@2
- set PATH=%APPDATA%\npm;%PATH%
- npm install -g gulp
- npm install
build: off
test_script:
- node --version
- npm --version
- ps: gulp test
- cmd: gulp test
|
CHANGE to the latest version of the Stack & APM | # Version to install
elastic_version: 7.1.1
elastic_download: https://artifacts.elastic.co
apm_java_version: 1.6.1
# Elastic Cloud credentials
elasticsearch_host: "{{ lookup('env','ELASTICSEARCH_HOST') }}"
elasticsearch_user: "{{ lookup('env','ELASTICSEARCH_USER') }}"
elasticsearch_password: "{{ lookup('env','ELASTICSEARCH_PASSWORD') }}"
kibana_host: "{{ lookup('env','KIBANA_HOST') }}"
kibana_id: "{{ lookup('env','KIBANA_ID') }}"
# Setup of the infrastructure
env: production
domain: xeraa.wtf
backend_server: https://backend.{{ domain }}
frontend_server: https://frontend.{{ domain }}
apm_server: "{{ lookup('env','APM_HOST') }}"
apm_secret: "{{ lookup('env','APM_TOKEN') }}"
# MySQL config
mysql_server: backend.{{ domain }}
mysql_user: "{{ lookup('env','ELASTICSEARCH_USER') }}"
mysql_password: "{{ lookup('env','ELASTICSEARCH_PASSWORD') }}"
mysql_database: person
# Credentials for Kibana dashboard-only mode
attendee_user: dashboard
attendee_password: secret
| # Version to install
elastic_version: 7.2.0
elastic_download: https://artifacts.elastic.co
apm_java_version: 1.7.0
# Elastic Cloud credentials
elasticsearch_host: "{{ lookup('env','ELASTICSEARCH_HOST') }}"
elasticsearch_user: "{{ lookup('env','ELASTICSEARCH_USER') }}"
elasticsearch_password: "{{ lookup('env','ELASTICSEARCH_PASSWORD') }}"
kibana_host: "{{ lookup('env','KIBANA_HOST') }}"
kibana_id: "{{ lookup('env','KIBANA_ID') }}"
# Setup of the infrastructure
env: production
domain: xeraa.wtf
backend_server: https://backend.{{ domain }}
frontend_server: https://frontend.{{ domain }}
apm_server: "{{ lookup('env','APM_HOST') }}"
apm_secret: "{{ lookup('env','APM_TOKEN') }}"
# MySQL config
mysql_server: backend.{{ domain }}
mysql_user: "{{ lookup('env','ELASTICSEARCH_USER') }}"
mysql_password: "{{ lookup('env','ELASTICSEARCH_PASSWORD') }}"
mysql_database: person
# Credentials for Kibana dashboard-only mode
attendee_user: dashboard
attendee_password: secret
|
Fix a type while excluding GeSHi source | imports:
- php
filter:
excluded_paths:
- 'geschi/*'
tools:
php_sim: true
php_cpd: false
| imports:
- php
filter:
excluded_paths:
- 'geshi/*'
- 'geshi.php'
tools:
php_sim: true
php_cpd: false
|
Add Go 1.10 to the supported Travis versions | language: go
go:
- 1.8
- 1.9
- tip
script:
- go test -v -coverprofile=coverage.txt -covermode=atomic
after_success:
- bash <(curl -s https://codecov.io/bash)
addons:
code_climate:
repo_token: 42136a24528a7d52e98adbe0d5126c8dadda18158a8559eec3ee4f73a2053436
| language: go
go:
- 1.8
- 1.9
- 1.10
- tip
script:
- go test -v -coverprofile=coverage.txt -covermode=atomic
after_success:
- bash <(curl -s https://codecov.io/bash)
addons:
code_climate:
repo_token: 42136a24528a7d52e98adbe0d5126c8dadda18158a8559eec3ee4f73a2053436
|
Use `make check` on Travis CI | #- Travis CI configuration file
#- .travis.yml ~~
# ~~ (c) SRW, 15 Jul 2013
# ~~ last updated 08 Feb 2015
language: node_js
node_js:
- '0.12'
- '0.10'
- '0.8'
- '0.6'
sudo: false
#- vim:set syntax=yaml:
| #- Travis CI configuration file
#- .travis.yml ~~
# ~~ (c) SRW, 15 Jul 2013
# ~~ last updated 13 Feb 2015
language: node_js
node_js:
- '0.12'
- '0.10'
- '0.8'
- '0.6'
script:
- make check
sudo: false
#- vim:set syntax=yaml:
|
Use the latest version of g++ | sudo: false
language: node_js
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- doxygen
- g++-4.8
before_install:
- if [[ "$TRAVIS_OS_NAME" == "linux" ]]; then export CXX=g++-4.8; fi
os:
- osx
- linux
node_js:
- stable
- 7
- 6
- 5
- 4
install:
- npm -g i npm@latest
- npm i --build-from-source
scripts:
- npm test
- npm run deploy
| sudo: false
language: node_js
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- doxygen
- g++
os:
- osx
- linux
node_js:
- stable
- 7
- 6
- 5
- 4
install:
- npm -g i npm@latest
- npm i --build-from-source
scripts:
- npm test
- npm run deploy
|
Add Ruby 2.4 to Travis-CI | language: ruby
rvm:
- 2.1.10
- 2.2.5
- 2.3.1
services:
- redis-server
script:
- bundle exec rake spec && bundle exec codeclimate-test-reporter
| language: ruby
rvm:
- 2.1.10
- 2.2.5
- 2.3.1
- 2.4.0
services:
- redis-server
script:
- bundle exec rake spec && bundle exec codeclimate-test-reporter
|
Allow usage of container-based infrastructure | language: rust
script:
- cargo test --features="live_tests"
- cargo test --features="tls"
after_success: |
[ $TRAVIS_BRANCH = master ] &&
[ $TRAVIS_PULL_REQUEST = false ] &&
cargo doc &&
echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
sudo pip install ghp-import &&
ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env:
global:
secure: eSYRUJ2wTq1g6AiPp0zvtxVJFn/3FnrCRAJmGCN1TBYpnl11ZvLZfhUA9IC4S48/YVmdeP1pywpIjY3ZGk7gWuaRLpTrwBxgm01RbOglQS1if6Pryc01FcwCSGb1fJKY4qR0v6iQRb23jaFfSELHfThf4rmG4QiKiNviHJRzb0c=
| language: rust
script:
- cargo test --features="live_tests"
- cargo test --features="tls"
sudo: false
after_success: |
[ $TRAVIS_BRANCH = master ] &&
[ $TRAVIS_PULL_REQUEST = false ] &&
cargo doc &&
echo "<meta http-equiv=refresh content=0;url=`echo $TRAVIS_REPO_SLUG | cut -d '/' -f 2`/index.html>" > target/doc/index.html &&
pip install --user ghp-import &&
/home/travis/.local/bin/ghp-import -n target/doc &&
git push -fq https://${GH_TOKEN}@github.com/${TRAVIS_REPO_SLUG}.git gh-pages
env:
global:
secure: eSYRUJ2wTq1g6AiPp0zvtxVJFn/3FnrCRAJmGCN1TBYpnl11ZvLZfhUA9IC4S48/YVmdeP1pywpIjY3ZGk7gWuaRLpTrwBxgm01RbOglQS1if6Pryc01FcwCSGb1fJKY4qR0v6iQRb23jaFfSELHfThf4rmG4QiKiNviHJRzb0c=
|
Drop node.js 8 support, add 14, lint with 14 (semver-major) | language: node_js
node_js:
- 8
- 10
- 12
- node
matrix:
include:
- name: Lint
node_js: 12
script: npm run lint
script: npm run test:ci
after_success: '<coverage/lcov.info ./node_modules/coveralls/bin/coveralls.js'
| language: node_js
node_js:
- 10
- 12
- 14
- node
matrix:
include:
- name: Lint
node_js: 14
script: npm run lint
script: npm run test:ci
after_success: '<coverage/lcov.info ./node_modules/coveralls/bin/coveralls.js'
|
Remove deprecated `sudo: false` Travis-CI configuration | sudo: false
language: ruby
rvm:
- 2.5.1
- ruby-head
gemfile: "gems.rb"
before_install: gem install bundler --no-document # Fixes ruby 2.5.1 tests
before_script:
- "ruby lib/initial_setup.rb ../.travis_setup"
- "psql -c 'create database sleepdata_test;' -U postgres"
- "rails db:migrate"
matrix:
allow_failures:
- rvm: ruby-head
| language: ruby
rvm:
- 2.5.1
- ruby-head
gemfile: "gems.rb"
before_install: gem install bundler --no-document # Fixes ruby 2.5.1 tests
before_script:
- "ruby lib/initial_setup.rb ../.travis_setup"
- "psql -c 'create database sleepdata_test;' -U postgres"
- "rails db:migrate"
matrix:
allow_failures:
- rvm: ruby-head
|
Test with Node.js 0.10 on Travis CI. | sudo: false
language: node_js
node_js:
- '4'
- '6'
branches:
only:
- master
- travis-ci
before_install:
- npm install
- npm install istanbul coveralls
| sudo: false
language: node_js
node_js:
- '0.10'
- '4'
- '6'
branches:
only:
- master
- travis-ci
before_install:
- npm install
- npm install istanbul coveralls
|
Fix flake8 max line length parameter | language: python
python:
- "2.7"
install:
- pip install flake8 docutils
- sudo apt-get install solr-jetty openjdk-6-jdk
before_script:
- sudo sed -i 's/NO_START=1/NO_START=0/g' /etc/default/jetty
- sudo sed -i 's/#JETTY_HOST=$(uname -n)/JETTY_HOST=127.0.0.1/g' /etc/default/jetty
- sudo sed -i 's/#JETTY_PORT=8080/JETTY_PORT=8983/g' /etc/default/jetty
- sudo wget -O /etc/solr/conf/schema.xml https://raw.githubusercontent.com/ckan/ckan/master/ckan/config/solr/schema.xml
- sudo service jetty start
script:
- flake8 odgovlt.py
- rst2html.py --strict README.rst /dev/null
- flake8 tests
- make test
| language: python
python:
- "2.7"
install:
- pip install flake8 docutils
- sudo apt-get install solr-jetty openjdk-6-jdk
before_script:
- sudo sed -i 's/NO_START=1/NO_START=0/g' /etc/default/jetty
- sudo sed -i 's/#JETTY_HOST=$(uname -n)/JETTY_HOST=127.0.0.1/g' /etc/default/jetty
- sudo sed -i 's/#JETTY_PORT=8080/JETTY_PORT=8983/g' /etc/default/jetty
- sudo wget -O /etc/solr/conf/schema.xml https://raw.githubusercontent.com/ckan/ckan/master/ckan/config/solr/schema.xml
- sudo service jetty start
script:
- rst2html.py --strict README.rst /dev/null
- flake8 --max-line-length=120 odgovlt.py tests
- make test
|
Change Travis CI configuration file indentation. | language: python
python:
- "3.5"
- "3.4"
- "3.3"
- "2.7"
- "2.6"
install:
- "pip install flake8"
- "pip install ."
script:
- "python -m test.test_facebook"
- "flake8 --ignore=E402,F401 examples"
- "flake8 facebook"
- "flake8 test"
env:
global:
- FACEBOOK_APP_ID=198798870326423
- FACEBOOK_SECRET=2db4d76fe8a336cf292470c20a5a5684
| language: python
python:
- "3.5"
- "3.4"
- "3.3"
- "2.7"
- "2.6"
install:
- "pip install flake8"
- "pip install ."
script:
- "python -m test.test_facebook"
- "flake8 --ignore=E402,F401 examples"
- "flake8 facebook"
- "flake8 test"
env:
global:
- FACEBOOK_APP_ID=198798870326423
- FACEBOOK_SECRET=2db4d76fe8a336cf292470c20a5a5684
|
Update NPM to support new dependency glob | language: node_js
node_js:
- "0.8"
- "0.10"
- "0.11"
before_install:
- npm install -g grunt-cli
| language: node_js
node_js:
- "0.8"
- "0.10"
- "0.11"
before_install:
- npm update npm -g # For "^" dependency compatiblibty in NodeJS 0.8
- npm install -g grunt-cli
|
Install module for develop to set the correct paths | language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
install: "pip install -r requirements.txt --use-mirrors"
script: mamba specs
| language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
install: "pip install -r requirements.txt --use-mirrors"
before_script: python setup.py develop
script: mamba specs
|
Test on 8 rather than 7 | language: node_js
notifications:
email: false
before_install: npm install npm@latest -g
node_js:
- "6"
- "4"
- "7"
script:
- npm run report-coverage
| language: node_js
notifications:
email: false
before_install: npm install npm@latest -g
node_js:
- "8"
- "6"
- "4"
script:
- npm run report-coverage
|
Test on trusty in Travis | # Use Travis's cointainer based infrastructure
sudo: false
addons:
apt:
sources:
- chef-stable-precise
packages:
- chefdk
# Don't `bundle install`
install: echo "skip bundle install"
branches:
only:
- master
# Ensure we make ChefDK's Ruby the default
before_script:
- eval "$(/opt/chefdk/bin/chef shell-init bash)"
# We have to install chef-sugar for ChefSpec
- /opt/chefdk/embedded/bin/chef gem install chef-sugar
script:
- /opt/chefdk/embedded/bin/chef --version
- /opt/chefdk/embedded/bin/cookstyle --version
- /opt/chefdk/embedded/bin/cookstyle
- /opt/chefdk/embedded/bin/foodcritic --version
- /opt/chefdk/embedded/bin/foodcritic . --exclude spec
- /opt/chefdk/embedded/bin/rspec spec
| # Use Travis's cointainer based infrastructure
sudo: false
dist: trusty
addons:
apt:
sources:
- chef-stable-precise
packages:
- chefdk
# Don't `bundle install`
install: echo "skip bundle install"
branches:
only:
- master
# Ensure we make ChefDK's Ruby the default
before_script:
- eval "$(/opt/chefdk/bin/chef shell-init bash)"
# We have to install chef-sugar for ChefSpec
- /opt/chefdk/embedded/bin/chef gem install chef-sugar
script:
- /opt/chefdk/embedded/bin/chef --version
- /opt/chefdk/embedded/bin/cookstyle --version
- /opt/chefdk/embedded/bin/cookstyle
- /opt/chefdk/embedded/bin/foodcritic --version
- /opt/chefdk/embedded/bin/foodcritic . --exclude spec
- /opt/chefdk/embedded/bin/rspec spec
|
Set default dist to trusty for Travis CI. | language: go
go:
- tip
sudo: false
addons:
apt:
packages:
- python3
- python3-psycopg2
- python3-numpy
install:
- go get github.com/olt/libpq
- go get github.com/DATA-DOG/go-sqlmock
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- go get golang.org/x/tools/cmd/cover
- pip3 install coverage
- pip3 install matplotlib
before_script:
- go vet $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone/...
script:
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone; go install
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/api; ./test_url_parser.py
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/api; ./test_db.py
after_success:
- cd $GOPATH/src/github.com/seadsystem/Backend; go test github.com/seadsystem/Backend/DB/landingzone/...
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone; ./test-coverage.sh
| language: go
go:
- tip
sudo: false
dist: trusty
addons:
apt:
packages:
- python3
- python3-coverage
- python3-psycopg2
- python3-numpy
- python3-matplotlib
install:
- go get github.com/olt/libpq
- go get github.com/DATA-DOG/go-sqlmock
- go get github.com/axw/gocov/gocov
- go get github.com/mattn/goveralls
- go get golang.org/x/tools/cmd/cover
before_script:
- go vet $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone/...
script:
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone; go install
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/api; ./test_url_parser.py
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/api; ./test_db.py
after_success:
- cd $GOPATH/src/github.com/seadsystem/Backend; go test github.com/seadsystem/Backend/DB/landingzone/...
- cd $GOPATH/src/github.com/seadsystem/Backend/DB/landingzone; ./test-coverage.sh
|
Add Python 3.8 to Travis | language: python
cache: pip
matrix:
include:
- python: "3.4"
- python: "3.5"
- python: "3.6"
- python: "3.7"
dist: xenial # required for Python 3.7 (travis-ci/travis-ci#9069)
sudo: required # required for Python 3.7 (travis-ci/travis-ci#9069)
# command to install dependencies
install: "pip install -r requirements.txt"
before_script:
- pip install flake8
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
# exit-zero treats all errors as warnings.
- flake8 . --count --exit-zero --max-complexity=10 --statistics
# command to run tests
script: nosetests
| language: python
cache: pip
matrix:
include:
- python: "3.4"
- python: "3.5"
- python: "3.6"
- python: "3.7"
- python: "3.8"
dist: xenial # required for Python 3.7 (travis-ci/travis-ci#9069)
sudo: required # required for Python 3.7 (travis-ci/travis-ci#9069)
# command to install dependencies
install: "pip install -r requirements.txt"
before_script:
- pip install flake8
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
# exit-zero treats all errors as warnings.
- flake8 . --count --exit-zero --max-complexity=10 --statistics
# command to run tests
script: nosetests
|
Upgrade npm for nodejs 0.8 | before_install:
- sudo apt-get install libicu-dev
# Workaround for a permissions issue with Travis virtual machine images
# that breaks Python's multiprocessing:
# https://github.com/travis-ci/travis-cookbooks/issues/155
- sudo rm -rf /dev/shm
- sudo ln -s /run/shm /dev/shm
before_script:
- npm install -g grunt-cli
language: node_js
node_js:
- "0.11"
- "0.10"
- 0.8
| before_install:
- sudo apt-get install libicu-dev
# Workaround for a permissions issue with Travis virtual machine images
# that breaks Python's multiprocessing:
# https://github.com/travis-ci/travis-cookbooks/issues/155
- sudo rm -rf /dev/shm
- sudo ln -s /run/shm /dev/shm
- npm i -g npm
language: node_js
node_js:
- "0.11"
- "0.10"
- 0.8
|
Add more lua versions to test | language: cpp
sudo: false
compiler:
- gcc
- clang
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.8
- g++-4.8
env:
- LUA=lua5.1
- LUA=luajit # latest stable version (2.0.4)
- LUA=luajit2.0 # current head of 2.0 branch
- LUA=luajit2.1 # current head of 2.1 branch
install:
- if [ "$CXX" = "g++" ]; then export CXX="g++-4.8" CC="gcc-4.8"; fi
- bash .travis/setup_lua.sh
before_script:
- export LUA_DIR=$TRAVIS_BUILD_DIR/install/lua
- cd tests
- cmake . && make
script:
- ctest -V
| language: cpp
sudo: false
compiler:
- gcc
- clang
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gcc-4.8
- g++-4.8
env:
- LUA=lua5.1
- LUA=lua5.2
- LUA=lua5.3
- LUA=luajit # latest stable version (2.0.4)
- LUA=luajit2.0 # current head of 2.0 branch
- LUA=luajit2.1 # current head of 2.1 branch
install:
- if [ "$CXX" = "g++" ]; then export CXX="g++-4.8" CC="gcc-4.8"; fi
- bash .travis/setup_lua.sh
before_script:
- export LUA_DIR=$TRAVIS_BUILD_DIR/install/lua
- cd tests
- cmake . && make
script:
- ctest -V
|
ADD some more Auditbeat rules around identities and access violations | auditbeat.modules:
- module: audit
metricsets: ["kernel"]
kernel.audit_rules: |
-w /etc/passwd -p wa -k identity
- module: audit
metricsets: [file]
file.paths:
- /opt/
file.max_file_size: 100 MiB
file.hash_types: [sha1]
name: "{{ inventory_hostname }}"
tags: ["{{ env }}", "lightsail"]
processors:
- add_cloud_metadata: ~
output.elasticsearch:
hosts: ["{{ elasticsearch_host }}"]
username: "{{ elasticsearch_user }}"
password: "{{ elasticsearch_password }}"
setup:
kibana:
host: "{{ kibana_host }}"
username: "{{ elasticsearch_user }}"
password: "{{ elasticsearch_password }}"
dashboards.enabled: true
| auditbeat.modules:
- module: audit
metricsets: ["kernel"]
kernel.audit_rules: |
## Identity changes
-w /etc/group -p wa -k identity
-w /etc/passwd -p wa -k identity
-w /etc/gshadow -p wa -k identity
## Unauthorized access attempts
-a always,exit -F arch=b64 -S open,creat,truncate,ftruncate,openat,open_by_handle_at -F exit=-EACCES -k access
-a always,exit -F arch=b64 -S open,creat,truncate,ftruncate,openat,open_by_handle_at -F exit=-EPERM -k access
- module: audit
metricsets: [file]
file.paths:
- /opt/
file.max_file_size: 100 MiB
file.hash_types: [sha1]
name: "{{ inventory_hostname }}"
tags: ["{{ env }}", "lightsail"]
processors:
- add_cloud_metadata: ~
output.elasticsearch:
hosts: ["{{ elasticsearch_host }}"]
username: "{{ elasticsearch_user }}"
password: "{{ elasticsearch_password }}"
setup:
kibana:
host: "{{ kibana_host }}"
username: "{{ elasticsearch_user }}"
password: "{{ elasticsearch_password }}"
dashboards.enabled: true
|
Use PHP_CodeSniffer, not PHP Mess Detector, for CodeClimate | engines:
duplication:
enabled: true
config:
languages:
- php
fixme:
enabled: true
phpmd:
enabled: true
ratings:
paths:
- "**.php"
exclude_paths:
- tests/*
| engines:
duplication:
enabled: true
config:
languages:
- php
fixme:
enabled: true
phpcodesniffer:
enabled: true
ratings:
paths:
- "**.php"
exclude_paths:
- tests/*
|
Revert "Temporarily nerf nbgitpuller to debug" | jupyterhub:
auth:
type: google
admin:
users:
# infrastructure
- rylo
- yuvipanda
- felder
# List of other admin users
# Professor
- mahoneymw
# GSIs
- krinsman
- theisen
# UGSI
- wqixuan
singleuser:
memory:
guarantee: 512M
limit: 1G
image:
name: gcr.io/ucb-datahub-2018/stat89a-user-image
storage:
type: hostPath
defaultUrl: "/lab"
# lifecycleHooks:
# postStart:
# exec:
# command: ["gitpuller", "https://gitlab.com/stat-89a/spring-2020/spring_2020.git", "master", "STAT\ 89A\ 2020"]
| jupyterhub:
auth:
type: google
admin:
users:
# infrastructure
- rylo
- yuvipanda
- felder
# List of other admin users
# Professor
- mahoneymw
# GSIs
- krinsman
- theisen
# UGSI
- wqixuan
singleuser:
memory:
guarantee: 512M
limit: 1G
image:
name: gcr.io/ucb-datahub-2018/stat89a-user-image
storage:
type: hostPath
defaultUrl: "/lab"
lifecycleHooks:
postStart:
exec:
command: ["gitpuller", "https://gitlab.com/stat-89a/spring-2020/spring_2020.git", "master", "STAT\ 89A\ 2020"]
|
Update from Hackage at 2018-11-16T15:13:51Z | homepage: https://github.com/apeyroux/google-isbn#readme
changelog-type: ''
hash: e27fe8bc1688122cf39ae13875c278869c975e24b72e818883bef43151425b3c
test-bench-deps: {}
maintainer: alex@px.io
synopsis: ''
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.2 && <5'
text: -any
conduit: -any
conduit-extra: -any
http-conduit: -any
aeson: -any
all-versions:
- '1.0.2'
author: Alexandre Peyroux
latest: '1.0.2'
description-type: haddock
description: Basic utility to search an ISBN using the Google Books webservice
license-name: BSD3
| homepage: https://github.com/apeyroux/google-isbn#readme
changelog-type: ''
hash: eab010ab5691853600eb8320235573241cd94d68fa985e211e77f9d6d1e7cf4a
test-bench-deps: {}
maintainer: alex@px.io
synopsis: ''
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.2 && <5'
text: -any
conduit: -any
conduit-extra: -any
http-conduit: -any
aeson: -any
all-versions:
- '1.0.2'
- '1.0.3'
author: Alexandre Peyroux
latest: '1.0.3'
description-type: haddock
description: Basic utility to search an ISBN using the Google Books webservice
license-name: BSD3
|
Fix github actions following branch renaming | name: CI
on:
push:
branches:
- master
pull_request: ~
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install -y texlive-latex-base gettext texlive-pictures texlive-latex-extra texlive-xetex libblas-dev liblapack-dev libatlas-base-dev gfortran
pip3 install --editable ".[dev]"
- name: Lint
run: |
pre-commit run --all-files
- name: Test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python3 manage.py test
coverage run --source=survey --omit=survey/migrations/* ./manage.py test
coverage html
coveralls debug --service=github
| name: CI
on:
push:
branches:
- main
pull_request: ~
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
sudo apt-get install -yqq texlive-latex-base gettext texlive-pictures texlive-latex-extra
sudo apt-get install -yqq texlive-xetex libblas-dev liblapack-dev libatlas-base-dev gfortran
pip3 install -e ".[dev]"
- name: Lint
run: |
pre-commit run --all-files
- name: Test
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python3 manage.py test
coverage run --source=survey --omit=survey/migrations/* ./manage.py test
coverage html
coveralls debug --service=github
|
Mark zsh installation as updateable | ---
- name: Install zsh
homebrew: name=zsh
- name: Clone Prezto
git: >
repo=git@github.com:jcf/prezto.git
dest=~/.prezto
force=no
tags: update
- name: Link runcoms
file: src={{ item }} dest=~/.{{ item | basename }} state=link
with_items:
- ~/.prezto/runcoms/zlogin
- ~/.prezto/runcoms/zlogout
- ~/.prezto/runcoms/zpreztorc
- ~/.prezto/runcoms/zprofile
- ~/.prezto/runcoms/zshenv
- ~/.prezto/runcoms/zshrc
| ---
- name: Install zsh
homebrew: name=zsh
tags: update
- name: Clone Prezto
git: >
repo=git@github.com:jcf/prezto.git
dest=~/.prezto
force=no
tags: update
- name: Link runcoms
file: src={{ item }} dest=~/.{{ item | basename }} state=link
with_items:
- ~/.prezto/runcoms/zlogin
- ~/.prezto/runcoms/zlogout
- ~/.prezto/runcoms/zpreztorc
- ~/.prezto/runcoms/zprofile
- ~/.prezto/runcoms/zshenv
- ~/.prezto/runcoms/zshrc
|
Disable Brakeman to fix Code Climate | engines:
# Fails locally
brakeman:
enabled: true
bundler-audit:
enabled: true
# Fails locally
csslint:
enabled: false
coffeelint:
enabled: false
duplication:
enabled: true
# Fails with: comparison of Fixnum with String failed
# config:
# languages:
# ruby:
# mass_threshold: 20
eslint:
enabled: true
fixme:
enabled: true
rubocop:
enabled: true
scss-lint:
enabled: true
fixme:
enabled: true
ratings:
paths:
- "**.rb"
- "**.coffee"
- "**.js"
- "**.jsx"
- "**.css"
- Gemfile.lock
exclude_paths:
- bin/**/*
- config/**/*
- db/**/*
- features/**/*
- script/**/*
- spec/**/*
- test/**/*
- vendor/**/*
| engines:
# Fails
brakeman:
enabled: false
bundler-audit:
enabled: true
# Fails locally
csslint:
enabled: false
coffeelint:
enabled: false
duplication:
enabled: true
# Fails with: comparison of Fixnum with String failed
# config:
# languages:
# ruby:
# mass_threshold: 20
eslint:
enabled: true
fixme:
enabled: true
rubocop:
enabled: true
scss-lint:
enabled: true
fixme:
enabled: true
ratings:
paths:
- "**.rb"
- "**.coffee"
- "**.js"
- "**.jsx"
- "**.css"
- Gemfile.lock
exclude_paths:
- bin/**/*
- config/**/*
- db/**/*
- features/**/*
- script/**/*
- spec/**/*
- test/**/*
- vendor/**/*
|
Update from Hackage at 2020-04-24T21:21:49Z | homepage: ''
changelog-type: ''
hash: 20e2d8f3f86d6107402f10dbc03fd9c8f50eaebfa33ed22bd92d43102cdb1ef0
test-bench-deps: {}
maintainer: mniip@mniip.com
synopsis: A hack to use GHC.Prim primitives in GHCi
changelog: ''
basic-deps:
base: ==4.*
ghc-prim: -any
all-versions:
- 0.1.0.0
- 0.1.1.0
- 0.2.0.0
author: mniip
latest: 0.2.0.0
description-type: haddock
description: |-
As of now, the GHCi interactive runtime is incapable of working on unboxed
tuples. In particular, it is unable to fully apply any function returning an
unboxed tuple, create a function that takes a non-nullary unboxed tuple as
argument, or pass a non-nullary tuple to some other function. The usual
solution is to enable object code generation with @-fobject-code@. This
package serves as a workaround for the cases where @-fobject-code@ is
undesiable.
license-name: MIT
| homepage: ''
changelog-type: ''
hash: fa9eb400503f565f957bdb44304601fd38b84e51865f4e5e02cfa125a175c515
test-bench-deps: {}
maintainer: mniip@mniip.com
synopsis: A hack to use GHC.Prim primitives in GHCi
changelog: ''
basic-deps:
base: ==4.*
ghc-prim: -any
all-versions:
- 0.1.0.0
- 0.1.1.0
- 0.2.0.0
- 0.2.0.1
author: mniip
latest: 0.2.0.1
description-type: haddock
description: |-
As of now, the GHCi interactive runtime is incapable of working on unboxed
tuples. In particular, it is unable to fully apply any function returning an
unboxed tuple, create a function that takes a non-nullary unboxed tuple as
argument, or pass a non-nullary tuple to some other function. The usual
solution is to enable object code generation with @-fobject-code@. This
package serves as a workaround for the cases where @-fobject-code@ is
undesiable.
license-name: MIT
|
Update from Hackage at 2018-12-10T14:30:19Z | homepage: https://github.com/iokasimov/observable
changelog-type: markdown
hash: abecec93522712f2328c246210db61420d4b29af8ee58eff7663e327abbcbcdf
test-bench-deps: {}
maintainer: Murat Kasimov <iokasimov.m@gmail.com>
synopsis: Continuation patterns
changelog: ! '# 0.1.1
* Rename Mock to Capture
* Add `(*=>)` combinator
* Add heartbeat usage example
* Add `bypass` combinator
# 0.1.2
* Add `uprise` combinator
* Remove `bypass` and `(*=>)`
# 0.1.3
* Add `watch` combinator
* Rename `uprise` to `chase`
* Add four infix combinators
# 0.1.4
* Remove `Usage.Heartbeat` module
'
basic-deps:
base: ==4.*
transformers: -any
all-versions:
- '0.1.0'
- '0.1.1'
- '0.1.2'
- '0.1.3'
- '0.1.4'
author: Murat Kasimov
latest: '0.1.4'
description-type: haddock
description: Make your actions to be observable and handle events from them.
license-name: BSD3
| homepage: https://github.com/iokasimov/observable
changelog-type: markdown
hash: 3bf814c4489bccef1b703ae49e3bb16dcdb2c8ef3030bdf5a3a7ff940b09d642
test-bench-deps: {}
maintainer: Murat Kasimov <iokasimov.m@gmail.com>
synopsis: Continuation patterns
changelog: ! '# 0.1.1
* Rename Mock to Capture
* Add `(*=>)` combinator
* Add heartbeat usage example
* Add `bypass` combinator
# 0.1.2
* Add `uprise` combinator
* Remove `bypass` and `(*=>)`
# 0.1.3
* Add `watch` combinator
* Rename `uprise` to `chase`
* Add four infix combinators
# 0.1.4
* Remove `Usage.Heartbeat` module
# 0.1.5
* Rename `chase` to `follow`
'
basic-deps:
base: ==4.*
transformers: -any
all-versions:
- '0.1.0'
- '0.1.1'
- '0.1.2'
- '0.1.3'
- '0.1.4'
- '0.1.5'
author: Murat Kasimov
latest: '0.1.5'
description-type: haddock
description: Make your actions to be observable and handle events from them.
license-name: BSD3
|
Copy files if test fails. | queue:
name: Hosted VS2017
demands:
- msbuild
- visualstudio
- vstest
steps:
- powershell: dotnet restore
displayName: 'Restore'
- task: VSBuild@1
displayName: 'Build'
inputs:
configuration: Release
- task: VSTest@2
displayName: 'Test'
- task: CopyFiles@2
displayName: 'Stage image assert errors.'
inputs:
sourceFolder: '$(Agent.TempDirectory)'
contents: '*.png'
targetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
displayName: 'Publish image assert errors.'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'Images'
| queue:
name: Hosted VS2017
demands:
- msbuild
- visualstudio
- vstest
steps:
- powershell: dotnet restore
displayName: 'Restore'
- task: VSBuild@1
displayName: 'Build'
inputs:
configuration: Release
- task: VSTest@2
displayName: 'Test'
- task: CopyFiles@2
displayName: 'Stage image assert errors.'
inputs:
sourceFolder: '$(Agent.TempDirectory)'
contents: '*.png'
targetFolder: '$(Build.ArtifactStagingDirectory)'
condition: failed()
- task: PublishBuildArtifacts@1
displayName: 'Publish image assert errors.'
inputs:
pathtoPublish: '$(Build.ArtifactStagingDirectory)'
artifactName: 'Images'
|
Revert windows ci back to msvc 2015. | image: Visual Studio 2017
clone_depth: 10
platform:
- x64
environment:
APPVEYOR_SAVE_CACHE_ON_ERROR: true
init:
- if /i %APPVEYOR_REPO_TAG% == true set VERSION=%APPVEYOR_REPO_TAG_NAME%
cache:
- qtcreator-latest\compiled
install:
- call "util\win\prepare.bat"
build_script:
- call "util\win\build.bat"
artifacts:
- path: 'QtcGtest-*.zip'
name: archive
deploy:
- provider: GitHub
description: ''
auth_token:
secure: NnyUV44yNAx8ea1L46dVhE4kQxUGd5M1O+yVk+9ncsNHWtw/9JoCnDqNybnxTccP
artifact: archive
force_update: true
on:
appveyor_repo_tag: true
| image: Visual Studio 2015
clone_depth: 10
platform:
- x86
environment:
APPVEYOR_SAVE_CACHE_ON_ERROR: true
init:
- if /i %APPVEYOR_REPO_TAG% == true set VERSION=%APPVEYOR_REPO_TAG_NAME%
cache:
- qtcreator-latest\compiled
install:
- call "util\win\prepare.bat"
build_script:
- call "util\win\build.bat"
artifacts:
- path: 'QtcGtest-*.zip'
name: archive
deploy:
- provider: GitHub
description: ''
auth_token:
secure: NnyUV44yNAx8ea1L46dVhE4kQxUGd5M1O+yVk+9ncsNHWtw/9JoCnDqNybnxTccP
artifact: archive
force_update: true
on:
appveyor_repo_tag: true
|
Add support for openSUSE Leap distributions | ---
galaxy_info:
author: Sébastien Han
description: Installs Ceph Object Storage Daemon
license: Apache
min_ansible_version: 2.3
platforms:
- name: Ubuntu
versions:
- xenial
- name: EL
versions:
- 7
categories:
- system
dependencies: []
| ---
galaxy_info:
author: Sébastien Han
description: Installs Ceph Object Storage Daemon
license: Apache
min_ansible_version: 2.3
platforms:
- name: Ubuntu
versions:
- xenial
- name: EL
versions:
- 7
- name: opensuse
versions:
- 42.3
categories:
- system
dependencies: []
|
Update on PR event for phpstan | name: "Static Analysis Check"
on:
pull_request:
push:
branches:
- 'develop'
- '4.1'
paths:
- 'app/**'
- 'system/**'
jobs:
build:
name: Run Check
runs-on: ubuntu-latest
steps:
- name: Setup PHP Action
uses: shivammathur/setup-php@v2
with:
extensions: intl
php-version: "7.4"
- name: Checkout
uses: actions/checkout@v2
- name: "Install dependencies"
run: "composer install"
- name: "Static analysis Check"
run: "vendor/bin/phpstan analyze --level=1 app system"
| name: "Static Analysis Check"
on:
pull_request:
branches:
- 'develop'
- '4.1'
paths:
- 'app/**'
- 'system/**'
push:
branches:
- 'develop'
- '4.1'
paths:
- 'app/**'
- 'system/**'
jobs:
build:
name: Run Check
runs-on: ubuntu-latest
steps:
- name: Setup PHP Action
uses: shivammathur/setup-php@v2
with:
extensions: intl
php-version: "7.4"
- name: Checkout
uses: actions/checkout@v2
- name: "Install dependencies"
run: "composer install"
- name: "Static analysis Check"
run: "vendor/bin/phpstan analyze --level=1 app system"
|
Add setup-ocaml lock file issue link into comment | name: locked
on:
push:
pull_request:
jobs:
regression:
strategy:
fail-fast: false
matrix:
os:
- ubuntu-latest
- macos-latest
ocaml-compiler:
- 4.12.0 # matches opam lock file
# don't add any other because they won't be used
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up OCaml ${{ matrix.ocaml-compiler }}
env:
OPAMLOCKED: locked # otherwise setup-ocaml pins non-locked dependencies
uses: ocaml/setup-ocaml@v2
with:
ocaml-compiler: ${{ matrix.ocaml-compiler }}
- name: Install dependencies
run: opam install . --deps-only --locked
- name: Build
run: ./make.sh nat
- name: Test regression
run: ./make.sh headers testci
- name: Test octApron regression # skipped by default but CI has apron, so explicitly test group (which ignores skipping -- it's now a feature!)
run: ruby scripts/update_suite.rb group octapron -s
| name: locked
on:
push:
pull_request:
jobs:
regression:
strategy:
fail-fast: false
matrix:
os:
- ubuntu-latest
- macos-latest
ocaml-compiler:
- 4.12.0 # matches opam lock file
# don't add any other because they won't be used
runs-on: ${{ matrix.os }}
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up OCaml ${{ matrix.ocaml-compiler }}
env:
# otherwise setup-ocaml pins non-locked dependencies
# https://github.com/ocaml/setup-ocaml/issues/166
OPAMLOCKED: locked
uses: ocaml/setup-ocaml@v2
with:
ocaml-compiler: ${{ matrix.ocaml-compiler }}
- name: Install dependencies
run: opam install . --deps-only --locked
- name: Build
run: ./make.sh nat
- name: Test regression
run: ./make.sh headers testci
- name: Test octApron regression # skipped by default but CI has apron, so explicitly test group (which ignores skipping -- it's now a feature!)
run: ruby scripts/update_suite.rb group octapron -s
|
Add services for testing based on test repo name | - hosts: all
tasks:
- name: Run gate-check-commit.sh script
become: yes
become_user: root
shell: >-
scripts/gate-check-commit.sh {{ scenario }} {{ action }} {{ install_method }}
args:
chdir: "src/opendev.org/openstack/openstack-ansible"
executable: /bin/bash
environment:
# ZUUL_SRC_PATH is used by tests/get-ansible-role-requirements to
# where the CI provided git sources were cloned.
ZUUL_SRC_PATH: "{{ ansible_user_dir }}/src"
ANSIBLE_PACKAGE: "{{ ansible_package | default('') }}"
| - hosts: all
vars:
scenario_map:
aodh: telemetry
ceilometer: telemetry
gnocchi: telemetry
horizon: lxc
tasks:
- name: Dynamically determine additional scenario elements
when:
- zuul.project.short_name is match("^openstack-ansible-os_(.*)$")
- (dynamic_scenario | default(True)) | bool
block:
- name: Set service_name
set_fact:
service_name: "{{ zuul.project.short_name | regex_replace('^openstack-ansible-os_(.*)$', '\\1') }}"
- name: Set scenario_suffix
set_fact:
scenario_suffix: "{{ scenario_map[service_name] | default(service_name) }}"
# Horizon's apache uses the same ip:port combinations as
# the repo server's nginx service. As such, we force the
# use of containers to ensure that they do not conflict.
- name: Add services based on test repo name
set_fact:
scenario: "{{ (service_name == 'horizon') | ternary(scenario | regex_replace('_metal', ''), scenario) }}_{{ scenario_suffix }}"
- name: Run gate-check-commit.sh script
become: yes
become_user: root
shell: >-
scripts/gate-check-commit.sh {{ scenario }} {{ action }} {{ install_method }}
args:
chdir: "src/opendev.org/openstack/openstack-ansible"
executable: /bin/bash
environment:
# ZUUL_SRC_PATH is used by tests/get-ansible-role-requirements to
# where the CI provided git sources were cloned.
ZUUL_SRC_PATH: "{{ ansible_user_dir }}/src"
ANSIBLE_PACKAGE: "{{ ansible_package | default('') }}"
|
Move perf tools to a separate task | - name: apt-get update
apt:
update_cache: yes
cache_valid_time: 300
ignore_errors: True
- name: install basic packages
apt: name={{ item }} state=present
with_items:
- aptitude
- curl
- hdparm
- heirloom-mailx
- iftop
- iotop
- lsof
- ncdu
- netcat
- openssl
- screen
- smartmontools
- sysstat
- telnet
- tmux
- unzip
- vim
- virt-what
- wget
- name: install additional packages
apt: name={{ item }} state=present
with_items:
- aria2
- atop
- bc
- byobu
- git
- htop
- iperf
- linux-tools
- lshw
- mosh
- netdiag
- nethogs
- ngrep
- pv
- tcpdump
- tcpflow
- tree
- unzip
ignore_errors: True
when: not common_utils_ascetic_install
- name: check if Russian locale exists
command: bash -c "locale -a | grep ru_RU"
changed_when: result.rc > 0
failed_when: false
register: result
- name: generate Russian locale
command: bash -c "locale-gen ru_RU.UTF-8"
when: result.rc > 0
| - name: apt-get update
apt:
update_cache: yes
cache_valid_time: 300
ignore_errors: True
- name: install basic packages
apt: name={{ item }} state=present
with_items:
- aptitude
- curl
- hdparm
- heirloom-mailx
- iftop
- iotop
- lsof
- ncdu
- netcat
- openssl
- screen
- smartmontools
- sysstat
- telnet
- tmux
- unzip
- vim
- virt-what
- wget
- name: install additional packages
apt: name={{ item }} state=present
with_items:
- aria2
- atop
- bc
- byobu
- git
- htop
- iperf
- lshw
- mosh
- netdiag
- nethogs
- ngrep
- pv
- tcpdump
- tcpflow
- tree
- unzip
ignore_errors: True
when: not common_utils_ascetic_install
- name: install perf-related packages
apt: name={{ item }} state=present
with_items:
- linux-tools
ignore_errors: True
when: not common_utils_ascetic_install
- name: check if Russian locale exists
command: bash -c "locale -a | grep ru_RU"
changed_when: result.rc > 0
failed_when: false
register: result
- name: generate Russian locale
command: bash -c "locale-gen ru_RU.UTF-8"
when: result.rc > 0
|
Update mpi4py recipe (bump build) | {% set build = 1 %}
{% set version = '2.0.0' %}
{% set md5 = '4f7d8126d7367c239fd67615680990e3' %}
{% set mpi = (environ.get('MPI') or 'mpich') if not win else 'msmpi' %}
{% set mpi_version = {'mpich':'>=3.1', 'openmpi':'>=1.8'}.get(mpi,'') %}
package:
name: mpi4py
version: {{version}}
about:
home: https://bitbucket.org/mpi4py/
summary: 'Python bindings for MPI'
license: 'BSD (https://bitbucket.org/mpi4py/mpi4py/src/master/LICENSE.rst)'
source:
fn: mpi4py-{{version}}.tar.gz
url: https://bitbucket.org/mpi4py/mpi4py/downloads/mpi4py-{{version}}.tar.gz
{%if md5%}md5: {{md5}}{%endif%}
requirements:
build:
- python
- {{mpi}} {{mpi_version}} # [not win]
run:
- python
- {{mpi}} {{mpi_version}} # [not win]
build:
number: {{build}}
string: py{{py}}_{{mpi}}_{{build}}
script: python setup.py install
test:
imports:
- mpi4py
- mpi4py.rc
- mpi4py.dl # [not win]
- mpi4py.MPI
| {% set build = 2 %}
{% set version = '2.0.0' %}
{% set md5 = '4f7d8126d7367c239fd67615680990e3' %}
{% set mpi = (environ.get('MPI') or 'mpich') if not win else 'msmpi' %}
{% set mpi_version = {'mpich':'>=3.1', 'openmpi':'>=1.8'}.get(mpi,'') %}
package:
name: mpi4py
version: {{version}}
about:
home: https://bitbucket.org/mpi4py/
summary: 'Python bindings for MPI'
license: 'BSD (https://bitbucket.org/mpi4py/mpi4py/src/master/LICENSE.rst)'
source:
fn: mpi4py-{{version}}.tar.gz
url: https://bitbucket.org/mpi4py/mpi4py/downloads/mpi4py-{{version}}.tar.gz
{%if md5%}md5: {{md5}}{%endif%}
requirements:
build:
- python
- {{mpi}} {{mpi_version}} # [not win]
run:
- python
- {{mpi}} {{mpi_version}} # [not win]
build:
number: {{build}}
string: py{{py}}_{{mpi}}_{{build}}
script: python setup.py install
test:
imports:
- mpi4py
- mpi4py.rc
- mpi4py.dl # [not win]
- mpi4py.MPI
|
Exclude test folder from Code Climate | engines:
duplication:
enabled: true
exclude_paths:
- "webpack.config.js"
- "test/webpack.config.js"
- "test/webpack.config.hot.js"
- "sample/webpack.config.js"
config:
languages:
- javascript
eslint:
enabled: true
fixme:
enabled: true
ratings:
paths:
- "**.js"
- "**.jsx"
exclude_paths:
- "**/__tests__"
| engines:
duplication:
enabled: true
exclude_paths:
- "webpack.config.js"
- "test/webpack.config.js"
- "test/webpack.config.hot.js"
- "sample/webpack.config.js"
config:
languages:
- javascript
eslint:
enabled: true
fixme:
enabled: true
ratings:
paths:
- "**.js"
- "**.jsx"
exclude_paths:
- "test"
- "**/__tests__"
|
Make CodeClimate generate a grade | engines:
duplication:
enabled: true
config:
languages:
- python
fixme:
enabled: true
radon:
enabled: true
pep8:
enabled: true
checks:
E501:
enabled: false
ratings:
paths:
- "**.py"
exclude_paths:
- "protofuzz/tests/*"
| engines:
duplication:
enabled: true
config:
languages:
- python
fixme:
enabled: true
radon:
enabled: true
pep8:
enabled: true
checks:
E501:
enabled: false
ratings:
paths:
- "protofuzz/**"
- "**.py"
exclude_paths:
- "**/tests/**/*"
- "**/fuzzdb/**/*"
- "**.proto"
|
Extend payload provider with additional payloads | xss_payloads:
script: <script id=%1$s>%2$s</script>
break_json_single_quote: "';%2$s//';%2$s//"
break_json_double_quote: '";%2$s//";%2$s//'
close_script_tag: >
--></SCRIPT>">'><SCRIPT id=%1$s>%2$s</SCRIPT>
image_onerror: <img src=x onerror=%2$s id=%1$s>
image_src: <IMG SRC=JaVaScRiPt:%2$s id=%1$s>
title: </TITLE><SCRIPT id="%1$s">%2$s</script>
break_attribute_single_quote: >
' x=%1$s onclick="%2$s" \
break_attribute_double_quote: >
" x=%1$s onclick="%2$s" \
| xss_payloads:
script: <script id=%1$s>%2$s</script>
break_json_single_quote: "';%2$s//';%2$s//"
break_json_double_quote: '";%2$s//";%2$s//'
close_script_tag: '--></SCRIPT>">''><SCRIPT id=%1$s>%2$s</SCRIPT>'
image_onerror: <img src=x onerror=%2$s id=%1$s>
image_src: <IMG SRC=JaVaScRiPt:%2$s id=%1$s>
title: </TITLE><SCRIPT id=%1$s>%2$s</script>
break_attribute_single_quote: ''' x=%1$s onclick="%2$s" \'
break_attribute_double_quote: '" x=%1$s onclick="%2$s" \'
|
Include code duplication detection in the checks. | filter:
paths: [hostingcheck/*]
excluded_paths: [build/*, tests/*]
checks:
php:
code_rating: true
remove_extra_empty_lines: true
remove_php_closing_tag: true
remove_trailing_whitespace: true
fix_use_statements:
remove_unused: true
preserve_multiple: false
preserve_blanklines: true
order_alphabetically: true
fix_php_opening_tag: true
fix_linefeed: true
fix_line_ending: true
fix_identation_4spaces: true
fix_doc_comments: true
tools:
external_code_coverage:
timeout: 600
runs: 4
php_code_coverage: true
php_mess_detector: true
php_code_sniffer:
config:
standard: PSR2
filter:
paths: ['hostingcheck']
php_pdepend: true
php_loc:
enabled: true
php_cpd:
enabled: true
sensiolabs_security_checker: true
| filter:
paths: [hostingcheck/*]
excluded_paths: [build/*, tests/*]
checks:
php:
code_rating: true
duplication: true
remove_extra_empty_lines: true
remove_php_closing_tag: true
remove_trailing_whitespace: true
fix_use_statements:
remove_unused: true
preserve_multiple: false
preserve_blanklines: true
order_alphabetically: true
fix_php_opening_tag: true
fix_linefeed: true
fix_line_ending: true
fix_identation_4spaces: true
fix_doc_comments: true
tools:
external_code_coverage:
timeout: 600
runs: 4
php_code_coverage: true
php_mess_detector: true
php_code_sniffer:
config:
standard: PSR2
filter:
paths: ['hostingcheck']
php_pdepend: true
php_loc:
enabled: true
php_cpd:
enabled: true
sensiolabs_security_checker: true
|
Increase coverage target to 90%. | comment: false
coverage:
status:
project:
default:
target: 85
threshold: 10%
patch:
default:
threshold: 25%
| comment: false
coverage:
status:
project:
default:
target: 90
threshold: 10%
patch:
default:
threshold: 25%
|
Use Monolog rotating file logging | imports:
- { resource: config.yml }
monolog:
handlers:
main:
type: error_log
level: INFO
channels: [!event]
| imports:
- { resource: config.yml }
monolog:
handlers:
main:
type: rotating_file
path: "%kernel.logs_dir%/api.log"
level: INFO
channels: [!event]
max_files: 10
|
Scale down the balances by 1e9 to avoid overflows | base: &base
name: token_holder
subgraph: habdelra/cardpay-xdai
tables:
token_holder:
column_mappings:
balance:
balance_uint64:
default: 0
max_value: 18446744073709551615
type: uint64
validity_column: balance_uint64_valid
partition_sizes:
- 524288
- 131072
- 16384
- 1024
version: 0.0.1
staging:
<<: *base
subgraph: habdelra/cardpay-sokol
production:
<<: *base
subgraph: habdelra/cardpay-xdai | base: &base
name: token_holder
subgraph: habdelra/cardpay-xdai
tables:
token_holder:
column_mappings:
balance:
balance_downscale_e9_uint64:
default: 0
downscale: 1000000000
max_value: 18446744073709551615
type: uint64
validity_column: balance_uint64_valid
partition_sizes:
- 524288
- 131072
- 16384
- 1024
version: 1.0.0
staging:
<<: *base
subgraph: habdelra/cardpay-sokol
production:
<<: *base
subgraph: habdelra/cardpay-xdai
|
Update from Hackage at 2017-05-16T03:34:39Z | homepage: https://github.com/athanclark/tmapmvar#readme
changelog-type: ''
hash: 61a8d3d7432798476ce318ac8d8d863bba39bd44dd513c1ab86f9e56fa3cb3cc
test-bench-deps: {}
maintainer: athan.clark@gmail.com
synopsis: A single-entity stateful Map in STM, similar to tmapchan
changelog: ''
basic-deps:
stm: -any
base: ! '>=4.8 && <5'
unordered-containers: -any
containers: -any
hashable: -any
all-versions:
- '0.0.0'
- '0.0.1'
author: Athan Clark
latest: '0.0.1'
description-type: markdown
description: ! '# tmapmvar
'
license-name: BSD3
| homepage: https://github.com/athanclark/tmapmvar#readme
changelog-type: ''
hash: 526a7d03503d98e9650366925df1b9c53760cda5876dee099791781bc840757e
test-bench-deps: {}
maintainer: athan.clark@gmail.com
synopsis: A single-entity stateful Map in STM, similar to tmapchan
changelog: ''
basic-deps:
stm: -any
base: ! '>=4.8 && <5'
unordered-containers: -any
containers: -any
hashable: -any
all-versions:
- '0.0.0'
- '0.0.1'
- '0.0.2'
author: Athan Clark
latest: '0.0.2'
description-type: markdown
description: ! '# tmapmvar
'
license-name: BSD3
|
Update from Hackage at 2017-01-01T17:57:35Z | homepage: ''
changelog-type: ''
hash: e0034ca1c2cc242331f09127daa691d312966cd2e64c62f8a04faf8c98df845b
test-bench-deps: {}
maintainer: necrobious@gmail.com
synopsis: Aeson types for UUID instances.
changelog: ''
basic-deps:
base: ! '>=4 && <5'
text: -any
uuid: ! '>=1'
aeson: -any
all-versions:
- '0.1.0.0'
author: Kirk Peterson
latest: '0.1.0.0'
description-type: markdown
description: ! 'uuid-aeson
==========
Aeson types for UUID instances.
example
=======
Data.UUID.Aeson Data.UUID.V4 Data.Aeson Control.Monad> liftM (fromJSON .toJSON)
nextRandom :: IO (Result UUID)
Success 99644d7c-298c-44a9-b9bb-d0ca9940cc4e
'
license-name: BSD3
| homepage: ''
changelog-type: ''
hash: 5fe65c563ef474292cf59cda8e36416dd75a60a05fc1fb8be43a0bd2eba1d814
test-bench-deps: {}
maintainer: necrobious@gmail.com
synopsis: Aeson types for UUID instances.
changelog: ''
basic-deps:
base: ! '>=4 && <5'
text: <1.3
uuid: ! '>=1 && <1.4'
aeson: <1.1
all-versions:
- '0.1.0.0'
author: Kirk Peterson
latest: '0.1.0.0'
description-type: markdown
description: ! 'uuid-aeson
==========
Aeson types for UUID instances.
example
=======
Data.UUID.Aeson Data.UUID.V4 Data.Aeson Control.Monad> liftM (fromJSON .toJSON)
nextRandom :: IO (Result UUID)
Success 99644d7c-298c-44a9-b9bb-d0ca9940cc4e
'
license-name: BSD3
|
Correct file rights of JMX authentication files | #
# Configure Tomcat
#
- name: generate a custom "setenv.sh" from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom 'jmxremote.password' from template in {{ java_home }}/jre/lib/management/ folder, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ java_home }}/jre/lib/management/jmxremote.password"
mode: 0600
- name: "generate a custom 'jmxremote.access' from template in {{ java_home }}/jre/lib/management/ folder, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ java_home }}/jre/lib/management/jmxremote.access"
mode: 0600
| #
# Configure Tomcat
#
- name: generate a custom "setenv.sh" from template, provides the ability to configure Tomcat at start-up from outside the container
template:
src: setenv.sh.j2
dest: "{{ tomcat_catalina_base }}/bin/setenv.sh"
mode: ug+x
- name: "generate a custom 'jmxremote.password' from template in {{ java_home }}/jre/lib/management/ folder, to use pasword protected JMX ports"
template:
src: jmxremote.password.j2
dest: "{{ java_home }}/jre/lib/management/jmxremote.password"
owner: "{{ tomcat_root_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
- name: "generate a custom 'jmxremote.access' from template in {{ java_home }}/jre/lib/management/ folder, to use pasword protected JMX ports"
template:
src: jmxremote.access.j2
dest: "{{ java_home }}/jre/lib/management/jmxremote.access"
owner: "{{ tomcat_root_user }}"
group: "{{ tomcat_system_group }}"
mode: 0640
|
Add sdk constraint and update test deps | name: gilded_rose
version: 0.0.1
description: A simple console application.
dev_dependencies:
test: '>=0.12.11 <0.13.0'
| name: gilded_rose
version: 0.0.1
description: A simple console application.
environment:
sdk: '>=2.10.0 <3.0.0'
dev_dependencies:
test: ^1.16.8
|
Install vscode from microsoft repository | ---
- name: check Visual Studio Code installed
shell: code -v
register: vscode_check
ignore_errors: true
- name: install Visual Studio Code
apt:
deb: https://vscode-update.azurewebsites.net/latest/linux-deb-x64/stable
become: true
when: vscode_check.rc
- name: install Visual Studio Code extensions
command: "{{ home | quote }}/.config/Code/User/restore"
| ---
- name: add Visual Studio Code repository
apt_repository:
repo: deb [arch=amd64] http://packages.microsoft.com/repos/vscode stable main
become: true
- name: install Visual Studio Code
apt:
name: code
become: true
- name: install Visual Studio Code extensions
command: "{{ home | quote }}/.yadm/vscode/restore"
|
Adjust tf base -> kiva_pod_base | frame_id: /base
initial_orientation: [0.003, -0.032, -0.010, 0.999]
initial_x: 1.471
initial_y: -0.022
initial_z: 0.495
object_a: 0.2
object_b: 0.2
object_g: 0.0
object_r: 0.0
object_type: cube
object_x: 0.865
object_y: 0.865
object_z: 1.0
publish_tf: true
tf_frame: kiva_pod_base
| frame_id: /base
initial_orientation: [0.00325133162551, -0.00693140178919, -0.00992589257658, 0.999921441078]
initial_x: 1.47352457047
initial_y: -0.0344418957829
initial_z: 0.423083603382
object_a: 0.2
object_b: 0.2
object_g: 0.0
object_r: 0.0
object_type: cube
object_x: 0.865
object_y: 0.865
object_z: 1.0
publish_tf: true
tf_frame: kiva_pod_base
|
Remove useless install of the node-gyp npm global package. | # Args:
- name: install nodesource key
apt_key: url=https://deb.nodesource.com/gpgkey/nodesource.gpg.key state=present
- name: install nodesource repository
apt_repository: repo='deb https://deb.nodesource.com/node_0.12 trusty main' state=present
- name: install node
apt: name={{ item }} state=present
with_items:
- nodejs
- jq
- build-essential
- name: fix node binary name
command: update-alternatives --install /usr/bin/node node /usr/bin/nodejs 10
args:
creates: /usr/bin/node
- name: accept self signed certificates
lineinfile: dest=/root/.bashrc line="export NODE_TLS_REJECT_UNAUTHORIZED=0"
when:
- is_development_environment
- name: check installed global npm modules
shell: cat /usr/lib/node_modules/{{ item.name }}/package.json | jq -r '.version'
with_items:
- { name: forever, version: 0.14.2 }
- { name: nodemon, version: 1.3.7 }
- { name: json, version: 9.0.3 }
- { name: node-gyp, version: 3.2.1 }
register: result
failed_when: no
changed_when: result.stdout != "{{ item.version }}"
- name: install global npm modules
npm: name={{ item.item.name }} version={{ item.item.version }} global=yes state=present
with_items: result.results
when: item.changed
| # Args:
- name: install nodesource key
apt_key: url=https://deb.nodesource.com/gpgkey/nodesource.gpg.key state=present
- name: install nodesource repository
apt_repository: repo='deb https://deb.nodesource.com/node_0.12 trusty main' state=present
- name: install node
apt: name={{ item }} state=present
with_items:
- nodejs
- jq
- build-essential
- name: fix node binary name
command: update-alternatives --install /usr/bin/node node /usr/bin/nodejs 10
args:
creates: /usr/bin/node
- name: accept self signed certificates
lineinfile: dest=/root/.bashrc line="export NODE_TLS_REJECT_UNAUTHORIZED=0"
when:
- is_development_environment
- name: check installed global npm modules
shell: cat /usr/lib/node_modules/{{ item.name }}/package.json | jq -r '.version'
with_items:
- { name: forever, version: 0.14.2 }
- { name: nodemon, version: 1.3.7 }
- { name: json, version: 9.0.3 }
register: result
failed_when: no
changed_when: result.stdout != "{{ item.version }}"
- name: install global npm modules
npm: name={{ item.item.name }} version={{ item.item.version }} global=yes state=present
with_items: result.results
when: item.changed
|
Add cache for free days | Leave:
# -
# employee: 1
# covered_by: 2
# starts_at: '2013-11-08'
# ends_at: '2013-11-08'
# -
# employee: 6
# covered_by: 3
# starts_at: '2013-11-11'
# ends_at: '2013-11-25'
# -
# employee: 4
# covered_by: 6
# starts_at: '2013-11-21'
# ends_at: '2013-12-03'
-
employee: 5
covered_by: ~
starts_at: '2013-12-23'
ends_at: '2014-01-07'
| Leave:
-
employee: 1
covered_by: 2
starts_at: '2013-11-08'
ends_at: '2013-11-08'
-
employee: 6
covered_by: 3
starts_at: '2013-11-11'
ends_at: '2013-11-25'
-
employee: 4
covered_by: 6
starts_at: '2013-11-21'
ends_at: '2013-12-03'
-
employee: 5
covered_by: ~
starts_at: '2013-12-23'
ends_at: '2014-01-07'
|
Add mopup for lockfiles that are older than 6hrs | ---
- dnf: name=duply state=latest
become: true
tags:
- packages
- file: path=~/.duply/{{ item.name }}
state=directory mode=0700
with_items: "{{ backup_sets }}"
- template: src="conf" dest=~/.duply/{{ item.name }}/conf
with_items: "{{ backup_sets }}"
- template: src="exclude" dest=~/.duply/{{ item.name }}/exclude
with_items: "{{ backup_sets }}"
- copy: src="{{ item[0] }}" dest=~/.duply/{{ item[1].name }}/{{ item[0] }}
with_nested:
-
- pre
- post
- "{{ backup_sets }}"
- cron: name={{ item.name }} minute={{ item.on_minute }}
job="/usr/bin/duply {{ item.name }} pre+bkp_post_cleanup --force"
with_items: "{{ backup_sets }}"
| ---
- dnf: name=duply state=latest
become: true
tags:
- packages
- file: path=~/.duply/{{ item.name }}
state=directory mode=0700
with_items: "{{ backup_sets }}"
- template: src="conf" dest=~/.duply/{{ item.name }}/conf
with_items: "{{ backup_sets }}"
- template: src="exclude" dest=~/.duply/{{ item.name }}/exclude
with_items: "{{ backup_sets }}"
- copy: src="{{ item[0] }}" dest=~/.duply/{{ item[1].name }}/{{ item[0] }}
with_nested:
-
- pre
- post
- "{{ backup_sets }}"
- cron: name={{ item.name }} minute={{ item.on_minute }}
job="/usr/bin/duply {{ item.name }} pre+bkp_post_cleanup --force"
with_items: "{{ backup_sets }}"
- cron: name=lock_cleanup minute=0
job="find ~/.cache/duplicity/ -type f -name lockfile.lock -mmin +240 -delete"
|
Update from Hackage at 2017-09-03T05:02:26Z | homepage: ''
changelog-type: ''
hash: b64c8b9e9804d12413c62c51ca9c000e319d34533619f2f2cb12fdfd0728cdec
test-bench-deps: {}
maintainer: Athan Clark <athan.clark@gmail.com>
synopsis: Painfully simple URL deployment.
changelog: ''
basic-deps:
exceptions: -any
base: ! '>=4.8 && <5'
monad-control: -any
path-extra: ! '>=0.0.3'
mtl: -any
mmorph: -any
transformers-base: -any
monad-logger: -any
transformers: -any
resourcet: -any
all-versions:
- '5.0.0.1'
author: Athan Clark <athan.clark@gmail.com>
latest: '5.0.0.1'
description-type: haddock
description: ''
license-name: MIT
| homepage: ''
changelog-type: ''
hash: 4759f0f2597f50f6254e61ef7897ca0f9bb8bab791422130ef9a4bd51fa6a223
test-bench-deps: {}
maintainer: Athan Clark <athan.clark@gmail.com>
synopsis: Painfully simple URL deployment.
changelog: ''
basic-deps:
exceptions: -any
split: -any
base: ! '>=4.8 && <5'
text: -any
strict: -any
monad-control: -any
path-extra: ! '>=0.0.5'
mtl: -any
mmorph: -any
transformers-base: -any
monad-logger: -any
transformers: -any
attoparsec-uri: -any
resourcet: -any
vector: -any
all-versions:
- '5.0.0.1'
- '6.0.0'
author: Athan Clark <athan.clark@gmail.com>
latest: '6.0.0'
description-type: haddock
description: ''
license-name: MIT
|
Add tenet to lingo bundle file | description: Best Practices for CockroachDB from their contributor docs.
version: 0.0.0
tenets:
- avoid-bool-params
tags:
- golang
- go
- cockroach
- cockroachdb
| description: Best Practices for CockroachDB from their contributor docs.
version: 0.0.0
tenets:
- avoid-bool-params
- line-length-limit
tags:
- golang
- go
- cockroach
- cockroachdb
|
Make all steps run on pwsh | name: Deploy
on:
release:
types: [created]
jobs:
windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v1
- name: Install needed software
run: ./.github/scripts/Install.ps1
env:
FZF_VERSION: 0.20.0
shell: powershell
- name: Build PSFzf-Binary
run: ./.github/scripts/Build.ps1
shell: powershell
- name: Publish
run: ./.github/scripts/Deploy.ps1
shell: pwsh
env:
POWERSHELLGALLERY_APIKEY: ${{ secrets.POWERSHELLGALLERY_APIKEY }}
GITHUB_PRERELEASE: ${{ github.event.release.prerelease }}
| name: Deploy
on:
release:
types: [created]
jobs:
windows:
runs-on: windows-latest
steps:
- uses: actions/checkout@v1
- name: Install needed software
run: ./.github/scripts/Install.ps1
env:
FZF_VERSION: 0.20.0
shell: pwsh
- name: Build PSFzf-Binary
run: ./.github/scripts/Build.ps1
shell: pwsh
- name: Publish
run: ./.github/scripts/Deploy.ps1
shell: pwsh
env:
POWERSHELLGALLERY_APIKEY: ${{ secrets.POWERSHELLGALLERY_APIKEY }}
GITHUB_PRERELEASE: ${{ github.event.release.prerelease }}
|
Fix coveralls integration by explicitly adding the correct service name | name: Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install coveralls
pip install -r requirements.txt
- name: Test
env:
PYTHONWARNINGS: all
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
python src/manage.py check
python src/manage.py test feedback
coverage run --omit="*/migrations*" --source="." src/manage.py test feedback
coveralls
| name: Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install coveralls
pip install -r requirements.txt
- name: Test
env:
PYTHONWARNINGS: all
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_SERVICE_NAME: github
run: |
python src/manage.py check
python src/manage.py test feedback
coverage run --omit="*/migrations*" --source="." src/manage.py test feedback
coveralls
|
Drop value_date from BookingTemplate fixtures. | # Read about fixtures at http://ar.rubyonrails.org/classes/Fixtures.html
one:
title: MyString
amount: 9.99
credit_account_id: 1
debit_account_id: 1
value_date: 2010-08-11
comments: MyText
empty:
code:
title:
amount:
credit_account_id:
debit_account_id:
value_date:
comments:
full:
code: full
title: MyString
amount: 9.99
credit_account: ubs
debit_account: postfinance
value_date: 2010-08-11
comments: MyText
partial:
code: partial
title: First booking
credit_account: ubs
debit_account: postfinance
comments: Let's start
| # Read about fixtures at http://ar.rubyonrails.org/classes/Fixtures.html
one:
title: MyString
amount: 9.99
credit_account_id: 1
debit_account_id: 1
comments: MyText
empty:
code:
title:
amount:
credit_account_id:
debit_account_id:
comments:
full:
code: full
title: MyString
amount: 9.99
credit_account: ubs
debit_account: postfinance
comments: MyText
partial:
code: partial
title: First booking
credit_account: ubs
debit_account: postfinance
comments: Let's start
|
Update GitLab CE version to 8.16.4 | root_pw: "" # auto-generate password
grub_pw: null
rc_scripts_after:
- zabbix-agent.service
firewall_allowed_tcp_ports:
- port: 22
- port: 80
- port: 443
- port: 10051
selinux_permissive_domains:
- zabbix_agent_t
gitlab_ce_version: "8.13.11"
gitlab_ce_checksum: "4c0cc69860280e1602cc6eac8f169a5b3e9bd5fd"
| root_pw: "" # auto-generate password
grub_pw: null
rc_scripts_after:
- zabbix-agent.service
firewall_allowed_tcp_ports:
- port: 22
- port: 80
- port: 443
- port: 10051
selinux_permissive_domains:
- zabbix_agent_t
gitlab_ce_version: "8.16.4"
gitlab_ce_checksum: "eddbb617ddd8d25364e7761042463e6581054c17" # sha1
|
Add a way to manually trigger a deployment | name: Nightly Cron
on:
schedule:
- cron: 0 15 * * *
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/github-script@0.9.0
name: Create Deployment
with:
github-token: ${{ secrets.flexgetbot_pat }}
script: |
await github.repos.createDeployment({
...context.repo,
ref: context.ref.slice(11),
});
| name: Trigger Deploy
on:
schedule:
- cron: 0 15 * * *
workflow_dispatch: {}
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/github-script@0.9.0
name: Create Deployment
with:
github-token: ${{ secrets.flexgetbot_pat }}
script: |
await github.repos.createDeployment({
...context.repo,
ref: context.ref.slice(11),
});
|
Use same type as the other quickstarts & use ClusterIP | apiVersion: v1
kind: Service
metadata:
name: wildfly-swarm-rest
spec:
ports:
- protocol: TCP
port: 8080
targetPort: 8080
type: LoadBalancer
| apiVersion: v1
kind: Service
metadata:
name: wildfly-swarm-rest
spec:
ports:
- protocol: TCP
port: 8080
targetPort: 8080
type: ClusterIP
|
Send User IDs with Sentry Errors | parameters:
env(SENTRY_DSN): "https://c70286fb157048be9ebc6e918e8c2b79@o200403.ingest.sentry.io/1323198"
sentry:
dsn: '%env(SENTRY_DSN)%'
options:
before_send: 'App\Service\SentryBeforeSend'
when@test:
sentry:
dsn: ~
tracing: false
| parameters:
env(SENTRY_DSN): "https://c70286fb157048be9ebc6e918e8c2b79@o200403.ingest.sentry.io/1323198"
sentry:
dsn: '%env(SENTRY_DSN)%'
options:
before_send: 'App\Service\SentryBeforeSend'
send_default_pii: true
when@test:
sentry:
dsn: ~
tracing: false
|
Complete awscli role for ansible | - pip: name=awscli state=present
| tasks:
- pip: name=awscli state=present
sudo: yes
- shell: region=$(curl -s http://169.254.169.254/latest/dynamic/instance-identity/document | awk -F\" '/region/ {print $4}'); aws configure set region $region
|
Allow Xdebug to work with .twig templates | ### Lagoon Drupal 8 development environment services file.
#
# This file should contain service definitions that are needed on development environments
#
# It contains some defaults that the Lagoon team suggests, please edit them as required.
#
parameters:
twig.config:
debug: true # displays twig debug messages, developers like them :)
auto_reload: true # reloads the twig files on every request, so no drush cache clear is required
cache: false # No twig internal cache, important: check the example.settings.loca.php to fully fully disable the twig cache
services:
cache.backend.null: # Defines a Cache Backend Factory which is just empty, it is not used by default
class: Drupal\Core\Cache\NullBackendFactory
| ### Lagoon Drupal 8 development environment services file.
#
# This file should contain service definitions that are needed on development environments
#
# It contains some defaults that the Lagoon team suggests, please edit them as required.
#
parameters:
twig.config:
debug: true # displays twig debug messages, developers like them :)
auto_reload: true # reloads the twig files on every request, so no drush cache clear is required
cache: true # Twig cache allows Xdebug to work with .twig files
services:
cache.backend.null: # Defines a Cache Backend Factory which is just empty, it is not used by default
class: Drupal\Core\Cache\NullBackendFactory
|
Handle all plugins via Vundle via .vimrc | - name: Install vim plugins
include: plugins.yml target={{ outer_item }}
with_items:
- /home/wicksy/.vim
loop_control:
loop_var: outer_item
- name: Install Vundle Plugins
command: vim +PluginInstall +qall
become_user: wicksy
- name: Chown vim directories and files (wicksy)
file:
path: /home/wicksy/.vim
state: directory
owner: wicksy
group: wicksy
mode: 0755
recurse: yes
| - name: Vundle Vim Repo
git:
repo: https://github.com/VundleVim/Vundle.vim.git
dest: "/home/wicksy/.vim/bundle/Vundle.vim"
version: master
clone: yes
force: yes
update: yes
become_user: wicksy
- name: Install Vundle Plugins
command: vim +PluginInstall +qall
become_user: wicksy
- name: Chown vim directories and files (wicksy)
file:
path: /home/wicksy/.vim
state: directory
owner: wicksy
group: wicksy
mode: 0755
recurse: yes
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path: "/static/admin"
webhook_url:
sections:
- type: directory
path: projects
label: Projects
create: documents
match: "**/*.md"
templates:
- project
- type: directory
path: journal
label: Journal
create: documents
match: "**/*.md"
templates:
- journal
- type: document
path: data/theme.json
label: Theme config
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
instant_preview: true
build:
preview_command: gridsome build
publish_command: gridsome build
preview_output_directory: dist
output_directory: dist
instant_preview_command: gridsome develop
install_dependencies_command: npm install
preview_docker_image: public
mount_path: public
| ---
new_page_extension: md
auto_deploy: false
admin_path: "/static/admin"
webhook_url:
sections:
- type: directory
path: projects
label: Projects
create: documents
match: "**/*.md"
templates:
- project
- type: directory
path: journal
label: Journal
create: documents
match: "**/*.md"
templates:
- journal
- type: document
path: data/theme.json
label: Theme config
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
instant_preview: true
build:
preview_command: gridsome build
publish_command: gridsome build
preview_output_directory: dist
output_directory: dist
instant_preview_command: gridsome develop
install_dependencies_command: npm install
preview_docker_image: node:10
mount_path: "/srv"
working_dir: "/srv"
|
Update install script to link theme | - clean: ["~"]
- link:
~/.vim: vim/
~/.vimrc: vimrc
~/.bashrc: bashrc
~/.bash_profile: bash_profile
~/.zshrc: zshrc
~/.oh-my-zsh: zsh
- shell: []
| - clean: ["~"]
- link:
~/.vim: vim/
~/.vimrc: vimrc
~/.bashrc: bashrc
~/.bash_profile: bash_profile
~/.zshrc: zshrc
~/.oh-my-zsh: zsh
./zsh/themes/digitalraven.zsh-theme: digitalraven.zsh-theme
- shell: []
|
Update pip and install virtualenvwrapper by default | ---
# choose python version and setup a service virtualenv
- include: 22-python2.yml
when: not use_python3
- include: 23-python3.yml
when: use_python3
- name: configure virtualenv home directory
become_user: 'root'
lineinfile: >
dest=/etc/environment
regexp=^WORKON_HOME={{ venvs_path }}
line=WORKON_HOME={{ venvs_path }}
state=present
- name: update virtualenv home directory owner
become_user: 'root'
file: >
path={{ venvs_path }}
owner={{ service_user }}
group={{ service_group }}
recurse=yes
- name: enable virtualenv on login
lineinfile: >
dest="~{{ service_user }}/.bashrc"
line="source {{ virtualenv_path }}/bin/activate"
state=present
- name: go to {{ source_path }} after login
lineinfile: >
dest="~{{ service_user }}/.bashrc"
line="cd {{ source_path }}"
state=present
| ---
# choose python version and setup a service virtualenv
- include: 22-python2.yml
when: not use_python3
- include: 23-python3.yml
when: use_python3
- name: update virtualenvwrapper and pip
pip: >
virtualenv="{{ virtualenv_path }}"
executable="{{ virtualenv_path }}/bin/pip"
name={{ item }}
state=latest
with_items:
- "pip==8.1.0"
- "virtualenvwrapper"
- name: configure virtualenv home directory
become_user: 'root'
lineinfile: >
dest=/etc/environment
regexp=^WORKON_HOME={{ venvs_path }}
line=WORKON_HOME={{ venvs_path }}
state=present
- name: update virtualenv home directory owner
become_user: 'root'
file: >
path={{ venvs_path }}
owner={{ service_user }}
group={{ service_group }}
recurse=yes
- name: enable virtualenv on login
lineinfile: >
dest="~{{ service_user }}/.bashrc"
line="source {{ virtualenv_path }}/bin/activate"
state=present
- name: go to {{ source_path }} after login
lineinfile: >
dest="~{{ service_user }}/.bashrc"
line="cd {{ source_path }}"
state=present
|
Create service for locale listener | services:
# municipales.example:
# class: Listabierta\Bundle\MunicipalesBundle\Example
# arguments: [@service_id, "plain_value", %parameter%]
| services:
municipales.locale_listener:
class: Listabierta\Bundle\MunicipalesBundle\EventListener\LocaleListener
arguments: ["%kernel.default_locale%"]
tags:
- { name: kernel.event_subscriber } |
Clean up github actions configuration | name: CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Run a one-line script
run: echo Hello, world!
- name: Install dependencies
run: |
sudo apt-get -qq update
sudo apt-get install -y build-essential python python-dev python-pip
sudo pip install platformio
- name: Build firmware
run: |
cd firmware
pio run -e v0 -e v1 -e v2 -t size
| name: CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Install dependencies
run: |
sudo apt-get -qq update
sudo apt-get install -y build-essential python python-dev python-pip
sudo pip install platformio
- name: Build firmware
run: |
cd firmware
pio run -e v0 -e v1 -e v2 -t size
|
Add '--prefix' to npm commands | name: Deploy BioThings Studio Webapp to S3
on:
push:
branches:
- master
jobs:
build_and_deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/setup-node@v1
with:
node-version: '12'
- run: npm install
- run: npm run build
- uses: jakejarvis/s3-sync-action@master
with:
args: --acl public-read --follow-symlinks --delete
env:
AWS_S3_BUCKET: ${{ secrets.STUDIO_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{secrets.AWS_REGION}}
SOURCE_DIR: './biothings/hub/webapp/dist'
DEST_DIR: 'master'
| name: Deploy BioThings Studio Webapp to S3
on:
push:
branches:
- master
jobs:
build_and_deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/setup-node@v1
with:
node-version: '12'
- run: npm --prefix biothings/hub/webapp/ install
- run: npm --prefix biothings/hub/webapp/ run build
- uses: jakejarvis/s3-sync-action@master
with:
args: --acl public-read --follow-symlinks --delete
env:
AWS_S3_BUCKET: ${{ secrets.STUDIO_S3_BUCKET }}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_REGION: ${{secrets.AWS_REGION}}
SOURCE_DIR: './biothings/hub/webapp/dist'
DEST_DIR: 'master'
|
Change GH-action trigger to work on PRs from forks | ---
name: CrateDB SQL
on: [push]
jobs:
test:
name: Test CrateDB SQL on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
steps:
- uses: actions/checkout@v2
- name: Setup Gradle Java
uses: actions/setup-java@v1
with:
java-version: 11
- name: Run tests on ${{ matrix.os }}
uses: eskatos/gradle-command-action@v1
with:
arguments: :server:test -Dtests.crate.run-windows-incompatible=${{ matrix.os == 'ubuntu-latest' }}
| ---
name: CrateDB SQL
on:
pull_request: ~
push:
branches:
- master
jobs:
test:
name: Test CrateDB SQL on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-latest, windows-latest]
steps:
- uses: actions/checkout@v2
- name: Setup Gradle Java
uses: actions/setup-java@v1
with:
java-version: 11
- name: Run tests on ${{ matrix.os }}
uses: eskatos/gradle-command-action@v1
with:
arguments: :server:test -Dtests.crate.run-windows-incompatible=${{ matrix.os == 'ubuntu-latest' }}
|
Update from Hackage at 2020-01-09T15:08:02Z | homepage: https://github.com/haskell-primitive/primitive-unaligned
changelog-type: ''
hash: a99337e10cb89ee16e9087d854cd90388aeffec3f8e6d7bbf348c137b87c63c0
test-bench-deps:
base: -any
primitive-unaligned: -any
primitive: -any
maintainer: Andrew Martin <andrew.thaddeus@gmail.com>
synopsis: Unaligned access to primitive arrays
changelog: ''
basic-deps:
base: ! '>=4.12.0.0 && <5'
primitive: ! '>=0.6.4 && <0.8'
all-versions:
- 0.1.1.0
author: Andrew Martin
latest: 0.1.1.0
description-type: haddock
description: Unaligned access to primitive arrays. The offsets are given in bytes
rather than elements.
license-name: BSD-3-Clause
| homepage: https://github.com/haskell-primitive/primitive-unaligned
changelog-type: ''
hash: e38ba9c583f459a6045df523b042dd7794e79100f57ce0f9233368a10ce43173
test-bench-deps:
base: -any
primitive-unaligned: -any
primitive: -any
maintainer: Andrew Martin <andrew.thaddeus@gmail.com>
synopsis: Unaligned access to primitive arrays
changelog: ''
basic-deps:
base: ! '>=4.12.0.0 && <5'
primitive: ! '>=0.6.4 && <0.8'
all-versions:
- 0.1.1.0
- 0.1.1.1
author: Andrew Martin
latest: 0.1.1.1
description-type: haddock
description: Unaligned access to primitive arrays. The offsets are given in bytes
rather than elements.
license-name: BSD-3-Clause
|
Test extensions on Travis against Ruby 2.2, 2.3 and 2.4 | sudo: required
dist: trusty
language: ruby
env:
- DB=postgres
- DB=mysql
gemfile:
- gemfiles/spree_3_1.gemfile
- gemfiles/spree_3_2.gemfile
- gemfiles/spree_3_3.gemfile
- gemfiles/spree_3_4.gemfile
- gemfiles/spree_master.gemfile
script:
- bundle exec rake test_app
- bundle exec rake spec
rvm:
- 2.3.1
- 2.2.7
before_install:
- mysql -u root -e "GRANT ALL ON *.* TO 'travis'@'%';"
| sudo: required
dist: trusty
language: ruby
env:
- DB=postgres
- DB=mysql
gemfile:
- gemfiles/spree_3_1.gemfile
- gemfiles/spree_3_2.gemfile
- gemfiles/spree_3_3.gemfile
- gemfiles/spree_3_4.gemfile
- gemfiles/spree_master.gemfile
script:
- bundle exec rake test_app
- bundle exec rake spec
rvm:
- 2.4.2
- 2.3.1
- 2.2.7
before_install:
- mysql -u root -e "GRANT ALL ON *.* TO 'travis'@'%';"
|
Switch to a fixed version instead of latest version for CI VMs | name: Check Matrix
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macOS-latest, windows-latest]
transport: [native, nio]
exclude:
# excludes native on Windows (there's none)
- os: windows-latest
transport: native
steps:
- uses: actions/checkout@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1.2.0
with:
java-version: 1.8
- name: Build with Gradle
run: ./gradlew clean check -PforceTransport=${{ matrix.transport }} | name: Check Matrix
on: [push, pull_request]
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-18.04, macos-10.15, windows-2019]
transport: [native, nio]
exclude:
# excludes native on Windows (there's none)
- os: windows-latest
transport: native
steps:
- uses: actions/checkout@v1
- name: Set up JDK 1.8
uses: actions/setup-java@v1.2.0
with:
java-version: 1.8
- name: Build with Gradle
run: ./gradlew clean check -PforceTransport=${{ matrix.transport }} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.