Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Update git author name and email | image: maven:3-jdk-8
environment:
- GIT_AUTHOR_NAME
- GIT_AUTHOR_EMAIL
volumes:
- ./.m2:/root/.m2
targets:
build:
environment:
- OOTPAI_DATA=/var/ootp-ai-data
run: mvn -B clean verify -Dgpg.skip=true -DaddScalacArgs=-feature
clean: mvn -B clean
install: mvn -B install -Dgpg.skip=true -DaddScalacArgs=-feature
update-versions: mvn -B versions:update-properties -DgenerateBackupPoms=false
site: mvn -B -e clean site
run:
before:
- install
environment:
- OOTPAI_SITE
- OOTPAI_CLEAR_CACHE
- OOTPAI_PLAYOFFS
- OOTPAI_DATA=/var/ootp-ai-data
volumes:
- $GIT_SSH_KEY:/ssh/id_rsa:ro
- ./ootp-ai-data:/var/ootp-ai-data
run: /bin/bash construi/run/run.sh
| image: maven:3-jdk-8
environment:
- GIT_AUTHOR_NAME=ootp-ai
- GIT_AUTHOR_EMAIL=levi.stephen@gmail.com
volumes:
- ./.m2:/root/.m2
targets:
build:
environment:
- OOTPAI_DATA=/var/ootp-ai-data
run: mvn -B clean verify -Dgpg.skip=true -DaddScalacArgs=-feature
clean: mvn -B clean
install: mvn -B install -Dgpg.skip=true -DaddScalacArgs=-feature
update-versions: mvn -B versions:update-properties -DgenerateBackupPoms=false
site: mvn -B -e clean site
run:
before:
- install
environment:
- OOTPAI_SITE
- OOTPAI_CLEAR_CACHE
- OOTPAI_PLAYOFFS
- OOTPAI_DATA=/var/ootp-ai-data
volumes:
- $GIT_SSH_KEY:/ssh/id_rsa:ro
- ./ootp-ai-data:/var/ootp-ai-data
run: /bin/bash construi/run/run.sh
|
Set facts in a manner that works properly with Puppet 4. | ---
driver:
name: vagrant
provisioner:
name: puppet_apply
modules_path: modules
manifests_path: manifests
<% if ENV['PUPPET3'] != nil %>
require_puppet_collections: false
<% else %>
require_puppet_collections: true
<% end %>
custom_facts:
ts_deploy_key: <%= ENV['TS_DEPLOY_KEY'] != nil ? ENV['TS_DEPLOY_KEY'] : 'ts_deploy_key' %>
ts_config_args: <%= ENV['TS_CONFIG_ARGS'] %>
<% if ENV['TS_PACKAGE_VERSION'] %>
ts_package_version: <%= ENV['TS_PACKAGE_VERSION'] %>
<% end %>
platforms:
- name: debian-7.8
- name: ubuntu-12.04
- name: ubuntu-14.04
- name: centos-6.7
suites:
- name: default
provisioner:
manifest: site.pp
| ---
driver:
name: vagrant
provisioner:
name: puppet_apply
modules_path: modules
manifests_path: manifests
<% if ENV['PUPPET3'] != nil %>
require_puppet_collections: false
<% else %>
require_puppet_collections: true
<% end %>
custom_facts:
ts_deploy_key: <%= ENV['TS_DEPLOY_KEY'] != nil ? ENV['TS_DEPLOY_KEY'] : 'ts_deploy_key' %>
<% if ENV['TS_CONFIG_ARGS'] != nil %>
ts_config_args: <%= ENV['TS_CONFIG_ARGS'] %>
<% end %>
<% if ENV['TS_PACKAGE_VERSION'] != nil %>
ts_package_version: <%= ENV['TS_PACKAGE_VERSION'] %>
<% end %>
platforms:
- name: debian-7.8
- name: ubuntu-12.04
- name: ubuntu-14.04
- name: centos-6.7
suites:
- name: default
provisioner:
manifest: site.pp
|
Remove -dev.infinity in SDK upper constraint | name: dart_style
# Note: See tool/grind.dart for how to bump the version.
version: 1.0.8-dev
author: Dart Team <misc@dartlang.org>
description: Opinionated, automatic Dart source code formatter.
homepage: https://github.com/dart-lang/dart_style
environment:
sdk: ">=1.8.0 <2.0.0-dev.infinity"
dependencies:
analyzer: '^0.30.0'
args: '>=0.12.1 <0.14.0'
path: '>=1.0.0 <2.0.0'
source_span: '>=1.4.0 <2.0.0'
dev_dependencies:
async: '>=1.0.0 <=2.0.0'
browser: '>=0.10.0 <0.11.0'
grinder: '^0.8.0'
js: ^0.6.0
node_preamble: ^1.0.0
pub_semver: '^1.2.3'
scheduled_test: '>=0.12.0 <0.13.0'
test: '>=0.12.0 <0.13.0'
yaml: '^2.0.0'
executables:
dartfmt: format
dartformat: format # Allow the old name for compatibility.
# Tell the bots not to bother building the tests since they don't run in a
# browser anyway.
transformers:
- $dart2js:
$exclude: 'test/**'
| name: dart_style
# Note: See tool/grind.dart for how to bump the version.
version: 1.0.8-dev
author: Dart Team <misc@dartlang.org>
description: Opinionated, automatic Dart source code formatter.
homepage: https://github.com/dart-lang/dart_style
environment:
sdk: ">=1.8.0 <2.0.0-dev"
dependencies:
analyzer: '^0.30.0'
args: '>=0.12.1 <0.14.0'
path: '>=1.0.0 <2.0.0'
source_span: '>=1.4.0 <2.0.0'
dev_dependencies:
async: '>=1.0.0 <=2.0.0'
browser: '>=0.10.0 <0.11.0'
grinder: '^0.8.0'
js: ^0.6.0
node_preamble: ^1.0.0
pub_semver: '^1.2.3'
scheduled_test: '>=0.12.0 <0.13.0'
test: '>=0.12.0 <0.13.0'
yaml: '^2.0.0'
executables:
dartfmt: format
dartformat: format # Allow the old name for compatibility.
# Tell the bots not to bother building the tests since they don't run in a
# browser anyway.
transformers:
- $dart2js:
$exclude: 'test/**'
|
Add include assets into artifacts | box: tcnksm/gox
dev:
steps:
- internal/watch:
code: |
go build ./...
./source
reload: true
build:
steps:
- setup-go-workspace
- script:
name: show environments
code: |
git version
go version
- script:
name: go get
code: |
go get -t ./...
- script:
name: go test
code: |
go test -v ./...
deploy:
steps:
- setup-go-workspace
- script:
name: install tools
code: |
apt-get update
apt-get install -y zip
curl -L http://stedolan.github.io/jq/download/linux64/jq -o /usr/local/bin/jq
chmod +x /usr/local/bin/jq
- script:
name: go get
code: |
go get ./...
- wercker/gox:
os: darwin linux windows
arch: 386 amd64
output: '{{.Dir}}_{{.OS}}_{{.Arch}}/{{.Dir}}'
dest: $WERCKER_OUTPUT_DIR/pkg
| box: tcnksm/gox
dev:
steps:
- internal/watch:
code: |
go build ./...
./source
reload: true
build:
steps:
- setup-go-workspace
- script:
name: show environments
code: |
git version
go version
- script:
name: go get
code: |
go get -t ./...
- script:
name: go test
code: |
go test -v ./...
deploy:
steps:
- setup-go-workspace
- script:
name: install tools
code: |
apt-get update
apt-get install -y zip
curl -L http://stedolan.github.io/jq/download/linux64/jq -o /usr/local/bin/jq
chmod +x /usr/local/bin/jq
- script:
name: go get
code: |
go get ./...
- wercker/gox:
os: darwin linux windows
arch: 386 amd64
output: '{{.Dir}}_{{.OS}}_{{.Arch}}/{{.Dir}}'
dest: $WERCKER_OUTPUT_DIR/pkg
- script:
name: include assets into artifacts
code: |
for dir in $WERCKER_OUTPUT_DIR/pkg/*; do
if [ -d "$dir" ]; then
cp -R zsh/ "$dir"
cp ghq.txt "$dir/README.txt"
fi
done
|
Extend version with git commit only if it ends with "-ea" | jdk:
- openjdk11
install:
- unset JAVA_TOOL_OPTIONS
- java src/bach/Version.java $(java src/bach/Version.java)-${GIT_COMMIT}
- java src/bach/Bootstrap.java
- source .bach/out/maven-install.sh
| jdk:
- openjdk11
install:
- unset JAVA_TOOL_OPTIONS
- VERSION=$(java src/bach/Version.java)
- if [[ $VERSION =~ ^.*-ea$ ]]; then java src/bach/Version.java ${VERSION}-${GIT_COMMIT}; fi
- java src/bach/Version.java
- java src/bach/Bootstrap.java
- source .bach/out/maven-install.sh
|
Update to use local deployment steps. | box: bamnet/goapp
# Build definition
build:
# The steps that will be executed on build
steps:
# Sets the go workspace and places you package
# at the right place in the workspace tree
- setup-go-workspace
# Gets the dependencies
- script:
name: goapp get
code: |
cd $WERCKER_SOURCE_DIR
echo $PATH
ls /usr/local
ls /usr/local/go_appengine
which goapp
go version
goapp get -t ./...
# Build the project
- script:
name: goapp build
code: |
goapp build ./...
# Test the project
- script:
name: goapp test
code: |
goapp test ./...
deploy:
steps:
- etre0/appengine-deploy-path-update:
email: $APP_ENGINE_USER
password: $APP_ENGINE_PASS
srcpath: . | box: bamnet/goapp
# Build definition
build:
# The steps that will be executed on build
steps:
# Sets the go workspace and places you package
# at the right place in the workspace tree
- setup-go-workspace
# Gets the dependencies
- script:
name: goapp get
code: |
cd $WERCKER_SOURCE_DIR
goapp get -t ./...
# Build the project
- script:
name: goapp build
code: |
goapp build ./...
# Test the project
- script:
name: goapp test
code: |
goapp test ./...
deploy:
steps:
- script:
name: appcfy.py update
code: |
appcfg.py update $WERCKER_SOURCE_DIR --email=$APP_ENGINE_USER -passin < $APP_ENGINE_PASS |
Modify msvs_version from 2013 to 2015 | environment:
matrix:
- nodejs_version: '10'
- nodejs_version: '10'
SPELLCHECKER_PREFER_HUNSPELL: true
- nodejs_version: '8'
- nodejs_version: '6'
install:
- ps: Install-Product node $env:nodejs_version
- npm --msvs_version=2013 install
test_script:
- node --version
- npm --version
- npm test
build: off
| environment:
matrix:
- nodejs_version: '10'
- nodejs_version: '10'
SPELLCHECKER_PREFER_HUNSPELL: true
- nodejs_version: '8'
- nodejs_version: '6'
install:
- ps: Install-Product node $env:nodejs_version
- npm --msvs_version=2015 install
test_script:
- node --version
- npm --version
- npm test
build: off
|
Configure Appveyor to only build master | build: false
environment:
matrix:
- PYTHON: 'C:/Python27'
- PYTHON: 'C:/Python33'
- PYTHON: 'C:/Python34'
init:
- 'ECHO %PYTHON%'
- ps: 'ls C:/Python*'
install:
- ps: (new-object net.webclient).DownloadFile('https://raw.github.com/pypa/pip/master/contrib/get-pip.py', 'C:/get-pip.py')
- '%PYTHON%/python.exe C:/get-pip.py'
- '%PYTHON%/Scripts/pip.exe install -r tests/requirements.txt'
test_script:
- '%PYTHON%/python.exe -c "import nose; nose.main();"'
| build: false
environment:
matrix:
- PYTHON: 'C:/Python27'
- PYTHON: 'C:/Python33'
- PYTHON: 'C:/Python34'
init:
- 'ECHO %PYTHON%'
- ps: 'ls C:/Python*'
install:
- ps: (new-object net.webclient).DownloadFile('https://raw.github.com/pypa/pip/master/contrib/get-pip.py', 'C:/get-pip.py')
- '%PYTHON%/python.exe C:/get-pip.py'
- '%PYTHON%/Scripts/pip.exe install -r tests/requirements.txt'
test_script:
- '%PYTHON%/python.exe -c "import nose; nose.main();"'
branches:
only:
- master
skip_tags: true
notifications:
- provider: Email
to:
- chris+appveyor@chrisdown.name
on_build_success: false
on_build_failure: true
|
Make sure loop kernel module is loaded | ---
- name: setup loopback device in staging environment
become: true
block:
- name: set docker vars
set_fact:
docker_device: /dev/loop0
docker_partition: /dev/mapper/loop0p1
- name: Create loop device
command: "mknod -m640 {{ docker_device }} b 7 0"
args:
creates: "{{ docker_device }}"
- name: Create empty loop file
command: dd if=/dev/zero of=/var/docker_device.img bs=100M count=1
args:
creates: /var/docker_device.img
- name: Check existing association
command: losetup
register: losetup_list
- name: Associate file to device
command: losetup -P /dev/loop0 /var/docker_device.img
when: "'/dev/loop0' not in losetup_list.stdout"
| ---
- name: setup loopback device in staging environment
become: true
block:
- name: set docker vars
set_fact:
docker_device: /dev/loop0
docker_partition: /dev/mapper/loop0p1
- name: Modeprobe loop device
command: "modprobe loop"
- name: Check loop is loaded
shell: "lsmod | grep loop"
register: lsmod_out
- name: Fail if loop is not loaded
fail:
msg: "Loop kernel module is not loaded"
when: "'loop' not in lsmod_out.stdout"
- name: Create loop device
command: "mknod -m640 {{ docker_device }} b 7 0"
args:
creates: "{{ docker_device }}"
- name: Create empty loop file
command: dd if=/dev/zero of=/var/docker_device.img bs=100M count=1
args:
creates: /var/docker_device.img
- name: Check existing association
command: losetup
register: losetup_list
- name: Associate file to device
command: losetup -P /dev/loop0 /var/docker_device.img
when: "'/dev/loop0' not in losetup_list.stdout"
|
Allow and and or for control flow | AllCops:
Exclude:
- 'test/**/*'
- 'examples/*'
- 'tmp/**/*'
- Rakefile
- gir_ffi.gemspec
Lint/AssignmentInCondition:
AllowSafeAssignment: true
Style/Blocks:
Enabled: false
Style/DotPosition:
EnforcedStyle: trailing
Style/LineLength:
Max: 92
Style/MethodDefParentheses:
EnforcedStyle: require_no_parentheses
Style/SignalException:
EnforcedStyle: only_raise
Style/StringLiterals:
Enabled: false
Style/RedundantReturn:
AllowMultipleReturnValues: true
Style/GuardClause:
MinBodyLength: 2
Style/IfUnlessModifier:
MaxLineLength: 60
Style/TrivialAccessors:
ExactNameMatch: true
Style/SingleLineBlockParams:
Enabled: false
Style/Next:
Enabled: false
Style/AccessorMethodName:
Enabled: false
# TODO: See about has_ and have_
Style/PredicateName:
NamePrefixBlacklist:
- is_
# RuboCop has no setting to accept how vim aligns parameters.
Style/AlignParameters:
Enabled: false
# FIXME: GirFFI uses a ffi-something naming scheme.
Style/FileName:
Enabled: false
# TODO: Decide on the desired style (nested or compact)
Style/ClassAndModuleChildren:
Enabled: false
inherit_from: .rubocop_todo.yml
| AllCops:
Exclude:
- 'test/**/*'
- 'examples/*'
- 'tmp/**/*'
- Rakefile
- gir_ffi.gemspec
Lint/AssignmentInCondition:
AllowSafeAssignment: true
Style/Blocks:
Enabled: false
Style/DotPosition:
EnforcedStyle: trailing
Style/LineLength:
Max: 92
Style/MethodDefParentheses:
EnforcedStyle: require_no_parentheses
Style/SignalException:
EnforcedStyle: only_raise
Style/StringLiterals:
Enabled: false
Style/RedundantReturn:
AllowMultipleReturnValues: true
Style/GuardClause:
MinBodyLength: 2
Style/IfUnlessModifier:
MaxLineLength: 60
Style/TrivialAccessors:
ExactNameMatch: true
Style/SingleLineBlockParams:
Enabled: false
Style/Next:
Enabled: false
Style/AccessorMethodName:
Enabled: false
# Support use of and and or for control flow.
Style/AndOr:
Enabled: false
# TODO: See about has_ and have_
Style/PredicateName:
NamePrefixBlacklist:
- is_
# RuboCop has no setting to accept how vim aligns parameters.
Style/AlignParameters:
Enabled: false
# FIXME: GirFFI uses a ffi-something naming scheme.
Style/FileName:
Enabled: false
# TODO: Decide on the desired style (nested or compact)
Style/ClassAndModuleChildren:
Enabled: false
inherit_from: .rubocop_todo.yml
|
Update dependencies of Github Actions | name: PHP Tests
on: [push, pull_request]
jobs:
build:
strategy:
matrix:
operating-system: [ubuntu-20.04]
php-versions: ['7.3', '7.4', '8.0', '8.1']
fail-fast: false
runs-on: ${{ matrix.operating-system }}
steps:
- uses: actions/checkout@v2
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-versions }}
- name: Validate composer.json and composer.lock
run: composer validate
- name: Install dependencies
run: composer install --prefer-dist --no-progress --no-suggest
- name: Run test suite
run: composer run-script test
| name: PHP Tests
on: [push, pull_request]
jobs:
build:
strategy:
matrix:
operating-system: [ubuntu-22.04]
php-versions: ['7.3', '7.4', '8.0', '8.1']
fail-fast: false
runs-on: ${{ matrix.operating-system }}
steps:
- uses: actions/checkout@v3
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-versions }}
- name: Validate composer.json and composer.lock
run: composer validate
- name: Install dependencies
run: composer install --prefer-dist --no-progress --no-suggest
- name: Run test suite
run: composer run-script test
|
Upgrade pre-commit hooks, don't use git:// protocol | repos:
- repo: git://github.com/pre-commit/pre-commit-hooks
rev: v2.2.1
hooks:
- id: check-added-large-files
- id: check-docstring-first
- id: check-merge-conflict
- id: check-yaml
- id: debug-statements
- id: end-of-file-fixer
exclude: CHANGELOG.md
- id: flake8
- id: name-tests-test
- id: requirements-txt-fixer
- id: trailing-whitespace
- repo: git://github.com/Yelp/detect-secrets
rev: v0.12.2
hooks:
- id: detect-secrets
args: ['--baseline', '.secrets.baseline']
exclude: tests/.*
| repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
- id: check-added-large-files
- id: check-docstring-first
- id: check-merge-conflict
- id: check-yaml
- id: debug-statements
- id: end-of-file-fixer
exclude: CHANGELOG.md
- id: flake8
- id: name-tests-test
- id: requirements-txt-fixer
- id: trailing-whitespace
- repo: https://github.com/Yelp/detect-secrets
rev: v1.2.0
hooks:
- id: detect-secrets
args: ['--baseline', '.secrets.baseline']
exclude: tests/.*
|
Remove designated branches for pre-commit auto-merge workflow. | name: pre-commit-ci-auto-merge
on:
workflow_run:
branches: [dev, main]
types: [completed]
workflows: ['tox-pytest']
jobs:
bot-auto-merge:
name: Auto-merge passing pre-commit-ci PRs
runs-on: ubuntu-latest
steps:
- name: Auto-merge passing pre-commit-ci PRs
if: ${{ github.event.workflow_run.conclusion == 'success' }}
uses: ridedott/merge-me-action@v2
with:
GITHUB_LOGIN: pre-commit-ci
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| name: pre-commit-ci-auto-merge
on:
workflow_run:
types: [completed]
workflows: ['tox-pytest']
jobs:
bot-auto-merge:
name: Auto-merge passing pre-commit-ci PRs
runs-on: ubuntu-latest
steps:
- name: Auto-merge passing pre-commit-ci PRs
if: ${{ github.event.workflow_run.conclusion == 'success' }}
uses: ridedott/merge-me-action@v2
with:
GITHUB_LOGIN: pre-commit-ci
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
Add OSX build for Perl Sub::Exporter::Progressive | package:
name: perl-sub-exporter-progressive
version: "0.001011"
source:
fn: Sub-Exporter-Progressive-0.001011.tar.gz
url: http://cpan.metacpan.org/authors/id/F/FR/FREW/Sub-Exporter-Progressive-0.001011.tar.gz
md5: bb50b3ba1538902b197c04818a84230a
build:
number: 0
skip: True # [osx]
requirements:
build:
- perl-threaded
run:
- perl-threaded
test:
# Perl 'use' tests
imports:
- Sub::Exporter::Progressive
about:
home: http://search.cpan.org/dist/Sub-Exporter-Progressive/
license: perl_5
summary: 'Only use Sub::Exporter if you need it'
| package:
name: perl-sub-exporter-progressive
version: "0.001011"
source:
fn: Sub-Exporter-Progressive-0.001011.tar.gz
url: http://cpan.metacpan.org/authors/id/F/FR/FREW/Sub-Exporter-Progressive-0.001011.tar.gz
md5: bb50b3ba1538902b197c04818a84230a
build:
number: 1
requirements:
build:
- perl-threaded
run:
- perl-threaded
test:
# Perl 'use' tests
imports:
- Sub::Exporter::Progressive
about:
home: http://search.cpan.org/dist/Sub-Exporter-Progressive/
license: perl_5
summary: 'Only use Sub::Exporter if you need it'
|
Add new commands to help text | name: dynmap
main: org.dynmap.DynmapPlugin
version: "0.20"
authors: [FrozenCow, mikeprimm, zeeZ]
softdepend: [Permissions]
commands:
dynmap:
description: Controls Dynmap.
usage: |
/<command> hide - hides the player from the map.
/<command> hide TheDude - hides the player 'TheDude' from the map.
/<command> show - shows the player from the map.
/<command> show TheDude - shows the player 'TheDude' from the map.
/<command> render - Renders the tile at your location.
/<command> fullrender - (Attempts to) render entire world from your location.
/<command> fullrender world - (Attempts to) render entire world 'world'.
/<command> stats - Show render statistics.
/<command> stats world - Show render statistics for maps on world 'world'.
/<command> resetstats - Reset render statistics.
/<command> resetstats world - Reset render statistics for maps on world 'world'.
| name: dynmap
main: org.dynmap.DynmapPlugin
version: "0.20"
authors: [FrozenCow, mikeprimm, zeeZ]
softdepend: [Permissions]
commands:
dynmap:
description: Controls Dynmap.
usage: |
/<command> hide - hides the player from the map.
/<command> hide TheDude - hides the player 'TheDude' from the map.
/<command> show - shows the player from the map.
/<command> show TheDude - shows the player 'TheDude' from the map.
/<command> render - Renders the tile at your location.
/<command> fullrender - (Attempts to) render entire world from your location.
/<command> fullrender world - (Attempts to) render entire world 'world'.
/<command> radiusrender ## - (Attempts to) render at least ## block radius from your location.
/<command> cancelrender - Cancels any active renders on current world
/<command> cancelrender world - Cancels any active renders of world 'world'
/<command> stats - Show render statistics.
/<command> stats world - Show render statistics for maps on world 'world'.
/<command> resetstats - Reset render statistics.
/<command> resetstats world - Reset render statistics for maps on world 'world'.
|
Disable docker test for Fedora, due to broken packaging | - hosts: testhost
gather_facts: True
roles:
# In destructive because it creates and removes a user
- { role: test_sudo, tags: test_sudo}
#- { role: test_su, tags: test_su} # wait till su support is added to local connection, needs tty
- { role: test_become, tags: test_become}
- { role: test_service, tags: test_service }
# Current pip unconditionally uses md5. We can re-enable if pip switches
# to a different hash or allows us to not check md5
- { role: test_pip, tags: test_pip, when: ansible_fips != True }
- { role: test_gem, tags: test_gem }
- { role: test_yum, tags: test_yum }
- { role: test_apt, tags: test_apt }
- { role: test_apt_repository, tags: test_apt_repository }
- { role: test_postgresql, tags: test_postgresql}
- { role: test_mysql_db, tags: test_mysql_db}
- { role: test_mysql_user, tags: test_mysql_user}
- { role: test_mysql_variables, tags: test_mysql_variables}
- { role: test_docker, tags: test_docker}
- { role: test_zypper, tags: test_zypper}
| - hosts: testhost
gather_facts: True
roles:
# In destructive because it creates and removes a user
- { role: test_sudo, tags: test_sudo}
#- { role: test_su, tags: test_su} # wait till su support is added to local connection, needs tty
- { role: test_become, tags: test_become}
- { role: test_service, tags: test_service }
# Current pip unconditionally uses md5. We can re-enable if pip switches
# to a different hash or allows us to not check md5
- { role: test_pip, tags: test_pip, when: ansible_fips != True }
- { role: test_gem, tags: test_gem }
- { role: test_yum, tags: test_yum }
- { role: test_apt, tags: test_apt }
- { role: test_apt_repository, tags: test_apt_repository }
- { role: test_postgresql, tags: test_postgresql}
- { role: test_mysql_db, tags: test_mysql_db}
- { role: test_mysql_user, tags: test_mysql_user}
- { role: test_mysql_variables, tags: test_mysql_variables}
- { role: test_docker, tags: test_docker, when: ansible_distribution != "Fedora" }
- { role: test_zypper, tags: test_zypper}
|
Use poetry to run tox | name: CI
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
branches: [ master ]
pull_request:
branches: [ master ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
test_pyenv:
runs-on: ubuntu-latest
name: install pyenv
steps:
- name: setup pyenv
uses: gabrielfalcao/pyenv-action@v7
with:
default: 3.9.0
versions: 3.6.12, 3.7.9, 3.8.6, 3.9.0
command: |
pip install -U pip
pip install -U poetry
- name: Run tox
run: tox
| name: CI
on:
# Triggers the workflow on push or pull request events but only for the master branch
push:
branches: [ master ]
pull_request:
branches: [ master ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
test_pyenv:
runs-on: ubuntu-latest
name: install pyenv
steps:
- name: setup pyenv
uses: gabrielfalcao/pyenv-action@v7
with:
default: 3.9.0
versions: 3.6.12, 3.7.9, 3.8.6, 3.9.0
command: |
pip install -U pip
pip install -U poetry
- name: Run tox
run: poetry run tox
|
Remove Node 8 from CI, add Node 14 | name: Tests
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: ['8', '10', '12']
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: yarn install, lint, and test
run: |
yarn install
yarn lint
yarn test --coverage
env:
CI: true
- run: cat ${GITHUB_EVENT_PATH}
- name: Upload coverage report
run: |
bash <(curl -s https://codecov.io/bash) \
-t "${{ secrets.CODECOV_TOKEN }}" \
-F node_${{ matrix.node-version }} \
-B "${GITHUB_REF//refs\/heads\//}" \
-T "${GITHUB_REF//refs\/tags\//}" \
-C "$GITHUB_SHA"
| name: Tests
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: ['10', '12', '14']
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: yarn install, lint, and test
run: |
yarn install
yarn lint
yarn test --coverage
env:
CI: true
- run: cat ${GITHUB_EVENT_PATH}
- name: Upload coverage report
run: |
bash <(curl -s https://codecov.io/bash) \
-t "${{ secrets.CODECOV_TOKEN }}" \
-F node_${{ matrix.node-version }} \
-B "${GITHUB_REF//refs\/heads\//}" \
-T "${GITHUB_REF//refs\/tags\//}" \
-C "$GITHUB_SHA"
|
Update PyPy versions in CI | name: Test
on:
pull_request:
push:
schedule:
- cron: '0 6 * * *'
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version:
- '3.6'
- '3.7'
- '3.8'
- '3.9'
- '3.10'
- 'pypy3'
toxenv: [py]
include:
- python-version: '3.6'
toxenv: lint
- python-version: '3.6'
toxenv: typing
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade --upgrade-strategy=eager coverage tox
- name: Run tests
run: tox -e ${{ matrix.toxenv }}
- name: Generate XML coverage report
if: matrix.toxenv == 'py'
run: coverage xml
- name: Upload coverage to Codecov
if: matrix.toxenv == 'py'
uses: codecov/codecov-action@v2
with:
fail_ci_if_error: false
# vim:set et sts=2:
| name: Test
on:
pull_request:
push:
schedule:
- cron: '0 6 * * *'
jobs:
test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version:
- '3.6'
- '3.7'
- '3.8'
- '3.9'
- '3.10'
- 'pypy-3.6'
- 'pypy-3.7'
- 'pypy-3.8'
toxenv: [py]
include:
- python-version: '3.6'
toxenv: lint
- python-version: '3.6'
toxenv: typing
steps:
- name: Check out repository
uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip wheel
python -m pip install --upgrade --upgrade-strategy=eager coverage tox
- name: Run tests
run: tox -e ${{ matrix.toxenv }}
- name: Generate XML coverage report
if: matrix.toxenv == 'py'
run: coverage xml
- name: Upload coverage to Codecov
if: matrix.toxenv == 'py'
uses: codecov/codecov-action@v2
with:
fail_ci_if_error: false
# vim:set et sts=2:
|
Use the rbd mirror client keyring | ---
- name: enable mirroring on the pool
command: "{{ container_exec_cmd | default('') }} rbd --cluster {{ cluster }} mirror pool enable {{ ceph_rbd_mirror_pool }} {{ ceph_rbd_mirror_mode }}"
changed_when: false
- name: add a mirroring peer
command: "{{ container_exec_cmd | default('') }} rbd --cluster {{ cluster }} mirror pool peer add {{ ceph_rbd_mirror_pool }} {{ ceph_rbd_mirror_remote_user }}@{{ ceph_rbd_mirror_remote_cluster }}"
changed_when: false
failed_when: false
| ---
- name: enable mirroring on the pool
command: "{{ container_exec_cmd | default('') }} rbd --cluster {{ cluster }} --keyring /etc/ceph/{{ cluster }}.client.rbd-mirror.{{ ansible_hostname }}.keyring --name client.rbd-mirror.{{ ansible_hostname }} mirror pool enable {{ ceph_rbd_mirror_pool }} {{ ceph_rbd_mirror_mode }}"
changed_when: false
- name: add a mirroring peer
command: "{{ container_exec_cmd | default('') }} rbd --cluster {{ cluster }} --keyring /etc/ceph/{{ cluster }}.client.rbd-mirror.{{ ansible_hostname }}.keyring --name client.rbd-mirror.{{ ansible_hostname }} mirror pool peer add {{ ceph_rbd_mirror_pool }} {{ ceph_rbd_mirror_remote_user }}@{{ ceph_rbd_mirror_remote_cluster }}"
changed_when: false
failed_when: false
|
Update from Forestry.io - Updated Forestry configuration | ---
label: Gallery
hide_body: true
fields:
- name: photo
type: field_group_list
fields:
- name: file
type: file
config:
maxSize: 250
label: File
- name: description
type: text
config:
required: false
label: Description
config:
min:
max:
labelField:
label: Photo
pages:
- data/photos/soltek_eq3401/sp94_guide_montage.yml
| ---
label: Gallery
hide_body: true
fields:
- name: photo
type: field_group_list
fields:
- name: file
type: file
config:
maxSize: 250
label: File
- name: description
type: text
config:
required: false
label: Description
config:
min:
max:
labelField: description
label: Photo
pages:
- data/photos/soltek_eq3401/sp94_guide_montage.yml
|
Make Docker builder home a named volume | version: "3"
services:
hades:
build:
context: ./docker
args:
BUILDER_UID: ${UID:-1000}
BUILDER_GID: ${GID:-1000}
image: hades
container_name: hades
network_mode: bridge
tmpfs:
- /tmp
- /run
cap_add:
- SYS_ADMIN
- NET_ADMIN
security_opt:
- seccomp:unconfined
stdin_open: false
tty: false
stop_signal: SIGRTMIN+3
volumes:
- /sys/fs/cgroup:/sys/fs/cgroup:ro
- ../hades-build:/build
- ./:/build/hades
| version: "3"
services:
hades:
build:
context: ./docker
args:
BUILDER_UID: ${UID:-1000}
BUILDER_GID: ${GID:-1000}
image: hades
container_name: hades
network_mode: bridge
tmpfs:
- /tmp
- /run
cap_add:
- SYS_ADMIN
- NET_ADMIN
security_opt:
- seccomp:unconfined
stdin_open: false
tty: false
stop_signal: SIGRTMIN+3
volumes:
- /sys/fs/cgroup:/sys/fs/cgroup:ro
- "home:/build"
- ./:/build/hades
volumes:
home:
|
Update expired nuget API key | # ------------------------------------------------------------------------------
# <auto-generated>
#
# This code was generated.
#
# - To turn off auto-generation set:
#
# [AppVeyor (AutoGenerate = false)]
#
# - To trigger manual generation invoke:
#
# nuke --generate-configuration AppVeyor --host AppVeyor
#
# </auto-generated>
# ------------------------------------------------------------------------------
image:
- Visual Studio 2019
#-----
environment:
# Update Java for sonar scanner, cf.:
# - https://docs.sonarqube.org/latest/setup/upgrade-notes/
# - https://www.appveyor.com/docs/windows-images-software/#java
JAVA_HOME: "C:\\Program Files\\Java\\jdk11"
#--- https://ci.appveyor.com/tools/encrypt
SONAR_LOGIN:
secure: BNJM0k+NLqZbdMn08BK3wsjO8Hy7kbpRjsQr8VAhl98bmbLs75+BW457TVIJi5d3
NUGET_API_KEY:
secure: 1gZfYXDnZKFT1JddFudQ6YWtaY3b5dd9TRyGgMWAvDcaemYinhdGIzRa21VKSGx5
#-----
build_script:
- cmd: .\build.cmd CiBuild
- sh: ./build.cmd CiBuild
artifacts:
- path: .artifacts/*.nupkg
| # ------------------------------------------------------------------------------
# <auto-generated>
#
# This code was generated.
#
# - To turn off auto-generation set:
#
# [AppVeyor (AutoGenerate = false)]
#
# - To trigger manual generation invoke:
#
# nuke --generate-configuration AppVeyor --host AppVeyor
#
# </auto-generated>
# ------------------------------------------------------------------------------
image:
- Visual Studio 2019
#-----
environment:
# Update Java for sonar scanner, cf.:
# - https://docs.sonarqube.org/latest/setup/upgrade-notes/
# - https://www.appveyor.com/docs/windows-images-software/#java
JAVA_HOME: "C:\\Program Files\\Java\\jdk11"
#--- https://ci.appveyor.com/tools/encrypt
SONAR_LOGIN:
secure: BNJM0k+NLqZbdMn08BK3wsjO8Hy7kbpRjsQr8VAhl98bmbLs75+BW457TVIJi5d3
NUGET_API_KEY:
secure: +g3nI05zuZ/hNuLtGc057QXU7wIfjMdiU41RJUZ34ZDUQ+MAlMxxEdR4mVU5RoLh
#-----
build_script:
- cmd: .\build.cmd CiBuild
- sh: ./build.cmd CiBuild
artifacts:
- path: .artifacts/*.nupkg
|
Fix running notebooks on AppVeyor. | clone_depth: 5
environment:
CONDA_PATH: "C:\\Miniconda3"
matrix:
- PYTHON_VERSION: "3.4"
#- PYTHON_VERSION: "3.5"
platform:
- x86
- x64
install:
# Use the pre-installed Miniconda for the desired arch
- ps: if($env:PYTHON_VERSION -eq '3.5')
{ $env:CONDA_PATH="$($env:CONDA_PATH)5" }
- ps: if($env:TARGET_ARCH -eq 'x64')
{ $env:CONDA_PATH="$($env:CONDA_PATH)-x64" }
- ps: $env:path="$($env:CONDA_PATH);$($env:CONDA_PATH)\Scripts;$($env:CONDA_PATH)\Library\bin;C:\cygwin\bin;$($env:PATH)"
- cmd: conda config --set always_yes yes --set changeps1 no
- cmd: conda update -q conda
# Useful for debugging any issues with conda
- cmd: conda info -a
- cmd: sed -i -e s/python=3/python=%PYTHON_VERSION%/ environment.yml
- cmd: conda env create -q -f environment.yml
- cmd: activate unidata-workshop
- cmd: python --version
- cmd: conda list
# Skip .NET project specific build phase.
build: off
test_script:
- cmd: find . -name "*.ipynb" -print0 | xargs -0 -n1 jupyter nbconvert --execute --ExecutePreprocessor.timeout=120 --inplace
| clone_depth: 5
environment:
CONDA_PATH: "C:\\Miniconda3"
matrix:
- PYTHON_VERSION: "3.4"
#- PYTHON_VERSION: "3.5"
platform:
- x86
- x64
install:
# Use the pre-installed Miniconda for the desired arch
- ps: if($env:PYTHON_VERSION -eq '3.5')
{ $env:CONDA_PATH="$($env:CONDA_PATH)5" }
- ps: if($env:TARGET_ARCH -eq 'x64')
{ $env:CONDA_PATH="$($env:CONDA_PATH)-x64" }
- ps: $env:path="$($env:CONDA_PATH);$($env:CONDA_PATH)\Scripts;$($env:CONDA_PATH)\Library\bin;C:\cygwin\bin;$($env:PATH)"
- cmd: conda config --set always_yes yes --set changeps1 no
- cmd: conda update -q conda
# Useful for debugging any issues with conda
- cmd: conda info -a
- cmd: sed -i -e s/python=3/python=%PYTHON_VERSION%/ environment.yml
- cmd: conda env create -q -f environment.yml
- cmd: activate unidata-workshop
- cmd: python --version
- cmd: conda list
# Skip .NET project specific build phase.
build: off
test_script:
- cmd: cd notebooks
- cmd: find . -name "*.ipynb" -print0 | xargs -0 -n1 jupyter nbconvert --execute --ExecutePreprocessor.timeout=120 --inplace
|
Update KFZ-Kennzeichen to 1.0.1 (10001) | Categories:
- Science & Education
- Reading
License: GPL-3.0-only
AuthorName: Lukas Sökefeld
AuthorWebSite: https://gitlab.com/LukeSoftware/kfz-kennzeichen
WebSite: https://gitlab.com/LukeSoftware/kfz-kennzeichen
SourceCode: https://gitlab.com/LukeSoftware/kfz-kennzeichen
IssueTracker: https://gitlab.com/LukeSoftware/kfz-kennzeichen/issues
AutoName: KFZ-Kennzeichen
RepoType: git
Repo: https://gitlab.com/LukeSoftware/kfz-kennzeichen
Builds:
- versionName: 1.0.0
versionCode: 10000
commit: v1.0.0
subdir: platforms/android/app
gradle:
- yes
MaintainerNotes: This is a Cordova app, built from source.
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags v.*[0-9]$
CurrentVersion: 1.0.00
CurrentVersionCode: 10000
| Categories:
- Science & Education
- Reading
License: GPL-3.0-only
AuthorName: Lukas Sökefeld
AuthorWebSite: https://gitlab.com/LukeSoftware/kfz-kennzeichen
WebSite: https://gitlab.com/LukeSoftware/kfz-kennzeichen
SourceCode: https://gitlab.com/LukeSoftware/kfz-kennzeichen
IssueTracker: https://gitlab.com/LukeSoftware/kfz-kennzeichen/issues
AutoName: KFZ-Kennzeichen
RepoType: git
Repo: https://gitlab.com/LukeSoftware/kfz-kennzeichen
Builds:
- versionName: 1.0.0
versionCode: 10000
commit: v1.0.0
subdir: platforms/android/app
gradle:
- yes
- versionName: 1.0.1
versionCode: 10001
commit: v1.0.1
subdir: platforms/android/app
gradle:
- yes
MaintainerNotes: This is a Cordova app, built from source.
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags v.*[0-9]$
CurrentVersion: 1.0.1
CurrentVersionCode: 10001
|
Disable Docker cookbook acceptance tests until the cookbook or Chef is fixed. | suites:
- name: docker-default
attributes:
docker:
version: 1.10.0
run_list:
- recipe[apt]
- recipe[apt-docker]
- recipe[docker_test::installation_package]
- recipe[docker_test::service_upstart]
- recipe[docker_test::auto]
includes: [ubuntu-14.04]
| suites:
- name: docker-default
attributes:
docker:
version: 1.10.0
run_list:
- recipe[apt]
- recipe[apt-docker]
includes: [ubuntu-14.04]
|
Update from Hackage at 2019-03-05T18:48:24Z | homepage: ''
changelog-type: ''
hash: 6d4850b9a6497fa75fb26f7ee7a077b2d7037388677ab700ec7b0806d9ff2a09
test-bench-deps: {}
maintainer: strake888@gmail.com
synopsis: Utilities
changelog: ''
basic-deps:
base: ! '>=4.9 && <5'
all-versions:
- 0.1.0.0
- 0.1.1.0
- 0.1.2.0
- 0.1.2.1
- 0.1.3.0
- 0.1.4.0
- 0.1.5.0
- 0.1.6.0
- 0.1.7.0
- 0.1.8.0
- 0.1.9.0
- 0.1.10.0
- 0.1.10.1
- 0.1.11.0
author: M Farkas-Dyck
latest: 0.1.11.0
description-type: haddock
description: ''
license-name: BSD-3-Clause
| homepage: ''
changelog-type: ''
hash: 40b96869005cd4fe26406315451d0c2f949a5a2ffa8f5a85692e55968133a81f
test-bench-deps: {}
maintainer: strake888@gmail.com
synopsis: Utilities
changelog: ''
basic-deps:
base: ! '>=4.9 && <5'
all-versions:
- 0.1.0.0
- 0.1.1.0
- 0.1.2.0
- 0.1.2.1
- 0.1.3.0
- 0.1.4.0
- 0.1.5.0
- 0.1.6.0
- 0.1.7.0
- 0.1.8.0
- 0.1.9.0
- 0.1.10.0
- 0.1.10.1
- 0.1.11.0
- 0.1.12.0
author: M Farkas-Dyck
latest: 0.1.12.0
description-type: haddock
description: ''
license-name: BSD-3-Clause
|
Add some translations for bank account. | # German locale
de:
activerecord:
models:
account: Konto
bank_account: Bankkonto
bank: Bank
account_type: Kontentyp
booking: Buchung
attributes:
account:
code: Nr.
title: Titel
saldo: Saldo
account_type: Kontentyp
bank_account:
number: Kontokorrent Nr.
bank:
clearing: Clearing-Nr.
swift: SWIFT-Nr.
account_type:
name: Name
title: Titel
booking:
code: Belegnr.
value_date: Valutadatum
title: Titel
amount: Betrag
comments: Bemerkungen
credit_account: Soll
debit_account: Haben
reference: Referenz
reference_type: Referenz Typ
reference_id: Referenz ID
text: Text
date: Datum
balance: Saldo
edit: Editieren
destroy: Löschen
valuta: Valuta
value: Betrag
# Title customizations
account:
index:
title: Konti
bank_account:
index:
title: Bankkonti
bank:
index:
title: Banken
account_type:
index:
title: Kontentypen
booking:
index:
title: Buchungsjournal
| # German locale
de:
activerecord:
models:
account: Konto
bank_account: Bankkonto
bank: Bank
account_type: Kontentyp
booking: Buchung
attributes:
account:
code: Nr.
title: Titel
saldo: Saldo
account_type: Kontentyp
bank_account:
number: Kontokorrent-Nr.
iban: IBAN
pc_id: VESR-Teilnehmer-Nr.
esr_id: VESR-Identifikations-Nr.
bank:
clearing: Clearing-Nr.
swift: SWIFT-Nr.
account_type:
name: Name
title: Titel
booking:
code: Belegnr.
value_date: Valutadatum
title: Titel
amount: Betrag
comments: Bemerkungen
credit_account: Soll
debit_account: Haben
reference: Referenz
reference_type: Referenz Typ
reference_id: Referenz ID
text: Text
date: Datum
balance: Saldo
edit: Editieren
destroy: Löschen
valuta: Valuta
value: Betrag
# Title customizations
account:
index:
title: Konti
bank_account:
index:
title: Bankkonti
bank:
index:
title: Banken
account_type:
index:
title: Kontentypen
booking:
index:
title: Buchungsjournal
|
Use powershell to execute test script | name: Build Pipeline
on:
push:
branches: [ master, setup-github-actions ]
jobs:
build:
name: "Build"
runs-on: ubuntu-20.04
environment: "Build Environment"
steps:
- name: Checkout branch
uses: actions/checkout@v3
- name: Setup dotnet
uses: actions/setup-dotnet@v2
with:
dotnet-version: |
6.0.x
3.1.x
- name: Build and pack
run: dotnet pack -c Release --version-suffix "${{ github.run_number }}" -o "./artifacts"
test:
name: "Unit tests"
runs-on: ubuntu-20.04
# Do not run this job until the build job completes
needs: build
environment: "Unit test environment"
steps:
- name: Checkout branch
uses: actions/checkout@master
- name: Execute unit test script
run: scripts/test.ps1
publish:
name: "Publish and release"
runs-on: ubuntu-20.04
needs: test
environment: "Nuget deployment environment"
steps:
- name: Publish package to Nuget
run: echo "would publish to nuget here. need to set up conditions so that only release tags are published." #dotnet nuget push */bin/Release/*.nupkg -k ${{ secrets.NUGET_TOKEN }} -s "https://api.nuget.org/v3/index.json"
- name: Create Github release
run: echo "would create github release here"
| name: Build Pipeline
on:
push:
branches: [ master, setup-github-actions ]
jobs:
build:
name: "Build"
runs-on: ubuntu-20.04
environment: "Build Environment"
steps:
- name: Checkout branch
uses: actions/checkout@v3
- name: Setup dotnet
uses: actions/setup-dotnet@v2
with:
dotnet-version: |
6.0.x
3.1.x
- name: Build and pack
run: dotnet pack -c Release --version-suffix "${{ github.run_number }}" -o "./artifacts"
test:
name: "Unit tests"
runs-on: ubuntu-20.04
# Do not run this job until the build job completes
needs: build
environment: "Unit test environment"
steps:
- name: Checkout branch
uses: actions/checkout@master
- name: Execute unit test script
run: scripts/test.ps1
shell: pwsh
publish:
name: "Publish and release"
runs-on: ubuntu-20.04
needs: test
environment: "Nuget deployment environment"
steps:
- name: Publish package to Nuget
run: echo "would publish to nuget here. need to set up conditions so that only release tags are published." #dotnet nuget push */bin/Release/*.nupkg -k ${{ secrets.NUGET_TOKEN }} -s "https://api.nuget.org/v3/index.json"
- name: Create Github release
run: echo "would create github release here"
|
Revert "임시로 healthcheck 주기 증량" | name: Run health check
on:
schedule:
- cron: '*/5 * * * *'
jobs:
install:
name: Install
runs-on: ubuntu-latest
steps:
- name: Only master
uses: actions/bin/filter@25b7b846d5027eac3315b50a8055ea675e2abd89
with:
args: branch master
- uses: actions/checkout@master
- name: Install
uses: actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680
with:
args: install
- name: Run health check
uses: ./actions/health-check
- name: Push health check
uses: ./actions/push-health-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| name: Run health check
on:
schedule:
- cron: 0 0 * * *
jobs:
install:
name: Install
runs-on: ubuntu-latest
steps:
- name: Only master
uses: actions/bin/filter@25b7b846d5027eac3315b50a8055ea675e2abd89
with:
args: branch master
- uses: actions/checkout@master
- name: Install
uses: actions/npm@59b64a598378f31e49cb76f27d6f3312b582f680
with:
args: install
- name: Run health check
uses: ./actions/health-check
- name: Push health check
uses: ./actions/push-health-check
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
Add qcm configuration for twig | qcm_core:
driver: doctrine/orm
user_class: Qcm\Bundle\PublicBundle\Entity\User
classes:
security:
model: Qcm\Bundle\PublicBundle\Entity\User
user:
model: Qcm\Bundle\PublicBundle\Entity\User
category:
model: Qcm\Bundle\PublicBundle\Entity\Category
question:
model: Qcm\Bundle\PublicBundle\Entity\Question
answer:
model: Qcm\Bundle\PublicBundle\Entity\Answer
| qcm_core:
driver: doctrine/orm
user_class: Qcm\Bundle\PublicBundle\Entity\User
classes:
security:
model: Qcm\Bundle\PublicBundle\Entity\User
user:
model: Qcm\Bundle\PublicBundle\Entity\User
category:
model: Qcm\Bundle\PublicBundle\Entity\Category
question:
model: Qcm\Bundle\PublicBundle\Entity\Question
answer:
model: Qcm\Bundle\PublicBundle\Entity\Answer
twig:
globals:
qcm_configuration: "@qcm.configuration"
|
Update from Hackage at 2018-07-15T17:58:12Z | homepage: ''
changelog-type: ''
hash: 8cf3c9fc1a650a0c15e24fcb8d79aeb7e50a82699a25e013bedd2e54a64c609f
test-bench-deps: {}
maintainer: Francesco Ariis <fa-ml@ariis.it>
synopsis: Binding to libSDL_gfx
changelog: ''
basic-deps:
base: ! '>=3 && <5'
SDL: -any
all-versions:
- '0.4.0'
- '0.5.2'
- '0.5.3'
- '0.6.0'
- '0.6.0.1'
- '0.6.0.2'
author: Lemmih (lemmih@gmail.com)
latest: '0.6.0.2'
description-type: text
description: ! "This package contains Haskell bindings to libSDL_gfx >= 2.0.\n\nGlobal
installation:\n runhaskell Setup.lhs configure\n runhaskell Setup.lhs build\n
\ runhaskell Setup.lhs install # as root\n\nLocal installation:\n runhaskell Setup.lhs
configure --prefix=[HOME]/usr --user\n runhaskell Setup.lhs build\n runhaskell
Setup.lhs install --user # not as root\n"
license-name: BSD3
| homepage: ''
changelog-type: ''
hash: 40c0c55b71691f351655ff3fb47123a7c9339444553bf9d717b9861a2e26ffff
test-bench-deps: {}
maintainer: Francesco Ariis <fa-ml@ariis.it>
synopsis: Binding to libSDL_gfx
changelog: ''
basic-deps:
base: ! '>=3 && <5'
SDL: -any
all-versions:
- '0.4.0'
- '0.5.2'
- '0.5.3'
- '0.6.0'
- '0.6.0.1'
- '0.6.0.2'
- '0.6.1.0'
author: Lemmih (lemmih@gmail.com)
latest: '0.6.1.0'
description-type: text
description: ! "This package contains Haskell bindings to libSDL_gfx >= 2.0.\n\nGlobal
installation:\n runhaskell Setup.lhs configure\n runhaskell Setup.lhs build\n
\ runhaskell Setup.lhs install # as root\n\nLocal installation:\n runhaskell Setup.lhs
configure --prefix=[HOME]/usr --user\n runhaskell Setup.lhs build\n runhaskell
Setup.lhs install --user # not as root\n"
license-name: BSD3
|
Install from apt needs root privilege. | ---
# tasks file for ansible
- name: Install requirements for Ansible from apt
apt: name=python-dev update_cache=yes
- name: Install Ansible
pip: name=ansible
| ---
# tasks file for ansible
- name: Install requirements for Ansible from apt
apt: name=python-dev update_cache=yes
become: yes
- name: Install Ansible
pip: name=ansible
|
Update from Hackage at 2017-06-01T09:48:30Z | homepage: http://github.com/quchen/prettyprinter
changelog-type: ''
hash: e62f6303842be0c8b2e966765d965f45fe9778c96619f6f1a9e3b27b62d5cc37
test-bench-deps: {}
maintainer: David Luposchainsky <dluposchainsky at google>
synopsis: Prettyprinter compatibility module for previous users of the wl-pprint package.
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
text: ==1.2.*
prettyprinter: <1.1
all-versions:
- '1'
- '1.0.0.1'
author: Daan Leijen, Noam Lewis, David Luposchainsky
latest: '1.0.0.1'
description-type: markdown
description: ! 'wl-pprint compatibility package
===============================
This package defines a compatibility layer between the old `wl-pprint` package,
and the new `prettyprinter` package.
This allows easily transitioning dependent packages from the old to the new
package, by simply replacing `wl-pprint` with `prettyprinter-compat-wl-pprint`
in the `.cabal` file.
Note that this package is **only for transitional purposes**, and therefore
deprecated and wholly undocumented. For new development, use the current version
of `prettyprinter`.
'
license-name: BSD2
| homepage: http://github.com/quchen/prettyprinter
changelog-type: ''
hash: e62f6303842be0c8b2e966765d965f45fe9778c96619f6f1a9e3b27b62d5cc37
test-bench-deps: {}
maintainer: David Luposchainsky <dluposchainsky at google>
synopsis: Prettyprinter compatibility module for previous users of the wl-pprint package.
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
text: ==1.2.*
prettyprinter: <1.1
all-versions:
- '1.0.0.1'
author: Daan Leijen, Noam Lewis, David Luposchainsky
latest: '1.0.0.1'
description-type: markdown
description: ! 'wl-pprint compatibility package
===============================
This package defines a compatibility layer between the old `wl-pprint` package,
and the new `prettyprinter` package.
This allows easily transitioning dependent packages from the old to the new
package, by simply replacing `wl-pprint` with `prettyprinter-compat-wl-pprint`
in the `.cabal` file.
Note that this package is **only for transitional purposes**, and therefore
deprecated and wholly undocumented. For new development, use the current version
of `prettyprinter`.
'
license-name: BSD2
|
Disable link check for vcpython link since it always erroneously fails | title: DataStax Python Driver for Apache Cassandra
summary: DataStax Python Driver for Apache Cassandra Documentation
output: docs/_build/
sections:
- title: N/A
prefix: /
type: sphinx
directory: docs
versions:
- name: 3.7
ref: 3.7-doc
- name: 3.6
ref: 3.6-doc
- name: 3.5
ref: 3.5-doc
-redirects:
- - \A\/(.*)/\Z: /\1.html
| title: DataStax Python Driver for Apache Cassandra
summary: DataStax Python Driver for Apache Cassandra Documentation
output: docs/_build/
checks:
external_links:
exclude:
- 'http://aka.ms/vcpython27'
sections:
- title: N/A
prefix: /
type: sphinx
directory: docs
versions:
- name: 3.7
ref: 3.7-doc
- name: 3.6
ref: 3.6-doc
- name: 3.5
ref: 3.5-doc
-redirects:
- - \A\/(.*)/\Z: /\1.html
|
Revert "add AppVeyor project version patching" | image: Visual Studio 2017
branches:
only:
- master
- develop
skip_commits:
files:
- '**/*.md'
build_script:
- ps: .\build.ps1
dotnet_csproj:
patch: true
file: '**\*.csproj'
version: '{version}'
package_version: '{version}'
test: off
artifacts:
- path: .\Build\Bin\net35
name: UnityFx.Async-net35
- path: .\Build\Bin\net46
name: UnityFx.Async-net46
deploy:
- provider: GitHub
auth_token:
secure: aTd8m7PLWUlE3iacD7bx9oWPW/CK0+BTE7/nhgB2EXk8iZ3FJ/TJX5kKLIYP76nN
force_update: false
on:
branch: master
| image: Visual Studio 2017
branches:
only:
- master
- develop
skip_commits:
files:
- '**/*.md'
build_script:
- ps: .\build.ps1
test: off
artifacts:
- path: .\Build\Bin\net35
name: UnityFx.Async-net35
- path: .\Build\Bin\net46
name: UnityFx.Async-net46
deploy:
- provider: GitHub
auth_token:
secure: aTd8m7PLWUlE3iacD7bx9oWPW/CK0+BTE7/nhgB2EXk8iZ3FJ/TJX5kKLIYP76nN
force_update: false
on:
branch: master
|
Fix code coverage not working properly for Python 2.7 64-bit and 3. | image:
- Visual Studio 2017
environment:
matrix:
- PYTHON: "C:\\Python27"
- PYTHON: "C:\\Python35"
- PYTHON: "C:\\Python36"
- PYTHON: "C:\\Python27-x64"
- PYTHON: "C:\\Python35-x64"
- PYTHON: "C:\\Python36-x64"
install:
- "%PYTHON%\\python.exe -m pip install --upgrade setuptools pip"
- "%PYTHON%\\python.exe -m pip install --upgrade virtualenv"
- "%PYTHON%\\python.exe -m pip --version"
- "%PYTHON%\\python.exe -m virtualenv --version"
- "git.exe --version"
- "%PYTHON%\\python.exe -m pip install --upgrade flake8 pylint codecov"
- "%PYTHON%\\python.exe -m pip install --upgrade pytest-flake8 pytest-pylint pytest-cov"
- "%PYTHON%\\python.exe -m pip install -r requirements.txt"
build: off
test_script:
- "%PYTHON%\\python.exe -m pytest"
on_success:
- "%PYTHON%\\python.exe -m codecov -X gcov"
skip_branch_with_pr: true
| image:
- Visual Studio 2017
environment:
matrix:
- PYTHON: "C:\\Python27"
- PYTHON: "C:\\Python35"
- PYTHON: "C:\\Python36"
- PYTHON: "C:\\Python27-x64"
- PYTHON: "C:\\Python35-x64"
- PYTHON: "C:\\Python36-x64"
install:
- "%PYTHON%\\python.exe -m pip install --upgrade setuptools pip"
- "%PYTHON%\\python.exe -m pip install --upgrade virtualenv"
- "%PYTHON%\\python.exe -m pip --version"
- "%PYTHON%\\python.exe -m virtualenv --version"
- "git.exe --version"
- "%PYTHON%\\python.exe -m pip install --upgrade flake8 pylint codecov"
- "%PYTHON%\\python.exe -m pip install --upgrade pytest-flake8 pytest-pylint pytest-cov"
- "%PYTHON%\\python.exe -m pip install -r requirements.txt"
- "choco install codecov"
build: off
test_script:
- "%PYTHON%\\python.exe -m pytest --cov-report xml"
on_success:
- "codecov -f coverage.xml"
skip_branch_with_pr: true
|
Update TubeLab to 1.1.1 (11) | Categories:
- Internet
License: GPL-3.0-only
AuthorName: Thomas
AuthorWebSite: https://fedilab.app/
SourceCode: https://framagit.org/tom79/fedilab-tube
IssueTracker: https://framagit.org/tom79/fedilab-tube/issues
Translation: https://crowdin.com/project/tubelab
AutoName: TubeLab
RepoType: git
Repo: https://framagit.org/tom79/fedilab-tube
Builds:
- versionName: 1.0.0
versionCode: 1
commit: 1.0.0
subdir: app
gradle:
- yes
- versionName: 1.0.3
versionCode: 5
commit: 1.0.3
subdir: app
gradle:
- yes
- versionName: 1.0.4
versionCode: 6
commit: 1.0.4
subdir: app
gradle:
- yes
- versionName: 1.0.5
versionCode: 7
commit: 1.0.5
subdir: app
gradle:
- yes
- versionName: 1.1.0
versionCode: 10
commit: 1.1.0
subdir: app
gradle:
- fdroid_acad
AutoUpdateMode: Version %v
UpdateCheckMode: Tags
CurrentVersion: 1.1.0
CurrentVersionCode: 10
| Categories:
- Internet
License: GPL-3.0-only
AuthorName: Thomas
AuthorWebSite: https://fedilab.app/
SourceCode: https://framagit.org/tom79/fedilab-tube
IssueTracker: https://framagit.org/tom79/fedilab-tube/issues
Translation: https://crowdin.com/project/tubelab
AutoName: TubeLab
RepoType: git
Repo: https://framagit.org/tom79/fedilab-tube
Builds:
- versionName: 1.0.0
versionCode: 1
commit: 1.0.0
subdir: app
gradle:
- yes
- versionName: 1.0.3
versionCode: 5
commit: 1.0.3
subdir: app
gradle:
- yes
- versionName: 1.0.4
versionCode: 6
commit: 1.0.4
subdir: app
gradle:
- yes
- versionName: 1.0.5
versionCode: 7
commit: 1.0.5
subdir: app
gradle:
- yes
- versionName: 1.1.0
versionCode: 10
commit: 1.1.0
subdir: app
gradle:
- fdroid_acad
- versionName: 1.1.1
versionCode: 11
commit: 1.1.1
subdir: app
gradle:
- fdroid_acad
AutoUpdateMode: Version %v
UpdateCheckMode: Tags
CurrentVersion: 1.1.1
CurrentVersionCode: 11
|
Add ENV variables to bluemix configuration | ---
applications:
- name: kiss-bluemix-demo
host: kiss-bluemix-demo
memory: 128M
instances: 1
path: .
buildpack: https://github.com/cloudfoundry/php-buildpack.git
| ---
applications:
- name: kiss-bluemix-demo
host: kiss-bluemix-demo
memory: 128M
instances: 1
path: .
buildpack: https://github.com/cloudfoundry/php-buildpack.git
env:
DISPLAY_ERRORS: false
USE_DI_CACHE: false
USE_DOTENV_FILE: false
USE_LATTE_CACHE: false |
Increase timeout waiting marathon to be up in test. | ---
- hosts: localhost
connection: local
sudo: yes
tasks:
- name: Wait for Marathon to be up
wait_for: host="{{ansible_default_ipv4.address}}" port=8080 state=started delay=3 timeout=5
- name: Test for Marathon ... endpoint
get_url:
url="http://{{ansible_default_ipv4.address}}:8080/v2/info"
dest=/tmp/marathon-info
force=yes
register: status
failed_when: "'OK' not in status.msg"
| ---
- hosts: localhost
connection: local
sudo: yes
tasks:
- name: Wait for Marathon to be up
wait_for: host="{{ansible_default_ipv4.address}}" port=8080 state=started delay=3 timeout=15
- name: Test for Marathon info endpoint
get_url:
url="http://{{ansible_default_ipv4.address}}:8080/v2/info"
dest=/tmp/marathon-info
force=yes
register: status
failed_when: "'OK' not in status.msg"
|
Remove the workaround related to 1464182 | ---
- name: fix masquerade
hosts: undercloud
become: yes
any_errors_fatal: yes
gather_facts: no
tasks:
- name: "w/a for bz#1460116"
service:
name: iptables
state: restarted
- name: compute related workarounds
hosts: compute
become: yes
any_errors_fatal: yes
gather_facts: no
tasks:
- name: "w/a for 1477720"
shell: |
setenforce 0
tags: skip_ansible_lint
- name: w/a for https://bugs.launchpad.net/tripleo/+bug/1708279 - retrieve subnet
shell: |
docker network inspect bridge|jq ".[].IPAM.Config[].Subnet"
register: docker_subnet
tags: skip_ansible_lint
- name: w/a for https://bugs.launchpad.net/tripleo/+bug/1708279 - add iptables rule
shell: |
iptables -t nat -I POSTROUTING -s "{{ docker_subnet.stdout }}" -j MASQUERADE
tags: skip_ansible_lint
- name: "w/a for bz 1464182"
shell: |
docker exec -u root -i nova_compute bash -c "echo $(hexdump -n 16 -v -e '/1 "%02x"' /dev/urandom) |tee /etc/machine-id"
| ---
- name: fix masquerade
hosts: undercloud
become: yes
any_errors_fatal: yes
gather_facts: no
tasks:
- name: "w/a for bz#1460116"
service:
name: iptables
state: restarted
- name: compute related workarounds
hosts: compute
become: yes
any_errors_fatal: yes
gather_facts: no
tasks:
- name: "w/a for 1477720"
shell: |
setenforce 0
tags: skip_ansible_lint
- name: w/a for https://bugs.launchpad.net/tripleo/+bug/1708279 - retrieve subnet
shell: |
docker network inspect bridge|jq ".[].IPAM.Config[].Subnet"
register: docker_subnet
tags: skip_ansible_lint
- name: w/a for https://bugs.launchpad.net/tripleo/+bug/1708279 - add iptables rule
shell: |
iptables -t nat -I POSTROUTING -s "{{ docker_subnet.stdout }}" -j MASQUERADE
tags: skip_ansible_lint
|
Update CI tests to span regions where EFS is supported | global:
qsname: quickstart-magento
owner: sshvans@amazon.com
regions:
- ap-northeast-1
- ap-northeast-2
- ap-south-1
- ap-southeast-1
- ap-southeast-2
- ca-central-1
- eu-central-1
- eu-west-1
- sa-east-1
- us-east-1
- us-east-2
- us-west-1
- us-west-2
reporting: true
tests:
quickstart-magentot1:
parameter_input: quickstart-magento.json
template_file: magento-master-newVPC.template
| global:
qsname: quickstart-magento
owner: sshvans@amazon.com
regions:
- ap-northeast-1
- ap-northeast-2
- ap-south-1
- ap-southeast-1
- ap-southeast-2
- ca-central-1
- eu-central-1
- eu-west-1
- sa-east-1
- us-east-1
- us-east-2
- us-west-1
- us-west-2
reporting: true
tests:
quickstart-magentot1:
parameter_input: quickstart-magento.json
template_file: magento-master-newVPC.template
regions:
- us-east-1
- us-east-2
- us-west-2
- eu-west-1
|
Add explicit Python version support (including Python 3) | ---
ref: freeipa
name: freeipa
description: FreeIPA Integration Pack
keywords:
- freeipa
- ipa
- idm
- redhat
- red
- hat
- authentication
version: 0.1.1
author: Encore Technologies
email: code@encore.tech
| ---
ref: freeipa
name: freeipa
description: FreeIPA Integration Pack
keywords:
- freeipa
- ipa
- idm
- redhat
- red
- hat
- authentication
version: 0.1.1
author: Encore Technologies
email: code@encore.tech
python_versions:
- "2"
- "3"
|
Add channels and update dependencies | name: pysteps
dependencies:
- python=3.6
- jsonschema
- matplotlib
- netCDF4
- numpy
- opencv
- pillow
- pyproj
- scipy
| name: pysteps
channels:
- conda-forge
- defaults
dependencies:
- python=3.6
- attrdict
- jsmin
- jsonschema
- matplotlib
- netCDF4
- numpy
- opencv
- pillow
- pyproj
- scipy
- scikit-image
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path: ''
webhook_url:
sections:
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: assets
public_path: "/assets"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_command: bundle exec jekyll build --drafts --unpublished --future -d _site
publish_command: bundle exec jekyll build -d _site
preview_env:
- JEKYLL_ENV=staging
publish_env:
- JEKYLL_ENV=production
preview_output_directory: _site
output_directory: _site
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
| ---
new_page_extension: md
auto_deploy: false
admin_path: ''
webhook_url:
sections:
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: assets
public_path: "/assets"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
instant_preview: true
build:
preview_command: bundle exec jekyll build --drafts --unpublished --future -d _site
publish_command: bundle exec jekyll build -d _site
preview_env:
- JEKYLL_ENV=staging
publish_env:
- JEKYLL_ENV=production
preview_output_directory: _site
output_directory: _site
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
|
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
sections:
- type: document
path: _data/nav.yml
label: 文档
- type: heading
label: 博客
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: bundle install --path vendor/bundle
preview_docker_image: forestryio/ruby:2.6
mount_path: "/srv"
working_dir: "/srv"
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
-d _site
| ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
sections:
- type: document
path: _data/nav.yml
label: 文档
- type: heading
label: 博客
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: bundle install --path vendor/bundle
preview_docker_image: forestryio/ruby:2.6
mount_path: "/srv"
working_dir: "/srv"
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
|
Use RCM to install dotfiles | ---
- name: Add RCM APT Repository
become: yes
apt_repository:
repo: ppa:martin-frost/thoughtbot-rcm
- name: Install RCM
become: yes
package:
name: rcm
| ---
- name: Add RCM APT Repository
become: yes
apt_repository:
repo: ppa:martin-frost/thoughtbot-rcm
- name: Install RCM
become: yes
package:
name: rcm
- name: Fetch dotfiles
git:
dest: ~/.dotfiles
repo: git@github.com:e4r7hbug/dotfiles.git
update: no
- name: Run RCM
command: rcup -v -t python
|
Update from Hackage at 2018-12-06T03:53:17Z | homepage: https://github.com/haskell-works/hw-aeson#readme
changelog-type: ''
hash: efae967ca715eb61b0150edc3d384ce5b19a1b8867371577e58db3d086057c98
test-bench-deps:
hw-aeson: -any
base: -any
hspec: -any
hedgehog: -any
aeson: -any
maintainer: newhoggy@gmail.com
synopsis: Convenience functions for Aeson
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
text: -any
aeson: -any
all-versions:
- '0.1.0.0'
author: John Ky
latest: '0.1.0.0'
description-type: markdown
description: ! '# hw-aeson
[](https://circleci.com/gh/haskell-works/hw-aeson)'
license-name: BSD3
| homepage: https://github.com/haskell-works/hw-aeson#readme
changelog-type: ''
hash: 23a73f70c7e450a6a14f26aac5b4f62feaf7b3e7ce7eab6a17590f29e6c81c4d
test-bench-deps:
hw-aeson: -any
base: -any
hspec: -any
hedgehog: -any
aeson: -any
maintainer: newhoggy@gmail.com
synopsis: Convenience functions for Aeson
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
text: -any
aeson: -any
all-versions:
- '0.1.0.0'
- '0.1.0.1'
author: John Ky
latest: '0.1.0.1'
description-type: markdown
description: ! '# hw-aeson
[](https://circleci.com/gh/haskell-works/hw-aeson)'
license-name: BSD3
|
Use java 11 in github actions | name: Gradle Check
on: [push]
jobs:
build:
strategy:
matrix:
os: [macOS-latest, windows-latest, ubuntu-latest]
runs-on: ${{matrix.os}}
steps:
- name: Checkout the repo
uses: actions/checkout@v2
- name: Cache gradle
uses: actions/cache@v1
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Cache gradle wrapper
uses: actions/cache@v1
with:
path: ~/.gradle/wrapper
key: ${{ runner.os }}-gradlewrapper-${{ hashFiles('gradle/wrapper/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradlewrapper-
- name: Cache maven
uses: actions/cache@v1
with:
path: ~/.m2/repository/
key: ${{ runner.os }}-maven-${{ hashFiles('**/*.gradle') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Gradle Check
run: ./gradlew check | name: Gradle Check
on: [push]
jobs:
build:
strategy:
matrix:
os: [macOS-latest, windows-latest, ubuntu-latest]
runs-on: ${{matrix.os}}
steps:
- name: Checkout the repo
uses: actions/checkout@v2
- name: Set up JDK 11
uses: actions/setup-java@v1
with:
java-version: 11
- name: Cache gradle
uses: actions/cache@v1
with:
path: ~/.gradle/caches
key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }}
restore-keys: |
${{ runner.os }}-gradle-
- name: Cache gradle wrapper
uses: actions/cache@v1
with:
path: ~/.gradle/wrapper
key: ${{ runner.os }}-gradlewrapper-${{ hashFiles('gradle/wrapper/gradle-wrapper.properties') }}
restore-keys: |
${{ runner.os }}-gradlewrapper-
- name: Cache maven
uses: actions/cache@v1
with:
path: ~/.m2/repository/
key: ${{ runner.os }}-maven-${{ hashFiles('**/*.gradle') }}
restore-keys: |
${{ runner.os }}-maven-
- name: Gradle Check
run: ./gradlew check |
Update with RBAC rules for maya-apiserver | # Define the RBAC rules ( ClusterRoles /
# Define the Service Account
# Launch the maya-apiserver ( deployment )
# Launch the maya-storagemanager ( deameon set)
| # Define the Service Account
# Define the RBAC rules for the Service Account
# Launch the maya-apiserver ( deployment )
# Launch the maya-storagemanager ( deameon set )
# Create Maya Service Account
apiVersion: v1
kind: ServiceAccount
metadata:
name: openebs-maya-operator
namespace: default
---
# Define Role that allows operations on K8s pods/deployments
# in "default" namespace
# TODO : change to new namespace, for isolated data network
# TODO : the rules should be updated with required group/resources/verb
kind: ClusterRole
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
namespace: default
name: openebs-maya-operator
rules:
- apiGroups: ["*"]
resources: ["services","pods","deployments"]
verbs: ["*"]
---
# Bind the Service Account with the Role Privileges.
# TODO: Check if default account also needs to be there
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1beta1
metadata:
name: openebs-maya-operator
namespace: default
subjects:
- kind: ServiceAccount
name: openebs-maya-operator
namespace: default
- kind: User
name: system:serviceaccount:default:default
apiGroup: rbac.authorization.k8s.io
roleRef:
kind: ClusterRole
name: openebs-maya-operator
apiGroup: rbac.authorization.k8s.io
---
apiVersion: apps/v1beta1
kind: Deployment
metadata:
name: maya-apiserver
namespace: default
spec:
replicas: 1
template:
metadata:
labels:
name: maya-apiserver
spec:
serviceAccountName: openebs-maya-operator
containers:
- name: maya-apiserver
image: openebs/m-apiserver:ci
ports:
- containerPort: 5656
|
Allow analyzer ^1.0.0 in mockito. | name: mockito
version: 5.0.0
description: A mock framework inspired by Mockito.
homepage: https://github.com/dart-lang/mockito
environment:
sdk: '>=2.12.0-0 <3.0.0'
dependencies:
analyzer: '>=0.39.15 <0.42.0'
build: ^1.3.0
code_builder: ^3.4.0
collection: ^1.15.0-nullsafety.5
dart_style: ^1.3.6
matcher: ^0.12.10-nullsafety.3
meta: ^1.3.0-nullsafety
path: ^1.8.0-nullsafety.3
source_gen: ^0.9.6
test_api: ^0.2.19-nullsafety
dev_dependencies:
build_runner: ^1.0.0
build_test: ^1.1.0
build_web_compilers: '>=1.0.0 <3.0.0'
http: ^0.13.0-nullsafety.0
package_config: '>=1.9.3 <3.0.0'
pedantic: ^1.10.0-nullsafety
test: ^1.16.0-nullsafety.12
| name: mockito
version: 5.0.0
description: A mock framework inspired by Mockito.
homepage: https://github.com/dart-lang/mockito
environment:
sdk: '>=2.12.0-0 <3.0.0'
dependencies:
analyzer: '>=0.39.15 <2.0.0'
build: ^1.3.0
code_builder: ^3.4.0
collection: ^1.15.0-nullsafety.5
dart_style: ^1.3.6
matcher: ^0.12.10-nullsafety.3
meta: ^1.3.0-nullsafety
path: ^1.8.0-nullsafety.3
source_gen: ^0.9.6
test_api: ^0.2.19-nullsafety
dev_dependencies:
build_runner: ^1.0.0
build_test: ^1.1.0
build_web_compilers: '>=1.0.0 <3.0.0'
http: ^0.13.0-nullsafety.0
package_config: '>=1.9.3 <3.0.0'
pedantic: ^1.10.0-nullsafety
test: ^1.16.0-nullsafety.12
# Force analyzer 1.0.0 during development and CI, so that test expectations work perfectly.
# This is necessary as analyze 1.0.0 is not in the version constraints of some other dependencies.
# glob also gets into the mix.
dependency_overrides:
analyzer: ^1.0.0
glob: ^1.1.0
|
Set up Heroku on CCI9 | version: 2
jobs:
build:
working_directory: ~/circulate
docker:
- image: python:3.6.0
environment:
FLASK_CONFIG: testing
TEST_DATABASE_URL: postgresql://ubuntu@localhost/circle_test?sslmode=disable
- image: postgres:9.6.2
environment:
POSTGRES_USER: ubuntu
POSTGRES_DB: circle_test
POSTGRES_PASSWORD: ""
- image: selenium/standalone-chrome:3.1.0
steps:
- checkout
- restore_cache:
key: projectname-{{ .Branch }}-{{ checksum "requirements/dev.txt" }}
- run:
name: Install Dependencies
command: pip install -r requirements/dev.txt
- run:
# this can be removed
name: Locate site Packages
command: python -c "import site; print(site.getsitepackages())"
- save_cache:
key: projectname-{{ .Branch }}-{{ checksum "requirements/dev.txt" }}
paths:
- "~/.cache/pip"
- "/usr/local/lib/python3.6/site-packages"
- run:
name: Run Tests
command: python manage.py test
- store_artifacts:
path: test-reports/
destination: tr1
- store_test_results:
path: test-reports/
- run:
name: setup Heroku
command: cd .circleci && ./setup-heroku.sh
- run:
name: Check
command: cat ~/.netrc
| version: 2
jobs:
build:
working_directory: ~/circulate
docker:
- image: python:3.6.0
environment:
FLASK_CONFIG: testing
TEST_DATABASE_URL: postgresql://ubuntu@localhost/circle_test?sslmode=disable
- image: postgres:9.6.2
environment:
POSTGRES_USER: ubuntu
POSTGRES_DB: circle_test
POSTGRES_PASSWORD: ""
- image: selenium/standalone-chrome:3.1.0
steps:
- checkout
- restore_cache:
key: projectname-{{ .Branch }}-{{ checksum "requirements/dev.txt" }}
- run:
name: Install Dependencies
command: pip install -r requirements/dev.txt
- run:
# this can be removed
name: Locate site Packages
command: python -c "import site; print(site.getsitepackages())"
- save_cache:
key: projectname-{{ .Branch }}-{{ checksum "requirements/dev.txt" }}
paths:
- "~/.cache/pip"
- "/usr/local/lib/python3.6/site-packages"
- run:
name: Run Tests
command: python manage.py test
- store_artifacts:
path: test-reports/
destination: tr1
- store_test_results:
path: test-reports/
- run:
name: setup Heroku
command: cd .circleci && chmod + x setup-heroku.sh && ./setup-heroku.sh
- run:
name: Check
command: cat ~/.netrc
|
Use pip on Circle CI for now | version: 2
jobs:
build:
docker:
- image: continuumio/miniconda3
steps:
- checkout
- run: conda install --quiet gcc_linux-64 gxx_linux-64
- run: conda config --add channels conda-forge
- run: conda config --set show_channel_urls true
- run: conda config --set always_yes yes
- run: conda update --quiet conda
- run: conda update --quiet python
- run: conda install --quiet conda-build
- run: conda build conda-recipe
| version: 2
jobs:
build:
docker:
- image: continuumio/miniconda3
steps:
- checkout
- run: conda install --quiet gcc_linux-64 gxx_linux-64
- run: conda config --add channels conda-forge
- run: conda config --set show_channel_urls true
- run: conda config --set always_yes yes
- run: conda update --quiet conda
- run: conda update --quiet python
- run: conda install --quiet pip pytest numpy scipy matplotlib sympy quantities pyparsing
- run: pip install .
- run: pytest -ra
|
Add links for new instance group jobs | ---
- type: replace
path: /addons/name=bosh-dns-aliases/jobs/name=bosh-dns-aliases/properties/aliases/domain=reverse-log-proxy.service.cf.internal/targets/instance_group=log-api/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asapi/jobs/name=route_registrar/consumes/nats/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asapi/jobs/name=loggregator_agent/consumes/doppler/deployment
value: ((deploy_env))
| ---
- type: replace
path: /addons/name=bosh-dns-aliases/jobs/name=bosh-dns-aliases/properties/aliases/domain=reverse-log-proxy.service.cf.internal/targets/instance_group=log-api/deployment
value: ((deploy_env))
- type: replace
path: /addons/name=bosh-dns-aliases/jobs/name=bosh-dns-aliases/properties/aliases/domain=nats.service.cf.internal/targets/instance_group=nats/deployment
value: ((deploy_env))
- type: replace
path: /addons/name=bosh-dns-aliases/jobs/name=bosh-dns-aliases/properties/aliases/domain=_.nats.service.cf.internal/targets/instance_group=nats/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asapi/jobs/name=route_registrar/consumes/nats/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asapi/jobs/name=loggregator_agent/consumes/doppler/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asactors/jobs/name=route_registrar/consumes/nats/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asmetrics/jobs/name=route_registrar/consumes/nats/deployment
value: ((deploy_env))
- type: replace
path: /instance_groups/name=asnozzle/jobs/name=route_registrar/consumes/nats/deployment
value: ((deploy_env))
|
Update from Hackage at 2018-01-11T05:27:00Z | homepage: ''
changelog-type: ''
hash: 17f10f7ee4ee9ee9dfc9b6b774eb0ad117373c8454d84260d4167eb4d9c4fd40
test-bench-deps:
base: -any
hspec: ! '>=1.3'
silently: ! '>=1.2.4'
maintainer: Simon Hengel <sol@typeful.net>
synopsis: A lightweight implementation of a subset of Hspec's API
changelog: ''
basic-deps:
base: ! '>=4 && <=5'
all-versions:
- '0.1.0'
- '0.2.0'
- '0.2.1'
author: Simon Hengel <sol@typeful.net>
latest: '0.2.1'
description-type: haddock
description: ! 'A lightweight implementation of a subset of Hspec''s API with
minimal dependencies.'
license-name: MIT
| homepage: https://github.com/hspec/nanospec#readme
changelog-type: ''
hash: a3311128515b687817baa485d1f50c6d6444c76620a8ac91919de142de69bf49
test-bench-deps:
base: ==4.*
hspec: ! '>=1.3'
silently: ! '>=1.2.4'
maintainer: Simon Hengel <sol@typeful.net>
synopsis: A lightweight implementation of a subset of Hspec's API
changelog: ''
basic-deps:
base: ==4.*
all-versions:
- '0.1.0'
- '0.2.0'
- '0.2.1'
- '0.2.2'
author: Simon Hengel <sol@typeful.net>
latest: '0.2.2'
description-type: haddock
description: A lightweight implementation of a subset of Hspec's API with minimal
dependencies.
license-name: MIT
|
Remove namespace and service account name | ---
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
name: jbhannah.net
namespace: argo
spec:
entrypoint: dag
serviceAccountName: argo
volumeClaimTemplates:
- metadata:
name: workdir
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
templates:
- name: install
inputs:
artifacts:
- name: source
path: /workdir/src
git:
repo: https://github.com/jbhannah/jbhannah.net.git
revision: master
container:
image: node:erbium-alpine
command:
- npm
args:
- ci
volumeMounts:
- name: workdir
mountPath: /workdir
- name: test
container:
image: node:erbium-alpine
command:
- npm
args:
- test
volumeMounts:
- name: workdir
mountPath: /workdir
- name: dag
dag:
tasks:
- name: install
template: install
- name: test
dependencies:
- install
template: test
| ---
apiVersion: argoproj.io/v1alpha1
kind: Workflow
metadata:
name: jbhannah.net
spec:
entrypoint: dag
volumeClaimTemplates:
- metadata:
name: workdir
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
templates:
- name: install
inputs:
artifacts:
- name: source
path: /workdir/src
git:
repo: https://github.com/jbhannah/jbhannah.net.git
revision: master
container:
image: node:erbium-alpine
command:
- npm
args:
- ci
volumeMounts:
- name: workdir
mountPath: /workdir
- name: test
container:
image: node:erbium-alpine
command:
- npm
args:
- test
volumeMounts:
- name: workdir
mountPath: /workdir
- name: dag
dag:
tasks:
- name: install
template: install
- name: test
dependencies:
- install
template: test
|
Update from Hackage at 2022-02-09T12:26:36Z | homepage: https://github.com/tonyday567/box-csv#readme
changelog-type: ''
hash: a16d30aba262516da48fd6662c1c59620d8751648e28011659631b93d2fa1dd7
test-bench-deps: {}
maintainer: tonyday567@gmail.com
synopsis: CSV parsing in a box.
changelog: ''
basic-deps:
base: '>=4.7 && <5'
time: ^>=1.9
box: ^>=0.7
text: ^>=1.2
lens: ^>=5.0
attoparsec: '>=0.13 && <0.16'
generic-lens: ^>=2.0
scientific: ^>=0.3
all-versions:
- 0.0.1
- 0.0.2
- 0.0.3
- 0.1.0
author: Tony Day
latest: 0.1.0
description-type: haddock
description: CSV parsing using attoparsec and the box library.
license-name: BSD-3-Clause
| homepage: https://github.com/tonyday567/box-csv#readme
changelog-type: ''
hash: 1a41ea6a58cbb7a1743de9808e60e71c3ed26a1b9ae3119257a7ccca200db90f
test-bench-deps: {}
maintainer: tonyday567@gmail.com
synopsis: CSV parsing in a box.
changelog: ''
basic-deps:
base: '>=4.7 && <5'
time: ^>=1.9
box: ^>=0.8
text: ^>=1.2
attoparsec: '>=0.13 && <0.16'
all-versions:
- 0.0.1
- 0.0.2
- 0.0.3
- 0.1.0
- 0.2.0
author: Tony Day
latest: 0.2.0
description-type: haddock
description: CSV parsing using attoparsec and the box library.
license-name: BSD-3-Clause
|
Remove deprecated (and unneeded) setting from test configuration | # Unless you are developing Gitorious and really know what you are doing,
# leave the following settings as is.
repository_base_path: "/tmp/git/repositories"
archive_cache_dir: "/tmp/git/tarball-cache"
archive_work_dir: "/tmp/git/tarball-work"
host: gitorious.test
scheme: http
client_host: gitorious.test
client_port: 3000
frontend_server: apache
cookie_secret: UpDoquajrecJewyeodMilmiasFicsOke
messaging_adapter: test
disable_record_throttling: false
| # Unless you are developing Gitorious and really know what you are doing,
# leave the following settings as is.
repository_base_path: "/tmp/git/repositories"
archive_cache_dir: "/tmp/git/tarball-cache"
archive_work_dir: "/tmp/git/tarball-work"
host: gitorious.test
scheme: http
client_host: gitorious.test
client_port: 3000
frontend_server: apache
cookie_secret: UpDoquajrecJewyeodMilmiasFicsOke
messaging_adapter: test
|
Add condition for services management | ---
# Redis services management tasks
- name: 'SERVICE | Manage Redis Server service'
service:
name: "{{ redis_server_service_name }}"
state: "{{ redis_server_service_state }}"
enabled: "{{ redis_server_service_enabled }}"
- name: 'SERVICE | Manage Redis Sentinel service'
service:
name: "{{ redis_sentinel_service_name }}"
state: "{{ redis_sentinel_service_state }}"
enabled: "{{ redis_sentinel_service_enabled }}"
| ---
# Redis services management tasks
- name: 'SERVICE | Manage Redis Server service'
service:
name: "{{ redis_server_service_name }}"
state: "{{ redis_server_service_state }}"
enabled: "{{ redis_server_service_enabled }}"
when: "redis_manage_redis_server | bool"
- name: 'SERVICE | Manage Redis Sentinel service'
service:
name: "{{ redis_sentinel_service_name }}"
state: "{{ redis_sentinel_service_state }}"
enabled: "{{ redis_sentinel_service_enabled }}"
when: "redis_manage_redis_sentinel | bool"
|
Use git specific ssh key | ---
- hosts: [dev]
become: false
roles:
- role: elixir
- role: python
- role: docker
before_tasks:
- name: Install base packages
become: true
apt:
state: present
update_cache: true
name:
- zlib1g-dev
- make
- clang
- unzip
- libssl-dev
- editorconfig
- rustc
- entr
- vim
- zsh
- git
- llvm
- autoconf
- fd-find
- ripgrep
- name: Symlink fd
file:
state: link
src: "/usr/bin/fdfind"
dest: "/usr/bin/fd"
- name: Install snaps
command: "snap install --classic {{ item }}"
with_items:
- universal-ctags
become: yes
- name: Copy ssh key
copy:
src: ~/.ssh/id_ed25519
dest: ~/.ssh/id_ed25519
mode: 0600
- name: bashrc
copy:
src: bashrc
dest: ~/.bashrc
| ---
- hosts: [dev]
become: false
roles:
- role: elixir
- role: python
- role: docker
before_tasks:
- name: Install base packages
become: true
apt:
state: present
update_cache: true
name:
- zlib1g-dev
- make
- clang
- unzip
- libssl-dev
- editorconfig
- rustc
- entr
- vim
- zsh
- git
- llvm
- autoconf
- fd-find
- ripgrep
- name: Symlink fd
file:
state: link
src: "/usr/bin/fdfind"
dest: "/usr/bin/fd"
- name: Install snaps
command: "snap install --classic {{ item }}"
with_items:
- universal-ctags
become: yes
- name: Copy ssh key
copy:
src: ~/.ssh/id_git
dest: ~/.ssh/id_ed25519
mode: 0600
- name: bashrc
copy:
src: bashrc
dest: ~/.bashrc
|
Fix ci-support Giltab CI typo | Flake8:
script:
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh
- . ./prepare-and-run-flake8.sh "$CI_PROJECT_NAME" examples
tags:
- python3
except:
- tags
Documentation:
script: |
EXTRA_INSTALL="numpy"
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/ci-support.sh
. ./ci-support.sh
build_project_in_venv
build_docs --no-check
tags:
- python3
| Flake8:
script:
- curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/prepare-and-run-flake8.sh
- . ./prepare-and-run-flake8.sh "$CI_PROJECT_NAME" examples
tags:
- python3
except:
- tags
Documentation:
script: |
EXTRA_INSTALL="numpy"
curl -L -O -k https://gitlab.tiker.net/inducer/ci-support/raw/master/ci-support.sh
. ./ci-support.sh
build_py_project_in_venv
build_docs --no-check
tags:
- python3
|
Update LfDI BW to 1.1 (2) | AntiFeatures:
- NonFreeNet
Categories:
- Internet
- Science & Education
License: EUPL-1.2
AuthorEmail: poststelle@lfdi.bwl.de
AuthorWebSite: https://www.baden-wuerttemberg.datenschutz.de/
SourceCode: https://gitlab.lfdi-bw.de/LfDI/lfdi-app-android-public
AutoName: LfDI BW
RepoType: git
Repo: https://gitlab.lfdi-bw.de/LfDI/lfdi-app-android-public.git
Builds:
- versionName: '1.0'
versionCode: 1
commit: 9063dbd8d333a538fcf073b3a7843b473f629ee6
subdir: app
sudo:
- apt-get update || apt-get update
- apt-get install -y openjdk-11-jdk-headless
- update-alternatives --auto java
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: '1.0'
CurrentVersionCode: 1
| AntiFeatures:
- NonFreeNet
Categories:
- Internet
- Science & Education
License: EUPL-1.2
AuthorEmail: poststelle@lfdi.bwl.de
AuthorWebSite: https://www.baden-wuerttemberg.datenschutz.de/
SourceCode: https://gitlab.lfdi-bw.de/LfDI/lfdi-app-android-public
AutoName: LfDI BW
RepoType: git
Repo: https://gitlab.lfdi-bw.de/LfDI/lfdi-app-android-public.git
Builds:
- versionName: '1.0'
versionCode: 1
commit: 9063dbd8d333a538fcf073b3a7843b473f629ee6
subdir: app
sudo:
- apt-get update || apt-get update
- apt-get install -y openjdk-11-jdk-headless
- update-alternatives --auto java
gradle:
- yes
- versionName: '1.1'
versionCode: 2
commit: 0f0a0cd7ddc578b375b760c2e9959b266960241d
subdir: app
sudo:
- apt-get update || apt-get update
- apt-get install -y openjdk-11-jdk-headless
- update-alternatives --auto java
gradle:
- yes
AutoUpdateMode: Version v%v
UpdateCheckMode: Tags
CurrentVersion: '1.1'
CurrentVersionCode: 2
|
Update to the latest release of GWT | repositories:
remote:
- http://repo1.maven.org/maven2
artifacts:
javax_javaee: javax:javaee-api:jar:7.0
javax_annotation: com.google.code.findbugs:jsr305:jar:2.0.1
gwt_user: com.google.gwt:gwt-user:jar:2.6.0
gwt_dev: com.google.gwt:gwt-dev:jar:2.6.0
javax_validation: javax.validation:validation-api:jar:1.0.0.GA
javax_validation_sources: javax.validation:validation-api:jar:sources:1.0.0.GA
gwt_webpoller: org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.7
| repositories:
remote:
- http://repo1.maven.org/maven2
artifacts:
javax_javaee: javax:javaee-api:jar:7.0
javax_annotation: com.google.code.findbugs:jsr305:jar:2.0.1
gwt_user: com.google.gwt:gwt-user:jar:2.6.1
gwt_dev: com.google.gwt:gwt-dev:jar:2.6.1
javax_validation: javax.validation:validation-api:jar:1.0.0.GA
javax_validation_sources: javax.validation:validation-api:jar:sources:1.0.0.GA
gwt_webpoller: org.realityforge.gwt.webpoller:gwt-webpoller:jar:0.7
|
Update Shippable to use the latest maxdown-fedora images | integrations:
notifications:
- integrationName: email
type: email
on_success: never
on_failure: always
env:
- DISTRO=latest CLASS=report
- DISTRO=latest CLASS=cv
- DISTRO=fedora24 CLASS=report
- DISTRO=fedora24 CLASS=cv
- DISTRO=fedora25 CLASS=report
- DISTRO=fedora25 CLASS=cv
build:
pre_ci_boot:
image_name: idelsink/mexdown
image_tag: $DISTRO
pull: true
ci:
- sudo ./install_latex.sh YES
- make -C $CLASS pdf
- make -C $CLASS copy OUT=~/mexdown_out EXT=pdf
- ls -l ~/mexdown_out
| integrations:
notifications:
- integrationName: email
type: email
on_success: never
on_failure: always
env:
- TAG=latest CLASS=report
- TAG=latest CLASS=cv
- TAG=1 CLASS=report
- TAG=1 CLASS=cv
build:
pre_ci_boot:
image_name: idelsink/mexdown-fedora
image_tag: $TAG
pull: true
ci:
- sudo ./install_latex.sh YES
- make -C $CLASS pdf
- make -C $CLASS copy OUT=~/mexdown_out EXT=pdf
- ls -l ~/mexdown_out
|
Install pytest and mypy at once | name: Build and Test
on:
push:
branches:
- master
pull_request:
branches:
- master
schedule:
- cron: 0 0 * * 0
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install poetry
poetry install
- name: Build
run: poetry build
- name: Run tests
run: |
python -m pytest --check-supported-comics --cov=webcomix
python -m mypy . --ignore-missing-imports- name: Coveralls
- name: Coveralls
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
| name: Build and Test
on:
push:
branches:
- master
pull_request:
branches:
- master
schedule:
- cron: 0 0 * * 0
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install poetry pytest mypy
poetry install
- name: Build
run: poetry build
- name: Run tests
run: |
pytest --check-supported-comics --cov=webcomix
mypy . --ignore-missing-imports- name: Coveralls
- name: Coveralls
uses: coverallsapp/github-action@master
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
|
Disable progress bars during installation | machine:
environment:
CXX: g++-4.8
BABEL_ENV: cover
CACHE: $HOME/cache
CMAKE_SHORT_VERSION: 3.4
CMAKE_VERSION: 3.4.3
PATH: $CACHE/cmake-$CMAKE_VERSION/bin:$PATH
JASMINE_TIMEOUT: 15000
node:
version: 5.12
post:
- pyenv global 2.7.11
- if [ "$CIRCLE_NODE_INDEX" -eq 1 ] ; then pyenv global 3.4.4 ; fi
dependencies:
cache_directories:
- "~/cache"
override:
- sudo apt-get update; sudo apt-get install libgif-dev
- ./scripts/install_cmake.sh
- npm update; npm prune
- pyenv exec pip install -U pip
- pyenv exec pip install -r requirements-dev.txt -e .[plugins,sftp] -e clients/python
- pyenv rehash
- pyenv exec girder-install web --all-plugins --dev
test:
pre:
- mkdir _build
override:
- PYENV_VERSION=`pyenv version-name` ctest -VV -S cmake/circle_continuous.cmake || true; test ! -f _build/test_failed:
parallel: true
| machine:
environment:
CXX: g++-4.8
BABEL_ENV: cover
CACHE: $HOME/cache
CMAKE_SHORT_VERSION: 3.4
CMAKE_VERSION: 3.4.3
PATH: $CACHE/cmake-$CMAKE_VERSION/bin:$PATH
JASMINE_TIMEOUT: 15000
DEBIAN_FRONTEND: noninteractive
node:
version: 5.12
post:
- pyenv global 2.7.11
- if [ "$CIRCLE_NODE_INDEX" -eq 1 ] ; then pyenv global 3.4.4 ; fi
dependencies:
cache_directories:
- "~/cache"
override:
- sudo apt-get -q update; sudo apt-get install -q -y libgif-dev
- ./scripts/install_cmake.sh
- npm config set progress false
- npm update; npm prune
- pyenv exec pip install -U pip | cat
- pyenv exec pip install -r requirements-dev.txt -e .[plugins,sftp] -e clients/python | cat
- pyenv rehash
- pyenv exec girder-install web --all-plugins --dev
test:
pre:
- mkdir _build
override:
- PYENV_VERSION=`pyenv version-name` ctest -VV -S cmake/circle_continuous.cmake || true; test ! -f _build/test_failed:
parallel: true
|
Add Qt6 windows CI support | # SPDX-FileCopyrightText: None
# SPDX-License-Identifier: CC0-1.0
include:
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/linux.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/freebsd.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/linux-qt6.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/windows.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/freebsd-qt6.yml
| # SPDX-FileCopyrightText: None
# SPDX-License-Identifier: CC0-1.0
include:
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/linux.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/freebsd.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/linux-qt6.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/windows.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/freebsd-qt6.yml
- https://invent.kde.org/sysadmin/ci-utilities/raw/master/gitlab-templates/windows-qt6.yml
|
Remove some remaining dark sky sensors | ################################################
## Packages / Recorder
################################################
homeassistant:
customize:
################################################
## Node Anchors
################################################
package.node_anchors:
customize: &customize
package: 'recorder'
################################################
## Recorder
## https://home-assistant.io/components/recorder/
################################################
recorder:
purge_interval: 1
purge_keep_days: 3
exclude:
domains:
- alert
- automation
- group
- history_graph
- media_player
- script
- sun
- updater
- weather
- weblink
- zone
- zwave
entities:
- sensor.date
- sensor.time
- sensor.ha_uptime
- sensor.dark_sky_apparent_temperature
- sensor.dark_sky_humidity
- sensor.dark_sky_ozone
- sensor.dark_sky_precip_intensity
- sensor.dark_sky_precip_probability
- sensor.dark_sky_pressure
- sensor.dark_sky_temperature
- sensor.dark_sky_uv_index
- sensor.dark_sky_wind_bearing
- sensor.dark_sky_wind_speed
- sensor.daily_power
- sensor.daily_power_offpeak
- sensor.daily_power_peak
- sensor.daily_gas
- sensor.monthly_gas
- sensor.monthly_power_offpeak
- sensor.monthly_power_peak
################################################
## History
## https://home-assistant.io/components/history/
################################################
history:
exclude:
domains:
- input_boolean
- input_select
# entities:
# # packages / rfxtrx
# - light.standing_ikea_vaster # restore state after reboot?
# - switch.frontdoor_bell # for counter
################################################
## Logbook
## https://home-assistant.io/components/logbook/
################################################
logbook:
| ################################################
## Packages / Recorder
################################################
homeassistant:
customize:
################################################
## Node Anchors
################################################
package.node_anchors:
customize: &customize
package: 'recorder'
################################################
## Recorder
## https://home-assistant.io/components/recorder/
################################################
recorder:
purge_interval: 1
purge_keep_days: 3
exclude:
domains:
- alert
- automation
- group
- history_graph
- media_player
- script
- sun
- updater
- weather
- weblink
- zone
- zwave
entities:
- sensor.date
- sensor.time
- sensor.ha_uptime
- sensor.daily_power
- sensor.daily_power_offpeak
- sensor.daily_power_peak
- sensor.daily_gas
- sensor.monthly_gas
- sensor.monthly_power_offpeak
- sensor.monthly_power_peak
################################################
## History
## https://home-assistant.io/components/history/
################################################
history:
exclude:
domains:
- input_boolean
- input_select
# entities:
# # packages / rfxtrx
# - light.standing_ikea_vaster # restore state after reboot?
# - switch.frontdoor_bell # for counter
################################################
## Logbook
## https://home-assistant.io/components/logbook/
################################################
logbook:
|
Update from Hackage at 2020-06-14T01:09:07Z | homepage: ''
changelog-type: markdown
hash: 5af8a7d43a97fa9f148ef614f7fe5e8b01c9c487110732015e77834ee07efdc7
test-bench-deps: {}
maintainer: lukec@themk.net
synopsis: Derive monoid instances for product types.
changelog: |
# Revision history for generic-monoid
## 0.1.0.0 -- 2018-12-12
* Initial release.
basic-deps:
base: '>=4.12 && <4.15'
all-versions:
- 0.1.0.0
author: Luke Clifton
latest: 0.1.0.0
description-type: markdown
description: |
# Generic Monoid (and Semigroup)
This library provides a method of deriving `Semigroup` and `Monoid` instances
for your large product types. It does this using GHC generics, and can provides
a mechanism for using the `DerivingVia` extension to reduce boilerplate.
It only works if each field of your product type is itself a `Semigroup`/`Monoid`.
```haskell
{-# LANGUAGE DerivingStrategies #-}
{-# LANGUAGE DerivingVia #-}
{-# LANGUAGE DeriveGeneric #-}
import GHC.Generics
import Data.Monoid.Generic
data BigProduct = BigProduct
{ theList :: [Int]
, theSum :: Sum Double
, theString :: String
} deriving (Generic, Eq)
deriving Semigroup via GenericSemigroup BigProduct
deriving Monoid via GenericMonoid BigProduct
useIt :: Bool
useIt = (mempty <> mempty) == BigProduct [] 0 ""
```
license-name: BSD-3-Clause
| homepage: ''
changelog-type: markdown
hash: d2ddcb359d05156e5654a309759dbf1de887e591d9afdabfb343b2e66403890c
test-bench-deps: {}
maintainer: lukec@themk.net
synopsis: Derive monoid instances for product types.
changelog: |
# Revision history for generic-monoid
## 0.1.0.1 -- 2020-06-14
* Support GHC 8.4
## 0.1.0.0 -- 2018-12-12
* Initial release.
basic-deps:
base: '>=4.11 && <4.15'
all-versions:
- 0.1.0.0
- 0.1.0.1
author: Luke Clifton
latest: 0.1.0.1
description-type: haddock
description: Using GHC's generics, allow for deriving `Monoid` and `Semigroup` instances
for your product types.
license-name: BSD-3-Clause
|
Update from Hackage at 2018-10-05T15:13:37Z | homepage: https://github.com/iokasimov/monopati
changelog-type: ''
hash: e3e9c68353dd48d909ae301566028421900038abbeb6e153fa8144790e1b9a18
test-bench-deps: {}
maintainer: Murat Kasimov <iokasimov.m@gmail.com>
synopsis: Well-typed paths
changelog: ''
basic-deps:
free: -any
split: -any
base: ==4.*
all-versions:
- '0.1.0'
- '0.1.1'
author: Murat Kasimov
latest: '0.1.1'
description-type: haddock
description: Despite the fact that there are a plenty of various well-typed "path"
libraries in Haskell, I decided to write new one that I would like to use.
license-name: BSD3
| homepage: https://github.com/iokasimov/monopati
changelog-type: ''
hash: 833758fbda0c78c4928368bf58c09ad8b2548cc6f03d5a374cc502dc137ca824
test-bench-deps: {}
maintainer: Murat Kasimov <iokasimov.m@gmail.com>
synopsis: Well-typed paths
changelog: ''
basic-deps:
free: -any
split: -any
base: ==4.*
directory: -any
all-versions:
- '0.1.0'
- '0.1.1'
- '0.1.2'
author: Murat Kasimov
latest: '0.1.2'
description-type: haddock
description: Despite the fact that there are a plenty of various well-typed "path"
libraries in Haskell, I decided to write new one that I would like to use.
license-name: BSD3
|
Drop dependencies on build_runner, etc – not used! | name: async
version: 2.2.0
description: Utility functions and classes related to the 'dart:async' library.
author: Dart Team <misc@dartlang.org>
homepage: https://www.github.com/dart-lang/async
environment:
sdk: '>=2.0.0 <3.0.0'
dependencies:
collection: ^1.5.0
dev_dependencies:
fake_async: ^1.0.0
stack_trace: ^1.0.0
test: ^1.0.0
# For building and testing with DDC
build_runner: ^1.0.0
build_web_compilers: ^1.0.0
build_test: ^0.10.1
| name: async
version: 2.2.0
description: Utility functions and classes related to the 'dart:async' library.
author: Dart Team <misc@dartlang.org>
homepage: https://www.github.com/dart-lang/async
environment:
sdk: '>=2.0.0 <3.0.0'
dependencies:
collection: ^1.5.0
dev_dependencies:
fake_async: ^1.0.0
stack_trace: ^1.0.0
test: ^1.0.0
|
Add silent option to curl | name: Test and build
on:
push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
- name: Start rocket.chat server
uses: isbang/compose-action@v0.1.1
with:
compose-file: docker-compose-test-server.yml
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: pip install codecov pytest-cov black
- name: Lint with black
run: black --check .
- name: Wait for rocket.chat server to be online
run: until curl http://localhost:3000/api/v1/info; do sleep 5; echo "waiting for Rocket.Chat server to start"; done
- name: Run tests
run: pytest tests rocketchat_API -x --cov=./
- name: Upload code coverage
run: codecov
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} | name: Test and build
on:
push
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
- name: Start rocket.chat server
uses: isbang/compose-action@v0.1.1
with:
compose-file: docker-compose-test-server.yml
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: pip install codecov pytest-cov black
- name: Lint with black
run: black --check .
- name: Wait for rocket.chat server to be online
run: until curl --silent http://localhost:3000/api/v1/info; do sleep 5; echo "waiting for Rocket.Chat server to start"; done
- name: Run tests
run: pytest tests rocketchat_API -x --cov=./
- name: Upload code coverage
run: codecov
env:
CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} |
Make the 'from_date' and 'to_date' parameters of getQuestionSubmissions a date-time string. | /sessions/{session_uid}/question_submissions/{question_manifest_uid}:
parameters:
- name: question_manifest_uid
description: question manifest uid
type: string
in: path
required: true
- name: session_uid
description: the session uid
type: string
in: path
required: true
get:
operationId: getQuestionSubmissions
description: |
Get all submissions for a question manifest
parameters:
- name: max_records
description: optional max records for return
in: query
required: false
type: number
- name: from_date
description: optional from date filter in ISO8601 format
in: query
required: false
type: string
- name: to_date
description: optional to date filter in ISO8601 format
in: query
required: false
type: string
responses:
'200':
description: Success
schema:
$ref: '#/definitions/QuestionSubmissionQueryResponse'
default:
description: Failed
schema:
$ref: '#/definitions/Error'
post:
operationId: createQuestionSubmission
description: |
Create submissions for a given question manifest
parameters:
- name: body
description: request body
in: body
required: true
schema:
$ref: '#/definitions/QuestionSubmissionCreateRequest'
responses:
'201':
description: Created
schema:
$ref: '#/definitions/ManifestSubmissions'
default:
description: Creation failed
schema:
$ref: '#/definitions/Error' | /sessions/{session_uid}/question_submissions/{question_manifest_uid}:
parameters:
- name: question_manifest_uid
description: question manifest uid
type: string
in: path
required: true
- name: session_uid
description: the session uid
type: string
in: path
required: true
get:
operationId: getQuestionSubmissions
description: |
Get all submissions for a question manifest
parameters:
- name: max_records
description: optional max records for return
in: query
required: false
type: number
- name: from_date
description: optional from date filter
in: query
required: false
type: string
format: date-time
- name: to_date
description: optional to date filter
in: query
required: false
type: string
format: date-time
responses:
'200':
description: Success
schema:
$ref: '#/definitions/QuestionSubmissionQueryResponse'
default:
description: Failed
schema:
$ref: '#/definitions/Error'
post:
operationId: createQuestionSubmission
description: |
Create submissions for a given question manifest
parameters:
- name: body
description: request body
in: body
required: true
schema:
$ref: '#/definitions/QuestionSubmissionCreateRequest'
responses:
'201':
description: Created
schema:
$ref: '#/definitions/ManifestSubmissions'
default:
description: Creation failed
schema:
$ref: '#/definitions/Error' |
Update from Hackage at 2020-09-14T22:08:32Z | homepage: ''
changelog-type: markdown
hash: a947b50a2b8c0eab7fd850e96c9628c479640aa77cf42ed66f10fc18533ead98
test-bench-deps: {}
maintainer: dan.firth@homotopic.tech
synopsis: Conversions from ixset-typed to other containers.
changelog: |
# Changelog for ixset-typed-conversions
## (v0.1.1.0)
* Add cofree conversions.
## (v0.1.0.0)
* Add conversion functions from `IxSet` to `HashMap` and `Zipper []`.
basic-deps:
exceptions: -any
free: -any
base: '>=4.7 && <5'
unordered-containers: -any
ixset-typed: -any
hashable: -any
zipper-extra: -any
all-versions:
- 0.1.0.0
- 0.1.0.1
- 0.1.1.0
- 0.1.1.1
author: Daniel Firth
latest: 0.1.1.1
description-type: markdown
description: |
# ixset-typed-conversions
Conversions from ixset-typed to other containers.
license-name: MIT
| homepage: ''
changelog-type: markdown
hash: c29e50b877c4191e47f8b3046f9ad16f18a27b9c7cc647259878ac9852f9c93b
test-bench-deps: {}
maintainer: dan.firth@homotopic.tech
synopsis: Conversions from ixset-typed to other containers.
changelog: |
# Changelog for ixset-typed-conversions
## (v0.1.2.0)
* Add `toHashMapBy'` and `toHashMapByM'`.
## (v0.1.1.0)
* Add cofree conversions.
## (v0.1.0.0)
* Add conversion functions from `IxSet` to `HashMap` and `Zipper []`.
basic-deps:
exceptions: -any
free: -any
base: '>=4.7 && <5'
unordered-containers: -any
ixset-typed: -any
hashable: -any
zipper-extra: -any
all-versions:
- 0.1.0.0
- 0.1.0.1
- 0.1.1.0
- 0.1.1.1
- 0.1.2.0
author: Daniel Firth
latest: 0.1.2.0
description-type: markdown
description: |
# ixset-typed-conversions
Conversions from ixset-typed to other containers.
license-name: MIT
|
Fix new sha256 from zip instead of tar.gz | {% set name = "LabJackPython" %}
{% set org = "labjack" %}
{% set upstreamversion = "4-24-2014" %}
{% set version = "20140424" %}
{% set sha256 = "f886ade2a29c21c233d339b0f6d00be94bcf8e5a57a5b6bac7e40bad758a5898" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name|lower }}-{{version}}-{{ sha256 }}.zip
url: https://github.com/{{ org }}/{{ name }}/archive/{{ upstreamversion }}.zip
sha256: {{ sha256 }}
build:
number: 0
skip: True # [py34]
script: python setup.py install --single-version-externally-managed --record record.txt
requirements:
build:
- python
run:
- python
test:
imports:
- u3
- u6
- ue9
- u12
about:
home: http://labjack.com/support/labjackpython
summary: "Python module for communicating with the LabJack U3/U6/UE9/U12."
license: MIT X-11
license_family: MIT
extra:
recipe-maintainers:
- kastman
| {% set name = "LabJackPython" %}
{% set org = "labjack" %}
{% set upstreamversion = "4-24-2014" %}
{% set version = "20140424" %}
{% set sha256 = "9cf7a6fca9f1308b60a4442dd1410af216fb1d38e49aa5c1ca1e670958c4bcf5" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
fn: {{ name|lower }}-{{version}}-{{ sha256 }}.zip
url: https://github.com/{{ org }}/{{ name }}/archive/{{ upstreamversion }}.zip
sha256: {{ sha256 }}
build:
number: 0
skip: True # [py34]
script: python setup.py install --single-version-externally-managed --record record.txt
requirements:
build:
- python
run:
- python
test:
imports:
- u3
- u6
- ue9
- u12
about:
home: http://labjack.com/support/labjackpython
summary: "Python module for communicating with the LabJack U3/U6/UE9/U12."
license: MIT X-11
license_family: MIT
extra:
recipe-maintainers:
- kastman
|
Make sure devstack-gate is running multinode correctly | default_enabled_projects:
- "openstack-infra/devstack-gate"
- "openstack-dev/devstack"
- "openstack/glance"
- "openstack/keystone"
- "openstack/neutron"
- "openstack/nova"
- "openstack/requirements"
- "openstack/tempest"
- "openstack/tempest-lib"
devstack_gate_env:
DEVSTACK_LOCAL_CONFIG: "{{ devstack_local_conf }}"
DEVSTACK_GATE_PROJECTS_OVERRIDE: "{{ default_enabled_projects | join(' ') }} {{ projects | join(' ') }}"
OVERRIDE_ZUUL_BRANCH: "{{ override_zuul_branch }}"
PYTHONUNBUFFERED: true
BUILD_TIMEOUT: 10800000
DEVSTACK_GATE_TEMPEST: 1
DEVSTACK_GATE_NEUTRON: 1
DEVSTACK_GATE_SETTINGS: "{{ workspace }}/custom_devstack_gate_hook"
DEVSTACK_GATE_FEATURE_MATRIX: "{{ workspace }}/features.yaml"
| default_enabled_projects:
- "openstack-infra/devstack-gate"
- "openstack-dev/devstack"
- "openstack/glance"
- "openstack/keystone"
- "openstack/neutron"
- "openstack/nova"
- "openstack/requirements"
- "openstack/tempest"
- "openstack/tempest-lib"
devstack_gate_env:
DEVSTACK_LOCAL_CONFIG: "{{ devstack_local_conf }}"
DEVSTACK_GATE_PROJECTS_OVERRIDE: "{{ default_enabled_projects | join(' ') }} {{ projects | join(' ') }}"
OVERRIDE_ZUUL_BRANCH: "{{ override_zuul_branch }}"
PYTHONUNBUFFERED: true
BUILD_TIMEOUT: 10800000
DEVSTACK_GATE_TEMPEST: 1
DEVSTACK_GATE_NEUTRON: 1
DEVSTACK_GATE_SETTINGS: "{{ workspace }}/custom_devstack_gate_hook"
DEVSTACK_GATE_FEATURE_MATRIX: "{{ workspace }}/features.yaml"
DEVSTACK_GATE_TOPOLOGY: "{% if (groups['subnodes']|length) > 0 %} multinode {% else %} aio {% endif %}"
|
Install kraken with pbr features | name: Lint and test
on: [push]
jobs:
lint_and_test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies and kraken
run: |
python -m pip install --upgrade pip
pip install wheel flake8 pytest nose hocr-spec
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
pip install .
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Run tests, except training tests
run: |
pytest -k 'not test_train'
| name: Lint and test
on: [push]
jobs:
lint_and_test:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies and kraken
run: |
python -m pip install --upgrade pip
pip install wheel pbr flake8 pytest nose hocr-spec
# if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
python setup.py install
- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
- name: Run tests, except training tests
run: |
pytest -k 'not test_train'
|
Update from Hackage at 2018-02-23T20:03:28Z | homepage: https://github.com/haskell-foundation/foundation
changelog-type: ''
hash: 4a74e888df45f518a0f2b93e440d982e8453fe4c0efc45dcdd7fa982fd7acb59
test-bench-deps: {}
maintainer: vincent@snarc.org
synopsis: Foundation scrap box of array & string
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
ghc-prim: -any
all-versions:
- '0.0.0'
- '0.0.1'
- '0.0.2'
- '0.0.3'
- '0.0.4'
- '0.0.5'
- '0.0.6'
author: ''
latest: '0.0.6'
description-type: haddock
description: Foundation most basic primitives without any dependencies
license-name: BSD3
| homepage: https://github.com/haskell-foundation/foundation
changelog-type: ''
hash: 690c6ce82aba0c7f3645941faa60d3470102cf097108e97922eef7d6a6d63af4
test-bench-deps: {}
maintainer: vincent@snarc.org
synopsis: Foundation scrap box of array & string
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
ghc-prim: -any
all-versions:
- '0.0.0'
- '0.0.1'
- '0.0.2'
- '0.0.3'
- '0.0.4'
- '0.0.5'
- '0.0.6'
- '0.0.7'
author: ''
latest: '0.0.7'
description-type: haddock
description: Foundation most basic primitives without any dependencies
license-name: BSD3
|
Update from Hackage at 2019-09-05T15:07:04Z | homepage: https://github.com/riugabachi/describe
changelog-type: markdown
hash: 7f5a96f5fbd6ad724601ceb4556a3483113b79cd8e2776e977b11bc2ab14c261
test-bench-deps:
cereal: ! '>=0.5.8 && <0.6'
bytestring: ! '>=0.10.8 && <0.11'
base: ^>=4.12.0.0
describe: -any
QuickCheck: -any
maintainer: n/a
synopsis: Combinators for describing binary data structures
changelog: |
# Revision history for describe
## 0.1.0.0 -- 2019-06-03
* Initial release.
basic-deps:
cereal: ! '>=0.5.8 && <0.6'
bytestring: ! '>=0.10.8 && <0.11'
base: ^>=4.12.0.0
all-versions:
- 0.1.0.0
author: Riuga
latest: 0.1.0.0
description-type: haddock
description: Combinators for describing binary data structures, which eliminate the
boilerplate of having to write isomorphic Get and Put instances. Please see the
Github page for examples.
license-name: BSD-3-Clause
| homepage: https://github.com/riugabachi/describe
changelog-type: markdown
hash: 5ab8be078d533709289a168efd64efab9d8d90068836de4ac0fe4da197c046e8
test-bench-deps:
cereal: ! '>=0.5.8 && <0.6'
bytestring: ! '>=0.10.8 && <0.11'
base: ^>=4.12.0.0
describe: -any
QuickCheck: -any
maintainer: n/a
synopsis: Combinators for describing binary data structures
changelog: |
# Revision history for describe
## 0.1.1.0 -- 2019-09-05
* Added Monad instance for Descriptor
## 0.1.0.0 -- 2019-06-03
* Initial release.
basic-deps:
cereal: ! '>=0.5.8 && <0.6'
bytestring: ! '>=0.10.8 && <0.11'
base: ^>=4.12.0.0
all-versions:
- 0.1.0.0
- 0.1.1.0
author: Riuga
latest: 0.1.1.0
description-type: haddock
description: Combinators for describing binary data structures, which eliminate the
boilerplate of having to write isomorphic Get and Put instances. Please see the
Github page for examples.
license-name: BSD-3-Clause
|
Update from Hackage at 2017-09-16T19:53:39Z | homepage: https://github.com/anton-k/data-fix
changelog-type: ''
hash: 9418f6c158198b16aaabac41737eebb5ccecc40b35272b59995498c6f344aa14
test-bench-deps: {}
maintainer: <anton.kholomiov@gmail.com>
synopsis: Fixpoint data types
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
all-versions:
- '0.0.1'
- '0.0.2'
- '0.0.3'
- '0.0.4'
- '0.0.6'
- '0.0.7'
author: Anton Kholomiov
latest: '0.0.7'
description-type: haddock
description: ! 'Fixpoint types and recursion schemes. If you define your AST as
fixpoint type, you get fold and unfold operations for free.
Thanks for contribution to: Matej Kollar, Herbert Valerio Riedel'
license-name: BSD3
| homepage: https://github.com/anton-k/data-fix
changelog-type: ''
hash: d8edf00817e386e0ef6bbb5d2ab75aac4e52e77e95c4ce3c08c0c9c028af3963
test-bench-deps: {}
maintainer: <anton.kholomiov@gmail.com>
synopsis: Fixpoint data types
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
all-versions:
- '0.0.1'
- '0.0.2'
- '0.0.3'
- '0.0.4'
- '0.0.6'
- '0.0.7'
- '0.2.0'
author: Anton Kholomiov
latest: '0.2.0'
description-type: haddock
description: ! 'Fixpoint types and recursion schemes. If you define your AST as
fixpoint type, you get fold and unfold operations for free.
Thanks for contribution to: Matej Kollar, Herbert Valerio Riedel'
license-name: BSD3
|
Fix integration tests for S3 | integration-tester:
build: .
command: s3
links:
- georocket_s3
- s3
georocket_s3:
image: georocket/georocket
volumes:
- ./conf/georocket_s3:/usr/local/georocket-server/conf
links:
- s3
s3:
image: scality/s3server:mem-latest
# Allow s3 server to be accessed through the "s3" domain name.
# If we don't add the domain name to the config file the server will
# fail with an authentication error.
command: bash -c "node -e \"var conf = require('./config.json'), fs = require('fs'); conf.regions.specifiedregion = ['s3']; fs.writeFileSync('config.json', JSON.stringify(conf, undefined, 2));\" && npm start"
| integration-tester:
build: .
command: s3
links:
- georocket_s3
- s3
georocket_s3:
image: georocket/georocket
volumes:
- ./conf/georocket_s3:/usr/local/georocket-server/conf
links:
- s3
s3:
image: scality/s3server:mem-latest
# Allow s3 server to be accessed through the "s3" domain name.
# If we don't add the domain name to the config file the server will
# fail with an authentication error.
command: bash -c "node -e \"var conf = require('./config.json'), fs = require('fs'); conf.restEndpoints.s3 = 'file'; fs.writeFileSync('config.json', JSON.stringify(conf, undefined, 2));\" && npm start"
|
Fix pg_hba path to use current postgresql 13 version | backups::exclude:
- srv/softwareheritage/postgres
# that's a mirror of the load on the host...
icinga2::host::vars:
load: high
prometheus::sql::config_snippets:
- swh-indexer
pgbouncer::auth_hba_file: /etc/postgresql/11/replica/pg_hba.conf
pgbouncer::listen_addr: 192.168.100.103
pgbouncer::databases:
# swh
- source_db: "%{hiera('swh::deploy::storage::db::dbname')}"
host: somerset.internal.softwareheritage.org
auth_user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
port: "%{hiera('swh::deploy::db::main::port')}"
- source_db: "%{hiera('swh::deploy::indexer::storage::db::dbname')}"
host: belvedere.internal.softwareheritage.org
auth_user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
port: "%{hiera('swh::deploy::db::indexer::port')}"
| backups::exclude:
- srv/softwareheritage/postgres
# that's a mirror of the load on the host...
icinga2::host::vars:
load: high
prometheus::sql::config_snippets:
- swh-indexer
pgbouncer::auth_hba_file: /etc/postgresql/13/replica/pg_hba.conf
pgbouncer::listen_addr: 192.168.100.103
pgbouncer::databases:
# swh
- source_db: "%{hiera('swh::deploy::storage::db::dbname')}"
host: somerset.internal.softwareheritage.org
auth_user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
port: "%{hiera('swh::deploy::db::main::port')}"
- source_db: "%{hiera('swh::deploy::indexer::storage::db::dbname')}"
host: belvedere.internal.softwareheritage.org
auth_user: "%{hiera('swh::deploy::db::pgbouncer::user::login')}"
port: "%{hiera('swh::deploy::db::indexer::port')}"
|
Add config for watchlistmanager service and refactor class paths to parameters | services:
protalk.block.service.watchlist:
class: Protalk\UserBundle\Block\Service\WatchlistBlockService
arguments: [protalk.block.service.watchlist, @templating, @security.context]
tags:
- { name: sonata.block }
protalk.block.service.welcome:
class: Protalk\UserBundle\Block\Service\WelcomeBlockService
arguments: [protalk.block.service.welcome, @templating]
tags:
- { name: sonata.block }
sonata.user.profile.form.type:
class: Protalk\UserBundle\Form\Type\ProfileType
arguments: [ %fos_user.model.user.class% ]
tags:
- { name: form.type, alias: sonata_user_profile } | parameters:
block.service.watchlist.class: 'Protalk\UserBundle\Block\Service\WatchlistBlockService'
block.service.welcome.class: 'Protalk\UserBundle\Block\Service\WelcomeBlockService'
user.profile.form.type.class: 'Protalk\UserBundle\Form\Type\ProfileType'
watchlist_manager.class: 'Protalk\UserBundle\Service\WatchlistManager'
services:
protalk.block.service.watchlist:
class: '%block.service.watchlist.class%'
arguments: [protalk.block.service.watchlist, @templating, @security.context]
tags:
- { name: sonata.block }
protalk.block.service.welcome:
class: '%block.service.welcome.class%'
arguments: [protalk.block.service.welcome, @templating]
tags:
- { name: sonata.block }
sonata.user.profile.form.type:
class: '%user.profile.form.type.class%'
arguments: [ %fos_user.model.user.class% ]
tags:
- { name: form.type, alias: sonata_user_profile }
protalk.watchlist_manager:
class: '%watchlist_manager.class%'
arguments: [@doctrine.orm.entity_manager, @security.context, @session] |
Build number set to 0 | {% set name = "p-winds" %}
{% set version = environ.get('GIT_DESCRIBE_TAG', 'untagged')|string|replace('-','_') %}
{% set build_number = environ.get('GIT_DESCRIBE_NUMBER', '0') %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/ladsantos/p-winds/archive/refs/tags/v0.5.2-beta.tar.gz
build:
noarch: python
number: {{ build_number }}
string: {{ [build_number, ('h' + PKG_HASH), environ.get('GIT_DESCRIBE_HASH', '')]|join('_') }}
script: "{{ PYTHON }} -m pip install . -vv"
requirements:
host:
- python
- pip
- setuptools
run:
- python
- numpy
- scipy
- astropy
- pillow
test:
imports:
- p_winds
commands:
- pytest
requires:
- pytest
source_files:
- tests/
about:
home: https://github.com/ladsantos/p-winds
license: MIT
license_family: MIT
license_file: LICENSE
summary: 'Python implementation of Parker wind models for exoplanet atmospheres'
doc_url: https://p-winds.readthedocs.io/
dev_url: https://ladsantos.org
extra:
recipe-maintainers:
- ladsantos
| {% set name = "p-winds" %}
{% set version = "0.5.2b" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://github.com/ladsantos/p-winds/archive/refs/tags/v0.5.2-beta.tar.gz
build:
noarch: python
number: 0
script: "{{ PYTHON }} -m pip install . -vv"
requirements:
host:
- python
- pip
- setuptools
run:
- python
- numpy
- scipy
- astropy
- pillow
test:
imports:
- p_winds
commands:
- pytest
requires:
- pytest
source_files:
- tests/
about:
home: https://github.com/ladsantos/p-winds
license: MIT
license_family: MIT
license_file: LICENSE
summary: 'Python implementation of Parker wind models for exoplanet atmospheres'
doc_url: https://p-winds.readthedocs.io/
dev_url: https://ladsantos.org
extra:
recipe-maintainers:
- ladsantos
|
Update from Hackage at 2022-02-24T06:27:46Z | homepage: https://github.com/tonyday567/formatn#readme
changelog-type: ''
hash: 9d1dee5441900faf2601de4d96ee77b14e0d1a2b4a680f52448bbe194f7bec6f
test-bench-deps: {}
maintainer: tonyday567@gmail.com
synopsis: Formatting of doubles.
changelog: ''
basic-deps:
base: '>=4.7 && <5'
text: ^>=1.2
containers: ^>=0.6
QuickCheck: -any
all-versions:
- 0.0.1
- 0.1.0
- 0.2.0
author: Tony Day
latest: 0.2.0
description-type: haddock
description: |-
This package provides support for number formatting styles, especially styles involving significant figure calculations.
== Usage
>>> import Data.FormatN
>>> comma (Just 3) 1234
1,230
license-name: BSD-3-Clause
| homepage: https://github.com/tonyday567/formatn#readme
changelog-type: ''
hash: cd383a25e5c00eb1f95c590ff98bfc2d7d22ad14e3107efc8dd2d64a2805fdbd
test-bench-deps: {}
maintainer: tonyday567@gmail.com
synopsis: Formatting of doubles.
changelog: ''
basic-deps:
base: '>=4.7 && <5'
text: ^>=1.2
containers: ^>=0.6
QuickCheck: -any
all-versions:
- 0.0.1
- 0.1.0
- 0.2.0
- 0.2.1
author: Tony Day
latest: 0.2.1
description-type: haddock
description: |-
This package provides support for number formatting styles, especially styles involving significant figure calculations.
== Usage
>>> import Data.FormatN
>>> comma (Just 3) 1234
1,230
license-name: BSD-3-Clause
|
Update from Hackage at 2021-02-23T19:39:05Z | homepage: ''
changelog-type: ''
hash: 1ab39be6c6cdbf35f6c98f64ddb39bf078e77814c0e086d73a14ff2e703a8942
test-bench-deps: {}
maintainer: ''
synopsis: A Haskell bundle of the Minisat SAT solver
changelog: ''
basic-deps:
base: ! '>=3 && <5'
async: -any
all-versions:
- '0.1'
- 0.1.1
- 0.1.2
author: ''
latest: 0.1.2
description-type: haddock
description: ''
license-name: BSD-3-Clause
| homepage: ''
changelog-type: ''
hash: 3654d6c19268c2ad1e5e15314e399ce153214ffd3da2404e1efacfa5297dc6c7
test-bench-deps: {}
maintainer: ''
synopsis: A Haskell bundle of the Minisat SAT solver
changelog: ''
basic-deps:
base: '>=3 && <5'
async: -any
all-versions:
- '0.1'
- 0.1.1
- 0.1.2
- 0.1.3
author: ''
latest: 0.1.3
description-type: haddock
description: ''
license-name: BSD-3-Clause
|
Update prometheus to 2.0.0 GA | ---
# image prefix defaults
l_openshift_prometheus_image_prefix: "{{ openshift_prometheus_image_prefix | default('openshift/') }}"
l_openshift_prometheus_proxy_image_prefix: "{{ openshift_prometheus_proxy_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
l_openshift_prometheus_alertmanager_image_prefix: "{{ openshift_prometheus_altermanager_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
l_openshift_prometheus_alertbuffer_image_prefix: "{{ openshift_prometheus_alertbuffer_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
# image version defaults
l_openshift_prometheus_image_version: "{{ openshift_prometheus_image_version | default('v2.0.0-dev.3') }}"
l_openshift_prometheus_proxy_image_version: "{{ openshift_prometheus_proxy_image_version | default('v1.0.0') }}"
l_openshift_prometheus_alertmanager_image_version: "{{ openshift_prometheus_alertmanager_image_version | default('v0.9.1') }}"
l_openshift_prometheus_alertbuffer_image_version: "{{ openshift_prometheus_alertbuffer_image_version | default('v0.0.2') }}"
| ---
# image prefix defaults
l_openshift_prometheus_image_prefix: "{{ openshift_prometheus_image_prefix | default('openshift/') }}"
l_openshift_prometheus_proxy_image_prefix: "{{ openshift_prometheus_proxy_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
l_openshift_prometheus_alertmanager_image_prefix: "{{ openshift_prometheus_altermanager_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
l_openshift_prometheus_alertbuffer_image_prefix: "{{ openshift_prometheus_alertbuffer_image_prefix | default(l_openshift_prometheus_image_prefix) }}"
# image version defaults
l_openshift_prometheus_image_version: "{{ openshift_prometheus_image_version | default('v2.0.0') }}"
l_openshift_prometheus_proxy_image_version: "{{ openshift_prometheus_proxy_image_version | default('v1.0.0') }}"
l_openshift_prometheus_alertmanager_image_version: "{{ openshift_prometheus_alertmanager_image_version | default('v0.9.1') }}"
l_openshift_prometheus_alertbuffer_image_version: "{{ openshift_prometheus_alertbuffer_image_version | default('v0.0.2') }}"
|
Use Microsoft Visual Studio 17 2022 | name: Unit Tests Windows
on: [ push ]
jobs:
build_mingw:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -DCMAKE_BUILD_TYPE=Debug -DENABLE_UNIT_TESTS=TRUE -G "MinGW Makefiles" -S ./source
- name: build
run: cmake --build ./
- name: unit-tests
run: ./unittests
build_msvc:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: cmake
shell: cmd
run: |
call "C:\Program Files (x86)\Microsoft Visual Studio\2019\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
cmake -G "Visual Studio 16 2019" -A x64 -DENABLE_UNIT_TESTS=TRUE -DCMAKE_C_COMPILER=cl -DCMAKE_CXX_COMPILER=cl -S ./source
- name: build
run: cmake --build ./ --config Debug
- name: unit-tests
run: ./Debug/unittests
| name: Unit Tests Windows
on: [ push ]
jobs:
build_mingw:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: cmake
run: cmake -DCMAKE_BUILD_TYPE=Debug -DENABLE_UNIT_TESTS=TRUE -G "MinGW Makefiles" -S ./source
- name: build
run: cmake --build ./
- name: unit-tests
run: ./unittests
build_msvc:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: cmake
shell: cmd
run: |
call "C:\Program Files\Microsoft Visual Studio\2022\Enterprise\VC\Auxiliary\Build\vcvars64.bat"
cmake -G "Visual Studio 17 2022" -A x64 -DENABLE_UNIT_TESTS=TRUE -DCMAKE_C_COMPILER=cl -DCMAKE_CXX_COMPILER=cl -S ./source
- name: build
run: cmake --build ./ --config Debug
- name: unit-tests
run: ./Debug/unittests
|
Include make as a dependency for packaging | stages:
- Package
- Build
tarball:
stage: Package
image: alpine:latest
before_script:
- apk add --no-cache git
script:
- git submodule sync
- git submodule update --init --recursive
- git submodule foreach git pull origin master
- make -f tools/star/Makefile release VERSION=$CI_COMMIT_REF_NAME
artifacts:
paths:
- release
moar:
stage: Build
image: alpine:latest
before_script:
- apk add --no-cache bash build-base git perl
- cd "$(mktemp -d)"
- tar xf "$CI_PROJECT_DIR/release/rakudo-star-$CI_COMMIT_REF_NAME.tar.gz"
script:
- perl Configure.pl --prefix=/usr/local --backend=moar --gen-moar
artifacts:
paths:
- "$CI_COMMIT_REF_NAME/release"
- /usr/local
| stages:
- Package
- Build
tarball:
stage: Package
image: alpine:latest
before_script:
- apk add --no-cache git make
script:
- git submodule sync
- git submodule update --init --recursive
- git submodule foreach git pull origin master
- make -f tools/star/Makefile release VERSION=$CI_COMMIT_REF_NAME
artifacts:
paths:
- release
moar:
stage: Build
image: alpine:latest
before_script:
- apk add --no-cache bash build-base git perl
- cd "$(mktemp -d)"
- tar xf "$CI_PROJECT_DIR/release/rakudo-star-$CI_COMMIT_REF_NAME.tar.gz"
script:
- perl Configure.pl --prefix=/usr/local --backend=moar --gen-moar
artifacts:
paths:
- "$CI_COMMIT_REF_NAME/release"
- /usr/local
|
Fix Code Climate config | engines:
rubocop:
enabled: true
csslint:
enabled: true
duplication:
enabled: true
config:
languages:
- ruby
fixme:
enabled: true
ratings:
- "**.css"
- "**.rb"
exclude_paths:
- spec/
| engines:
rubocop:
enabled: true
csslint:
enabled: true
duplication:
enabled: true
config:
languages:
- ruby
fixme:
enabled: true
ratings:
paths:
- "**.css"
- "**.rb"
exclude_paths:
- spec/
|
Set NPM token as correct name | name: Deployment
on:
push:
branches:
- master
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- run: export CHROME_BIN=chromium-browser
- run: export DISPLAY=:99.0
- run: Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 &
- run: sleep 5
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: '8.x'
- run: mkdir dist
- run: mkdir dist/js
- run: npm install
- run: npm run setup
- run: npm run build
- run: npm run html:production
# - name: GitHub Pages Deployment
# uses: crazy-max/ghaction-github-pages@v0.1.0
# if: success()
# with:
# target_branch: gh-pages
# build_dir: dist
# env:
# GITHUB_PAT: ${{ secrets.GITHUB_PAT }}
- name: NPM Release
if: success()
uses: primer/publish@master
env:
NPM_AUTH_TOKEN: ${{ secrets.NPMJS_TOKEN }}
| name: Deployment
on:
push:
branches:
- master
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- run: export CHROME_BIN=chromium-browser
- run: export DISPLAY=:99.0
- run: Xvfb :99 -screen 0 1024x768x24 > /dev/null 2>&1 &
- run: sleep 5
- uses: actions/checkout@v1
- uses: actions/setup-node@v1
with:
node-version: '8.x'
- run: mkdir dist
- run: mkdir dist/js
- run: npm install
- run: npm run setup
- run: npm run build
- run: npm run html:production
# - name: GitHub Pages Deployment
# uses: crazy-max/ghaction-github-pages@v0.1.0
# if: success()
# with:
# target_branch: gh-pages
# build_dir: dist
# env:
# GITHUB_PAT: ${{ secrets.GITHUB_PAT }}
- name: NPM Release
if: success()
uses: primer/publish@master
env:
NPM_AUTH_TOKEN: ${{ secrets.NPM_AUTH_TOKEN }}
|
Add more fixes to manual patches | # This contains a bunch of small fixes that allow the challenges to be compiled
# Format:
# [filename]:
# '[code to be replaced]': '[replacement]'
# ...
# Patches that should be applied to all files
all:
'random(': 'cgc_random('
'fdwait(': 'cgc_fdwait('
'timeval': 'cgc_timeval'
'fd_set': 'cgc_fd_set'
cpp.cc:
'void *operator new( unsigned int alloc_size )': 'void *operator new( size_t alloc_size )'
'void *operator new[]( unsigned int alloc_size )': 'void *operator new[]( size_t alloc_size )' | # This contains a bunch of small fixes that allow the challenges to be compiled
# Format:
# [filename]:
# '[code to be replaced]': '[replacement]'
# ...
# Patches that should be applied to all files
all:
'random(': 'cgc_random('
'fdwait(': 'cgc_fdwait('
'timeval': 'cgc_timeval'
'fd_set': 'cgc_fd_set'
# Wrong param types
libcpp.cc: &new_sz
# KPRCA 54
'operator new(unsigned int sz)': 'operator new(size_t sz)'
'operator new[](unsigned int sz)': 'operator new[](size_t sz)'
cpp.cc:
# CROMU 21, 23, 26, 42, 44, 70
'void *operator new( unsigned int alloc_size )': 'void *operator new( size_t alloc_size )'
'void *operator new[]( unsigned int alloc_size )': 'void *operator new[]( size_t alloc_size )'
# KPRCA 52, 53
'void *operator new(unsigned int size)': 'void *operator new(size_t size)'
'void *operator new[](unsigned int size)': 'void *operator new[](size_t size)'
# TNETS 2
<<: *new_sz
# A few files had a section() attribute, but were missing "DATA,", which is needed on OSX
# KPRCA 25
# TODO: Windows?
aafirst.c: &rodatafix
'section(".rodata.init")': 'section("DATA,.rodata.init")'
oo.h:
<<: *rodatafix
zzlast.c:
<<: *rodatafix
|
Add COVERALLS_SERVICE_NAME to GitHub Actions. | name: Main
on: [push, pull_request]
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install tox-gh-actions coveralls
- name: Run tests
run: |
tox
- name: Send coverage report
run: |
coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
coveralls:
name: Finish Coveralls
needs: test
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Finished
run: |
pip install --upgrade coveralls
coveralls --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| name: Main
on: [push, pull_request]
jobs:
test:
name: Test
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
pip install --upgrade pip
pip install tox-gh-actions coveralls
- name: Run tests
run: |
tox
- name: Send coverage report
run: |
coveralls
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: ${{ matrix.python-version }}
COVERALLS_PARALLEL: true
COVERALLS_SERVICE_NAME: github
coveralls:
name: Finish Coveralls
needs: test
runs-on: ubuntu-latest
container: python:3-slim
steps:
- name: Finished
run: |
pip install --upgrade coveralls
coveralls --finish
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
Add scheduled events for lambdas | service: spiders
package:
exclude:
- .cache/**
- .idea/**
- .pytest_cache/**
- .scrapy/**
- .vscode/**
- .coverage
- tests/**
- .pep8
- .python-version
- .pyup.yml
- .travis.yml
- LICENSE
- README.rst
- requirements.txt
- test_requirements.txt
- setup.cfg
provider:
name: aws
runtime: python3.7
timeout: 30
functions:
tvinna:
handler: lambda.run
environment:
SCRAPY_SPIDER: tvinna
mbl:
handler: lambda.run
environment:
SCRAPY_SPIDER: mbl
alfred:
handler: lambda.run
environment:
SCRAPY_SPIDER: alfred
plugins:
- serverless-python-requirements
custom:
pythonRequirements:
dockerizePip: true
useDownloadCache: true
slim: true
| service: spiders
package:
exclude:
- .cache/**
- .idea/**
- .pytest_cache/**
- .scrapy/**
- .vscode/**
- .coverage
- tests/**
- .pep8
- .python-version
- .pyup.yml
- .travis.yml
- LICENSE
- README.rst
- requirements.txt
- test_requirements.txt
- setup.cfg
provider:
name: aws
runtime: python3.7
timeout: 30
functions:
tvinna:
handler: lambda.run
environment:
SCRAPY_SPIDER: tvinna
events:
- schedule: rate(30 minutes)
mbl:
handler: lambda.run
environment:
SCRAPY_SPIDER: mbl
events:
- schedule: rate(30 minutes)
alfred:
handler: lambda.run
environment:
SCRAPY_SPIDER: alfred
events:
- schedule: rate(30 minutes)
plugins:
- serverless-python-requirements
custom:
pythonRequirements:
dockerizePip: true
useDownloadCache: true
slim: true
|
Add localhost dns lookup for the docker daemon. | ---
- name: install docker
sudo: yes
yum:
name: docker
state: latest
- name: enable docker
sudo: yes
service:
name: docker
state: started
enabled: yes
- name: add {{ ansible_ssh_user }} to docker group
sudo: yes
user:
name: "{{ ansible_ssh_user }}"
groups: docker
append: yes
# required to install pip
- name: install epel-release
sudo: yes
yum:
name: epel-release
state: present
# required to install docker-py with pip
- name: install python-pip
sudo: yes
yum:
name: python-pip
state: present
# required for ansible docker module
- name: install docker-py
sudo: yes
pip:
name: docker-py
# docker-py 0.4.0 works with ansible 1.8.2
version: 0.4.0
state: present
| ---
- name: install docker
sudo: yes
yum:
name: docker
state: latest
- name: docker consul dns
sudo: yes
lineinfile:
dest: /etc/sysconfig/docker
state: present
regexp: ^OPTIONS
line: OPTIONS=--selinux-enabled -H fd:// --dns {{ ansible_eth0.ipv4.address }} --dns-search service.consul
- name: enable docker
sudo: yes
service:
name: docker
state: started
enabled: yes
- name: add {{ ansible_ssh_user }} to docker group
sudo: yes
user:
name: "{{ ansible_ssh_user }}"
groups: docker
append: yes
# required to install pip
- name: install epel-release
sudo: yes
yum:
name: epel-release
state: present
# required to install docker-py with pip
- name: install python-pip
sudo: yes
yum:
name: python-pip
state: present
# required for ansible docker module
- name: install docker-py
sudo: yes
pip:
name: docker-py
# docker-py 0.4.0 works with ansible 1.8.2
version: 0.4.0
state: present
|
Fix Travis CI build broken on missing scheme | language: objective-c
osx_image: xcode7
xcode_project: ZipZap.xcodeproj
xcode_scheme: ZipZap (OS X Static Library) | language: objective-c
osx_image: xcode7
xcode_project: ZipZap.xcodeproj
xcode_scheme: ZipZap (macOS Static Library)
|
Remove 0.11 from CI build. | language: node_js
node_js:
- "0.11"
- "0.10"
- 0.8
before_install: "npm install -g bob --loglevel error"
script: "bob build"
| language: node_js
node_js:
- "0.10"
- 0.8
before_install: "npm install -g bob --loglevel error"
script: "bob build"
|
Test on Python 3.8 beta | language: python
cache: pip
matrix:
include:
- python: 2.7
- python: 3.5
- python: 3.6
- python: 3.7
install:
- pip install -r requirements.txt
script: python test_tablib.py
| language: python
cache: pip
matrix:
include:
- python: 2.7
- python: 3.5
- python: 3.6
- python: 3.7
- python: 3.8-dev
install:
- pip install -r requirements.txt
script: python test_tablib.py
|
Allow failures on CLISP for now. | language: common-lisp
addons:
postgresql: "9.3"
env:
matrix:
- LISP=sbcl
- LISP=ccl
- LISP=clisp
install:
# Install cl-travis
- curl https://raw.githubusercontent.com/luismbo/cl-travis/master/install.sh | bash
before_script:
- mysql -uroot -e 'GRANT ALL ON *.* TO nobody@"localhost" IDENTIFIED BY "nobody"'
- mysql -uroot -e 'CREATE DATABASE `integral_test`'
- psql -c 'create database "integral_test";' -U postgres
- psql -c "CREATE USER nobody WITH PASSWORD 'nobody';" -U postgres
- git clone https://github.com/fukamachi/cl-dbi ~/lisp/cl-dbi
script:
- cl -l prove -e '(or (prove:run :integral-test) (uiop:quit -1))'
| language: common-lisp
addons:
postgresql: "9.3"
env:
matrix:
- LISP=sbcl
- LISP=ccl
- LISP=clisp
matrix:
allow_failures:
- env: LISP=clisp
install:
# Install cl-travis
- curl https://raw.githubusercontent.com/luismbo/cl-travis/master/install.sh | bash
before_script:
- mysql -uroot -e 'GRANT ALL ON *.* TO nobody@"localhost" IDENTIFIED BY "nobody"'
- mysql -uroot -e 'CREATE DATABASE `integral_test`'
- psql -c 'create database "integral_test";' -U postgres
- psql -c "CREATE USER nobody WITH PASSWORD 'nobody';" -U postgres
- git clone https://github.com/fukamachi/cl-dbi ~/lisp/cl-dbi
script:
- cl -l prove -e '(or (prove:run :integral-test) (uiop:quit -1))'
|
Update Travis to test on Node 0.10, 0.12, 4.2, and 5.x | language: node_js
node_js:
- "4.2"
- "4.1"
- "4.0"
- "0.12"
- "0.10"
- "iojs"
| language: node_js
node_js:
- "0.10"
- "0.12"
- "4.2"
- "5"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.