Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Set up CI with Azure Pipelines | # Xcode
# Build, test, and archive an Xcode workspace on macOS.
# Add steps that install certificates, test, sign, and distribute an app, save build artifacts, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/xcode
pool:
vmImage: 'macOS 10.13'
steps:
- task: Xcode@5
inputs:
actions: 'build'
scheme: ''
sdk: 'iphoneos'
configuration: 'Release'
xcWorkspacePath: '**/*.xcodeproj/project.xcworkspace'
xcodeVersion: 'default' # Options: 8, 9, default, specifyPath
| |
Set up CI with Azure Pipelines | # Node.js with gulp
# Build a Node.js project using the gulp task runner.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/javascript
trigger:
- master
pool:
vmImage: 'ubuntu-latest'
steps:
- task: NodeTool@0
inputs:
versionSpec: '10.x'
displayName: 'Install Node.js'
- script: |
npm install
gulp default --gulpfile gulpfile.js
displayName: 'npm install and run gulp'
| |
Update from Hackage at 2019-06-21T01:00:17Z | homepage: https://github.com/shirren/json-api-lib
changelog-type: ''
hash: bbbb518114e715bb2e09bc4c32cddac50e81115e50cd89170f550787193211ce
test-bench-deps:
json-api-lib: -any
bytestring: ! '>=0.10.8 && <0.11'
lens-aeson: ! '>=1.0.2 && <1.1'
base: ! '>=4.11 && <5'
aeson-pretty: ! '>0.8 && <0.9'
unordered-containers: ! '>=0.2.10 && <0.3'
hspec: ! '>=2.7.1 && <2.8'
text: ! '>=1.2.3.1 && <1.3'
data-default: ! '>=0.7.1 && <0.8'
containers: ! '>=0.6.1 && <0.7'
lens: ! '>=4.17.1 && <4.18'
uri-encode: ! '>=1.5.0 && <1.6'
aeson: ! '>1.4 && <1.5'
maintainer: Shirren Premaratne
synopsis: Utilities for generating JSON-API payloads
changelog: ''
basic-deps:
lens-aeson: ! '>=1.0.2 && <1.1'
base: ! '>=4.11 && <5'
unordered-containers: ! '>=0.2.10 && <0.3'
text: ! '>=1.2.3.1 && <1.3'
data-default: ! '>=0.7.1 && <0.8'
containers: ! '>=0.6.1 && <0.7'
lens: ! '>=4.17.1 && <4.18'
uri-encode: ! '>=1.5.0 && <1.6'
aeson: ! '>1.4 && <1.5'
all-versions:
- 0.1.0.0
author: Todd Mohney, Shirren Premaratne
latest: 0.1.0.0
description-type: haddock
description: |-
Provides utilities for deriving JSON payloads conformant to the json-api
specification
license-name: MIT
| |
Update from Hackage at 2019-05-05T14:50:46Z | homepage: https://github.com/isovector/polysemy-zoo#readme
changelog-type: markdown
hash: aabb2f31832daad68965a6d2e038583223ed8aa81fe2d8af3f486c35325c415c
test-bench-deps:
polysemy-plugin: -any
base: ! '>=4.7 && <5'
hspec: -any
containers: -any
polysemy-zoo: -any
polysemy: -any
maintainer: sandy@sandymaguire.me
synopsis: Experimental, user-contributed effects and interpreters for polysemy
changelog: |
# Changelog for polysemy-zoo
## Unreleased changes
basic-deps:
polysemy-plugin: -any
base: ! '>=4.7 && <5'
containers: -any
polysemy: -any
all-versions:
- 0.1.0.0
author: Sandy Maguire
latest: 0.1.0.0
description-type: markdown
description: |+
# polysemy-zoo
[](https://travis-ci.org/isovector/polysemy-zoo)
[](https://hackage.haskell.org/package/polysemy-zoo)
## Dedication
> Once I was chased by the king of all scorpions.
>
> Rachel Hunter
## Overview
The `polysemy-zoo` is an experimental repository for user-contributed additions
to the `polysemy` ecosystem. You're encouraged to open Pull Requests here for
any effects/interpretations that you write and think might be useful for others.
Particularly successful contributions here will be migrated into either
`polysemy` proper, or `polysemy-contrib` (the less experimental version of the
zoo.)
license-name: BSD-3-Clause
| |
Add action to publish package to pypi on release | # This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
name: Upload Python Package
on:
release:
types: [created]
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.WSGI_KERBEROS_PYPI_TOKEN }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
| |
Add a Coverity Scan Github action. | name: Coverity Scan
on:
push:
branches: [ master ]
jobs:
coverity:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: dependencies
run: sudo apt-get -y install g++ libcfitsio-dev autoconf-archive
- name: bootstrap
run: ./bootstrap
- name: configure
run: ./configure
- uses: vapier/coverity-scan-action@v1
with:
command: make
email: ${{ secrets.COVERITY_SCAN_EMAIL }}
token: ${{ secrets.COVERITY_SCAN_TOKEN }}
| |
Remove non existing cache directory. | machine:
services:
- docker
node:
version:
6.1.0
python:
version:
2.7.3
dependencies:
pre:
- pip install -r requirements_dev.txt
- pip install -r requirements.txt
- pip install pymongo==3.2.1
- if [ "$CIRCLE_BRANCH" = "master" ]; then make deps; fi
cache_directories:
- node_modules/
- rd_ui/node_modules/
- rd_ui/app/bower_components/
test:
override:
- nosetests --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/junit.xml --with-coverage --cover-package=redash tests/
deployment:
github_and_docker:
branch: master
commands:
- make pack
- make upload
- echo "rd_ui/app" >> .dockerignore
- docker pull redash/redash:latest
- docker build -t redash/redash:$(./manage.py version | sed -e "s/\+/./") .
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- docker push redash/redash:$(./manage.py version | sed -e "s/\+/./")
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
general:
branches:
ignore:
- gh-pages
| machine:
services:
- docker
node:
version:
6.1.0
python:
version:
2.7.3
dependencies:
pre:
- pip install -r requirements_dev.txt
- pip install -r requirements.txt
- pip install pymongo==3.2.1
- if [ "$CIRCLE_BRANCH" = "master" ]; then make deps; fi
cache_directories:
- node_modules/
- rd_ui/app/bower_components/
test:
override:
- nosetests --with-xunit --xunit-file=$CIRCLE_TEST_REPORTS/junit.xml --with-coverage --cover-package=redash tests/
deployment:
github_and_docker:
branch: master
commands:
- make pack
- make upload
- echo "rd_ui/app" >> .dockerignore
- docker pull redash/redash:latest
- docker build -t redash/redash:$(./manage.py version | sed -e "s/\+/./") .
- docker login -e $DOCKER_EMAIL -u $DOCKER_USER -p $DOCKER_PASS
- docker push redash/redash:$(./manage.py version | sed -e "s/\+/./")
notify:
webhooks:
- url: https://webhooks.gitter.im/e/895d09c3165a0913ac2f
general:
branches:
ignore:
- gh-pages
|
Add slather to gem install | osx_image: xcode6.4
language: objective-c
before_install: gem install cocoapods xcpretty obcd -N
podfile: GTScrollNavigationBarExample/Podfile
env:
- LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8
script:
- set -o pipefail && xcodebuild -workspace GTScrollNavigationBarExample/GTScrollNavigationBarExample.xcworkspace
-scheme GTScrollNavigationBarExample -sdk iphonesimulator -destination "platform=iOS Simulator,name=iPhone 6"
GCC_INSTRUMENT_PROGRAM_FLOW_ARCS=YES GCC_GENERATE_TEST_COVERAGE_FILES=YES clean test | xcpretty -c
- pod lib lint --quick
- obcd --path GTScrollNavigationBar find HeaderStyle
after_success: slather | osx_image: xcode6.4
language: objective-c
before_install: gem install cocoapods xcpretty obcd slather -N
podfile: GTScrollNavigationBarExample/Podfile
env:
- LC_CTYPE=en_US.UTF-8 LANG=en_US.UTF-8
script:
- set -o pipefail && xcodebuild -workspace GTScrollNavigationBarExample/GTScrollNavigationBarExample.xcworkspace
-scheme GTScrollNavigationBarExample -sdk iphonesimulator -destination "platform=iOS Simulator,name=iPhone 6"
GCC_INSTRUMENT_PROGRAM_FLOW_ARCS=YES GCC_GENERATE_TEST_COVERAGE_FILES=YES clean test | xcpretty -c
- pod lib lint --quick
- obcd --path GTScrollNavigationBar find HeaderStyle
after_success: slather |
Add an initial configuration for Travis | language: go
go:
- 1.6
- 1.7
- tip
sudo: required
before_install:
- sudo apt-get -qq update
- sudo apt-get -qq install btrfs-tools libdevmapper-dev
script:
- make
| |
Use latest GAE SDK for Travis builds | language: go
install:
- curl -sSo gae_sdk.zip https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.12.zip
- unzip -q gae_sdk.zip
script:
- ./go_appengine/goapp test -v ./endpoints
| language: go
install:
- curl -sSo gae_sdk.zip https://storage.googleapis.com/appengine-sdks/featured/go_appengine_sdk_linux_amd64-1.9.15.zip
- unzip -q gae_sdk.zip
script:
- ./go_appengine/goapp test -v ./endpoints
|
Allow github to perform integration testing | ---
language: python
python: "2.7"
before_install:
- sudo apt-get update --assume-yes -qq
- sudo apt-get install --assume-yes -qq python-apt python-pycurl
install:
- sudo pip install ansible
script:
- ansible --version
- ansible-playbook --inventory-file tests/hosts --syntax-check tests/playbook.yml
- ansible-playbook --inventory-file tests/hosts --connection=local -vvvv tests/playbook.yml
| |
Add old version of workflow | name: Vienna
on: pull_request
jobs:
test:
name: Test
runs-on: macOS-latest
steps:
- name: Install SwiftLint
run: brew install swiftlint
- name: Set up Git repository
uses: actions/checkout@v1
- name: Build Xcode project
run: xcodebuild -project Vienna.xcodeproj -scheme Vienna -configuration Development build-for-testing | xcpretty && exit ${PIPESTATUS[0]}
- name: Test Xcode project
run: xcodebuild -project Vienna.xcodeproj -scheme Vienna -configuration Development test | xcpretty && exit ${PIPESTATUS[0]}
| |
Add code climate YAML file | ---
engines:
duplication:
enabled: true
config:
languages:
- python
fixme:
enabled: true
pep8:
enabled: true
radon:
enabled: true
ratings:
paths:
- "**.py"
exclude_paths:
- "docs/*"
- "examples/*"
| |
Add initial github actions workflow | ---
name: build
'on':
push:
branches:
- master
pull_request:
branches: ["*"]
env:
GOPROXY: https://proxy.golang.org
GOPATH: ${{ github.workspace }}/go
jobs:
build:
strategy:
matrix:
os: [ubuntu-18.04, macos-10.15]
go: [1.13, 1.14]
fail-fast: true
runs-on: ${{ matrix.os }}
steps:
- name: Set up go
uses: actions/setup-go@v1
with:
go-version: ${{ matrix.go }}
id: go
- uses: actions/checkout@v2
with:
path: ${{ env.GOPATH }}/src/k8s.io/kops
- name: make nodeup examples test
working-directory: ${{ env.GOPATH }}/src/k8s.io/kops
run: |
make nodeup examples test
verify:
strategy:
matrix:
os: [ubuntu-18.04]
go: [1.13]
fail-fast: true
runs-on: ${{ matrix.os }}
steps:
- name: Set up go
uses: actions/setup-go@v1
with:
go-version: ${{ matrix.go }}
id: go
- uses: actions/checkout@v2
with:
path: ${{ env.GOPATH }}/src/k8s.io/kops
- name: make travis-ci
working-directory: ${{ env.GOPATH }}/src/k8s.io/kops
run: |
make travis-ci
| |
Build with Actions, ship coverage to Codecov. | name: Node CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x]
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: npm install
run: npm install --no-package-lock
- name: npm ls
run: npm ls
- name: npm install nyc, prove, codecov
run: npm install -g nyc prove codecov
- name: npm test
run: nyc npm test
env:
CI: true
- name: generate codecov report
run: nyc report --reporter=text-lcov > coverage.lcov
- name: ship coverage to codecov
run: codecov --branch=${GITHUB_REF##*/}
env:
CODECOV_TOKEN: ${{secrets.CODECOV_TOKEN}}
| |
Update data hourly using GitHub actions | on:
schedule:
- cron: '37 * * * *'
push:
branches:
- master
name: update
jobs:
crawl:
runs-on: ubuntu-18.04
steps:
- name: checkout
uses: actions/checkout@master
- name: setup python
uses: actions/setup-python@master
with:
python-version: 2.x
- name: update
run: python __init__.py
| |
Enable testing on Travis CI | language: python
python:
- "2.7"
- "3.3"
install:
- python setup.py --quiet install
script:
- superzippy --output=foo superzippy superzippy.packaging:run
- ./foo --output=bar superzippy superzippy.packaging:run
- ./bar --help
| |
Allow failure of MRI 2.0 | language: ruby
rvm:
- 1.8.7
- 1.9.2
- 1.9.3
- ruby-head
| language: ruby
rvm:
- 1.8.7
- 1.9.2
- 1.9.3
- ruby-head
matrix:
allow_failures:
- rvm: ruby-head
|
Update from Hackage at 2018-03-12T06:25:59Z | homepage: https://github.com/debug-ito/greskell/
changelog-type: markdown
hash: 70e56c08ce910ba50de2254f9aa7e9bb1a7534f2b5d8cc024aa5e2a0e95f1b9a
test-bench-deps:
base: -any
hspec: ! '>=2.2.3'
text: -any
doctest: ! '>=0.11 && <0.15'
doctest-discover: ! '>=0.1.0.7 && <0.2'
QuickCheck: ! '>=2.8.2 && <2.12'
greskell-core: -any
aeson: -any
maintainer: Toshio Ito <debug.ito@gmail.com>
synopsis: Haskell binding for Gremlin graph query language - core data types and tools
changelog: ! '# Revision history for greskell-core
## 0.1.0.0 -- 2018-03-12
* First version. Released on an unsuspecting world.
'
basic-deps:
base: ! '>=4.9.0.0 && <4.11'
unordered-containers: ! '>=0.2.7.1 && <0.3'
text: ! '>=1.2.2.1 && <1.3'
scientific: ! '>=0.3.4.9 && <0.4'
aeson: ! '>=0.11.2.1 && <1.4'
all-versions:
- '0.1.0.0'
author: Toshio Ito <debug.ito@gmail.com>
latest: '0.1.0.0'
description-type: markdown
description: ! '# greskell-core
Haskell binding for Gremlin query language.
See the package description, or [project README](https://github.com/debug-ito/greskell/blob/master/README.md).
## Author
Toshio Ito <debug.ito@gmail.com>
'
license-name: BSD3
| |
Add Travis script using gfortran 6 | language: generic
# Use the trusty environment, since the netcdf packages in 12.04 are broken
dist: trusty
sudo: required
addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- gfortran-6
before_install:
# Install requirements for M.C. Kernel
# - pip install --user cpp-coveralls
# - sudo apt-get update -qq
# - sudo apt-get install -qq gfortran libnetcdff5 libnetcdf-dev libfftw3-dev liblapack-dev libblas-dev openmpi-bin libopenmpi-dev
install:
- gfortran -v
- gfortran hello_world.f90 -o hello_world.x
script:
- ./hello_world.x
after_success:
cache:
directories:
- $HOME/miniconda
| |
Send build notifications to seb (can't subscribe Travis from: address to google groups) | language: ruby
branches:
only:
- develop
- master
rvm:
- 1.8.7
before_install:
- sudo gem install rubygems-update --version=1.6.2
- sudo gem update --system 1.6.2
- sudo gem install rake
- git submodule update --init --recursive
- psql -c "create database foi_test template template0 encoding 'SQL_ASCII';" -U postgres
- cp config/database.yml-test config/database.yml
- sudo apt-get update
- export DEBIAN_FRONTEND=noninteractive
- sudo apt-get -y install exim4-daemon-light
- sudo apt-get -y install `cut -d " " -f 1 config/packages | egrep -v "(^#|wkhtml|bundler)"`
- ./script/rails-post-deploy
before_script:
notifications:
irc: "irc.freenode.org#alaveteli"
email:
recipients:
- alaveteli-dev@googlegroups.com
| language: ruby
branches:
only:
- develop
- master
rvm:
- 1.8.7
before_install:
- sudo gem install rubygems-update --version=1.6.2
- sudo gem update --system 1.6.2
- sudo gem install rake
- git submodule update --init --recursive
- psql -c "create database foi_test template template0 encoding 'SQL_ASCII';" -U postgres
- cp config/database.yml-test config/database.yml
- sudo apt-get update
- export DEBIAN_FRONTEND=noninteractive
- sudo apt-get -y install exim4-daemon-light
- sudo apt-get -y install `cut -d " " -f 1 config/packages | egrep -v "(^#|wkhtml|bundler)"`
- ./script/rails-post-deploy
before_script:
notifications:
irc: "irc.freenode.org#alaveteli"
email:
recipients:
- seb.bacon@gmail.com
|
Add TravisCI for automated test running | language: python
python:
- "2.7"
virtualenv:
# let the tests pick up the system-installed pygame
system_site_packages: true
before_install:
- sudo apt-get -qq update
- sudo apt-get install python-pygame
install: "pip install -r requirements_dev.txt"
script: python setup.py test
| |
Update from Hackage at 2016-12-23T19:41:14Z | homepage: ''
changelog-type: ''
hash: 8f499412a72838ee300377e9571cfd76ad96d7e0f78bdbd4b8c36dc9a9487f8f
test-bench-deps: {}
maintainer: jeanphilippe.bernardy@gmail.com
synopsis: A generator of nix files
changelog: ''
basic-deps:
base: <=666
text: -any
filepath: ! '>=1.3'
process: ! '>=1.1'
containers: -any
mtl: -any
optparse-applicative: -any
aeson: -any
yaml: -any
directory: ! '>=1.1'
all-versions:
- '1.0'
author: Jean-Philippe Bernardy
latest: '1.0'
description-type: haddock
description: Soon to appear.
license-name: GPL
| |
Disable pandas master tests for python 2.6 and 3.2 | language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
- 3.4
env:
- PANDAS_VERSION=v0.13.1
- PANDAS_VERSION=v0.14.1
- PANDAS_VERSION=master
before_install:
- sudo add-apt-repository -y ppa:ubuntugis/ppa
- sudo apt-get update
- sudo apt-get install libgdal1h gdal-bin libgdal-dev libspatialindex-dev libspatialindex1
# - sudo -u postgres psql -c "drop database if exists test_geopandas"
# - sudo -u postgres psql -c "create database test_geopandas"
# - sudo -u postgres psql -c "create extension postgis" -d test_geopandas
install:
- pip install -r requirements.txt --use-mirrors
- pip install -r requirements.test.txt --use-mirrors
- if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install -r .requirements-2.6.txt --use-mirrors; fi
- git clone git://github.com/pydata/pandas.git
- cd pandas
- git checkout $PANDAS_VERSION
- python setup.py install
- cd ..
script:
- py.test tests --cov geopandas -v --cov-report term-missing
after_success:
- coveralls
| language: python
python:
- 2.6
- 2.7
- 3.2
- 3.3
- 3.4
env:
- PANDAS_VERSION=v0.13.1
- PANDAS_VERSION=v0.14.1
- PANDAS_VERSION=master
matrix:
exclude:
- python: 2.6
env: PANDAS_VERSION=master
- python: 3.2
env: PANDAS_VERSION=master
before_install:
- sudo add-apt-repository -y ppa:ubuntugis/ppa
- sudo apt-get update
- sudo apt-get install libgdal1h gdal-bin libgdal-dev libspatialindex-dev libspatialindex1
# - sudo -u postgres psql -c "drop database if exists test_geopandas"
# - sudo -u postgres psql -c "create database test_geopandas"
# - sudo -u postgres psql -c "create extension postgis" -d test_geopandas
install:
- pip install -r requirements.txt --use-mirrors
- pip install -r requirements.test.txt --use-mirrors
- if [[ $TRAVIS_PYTHON_VERSION == '2.6' ]]; then pip install -r .requirements-2.6.txt --use-mirrors; fi
- git clone git://github.com/pydata/pandas.git
- cd pandas
- git checkout $PANDAS_VERSION
- python setup.py install
- cd ..
script:
- py.test tests --cov geopandas -v --cov-report term-missing
after_success:
- coveralls
|
Add TravisCI config <3 <3 <3 | language: ruby
rvm:
- 2.1
- 2.0.0
- 1.9.3
- 1.9.2
- ruby-head
- jruby-19mode
matrix:
allow_failures:
- rvm: ruby-head
- rvm: jruby-19mode
| |
Add reek configuration back to the repo | ---
UncommunicativeParameterName:
accept: []
exclude: []
enabled: true
reject:
- !ruby/regexp /^.$/
- !ruby/regexp /[0-9]$/
- !ruby/regexp /[A-Z]/
LargeClass:
max_methods: 1
exclude: []
enabled: true
max_instance_variables: 1
UncommunicativeMethodName:
accept: []
exclude: []
enabled: true
reject:
- !ruby/regexp /^[a-z]$/
- !ruby/regexp /[0-9]$/
- !ruby/regexp /[A-Z]/
LongParameterList:
max_params: 1
exclude: []
enabled: true
overrides: {}
FeatureEnvy:
exclude: []
enabled: true
ClassVariable:
exclude: []
enabled: true
BooleanParameter:
exclude: []
enabled: true
IrresponsibleModule:
exclude: []
enabled: true
UncommunicativeModuleName:
accept: []
exclude: []
enabled: true
reject:
- !ruby/regexp /^.$/
- !ruby/regexp /[0-9]$/
NestedIterators:
ignore_iterators: []
exclude: []
enabled: true
max_allowed_nesting: 1
LongMethod:
max_statements: 5
exclude: []
enabled: true
Duplication:
allow_calls: []
exclude: []
enabled: true
max_calls: 1
UtilityFunction:
max_helper_calls: 0
exclude: []
enabled: true
Attribute:
exclude: []
enabled: false
UncommunicativeVariableName:
accept: []
exclude: []
enabled: true
reject:
- !ruby/regexp /^.$/
- !ruby/regexp /[0-9]$/
- !ruby/regexp /[A-Z]/
SimulatedPolymorphism:
exclude: []
enabled: true
max_ifs: 1
DataClump:
exclude: []
enabled: true
max_copies: 0
min_clump_size: 0
ControlCouple:
exclude: []
enabled: true
LongYieldList:
max_params: 0
exclude: []
enabled: true
| |
Remove dependabot complaining about javascript code | # For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ "master" ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ "master" ]
schedule:
- cron: '31 11 * * 1'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
strategy:
fail-fast: false
matrix:
# language: [ 'javascript', 'python' ]
language: [ 'python' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# ℹ️ Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
| |
Update from Forestry.io - Updated Forestry configuration | ---
label: The Data Visualisation Catalogue
hide_body:
is_partial:
fields:
- name: layout
label: Layout
type: text
hidden: false
default: ''
- name: title
label: Title
type: text
hidden: false
default: ''
- name: date
label: Date
type: datetime
hidden: false
default: ''
- name: modified
label: Modified
type: datetime
hidden: false
default: ''
| |
Add the is-property module as a dependency of ths snap | name: dillinger
version: master
summary: The last Markdown editor, ever
description: |
Dillinger is a cloud-enabled, mobile-ready, offline-storage, AngularJS powered
HTML5 Markdown editor.
confinement: strict
apps:
server:
command: node $SNAP/lib/node_modules/Dillinger/app.js
plugs: [network-bind]
daemon: simple
parts:
dillinger:
source: .
plugin: nodejs
node-engine: '4.4.7'
build-packages: [bzip2, git]
| name: dillinger
version: master
summary: The last Markdown editor, ever
description: |
Dillinger is a cloud-enabled, mobile-ready, offline-storage, AngularJS powered
HTML5 Markdown editor.
confinement: strict
apps:
server:
command: node $SNAP/lib/node_modules/Dillinger/app.js
plugs: [network-bind]
daemon: simple
parts:
dillinger:
source: .
plugin: nodejs
node-engine: '4.4.7'
build-packages: [bzip2, git]
node-packages: [is-property]
|
Add github action for publishing releases to pypi | # This workflow will upload a Python Package using Twine when a release is created
# For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
# This workflow uses actions that are not certified by GitHub.
# They are provided by a third-party and are governed by
# separate terms of service, privacy policy, and support
# documentation.
name: Upload Python Package
on:
release:
types: [published]
permissions:
contents: read
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up Python
uses: actions/setup-python@v3
with:
python-version: '3.x'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
| |
Fix arguments to match replication controller | apiVersion: extensions/v1beta1
kind: DaemonSet
metadata:
name: nginx-ingress-lb
spec:
template:
metadata:
labels:
name: nginx-ingress-lb
spec:
terminationGracePeriodSeconds: 60
containers:
- image: gcr.io/google_containers/nginx-ingress-controller:0.5
name: nginx-ingress-lb
imagePullPolicy: Always
livenessProbe:
httpGet:
path: /healthz
port: 10249
scheme: HTTP
initialDelaySeconds: 30
timeoutSeconds: 5
# use downward API
env:
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
ports:
- containerPort: 80
hostPort: 80
- containerPort: 443
hostPort: 4444
args:
- /nginx-ingress-controller-lb
- --default-backend-service=default/default-http-backend
| apiVersion: extensions/v1beta1
kind: DaemonSet
metadata:
name: nginx-ingress-lb
spec:
template:
metadata:
labels:
name: nginx-ingress-lb
spec:
terminationGracePeriodSeconds: 60
containers:
- image: gcr.io/google_containers/nginx-ingress-controller:0.5
name: nginx-ingress-lb
imagePullPolicy: Always
livenessProbe:
httpGet:
path: /healthz
port: 10249
scheme: HTTP
initialDelaySeconds: 30
timeoutSeconds: 5
# use downward API
env:
- name: POD_IP
valueFrom:
fieldRef:
fieldPath: status.podIP
- name: POD_NAME
valueFrom:
fieldRef:
fieldPath: metadata.name
- name: POD_NAMESPACE
valueFrom:
fieldRef:
fieldPath: metadata.namespace
ports:
- containerPort: 80
hostPort: 80
- containerPort: 443
hostPort: 4444
args:
- /nginx-ingress-controller
- --default-backend-service=default/default-http-backend
|
Create first conda-forge 2.2.2 fine recipe | package:
name: fine
version: 2.2.2
source:
url: https://github.com/FZJ-IEK3-VSA/FINE/archive/refs/tags/v2.2.2.tar.gz
sha256: 1e8bff5de0770e1b4e6aba4f9502c107781bb4b9e1faff5d99db32f836cb0fc0
build:
number: 0
noarch: python
script: {{ PYTHON }} -m pip install --no-deps .
requirements:
host:
- python >=3.6
- pip
- setuptools-git
run:
- python >=3.6
- libiconv
- gdal
- geopandas
- jupyter
- notebook
- libpysal
- pip
- descartes
- glpk
- openpyxl
- matplotlib
- xlrd
- pyomo
- numpy
- pandas
- gpyopt
- networkx
- scipy
- pwlf
- pydoe
- sphinx_rtd_theme
- scikit-learn
- xarray
- statsmodels
- seaborn
- rasterio
- netcdf4
- pytest
- pytest-cov
- pylint
- nbval
- black
- tsam
- wquantiles
- glpk
test:
imports:
- FINE
- pandas
- numpy
source_files:
- .
requires:
- pytest
commands:
- pytest test/
about:
home: https://github.com/FZJ-IEK3-VSA/FINE.git
summary: The FINE python package provides a framework for modeling, optimizing and assessing energy systems. With the provided framework, systems with multiple regions, commodities and time steps can be modeled. Target of the optimization is the minimization of the total annual cost while considering technical and enviromental constraints. Besides using the full temporal resolution, an interconnected typical period storage formulation can be applied, that reduces the complexity and computational time of the model.
license: MIT
license_file: LICENSE.txt
extra:
recipe-maintainers:
- julian-belina
| |
Add closed issue message github action | name: Closed Issue Message
on:
issues:
types: [closed]
jobs:
auto_comment:
runs-on: ubuntu-latest
steps:
- uses: aws-actions/closed-issue-message@v1
with:
# These inputs are both required
repo-token: "${{ secrets.GITHUB_TOKEN }}"
message: |
### ⚠️COMMENT VISIBILITY WARNING⚠️
Comments on closed issues are hard for our team to see.
If you need more assistance, please either tag a team member or open a new issue that references this one.
If you wish to keep having a conversation with other community members under this issue feel free to do so.
| |
Add GitHub action to validate markdown documentation. | # Copyright 2020 The TensorFlow Hub Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
name: Documentation markdown validator
on:
pull_request:
jobs:
test-changed-md-files:
runs-on: ubuntu-16.04
name: Test changed markdown documentation files.
steps:
- uses: actions/checkout@v1
- uses: actions/setup-python@v1
with:
python-version: '3.6'
- name: Install packages
run: |
pip install absl-py tensorflow tensorflow_hub
pip freeze all
- name: Debug information
run: |
echo $(git status)
echo $(git branch)
echo $(git remote)
echo $(git diff origin/master --name-only)
echo $(ls)
- name: Run validator on changed files
run: |
git diff origin/master --name-only --relative=assets | xargs python3.6 ./tools/validator.py
| |
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
collections:
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
| |
Add programmatic site configuration to Ansible | ---
# Tasks to automate the stuffing of default Drupal VM variables for synced
# folders, databases, and webhosts. I pulled the arcane "union" syntax from
# https://github.com/ansible/ansible/pull/8019#issuecomment-152079657
- name: Programmatically add synced folders for all regular sites, plus one for
the UCSF multisite, sorted alphabetically.
with_items: "{{ (sites + ['ucsf'])|sort }}"
set_fact:
vagrant_synced_folders: "{{ vagrant_synced_folders|union([{
'local_path': sites_path + '/' + item,
'destination': '/var/www/' + item,
'type': 'nfs',
'create': true,
}]) }}"
- name: Programmatically add database and webhost config for all regular sites
and UCSF multisites, sorted alphabetically.
with_items: "{{ (sites + ucsf_multisites)|sort }}"
set_fact:
mysql_databases: "{{ mysql_databases|union([{
'name': item + '_drupalvm',
'encoding': 'utf8mb4',
'collation': 'utf8mb4_general_ci',
}]) }}"
nginx_hosts: "{{ nginx_hosts|union([{
'server_name': item + '.dvm',
'root': '/var/www/ucsf' if item in ucsf_multisites else '/var/www/' + item,
'is_php': true,
}]) }}"
| |
Add project board workflow for good first issues | name: Add good first issues to project board
on:
# Trigger when an issue gets labeled or deleted
issues:
types: [opened, reopened, closed, labeled, assigned]
jobs:
manage_project_issues:
runs-on: ubuntu-latest
if: contains(github.event.issue.labels.*.name, 'good first issue')
steps:
- name: Create Project Card
if: |
github.event.action == 'labeled' ||
github.event.action == 'reopened' ||
github.event.action == 'opened'
uses: peter-evans/create-or-update-project-card@v1.1.4
with:
token: ${{ secrets.PROJECT_BOARD_AUTOMATION_PAT }}
project-location: vapor
project-name: Beginner Issues
column-name: To do
issue-number: ${{ github.event.number }}
- name: Update Project Card
if: |
github.event.action == 'assigned'
uses: peter-evans/create-or-update-project-card@v1.1.4
with:
token: ${{ secrets.PROJECT_BOARD_AUTOMATION_PAT }}
project-location: vapor
project-name: Beginner Issues
column-name: In progress
issue-number: ${{ github.event.number }}
- name: Close Project Card
if: |
github.event.action == 'closed'
uses: peter-evans/create-or-update-project-card@v1.1.4
with:
token: ${{ secrets.PROJECT_BOARD_AUTOMATION_PAT }}
project-location: vapor
project-name: Beginner Issues
column-name: Done
issue-number: ${{ github.event.number }}
| |
Add stale issue cleanup action | name: "Close stale issues"
# Controls when the action will run.
on:
schedule:
- cron: "0 0 * * *"
jobs:
cleanup:
runs-on: ubuntu-latest
name: Stale issue job
steps:
- uses: aws-actions/stale-issue-cleanup@v3
with:
# Setting messages to an empty string will cause the automation to skip
# that category
ancient-issue-message: We have noticed this issue has not recieved attention in 3 years. We will close this issue for now. If you think this is in error, please feel free to comment and reopen the issue.
stale-issue-message: This issue has not recieved a response in 1 week. If you want to keep this issue open, please just leave a comment below and auto-close will be canceled.
# These labels are required
stale-issue-label: closing-soon
exempt-issue-label: no-autoclose
stale-pr-label: no-pr-activity
exempt-pr-label: awaiting-approval
response-requested-label: response-requested
# Don't set closed-for-staleness label to skip closing very old issues
# regardless of label
closed-for-staleness-label: closed-for-staleness
# Issue timing
days-before-stale: 7
days-before-close: 4
days-before-ancient: 1095
# If you don't want to mark a issue as being ancient based on a
# threshold of "upvotes", you can set this here. An "upvote" is
# the total number of +1, heart, hooray, and rocket reactions
# on an issue.
minimum-upvotes-to-exempt: 10
repo-token: ${{ secrets.GITHUB_TOKEN }}
loglevel: DEBUG
# Set dry-run to true to not perform label or close actions.
# dry-run: true | |
Add ops-file to enable TLS on File Server | ---
- type: replace
path: /instance_groups/name=api/jobs/name=file_server/properties/https_server_enabled?
value: true
- type: replace
path: /instance_groups/name=api/jobs/name=file_server/properties/tls?
value:
cert: ((file_server_cert.certificate))
key: ((file_server_cert.private_key))
- type: replace
path: /variables/-
value:
name: file_server_cert
options:
ca: service_cf_internal_ca
common_name: file-server.service.cf.internal
type: certificate
extended_key_usage:
- server_auth
| |
Add a workflow for handling branch-specific containers, if needed | name: Upload docker containers modified for release branches
on:
workflow_dispatch:
push:
paths:
- '.github/workflows/docker.yml'
- 'tf_sig_build_dockerfiles/**'
- '!tf_sig_build_dockerfiles/README.md'
branches:
# Branch should match the TF branch name
- "r[1-9].[0-9]+"
jobs:
docker:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [python3.7, python3.8, python3.9, python3.10]
steps:
-
name: Checkout
uses: actions/checkout@v2
-
name: Set up Docker Buildx
uses: docker/setup-buildx-action@v1
-
name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-
name: Login to GCR
uses: docker/login-action@v1
with:
registry: gcr.io
username: _json_key
password: ${{ secrets.GCP_CREDS }}
-
name: Generate variables for cache busting and tag naming
run: |
echo "::set-output name=DATE::$(date +'%Y-%m-%d')"
# Converts r2.9 to just 2.9
echo "::set-output name=REF::$(echo $GITHUB_REF_NAME | sed 's/r//g')"
id: vars
-
name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
push: true
context: ./tf_sig_build_dockerfiles
target: devel
build-args: |
PYTHON_VERSION=${{ matrix.python-version }}
CACHEBUSTER=${{ steps.vars.outputs.DATE }}
tags: |
tensorflow/build:${{ steps.vars.outputs.REF }}-${{ matrix.python-version }}
gcr.io/tensorflow-sigs/build:${{ steps.vars.outputs.REF }}-${{ matrix.python-version }}
cache-from: type=registry,ref=tensorflow/build:${{ steps.vars.outputs.REF }}-${{ matrix.python-version }}
cache-to: type=inline
-
name: Image digest
run: echo ${{ steps.docker_build.outputs.digest }}
| |
Update from Hackage at 2018-10-17T01:30:58Z | homepage: https://github.com/jonascarpay/apecs-physics#readme
changelog-type: ''
hash: 2c7170d0d5f2477795af1eabd7506c75959b0533f0ddba1dba26f125d00ac4f2
test-bench-deps: {}
maintainer: jonascarpay@gmail.com
synopsis: Gloss rendering for apecs-physics
changelog: ''
basic-deps:
gloss: -any
apecs: ! '>=0.6'
base: ! '>=4.7 && <5'
apecs-physics: -any
all-versions:
- '0.1.0.0'
author: Jonas Carpay
latest: '0.1.0.0'
description-type: markdown
description: ! '# apecs-physics-gloss
Tools for simple gloss-based rendering, split out to avoid unnecessary dependencies
if you roll our own rendering
'
license-name: BSD3
| |
Update from Hackage at 2018-03-17T07:20:09Z | homepage: https://github.com/parsonsmatt/garlic-bread#readme
changelog-type: markdown
hash: 673437c96e8927265e7306955f90b2e0be248f11db575c5cf45fe4dcb9d0c693
test-bench-deps:
base: ! '>=4.9 && <5'
hspec: -any
garlic-bread: -any
mtl: -any
transformers: -any
maintainer: parsonsmatt@gmail.com
synopsis: A monad transformer for keeping track of where you've come from.
changelog: ! '# Changelog for garlic-bread
## `0.1.0.0`
- Initial release!
'
basic-deps:
base: ! '>=4.9 && <5'
mtl: -any
transformers: -any
all-versions:
- '0.1.0.0'
author: Matt Parsons
latest: '0.1.0.0'
description-type: markdown
description: ! '# garlic-bread
[](https://travis-ci.org/parsonsmatt/garlic-bread)
A monad transformer for remembering where you''ve been.
## Example: XML parsing
The initial inspiration for this library came in the form of parsing XML.
Buggy, underspecified, *weird* XML.
I''d write a parser, and it would work OK on the test data, but then we''d release
it into production, and suddenly it found parse errors.
These documents were huge, repetitive, deeply nested, and unweildy.
I quickly realized that I needed a way to remember where I''ve been.
Remembering the tales of *Theseus and the Minotaur* and was *Hansel and Gretel*,
I started writing some combinators to remember the path through the XML document.
When a parse failed, I bubbled the breadcrumbs up.
Suddenly, reading the error messages became easy: it told me exactly how to get
to the data that failed the test!
'
license-name: BSD3
| |
Update from Hackage at 2021-02-09T21:49:18Z | homepage: ''
changelog-type: ''
hash: a0cd615fc3bfc0f8715afde6316cecff34b65b1b7873e64a0eb8f875c271d134
test-bench-deps:
invert: -any
base: '>=4.12 && <4.15'
criterion: ^>=1.5
maintainer: ''
synopsis: Automatically generate a function's inverse
changelog: ''
basic-deps:
base: ^>=4.12 || ^>=4.13 || ^>=4.14
unordered-containers: ^>=0.2
generic-deriving: ^>=1.14
containers: ^>=0.6
hashable: ^>=1.3
vector: ^>=0.12
all-versions:
- '1.0'
author: ''
latest: '1.0'
description-type: haddock
description: |-
This library deals with computing a function's inverse.
This is, of course, not possible in general, so the
applicability of this library comes with some caveats:
* The function's domain must be enumerable, and
preferably rather small. We provide a few suggestions
and utilities for how to enumerate the domain.
* The function's codomain must belong to the @Eq@ class.
An @Ord@ or @Hashable@ instance is also nice, to
accommodate a data structure for efficient lookups.
* The functions for inverting injections, surjections,
and bijections require some care to use correctly,
because the library does not verify these properties.
The main purpose of this library is to provide documentation
and convenience. It does not contain a great quantity of code,
so a user hesitant to incur a dependency on the package might
well choose only to read and borrow its techniques.
license-name: Apache-2.0
| |
Add Github Actions regression testing | name: Regression Tests
on: [push, pull_request]
jobs:
build:
name: Python ${{ matrix.python-version }} (${{ matrix.group }}/10)
runs-on: ubuntu-20.04
strategy:
matrix:
python-version: [3.9]
group: [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
steps:
- uses: actions/checkout@v1
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Icarus Verilog
run: |
sudo apt install -y --no-install-recommends iverilog
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install tox tox-gh-actions
- name: Test with tox
run: tox -- --splits 10 --group ${{ matrix.group }}
| |
Add Github actions testing of style/unit | name: delivery
on: [push, pull_request]
jobs:
delivery:
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@master
- name: Run Chef Delivery
uses: actionshub/chef-delivery@master
env:
CHEF_LICENSE: accept-no-persist | |
Update from Hackage at 2015-06-25T12:19:43+0000 | homepage: https://github.com/emilaxelsson/ho-rewriting
changelog-type: ''
hash: edf30383db4bbfcd3c0344503bb52842f8292a2fd5f07b62aba1c4f647099a0f
test-bench-deps:
patch-combinators: -any
base: -any
compdata: -any
ho-rewriting: -any
maintainer: emax@chalmers.se
synopsis: Generic rewrite rules with safe treatment of variables and binders
changelog: ''
basic-deps:
patch-combinators: -any
base: ! '>=4.7 && <5'
compdata: ! '>=0.9'
containers: -any
mtl: -any
all-versions:
- '0.1'
author: Emil Axelsson
latest: '0.1'
description-type: markdown
description: ! "This package gives a generic implementation of higher-order rewriting.
The main idea is to use techniques from embedded domain-specific languages to offer
an interface which is both safe and syntactically appealing.\n\nSome examples are
found in the [examples directory](examples). For more information, see \"Lightweight
Higher-Order Rewriting in Haskell\":\n\n * [Paper](http://www.cse.chalmers.se/~emax/documents/axelsson2015lightweight_DRAFT.pdf)\n
\ * [Slides](http://www.cse.chalmers.se/~emax/documents/axelsson2015lightweight_slides.pdf)\n"
license-name: BSD3
| |
Update from Hackage at 2018-11-20T17:44:01Z | homepage: https://github.com/nh2/lambdabot-zulip
changelog-type: ''
hash: 48610a54ff5b78b488a5b13b60c665d1032db677f9d9efcc515a7f503171540a
test-bench-deps:
base: ! '>=4 && <5'
hspec: ! '>=1.3.0.1'
text: -any
HUnit: ! '>=1.2'
lambdabot-zulip: -any
maintainer: Niklas Hambüchen <mail@nh2.me>
synopsis: Lambdabot for Zulip Chat
changelog: ''
basic-deps:
base: ==4.*
text: -any
hint: -any
mueval: -any
containers: -any
hzulip: -any
optparse-applicative: -any
say: -any
lambdabot-zulip: -any
yaml: -any
all-versions:
- '0.1.0'
author: Niklas Hambüchen <mail@nh2.me>
latest: '0.1.0'
description-type: markdown
description: ! '# lambdabot-zulip
A [`lambdabot`](https://wiki.haskell.org/Lambdabot)-like bot for [Zulip](https://zulipchat.com/).
Can evaluate Haskell expressions and show their types.
### Screenshot

## Usage
Run the `lambdabot-zulip-server` executable to start the bot.
It reads a `settings.yaml` in the working directory (or passed via command line).
See [`example-settings/settings.yaml`](example-settings/settings.yaml) for an example.
You have to provide Zulip API credentials, and streams (channels) the bot should
be active on.
'
license-name: MIT
| |
Configure procurement.cabinetoffice.gov.uk for global archive | ---
site: cabinetoffice_procurement
whitehall_slug: cabinet-office
title: Cabinet Office
redirection_date: 1st February 2014
homepage: https://www.gov.uk/government/organisations/cabinet-office
tna_timestamp: 20130503183627
host: procurement.cabinetoffice.gov.uk
furl: www.gov.uk/cabinetoffice
global: =410
| |
Add example with deployment that has a data volume. | apiVersion: v1
kind: PersistentVolume
metadata:
name: hello-volume
labels:
type: local
spec:
storageClassName: manual
capacity:
storage: 100Mi
accessModes:
- ReadWriteOnce
hostPath:
path: "/mnt/data"
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: hello-claim
spec:
storageClassName: manual
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 50Mi
---
apiVersion: v1
kind: Service
metadata:
name: hello
spec:
type: ClusterIP
selector:
service: hello
ports:
- name: proxied
port: 80
targetPort: http
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: hello
labels:
service: hello
spec:
replicas: 1
selector:
matchLabels:
service: hello
template:
metadata:
labels:
service: hello
spec:
volumes:
- name: hello-storage
persistentVolumeClaim:
claimName: hello-claim
containers:
- name: hello-container
image: nginx
ports:
- containerPort: 80
name: http
volumeMounts:
- mountPath: "/usr/share/nginx/html"
name: hello-storage
| |
Add database settings to ci tests | language: python
sudo: false
cache:
- pip
python:
- "2.7"
- "3.5"
env:
- DJANGO_VERSION=1.10
- DJANGO_VERSION=1.11rc1
install:
- pip install -r requirements.txt
- pip uninstall -y Django
- pip install -q Django==$DJANGO_VERSION
before_script:
- psql -c 'create database access_mo_django;' -U postgres
- psql -c "CREATE EXTENSION postgis;" -U postgres -d access_mo_django
script:
- flake8 am
- python am/manage.py migrate
- coverage run am/manage.py test house_scraper
- coverage run am/manage.py test senate_scraper
after_success:
- coveralls
addons:
postgresql: "9.4"
| language: python
sudo: false
cache:
- pip
python:
- "2.7"
- "3.5"
env:
- DJANGO_VERSION=1.10 DATABASE_SETTINGS='settings_local.py'
- DJANGO_VERSION=1.11rc1 DATABASE_SETTINGS='settings_local.py'
install:
- pip install -r requirements.txt
- pip uninstall -y Django
- pip install -q Django==$DJANGO_VERSION
before_script:
- psql -c 'create database access_mo_django;' -U postgres
- psql -c "CREATE EXTENSION postgis;" -U postgres -d access_mo_django
script:
- flake8 am
- python am/manage.py migrate
- coverage run am/manage.py test house_scraper
- coverage run am/manage.py test senate_scraper
after_success:
- coveralls
addons:
postgresql: "9.4"
|
Update from Forestry.io - Updated Forestry configuration | ---
label: Variados
hide_body: false
fields:
- name: layout
label: Layout
type: text
hidden: false
default: ''
- name: title
label: Title
type: text
hidden: false
default: ''
- name: date
label: Date
type: datetime
hidden: false
default: ''
- name: img
label: Img
type: file
hidden: false
default: ''
- name: comments
label: Comments
type: boolean
hidden: false
default: ''
- name: tags
label: Tags
type: tag_list
hidden: false
default: ''
| |
Add Edge Gateway firewall and NAT rule config file | gateway: 0E7T-IS-GATEWAY-002
firewall_service:
enabled: true
policy: drop
firewall_rules:
- description: "Outbound"
protocols: any
destination_ip: "external"
source_ip: "internal"
- description: "Public ICMP access to backup network"
protocols: icmp
destination_ip: "31.210.241.201"
source_ip: Any
- description: "SSH access from Aviation House .90"
protocols: tcp
destination_ip: "31.210.241.201"
source_ip: "80.194.77.90"
destination_port_range: "22"
- description: "SSH access from Aviation House .100"
protocols: tcp
destination_ip: "31.210.241.201"
source_ip: "80.194.77.100"
destination_port_range: "22"
- description: "SSH access from interim production"
protocols: tcp
destination_ip: "31.210.241.201"
source_ip: "217.171.99.70"
destination_port_range: "22"
- description: "SSH access from P1 production"
protocols: tcp
destination_ip: "31.210.241.201"
source_ip: "37.26.90.227"
destination_port_range: "22"
nat_service:
enabled: true
nat_rules:
- rule_type: 'SNAT'
network_id: '5d4ab16b-df39-4f81-9a68-c7cf2bec6bb4'
original_ip: "192.168.152.0/24"
translated_ip: "31.210.241.201"
- rule_type: 'DNAT'
network_id: '5d4ab16b-df39-4f81-9a68-c7cf2bec6bb4'
original_ip: "31.210.241.201"
original_port: "22"
translated_ip: "192.168.152.10"
translated_port: "22"
| |
Add ghcr.io images build and push | name: Docker images
on:
push:
branches: [ master ]
tags: [ 'v*' ]
pull_request:
branches: [ master ]
jobs:
docker:
name: Build image
runs-on: ubuntu-latest
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Docker meta
id: meta
uses: docker/metadata-action@v3
with:
images: ghcr.io/${{ github.repository }}
# create latest tag for branch events
flavor: |
latest=${{ github.event_name == 'push' && github.ref_type == 'branch' }}
tags: |
type=ref,event=branch
type=ref,event=pr
type=semver,pattern={{version}}
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}.{{minor}}.{{patch}}
- name: Login to DockerHub
if: github.event_name != 'pull_request'
uses: docker/login-action@v1
with:
registry: ghcr.io
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: Build and push
id: docker_build
uses: docker/build-push-action@v2
with:
# push for non-pr events
push: ${{ github.event_name != 'pull_request' }}
context: .
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
| |
Update from Hackage at 2016-11-29T14:58:52Z | homepage: http://github.com/aelve/fmt
changelog-type: markdown
hash: ee40f5f74bc70265be6c523cbb8bc786ef33958f9c3ee3038ddc7e24ac650582
test-bench-deps: {}
maintainer: yom@artyom.me
synopsis: Nice formatting library
changelog: ! '# 0.1.0.0
First release.
'
basic-deps:
base: ! '>=4.6 && <5'
text: -any
text-format: ! '>=0.3'
all-versions:
- '0.0.0.1'
author: Artyom
latest: '0.0.0.1'
description-type: haddock
description: Nice formatting library
license-name: MIT
| |
Revert "Even more component types." | - group: Atomic
components:
- name: Button
path: button
- name: Menu
path: menu
- name: Radio Button
path: radio-button
- name: Check Box
path: check-box
- name: String Input
path: string-input
- name: Slider
path: slider
- name: Command Loop
path: command-loop
- group: Structured
components:
- name: Date & Time
path: date-and-time
- name: List
path: list
- name: Map
path: map
- name: Media
path: media
- name: Search
path: search
- name: Table
path: table
- name: Tree
path: tree
- name: Web
path: web
- group: Behavior
components:
- name: Drag & Drop
path: drag-and-drop
- name: Resizing
path: resizing
- name: Rotating
path: rotating
- name: Selecting
path: selecting
- name: Scrolling
path: scrolling
- name: Zooming
path: zooming
- group: Composite
components:
- name: Modal View
path: modal-view
- name: View Switcher
path: view-switcher
- name: Split View
path: split-view
- group: Environment
components:
- name: Information Transfer
path: information-transfer
- name: Login/Unlock
path: login-unlock
- name: Macro View
path: macro-view
- name: Persistent Storage
path: storage
- name: Task Launcher
path: task-launcher
- name: Task Switcher
path: task-switcher
- name: Window
path: window
| - group: Atomic
components:
- name: Button
path: button
- name: Menu
path: menu
- name: Radio Button
path: radio-button
- name: Check Box
path: check-box
- name: String Input
path: string-input
- name: Slider
path: slider
- name: Command Loop
path: command-loop
- group: Structured
components:
- name: Date & Time
path: date-and-time
- name: List
path: list
- name: Media
path: media
- name: Search
path: search
- name: Table
path: table
- name: Tree
path: tree
- group: Behavior
components:
- name: Drag & Drop
path: drag-and-drop
- name: Resizing
path: resizing
- name: Rotating
path: rotating
- name: Selecting
path: selecting
- name: Scrolling
path: scrolling
- name: Zooming
path: zooming
- group: Composite
components:
- name: Modal View
path: modal-view
- name: View Switcher
path: view-switcher
- name: Split View
path: split-view
- group: Environment
components:
- name: Information Transfer
path: information-transfer
- name: Login/Unlock
path: login-unlock
- name: Persistent Storage
path: storage
- name: Task Launcher
path: task-launcher
- name: Task Switcher
path: task-switcher
- name: Window
path: window
|
Add C dev environment play | ---
- hosts: all
become: yes
become_user: root
become_method: sudo
gather_facts: no
tasks:
- name: Make sure C dev environment is installed
include_role:
name: c-devenv
...
| |
Remove setuptools as running dependency | {% set name = "falcon" %}
{% set version = "052016" %}
package:
name: {{ name }}
version: {{ version }}
about:
home: 'https://github.com/PacificBiosciences/FALCON'
license: "Standard PacBio Open Source License"
summary: "A set of tools for fast aligning long reads for consensus and assembly"
source:
fn: funzip_052016.tar.gz
url: https://github.com/PacificBiosciences/FALCON/archive/funzip_052016.tar.gz
md5: be4a225262ee4bd9729b6b7e81a83615
build:
number: 0
skip: True # [py3k]
requirements:
build:
- python
- setuptools
- pypeflow
- networkx
run:
- python
- setuptools
- pypeflow
- rdflib
- networkx
test:
commands:
- fc_run.py 2>&1 | grep fc_run | {% set name = "falcon" %}
{% set version = "052016" %}
package:
name: {{ name }}
version: {{ version }}
about:
home: 'https://github.com/PacificBiosciences/FALCON'
license: "Standard PacBio Open Source License"
summary: "A set of tools for fast aligning long reads for consensus and assembly"
source:
fn: funzip_052016.tar.gz
url: https://github.com/PacificBiosciences/FALCON/archive/funzip_052016.tar.gz
md5: be4a225262ee4bd9729b6b7e81a83615
build:
number: 0
skip: True # [py3k]
requirements:
build:
- python
- setuptools
- pypeflow
- networkx
run:
- python
- pypeflow
- rdflib
- networkx
test:
commands:
- fc_run.py 2>&1 | grep fc_run |
Add basic GitHub actions workflow | name: Build and test QDLDL
on: [push, pull_request]
env:
# The CMake build type
BUILD_TYPE: Release
jobs:
build:
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.os }}
steps:
- name: Check out repository
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Setup Environment
run: cmake -E make_directory ${{ runner.workspace }}/build
- name: Configure
shell: bash
working-directory: ${{ runner.workspace }}/build
run: cmake --warn-uninitialized -DCMAKE_BUILD_TYPE=$BUILD_TYPE -DUNITTESTS=ON -DCOVERAGE=ON $GITHUB_WORKSPACE
- name: Build
shell: bash
working-directory: ${{ runner.workspace }}/build
run: cmake --build . --config $BUILD_TYPE
- name: Run tests
shell: bash
working-directory: ${{ runner.workspace }}/build
run: ctest -C $BUILD_TYPE
| |
Update from Hackage at 2019-05-04T03:38:29Z | homepage: https://github.com/thomaseding/math-metric
changelog-type: ''
hash: 77ba8b9a420c98c2dd4a9ba24126fc2df5c1e8f55307b6b0324d21fe49c1d41f
test-bench-deps: {}
maintainer: Thomas Eding
synopsis: Typeclass for metric spaces
changelog: ''
basic-deps:
base: ^>=4.12.0.0
all-versions:
- 0.1.0.1
author: Thomas Eding
latest: 0.1.0.1
description-type: markdown
description: math-metric
license-name: BSD-3-Clause
| |
Change location of coverage data | sudo: required
language: python
python:
- "3.6"
services:
- docker
before_install:
- docker build -t singularitynet_agent agent
- docker volume create results
script:
- docker run --volume results:/data singularitynet_agent /bin/sh -c "./agent.sh test"
after_success:
- docker run --volume results:/data -e TRAVIS=$TRAVIS -e TRAVIS_JOB_ID=$TRAVIS_JOB_ID -e TRAVIS_BRANCH=$TRAVIS_BRANCH singularitynet_agent /bin/bash -c "coveralls"
| sudo: required
language: python
python:
- "3.6"
services:
- docker
before_install:
- docker build -t singularitynet_agent agent
- docker volume create results
- pip install coveralls
script:
- docker run --volume ${PWD}/test-results:/data singularitynet_agent /bin/sh -c "./agent.sh test"
after_success:
- coveralls
|
Add redis-server to build env | language: python
python:
- "2.6"
env:
- CFLAGS=-O0
branches:
only:
- master
install:
- "uname -a"
- "lsb_release -a"
- "sudo apt-get install moreutils libblas-dev liblapack-dev"
- "bash -ex .travis/upgrade-couchdb.sh | ts"
- "git clone https://github.com/dimagi/commcarehq-venv.git"
- "cp -r commcarehq-venv/hq_env/* ~/virtualenv/"
- "source ~/virtualenv/bin/activate"
- "time (bash -e .travis/quietly-run-install.sh | ts)"
- "time (pip install --exists-action w -r requirements/requirements.txt --use-mirrors)"
- "time (bash -e .travis/misc-setup.sh | ts)"
- "cp .travis/localsettings.py localsettings.py"
- "pip install coverage unittest2 mock --use-mirrors"
script: "coverage run manage.py test --noinput --failfast"
after_success:
- coverage report
- coveralls
services:
- postgresql
- couchdb
- rabbitmq
- elasticsearch
- memcache
| language: python
python:
- "2.6"
env:
- CFLAGS=-O0
branches:
only:
- master
install:
- "uname -a"
- "lsb_release -a"
- "sudo apt-get install moreutils libblas-dev liblapack-dev"
- "bash -ex .travis/upgrade-couchdb.sh | ts"
- "git clone https://github.com/dimagi/commcarehq-venv.git"
- "cp -r commcarehq-venv/hq_env/* ~/virtualenv/"
- "source ~/virtualenv/bin/activate"
- "time (bash -e .travis/quietly-run-install.sh | ts)"
- "time (pip install --exists-action w -r requirements/requirements.txt --use-mirrors)"
- "time (bash -e .travis/misc-setup.sh | ts)"
- "cp .travis/localsettings.py localsettings.py"
- "pip install coverage unittest2 mock --use-mirrors"
script: "coverage run manage.py test --noinput --failfast"
after_success:
- coverage report
- coveralls
services:
- postgresql
- couchdb
- rabbitmq
- elasticsearch
- memcache
- redis-server
|
Update from Hackage at 2017-09-17T05:52:02Z | homepage: https://github.com/michael-swan/pcf-font-embed
changelog-type: markdown
hash: 21d4ecb723ac29f761145d25f71dda66797b83b29feac88228b68580f1afb4cf
test-bench-deps: {}
maintainer: mswan@fastmail.com
synopsis: Template Haskell for embedding text rendered using PCF fonts.
changelog: ! '# 0.1.0.0
Complete basic PCF rendered text embedding with embedPCFText.
'
basic-deps:
bytestring: -any
base: ! '>=4.7 && <5'
pcf-font: -any
template-haskell: -any
vector: -any
all-versions:
- '0.1.0.0'
author: Michael Swan
latest: '0.1.0.0'
description-type: markdown
description: ! '# pcf-font-embed
'
license-name: BSD3
| |
Fix PHP versions in Travis config | language: php
sudo: false
matrix:
include:
- php: 5.3
- php: 5.4
- php: 5.5
- php: 5.6
- php: 5.6
env: COMPOSER_FLAGS='--prefer-lowest --prefer-stable'
- php: 7.0
- php: hhvm
- php: nightly
allow_failures:
- php: 7.0
- php: nightly
fast_finish: true
before_install:
- travis_retry composer self-update && composer --version
install:
- travis_retry composer update $COMPOSER_FLAGS --prefer-source -n
script: vendor/bin/phpunit --verbose --coverage-clover=coverage.clover
after_script:
- php ocular.phar code-coverage:upload --format=php-clover coverage.clover
after_script:
- sh -c 'if [ "$TRAVIS_PHP_VERSION" != "hhvm" ]; then wget https://scrutinizer-ci.com/ocular.phar && php ocular.phar code-coverage:upload --format=php-clover coverage.clover; fi;'
notifications:
webhooks:
urls: ['https://webhooks.gitter.im/e/e1d99368833bf806ceeb']
on_success: change
on_failure: always
on_start: never
| language: php
sudo: false
matrix:
include:
- php: 5.5
- php: 5.6
- php: 5.6
env: COMPOSER_FLAGS='--prefer-lowest --prefer-stable'
- php: 7.0
- php: hhvm
- php: nightly
allow_failures:
- php: 7.0
- php: nightly
fast_finish: true
before_install:
- travis_retry composer self-update && composer --version
install:
- travis_retry composer update $COMPOSER_FLAGS --prefer-source -n
script: vendor/bin/phpunit --verbose --coverage-clover=coverage.clover
after_script:
- php ocular.phar code-coverage:upload --format=php-clover coverage.clover
after_script:
- sh -c 'if [ "$TRAVIS_PHP_VERSION" != "hhvm" ]; then wget https://scrutinizer-ci.com/ocular.phar && php ocular.phar code-coverage:upload --format=php-clover coverage.clover; fi;'
notifications:
webhooks:
urls: ['https://webhooks.gitter.im/e/e1d99368833bf806ceeb']
on_success: change
on_failure: always
on_start: never
|
Add Symfony 2.5 + Remove 2.0 branch | language: php
php: 5.3.3
env:
- SYMFONY_VERSION="2.1.*"
- SYMFONY_VERSION="2.2.*"
- SYMFONY_VERSION="2.3.*"
- SYMFONY_VERSION="2.4.*"
before_script:
- composer require --no-update symfony/framework-bundle:${SYMFONY_VERSION}
- composer require --no-update symfony/form:${SYMFONY_VERSION}
- composer install --dev --prefer-source
script: bin/phpunit --coverage-clover clover.xml
after_script: bin/coveralls
branches:
only:
- master
- 2.0
notifications:
email: geloen.eric@gmail.com
| language: php
php: 5.3.3
env:
- SYMFONY_VERSION="2.1.*"
- SYMFONY_VERSION="2.2.*"
- SYMFONY_VERSION="2.3.*"
- SYMFONY_VERSION="2.4.*"
- SYMFONY_VERSION="2.5.*"
before_script:
- composer require --no-update symfony/framework-bundle:${SYMFONY_VERSION}
- composer require --no-update symfony/form:${SYMFONY_VERSION}
- composer install --dev --prefer-source
script: bin/phpunit --coverage-clover clover.xml
after_script: bin/coveralls
branches:
only: master
notifications:
email: geloen.eric@gmail.com
|
Configure Travis for external code coverage | language: php
php:
- 5.3
- 5.4
- 5.5
before_script:
- composer install --dev
script: phpunit --coverage-text
| language: php
php:
- 5.3
- 5.4
- 5.5
before_script:
- composer install --dev
script: phpunit --coverage-text --coverage-clover=coverage.clover
after_script:
- wget https://scrutinizer-ci.com/ocular.phar
- php ocular.phar code-coverage:upload --format=php-clover coverage.clover |
Include acceptance test environment yaml | resources:
etcd:
number: 1
flavor: 100
image: 169d484a-dde2-44c8-8f15-daaa1ba69e94
networks:
- d7c83218-276b-4e4c-8ed1-1165b44c3a44
apache:
number: 1
flavor: 100
image: 169d484a-dde2-44c8-8f15-daaa1ba69e94
networks:
- d7c83218-276b-4e4c-8ed1-1165b44c3a44
| |
Update from Hackage at 2021-12-18T10:39:24Z | homepage: https://andriusstank.github.io/downhill/
changelog-type: markdown
hash: 40763fc666b656bd58c21e4ad39a46ca396e18651d90a6d1a02f6d78683aee8e
test-bench-deps:
base: -any
vector-space: -any
downhill: -any
tasty-hunit: -any
tasty: -any
maintainer: floppycat@gmail.com
synopsis: Reverse mode automatic differentiation
changelog: |
# Revision history for downhill
## 0.1.0.0 -- 2021-12-12
* First version
basic-deps:
reflection: '>=2.1.6 && <2.2'
base: '>=4.12.0.0 && <4.17'
unordered-containers: '>=0.2.14 && <0.3'
vector-space: '>=0.16 && <0.17'
th-abstraction: '>=0.4.3 && <0.5'
containers: '>=0.6.5 && <0.7'
transformers: '>=0.5.6 && <0.6'
template-haskell: '>=2.16.0 && <2.19'
all-versions:
- 0.1.0.0
author: Andrius Stankevičius
latest: 0.1.0.0
description-type: haddock
description: |-
Simple and well typed implementation of reverse mode automatic differentiation.
See home page <https://andriusstank.github.io/downhill/> for more detailed
description.
license-name: MIT
| |
Set up ndawg.org as a global redirect | ---
site: phe_ndawg
whitehall_slug: public-health-england
homepage: https://www.gov.uk/government/organisations/public-health-england
homepage_furl: www.gov.uk/phe
tna_timestamp: 20170605150000 # Stub timestamp - site not in TNA
host: ndawg.org
aliases:
- www.ndawg.org
global: =301 https://www.gov.uk/government/organisations/public-health-england
| |
Add reno note on volumes.backups_enabled option | ---
upgrade:
- New config section ``volumes`` with new config option
``[volumes]backups_enabled`` (defaults to ``True``).
Operators that do not have Cinder backup service deployed in their cloud
are encouraged to set this option to ``False``.
fixes:
- Allow to configure Heat service to forbid creation of stacks containing
Volume resources with ``deletion_policy`` set to ``Snapshot`` when there
is no Cinder backup service available.
| |
Update from Hackage at 2017-03-27T12:39:28Z | homepage: xy30.com
changelog-type: ''
hash: 8a8c4d4393ef94a9b5358a3030e9ec5bda707ba7958d30e08fb0c10bce4c2303
test-bench-deps: {}
maintainer: hawk.alan@gmail.com
synopsis: Display the number of bytes of each line
changelog: ''
basic-deps:
base: ! '>4 && <10000'
all-versions:
- '0.1.0.0'
author: Alan Hawkins
latest: '0.1.0.0'
description-type: haddock
description: ''
license-name: GPL-3
| |
Update from Hackage at 2016-11-21T15:04:19Z | homepage: https://github.com/hltcoe
changelog-type: ''
hash: 92bf3fced5ca9e29e745a6c986e3cf94bbdbb8f339cbc2dfb112aa33a8189898
test-bench-deps: {}
maintainer: tom@cs.jhu.edu
synopsis: Library for the Concrete data format.
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.7 && <5'
unordered-containers: -any
text: -any
thrift: ! '>=0.9.3'
hashable: -any
QuickCheck: -any
vector: -any
all-versions:
- '0.1.0.0'
author: Thomas Lippincott
latest: '0.1.0.0'
description-type: haddock
description: Library for the Concrete data format.
license-name: GPL
| |
Add Travis CI configuration file | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
language: java
sudo: false
cache:
directories:
- "$HOME/.m2"
jobs:
include:
- stage: test
jdk: openjdk11
script: mvn verify -Prun-its
branches:
only:
- master
| |
Add Travis CI config file | language: python
python:
- 2.7
- pypy
env:
- VCLOUD_API_ROOT=http://test-api-root
install:
- pip install . --use-mirrors
script: nosetests
| |
Update from Hackage at 2017-08-15T16:09:03Z | homepage: https://github.com/moesenle/servant-websockets#readme
changelog-type: markdown
hash: 7fce7e16a25e47dd65a8c5a32fdf7d439b490be075d4ba038a2a406d2a4ec3c9
test-bench-deps: {}
maintainer: moesenle@gmail.com
synopsis: Small library providing WebSocket endpoints for servant.
changelog: ! "# Version 1.0.0\n\n * Initial release of `servant-websockets`.\n"
basic-deps:
warp: -any
exceptions: -any
wai-websockets: -any
bytestring: -any
wai: -any
servant-websockets: -any
base: ! '>=4.7 && <5'
text: -any
async: -any
websockets: -any
conduit: -any
servant-server: -any
resourcet: -any
aeson: -any
all-versions:
- '1.0.0'
author: Lorenz Moesenlechner
latest: '1.0.0'
description-type: markdown
description: ! '# Introduction
This small library provides two servant endpoints for implementing
websockets and is based on `websockets` and `wai-websockets`.
This library provides two `servant` endpoints: `WebSocket` and
`WebSocketConduit`. The former is a low-level interface for directly
interacting with a `Connection` (see the
[websockets](https://hackage.haskell.org/package/websockets) library
for more information). The latter provides a
[conduit](https://hackage.haskell.org/package/conduit) based endpoint
for JSON serializable input and output.
See the module documentation for examples.
'
license-name: BSD3
| |
Add handlers for child node role. | ---
- name: Run pulp-manage-db
command: pulp-manage-db
become_user: apache
become_method: sudo
- name: Restart server components
service: name={{ item }} state=restarted
with_items:
- pulp_workers
- pulp_celerybeat
- pulp_resource_manager
- goferd
| |
Add action to send a notification of non-PR CirrusCI failures to Zulip | name: Cirrus CI failures
on:
check_run:
type: ['completed']
jobs:
send-failure-notification-to-zulip:
name: Send Cirrus CI failure to Zulip
if: github.event.check_run.app.name == 'Cirrus CI' && github.event.check_run.conclusion == 'failure' && toJson(github.event.check_run.pull_requests) == '[]'
runs-on: ubuntu-latest
steps:
- name: Log
run: echo "$EVENT"
env:
EVENT: ${{ toJson(github.event.check_run.pull_requests) }}
- name: Send
uses: zulip/github-actions-zulip@35d7ad8e98444f894dcfe1d4e17332581d28ebeb
with:
api-key: ${{ secrets.ZULIP_SCHEDULED_JOB_FAILURE_API_KEY }}
email: ${{ secrets.ZULIP_SCHEDULED_JOB_FAILURE_EMAIL }}
organization-url: 'https://ponylang.zulipchat.com/'
to: notifications
type: stream
topic: ${{ github.repository }} scheduled job failure
content: ${{ github.event.check_run.details_url }} failed.
| |
Update from Hackage at 2017-10-09T17:42:33Z | homepage: https://github.com/andrewthad/concurrent-st#readme
changelog-type: ''
hash: 11b0df84e519961f467a9ebd561783db38ad614f3efab6f0685e936306b529bf
test-bench-deps: {}
maintainer: andrew.thaddeus@gmail.com
synopsis: Concurrent Haskell in ST
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
ghc-prim: -any
all-versions:
- '0.1'
author: Andrew Martin
latest: '0.1'
description-type: markdown
description: ! '# concurrent-st
'
license-name: BSD3
| |
Update from Hackage at 2017-07-08T15:40:51Z | homepage: https://bitbucket.org/fmapE/h2c
changelog-type: ''
hash: 32fcb3552fc4ddd3e1336d59dbb0f695e86c12fa8fa28e1c956cc498dafe0fde
test-bench-deps: {}
maintainer: edwardamsden@gmail.com
synopsis: Bindings to Linux I2C with support for repeated-start transactions.
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.7 && <5'
mtl: -any
resourcet: -any
all-versions:
- '1.0.0'
author: Edward Amsden
latest: '1.0.0'
description-type: markdown
description: ! "# h2c: Haskell bindings to Linux I2C API\n\nH2C is a binding to the
Linux i2c-tools/i2c-dev API.\nIt uses the I2C_RDWR ioctl for repeated-start communications
between master and slave.\n\n## Notes\n - You'll probably have to run as root. Getting
regular users direct access to i2c busses on Linux is tricky.\n - The Linux i2c-stub
driver that you might think would be useful for testing doesn't support the I2C_RDWR
ioctl.\n This is why, if you try to use it, you'll get the \"invalid argument\"
error.\n\n## Documentation\n` $ stack haddock --open`, find module `System.IO.I2C`\n\nBE
CAREFUL WITH I2C. It can be used to poke at things like your graphics card, fans,
&c.\n\nSee my library [bno055-haskell](https://bitbucket.org/fmapE/bno055-haskell)
for examples of h2c in use.\n\n"
license-name: MIT
| |
Enable OpenSM Infiniband subnet manager | ---
###############################################################################
# OpenSM Infiniband subnet manager configuration.
# Whether OpenSM is enabled.
#opensm_enabled:
opensm_enabled: True
###############################################################################
# Dummy variable to allow Ansible to accept this file.
workaround_ansible_issue_8743: yes
| |
Add release note about the swift Large Object changes | ---
fixes:
- Delete swift objects uploaded in service of uploading images
at the time that the corresponding image is deleted. On some clouds,
image uploads are accomplished by uploading the image to swift and
then running a task-import. As shade does this action on behalf of the
user, it is not reasonable to assume that the user would then be aware
of or manage the swift objects shade created, which led to an ongoing
leak of swift objects.
- Upload swift Large Objects as Static Large Objects by default. Shade
automatically uploads objects as Large Objects when they are over a
segment_size threshold. It had been doing this as Dynamic Large Objects,
which sound great, but which have the downside of not deleting their
sub-segments when the primary object is deleted. Since nothing in the
shade interface exposes that the object was segmented, the user would not
know they would also need to find and delete the segments. Instead, we
now upload as Static Large Objects which behave as expected and delete
segments when the object is deleted.
| |
Update from Hackage at 2017-06-08T08:32:09Z | homepage: https://github.com/mjhopkins/alerta-client
changelog-type: markdown
hash: c6a8db93d249c803e6c5ee707093142b2c05aca02f43b2d07114592044ba6f60
test-bench-deps: {}
maintainer: markjohnhopkins@gmail.com
synopsis: Bindings to the alerta REST API
changelog: ! "# Change log\n\n## 0.1.0.0\n Mark Hopkins <markjohnhopkins@gmail.com>
Jun 2017\n\n - Basics of the alerta API, including\n - alerts\n - alert queries,
history, top 10 and flapping\n - environments\n - services\n - blackouts\n
\ - heartbeats\n - API keys\n - users\n - customers\n\n Not yet covered:
OAuth integration.\n"
basic-deps:
http-client: -any
base: ! '>=4.7 && <5'
time: -any
aeson-pretty: -any
servant-client: -any
text: -any
http-api-data: -any
data-default: -any
servant-server: -any
servant: -any
containers: -any
aeson: -any
all-versions:
- '0.1.0.0'
author: Mark Hopkins
latest: '0.1.0.0'
description-type: markdown
description: ! '# alerta-client
Haskell bindings for the [Alerta](http://alerta.io) REST API.'
license-name: BSD3
| |
Update from Hackage at 2018-11-25T18:36:47Z | homepage: https://github.com/woehr/open-adt
changelog-type: ''
hash: 8e3a9cc486542ed5caa10c7c38b27a1a6e2b1ddf65735bbabee6db59f6e962ee
test-bench-deps: {}
maintainer: Jordan Woehr
synopsis: Open algebraic data type examples.
changelog: ''
basic-deps:
row-types: ! '>=0.2.3 && <1'
base: ! '>=4.9 && <5'
deriving-compat: ! '>=0.3 && <1'
open-adt: ! '>=1 && <2'
constraints: ! '>=0.8 && <1'
recursion-schemes: ! '>=5 && <6'
template-haskell: ! '>=2.11 && <3'
open-adt-tutorial: -any
all-versions:
- '1.0'
author: ''
latest: '1.0'
description-type: haddock
description: ! 'Example usage of open-adt with haddock documentation. Read the
"Data.OpenADT.Tutorial" module from top to bottom.'
license-name: BSD3
| |
Update from Hackage at 2017-05-30T13:03:33Z | homepage: https://github.com/andrewthad/teleshell#readme
changelog-type: ''
hash: 000b3c0983fe7ceed42e0f81576ac99afc83d8a7eb20ef9ba81d0c7f0abb8eb7
test-bench-deps:
bytestring: -any
split: -any
base: -any
pipes: -any
tasty-quickcheck: -any
tasty-hunit: -any
tasty: -any
QuickCheck: -any
teleshell: -any
vector: -any
maintainer: andrew.thaddeus@gmail.com
synopsis: Telnet client and other things
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.7 && <5'
unordered-containers: -any
text: -any
network: -any
pipes: -any
hashable: -any
attoparsec: -any
transformers: -any
primitive: -any
vector: -any
all-versions:
- '0.1.0.0'
author: Andrew Martin
latest: '0.1.0.0'
description-type: markdown
description: ! '# teleshell
'
license-name: BSD3
| |
Add action to register new issues for triaging | on:
issues:
types: opened
jobs:
assign_issue_to_project:
runs-on: ubuntu-latest
name: Add new issues to the triage project
steps:
- name: Create new project card with issue
id: list
uses: qmacro/action-add-issue-to-project-column@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
project: 'Triage'
column: 'Needs triage'
| |
Switch to GitHub Actions for CI. | name: CI
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
ruby: [ '2.5', '2.6', '2.7' ]
steps:
- name: Check out code
uses: actions/checkout@v2
- name: Set up ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby }}
bundler-cache: true
- name: Test
run: "bundle exec rspec"
| |
Update from Hackage at 2016-04-03T17:24:30+0000 | homepage: http://github.com/slpopejoy/
changelog-type: ''
hash: 0920c1d843ed9bc41339c90f3b1cd6c7922f7b3868cf505e5c282cefa6c5524f
test-bench-deps: {}
maintainer: spopejoy@panix.com
synopsis: Braid representations in Haskell
changelog: ''
basic-deps:
diagrams-lib: ==1.3.*
base: ! '>=4.8 && <4.9'
diagrams-rasterific: ==1.3.*
containers: -any
lens: ==4.13.*
diagrams: ==1.3.*
transformers-compat: ==0.4.*
all-versions:
- '0.0.2'
author: Stuart Popejoy
latest: '0.0.2'
description-type: haddock
description: Braids represented as Haskell types with support for generation and transformations.
license-name: BSD2
| |
Add go tip github action |
on:
workflow_dispatch:
pull_request:
types: [opened, reopened,ready_for_review,synchronize]
push:
branches:
- main
- master
name: Test Go Tip
jobs:
test:
runs-on: ubuntu-latest
steps:
- name: Install Go tip
run: |
curl -sL https://storage.googleapis.com/go-build-snap/go/linux-amd64/$(git ls-remote https://github.com/golang/go.git HEAD | awk '{print $1;}').tar.gz -o gotip.tar.gz
ls -lah gotip.tar.gz
mkdir -p ~/sdk/gotip
tar -C ~/sdk/gotip -xzf gotip.tar.gz
~/sdk/gotip/bin/go version
echo "PATH=$HOME/go/bin:$HOME/sdk/gotip/bin/:$PATH" >> $GITHUB_ENV
- name: Checkout
uses: actions/checkout@v2
with:
submodules: recursive
- id: Cache
uses: actions/cache@v2
with:
path: |
~/go/pkg/mod # Module download cache
~/.cache/go-build # Build cache (Linux)
key: ubuntu-go-${{ hashFiles('**/go.sum') }}
restore-keys: |
ubuntu-go-
- name: Dependencies
run: go mod download
if: steps.cache.outputs.cache-hit != 'true'
- name: Test
run: go test ./...
- name: Test 32 bit
env:
GOARCH: 386
run: go test ./...
- name: Test with race detector
run: go test -race ./...
| |
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
sections:
- type: jekyll-posts
label: Posts
create: all
- type: jekyll-pages
label: Pages
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_env:
- JEKYLL_ENV=staging
preview_output_directory: _site
install_dependencies_command: bundle install --path vendor/bundle
preview_docker_image: forestryio/ruby:2.6
mount_path: "/srv"
working_dir: "/srv"
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
-d _site
| |
Use Travis CI to run flake8 tests on pull requests | language: python
cache: pip
python:
- "2.7"
- "3.6"
matrix:
allow_failures:
- python: "3.6"
install:
#- pip install -r requirements.txt
- pip install flake8 # pytest # add another testing frameworks later
before_script:
# stop the build if there are Python syntax errors or undefined names
- flake8 . --count --select=E901,E999,F821,F822,F823 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
- flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
script:
- true # pytest --capture=sys # add other tests here
notifications:
on_success: change
on_failure: change # `always` will be the setting once code changes slow down
| |
Update from Hackage at 2022-01-16T16:06:43Z | homepage: https://github.com/haskell-gi/haskell-gi
changelog-type: markdown
hash: 74318728f4b7fcb91208e103a8b70ec13925f900c05496fc0c9dc24b14c47040
test-bench-deps: {}
maintainer: Iñaki García Etxebarria
synopsis: Adwaita bindings
changelog: |+
1.0.1
=====
+ Initial release
basic-deps:
haskell-gi-base: '>=0.26 && <0.27'
bytestring: '>=0.10 && <1'
haskell-gi-overloading: <1.1
gi-gio: ==2.0.*
base: '>=4.9 && <5'
text: '>=1.0 && <2'
gi-gtk: ==4.0.*
gi-gdk: ==4.0.*
gi-gobject: ==2.0.*
containers: '>=0.5 && <1'
haskell-gi: '>=0.26 && <0.27'
transformers: '>=0.4 && <1'
all-versions:
- 1.0.1
author: Iñaki García Etxebarria
latest: 1.0.1
description-type: markdown
description: |
# Documentation
Autogenerated documentation for this package can be found at
[https://hackage.haskell.org/package/gi-adwaita-1.0.1/docs/GI-Adw.html](https://hackage.haskell.org/package/gi-adwaita-1.0.1/docs/GI-Adw.html)
For general documentation on using [haskell-gi](https://github.com/haskell-gi/haskell-gi) based bindings, see [the project page](https://github.com/haskell-gi/haskell-gi) or [the Wiki](https://github.com/haskell-gi/haskell-gi/wiki).
license-name: LGPL-2.1-only
| |
Build with Actions, ship coverage to Codecov. | name: Node CI
on: [push]
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x]
steps:
- uses: actions/checkout@v1
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: npm install
run: npm install --no-package-lock
- name: npm ls
run: npm ls
- name: npm install nyc, prove, codecov
run: npm install -g nyc prove codecov
- name: npm test
run: nyc npm test
env:
CI: true
- name: generate codecov report
run: nyc report --reporter=text-lcov > coverage.lcov
- name: ship coverage to codecov
run: codecov --branch=${GITHUB_REF##*/}
env:
CODECOV_TOKEN: ${{secrets.CODECOV_TOKEN}}
| |
Add a Travis CI config | # Enables support for a docker container-based build:
# https://docs.travis-ci.com/user/workers/container-based-infrastructure/#Routing-your-build-to-container-based-infrastructure
sudo: false
language: python
python:
- "2.7"
script: ./ci.sh
| |
Update from Hackage at 2022-09-21T20:07:46Z | homepage: https://github.com/monadicsystems/lucid-htmx/tree/main/lucid2#readme
changelog-type: markdown
hash: cde494a8e4d199dfcc59331f41c72e1a60c3d2106e1bee56e61d97b09425f850
test-bench-deps:
base: '>=4.7 && <5'
lucid2: <=0.0.20220526
hspec: -any
text: -any
HUnit: -any
lucid2-htmx: <=0.1.0.8
maintainer: tech@monadic.systems
synopsis: Use htmx in your lucid templates
changelog: "# Changelog for lucid-htmx\n\n## 0.1.0.0\n\nFirst release of `lucid-htmx`!\n\n##
0.1.0.6\n\nRelax upper bound of `lucid` dependency up to, and including, 2.11\n\n##
0.1.0.7\n\nChanged upper bound of lucid2 to 0.0.20220526\nAdded tests \nConverted
lucid1 version of lucid-htmx to lucid2 version\n\n## 0.1.0.8\n\nChange name of package
from lucid-htmx to lucid2-htmx to avoid name collision on hackage\n"
basic-deps:
base: '>=4.7 && <5'
lucid2: <=0.0.20220526
text: -any
all-versions:
- 0.1.0.8
author: Monadic Systems LLC
latest: 0.1.0.8
description-type: markdown
description: |+
Another version of lucid-htmx that works with lucid2
license-name: BSD-3-Clause
| |
Update from Hackage at 2017-02-12T04:14:57Z | homepage: https://github.com/minad/tasty-stats#readme
changelog-type: ''
hash: 33e2b388a2825ead59fc718f5fe8d873664ced5a0afaafb08d04512c10090a9a
test-bench-deps: {}
maintainer: Daniel Mendler <mail@daniel-mendler.de>
synopsis: Collecting statistics of the Tasty testsuite
changelog: ''
basic-deps:
stm: ! '>=2.4 && <2.6'
base: ! '>=4.8 && <5'
time: ! '>=1.5 && <1.7'
process: ! '>=1.3 && <1.5'
tagged: ! '>=0.7 && <0.9'
containers: ! '>=0.4 && <0.6'
tasty: ! '>=0.10 && <0.12'
all-versions:
- '0.1.0.0'
author: Daniel Mendler <mail@daniel-mendler.de>
latest: '0.1.0.0'
description-type: markdown
description: ! '# tasty-stats: Collect statistics of your Tasty test suite
[](https://hackage.haskell.org/package/tasty-stats)
[](http://travis-ci.org/minad/tasty-stats)
This package provides auto discovery for the tasty test framework.
Use the `Test.Tasty.Stats.consoleReporter` ingredient:
``` haskell
main = defaultMainWithIngredients (Test.Tasty.Stats.consoleReporter : defaultIngredients)
testTree
```
With tasty-auto:
``` haskell
-- test/test.hs
{-# OPTIONS_GHC -F -pgmF tasty-auto -optF Test.Tasty.Stats.consoleReporter #-}
```
'
license-name: MIT
| |
Add algorithms as a package. | {% set name = "algorithms" %}
{% set version = "0.1.4" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/algorithms-{{ version }}.tar.gz
sha256: ccd05e13d4ad169b0061028b16b16e7f67e884d237a8bbff13b354ee5d31f34c
build:
number: 0
noarch: python
script: {{ PYTHON }} -m pip install . -vv
requirements:
host:
- pip
- python >=3.6
run:
- python >=3.6
test:
imports:
- algorithms
- algorithms.arrays
commands:
- pip check
requires:
- pip
about:
home: https://github.com/keon/algorithms
summary: Pythonic Data Structures and Algorithms
license: MIT
license_file: LICENSE
description: |
Minimal and clean example implementations of data structures and algorithms
in Python 3.
extra:
recipe-maintainers:
- thewchan
| |
Add `docker-compose-mac-volume.yml` overlay for faster mac volume syncing. | # This is an docker-compose overlay that uses performance-tuned volumes for OSX.
# see https://docs.docker.com/docker-for-mac/osxfs-caching/ for more infomation
# A convient way to overlay this file is to add a `.env` file with the contents:
# COMPOSE_FILE=docker-compose.yml:docker/dev/docker-compose-mac-volumes.yml
# You can also do it manually when you run docker-compose each time with
# docker-compose -f docker-compose.yml -f docker/dev/docker-compose-mac-volumes.yml
# if you are making changes to docker-compose.yml or this file it is useful to
# run `docker-compose config` which shows how the two files get merged together
version: '2'
services:
app:
volumes:
- .:/rigse:delegated
solr:
volumes:
- .:/rigse:delegated
| |
Create official build with github actions | name: Official Build CI
on:
push:
branches:
- master
env:
IMAGE_NAME: donkeybot
REGISTRY: suchtilt.azurecr.io
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Login to docker
env:
REGISTRY_PW: ${{ secrets.CONTAINER_REGISTRY_PW }}
run: echo -e "$REGISTRY_PW" | docker login -u suchtilt --password-stdin "$REGISTRY"
- name: Docker actions
run: |
docker pull "$REGISTRY/$IMAGE_NAME" || true
docker build --pull --cache-from "$REGISTRY/$IMAGE_NAME" --tag "$REGISTRY/$IMAGE_NAME" .
docker images
docker push "$REGISTRY/$IMAGE_NAME"
| |
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
collections:
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
| |
Update from Forestry.io - Updated Forestry configuration | ---
upload_path: "/uploads/:year:/:month:/:day:"
frontmatter_file_url_template: "/uploads/:year:/:month:/:day:"
body_file_url_template: "/uploads/:year:/:month:/:day:"
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
collections:
| |
Set up CI with Azure Pipelines | # Starter pipeline
# Start with a minimal pipeline that you can customize to build and deploy your code.
# Add steps that build, run tests, deploy, and more:
# https://aka.ms/yaml
trigger:
- master
pool:
vmImage: 'macOS-10.13'
steps:
- script: |
brew install bitrise
bitrise setup
bitrise run primary
displayName: 'bitrise run primary'
- script: |
echo Is this parallel
echo Second line
displayName: 'Run a multi-line script'
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.