Instruction stringlengths 14 778 | input_code stringlengths 0 4.24k | output_code stringlengths 1 5.44k |
|---|---|---|
Install the full texlive distribution for tests instead of using single packages. | sudo: false
addons:
apt:
packages:
- texlive-latex-recommended
- texlive-latex-extra
- texlive-fonts-recommended
- texlive-science
- texlive-bibtex-extra
- texlive-latex-recommended
- latexmk
script:
- latexmk masterthesis
- latexmk presentation
| sudo: false
addons:
apt:
packages:
- texlive-full
- latexmk
script:
- latexmk masterthesis
- latexmk presentation
|
Configure Travis to install geos and proj libs before test | language: ruby
rvm:
- 2.2.2
branches:
only:
- master
before_install:
- mysql -e "create database IF NOT EXISTS transam_spatial_testing;" -uroot
addons:
code_climate:
repo_token: ce7c157104b0cf1f2babf66d9cc10bbe598607781e8eeb1ba1593fec1d1fc5c1
before_script:
- cp spec/dummy/config/database.travis.yml spec/dummy/config/database.yml
script:
- bundle exec rake app:transam_spatial:prepare_rspec
- bundle exec rspec
| language: ruby
rvm:
- 2.2.2
branches:
only:
- master
before_install:
- sudo apt-get update
- gem update bundler
- sudo apt-get install libgeos-dev libproj-dev
- mysql -e "create database IF NOT EXISTS transam_spatial_testing;" -uroot
addons:
code_climate:
repo_token: ce7c157104b0cf1f2babf66d9cc10bbe598607781e8eeb1ba1593fec1d1fc5c1
before_script:
- cp spec/dummy/config/database.travis.yml spec/dummy/config/database.yml
script:
- bundle exec rake app:transam_spatial:prepare_rspec
- bundle exec rspec
|
Upgrade Travis CI to Ubuntu Trusty to fix Chrome usage in testing. | language: node_js
sudo: false
node_js: stable
addons:
sauce_connect: true
firefox: latest
apt:
sources:
- google-chrome
packages:
- google-chrome-stable
before_install:
- export CHROME_BIN=chromium-browser
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
before_script:
- npm install
script:
- if [ "${TRAVIS_PULL_REQUEST}" = "false" ]; then gulp ci; else gulp test-pr; fi
| language: node_js
sudo: required
dist: trusty
node_js: stable
addons:
sauce_connect: true
firefox: latest
apt:
sources:
- google-chrome
packages:
- google-chrome-stable
before_install:
- export CHROME_BIN=chromium-browser
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
before_script:
- npm install
script:
- if [ "${TRAVIS_PULL_REQUEST}" = "false" ]; then gulp ci; else gulp test-pr; fi
|
Disable rbx on CI, simplecov bug? | ---
language: ruby
rvm:
- 1.8.7
- 1.9.3
- jruby-19mode
- rbx-19mode
| ---
language: ruby
rvm:
- 1.8.7
- 1.9.3
- jruby-19mode
|
Add page for our publications | # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely need to edit after that.
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
title: Open Data in Experimental Mechanics
email: contact@openexpmechanics.science
description: > # this means to ignore newlines until "baseurl:"
Open Data in Experimental Mechanics
baseurl: "" # the subpath of your site, e.g. /blog
url: "https://opendataexpmechanics.github.io/" # the base hostname & protocol for your site
twitter_username: OpenDataExpMech
github_username: OpenDataExpMechanics
# Build settings
markdown: kramdown
kramdown:
input: GFM
sass:
sass_dir: _sass
style: compressed
plugins:
- jekyll-seo-tag
# Third-party services
# just leave someone empty to disable it
# google_analytics:
disqus_shortname:
# used this for post_excerpt at index_page
excerpt_separator: <!--description-->
| # Welcome to Jekyll!
#
# This config file is meant for settings that affect your whole blog, values
# which you are expected to set up once and rarely need to edit after that.
# For technical reasons, this file is *NOT* reloaded automatically when you use
# 'jekyll serve'. If you change this file, please restart the server process.
# Site settings
title: Open Data in Experimental Mechanics
email: contact@openexpmechanics.science
description: > # this means to ignore newlines until "baseurl:"
Open Data in Experimental Mechanics
baseurl: "" # the subpath of your site, e.g. /blog
url: "https://opendataexpmechanics.github.io/" # the base hostname & protocol for your site
twitter_username: OpenDataExpMech
github_username: OpenDataExpMechanics
# Build settings
markdown: kramdown
kramdown:
input: GFM
sass:
sass_dir: _sass
style: compressed
plugins_dir:
- jekyll-seo-tag
# Third-party services
# just leave someone empty to disable it
# google_analytics:
disqus_shortname:
# used this for post_excerpt at index_page
excerpt_separator: <!--description-->
|
Update from Hackage at 2016-12-20T13:23:44Z | homepage: ''
changelog-type: ''
hash: 98a5575eff129b96cee0946d4ae2630bce8f4ef45b9168840ee7d27ce6027f9e
test-bench-deps: {}
maintainer: luka.horvat9@gmail.com
synopsis: Prelude replacement based on protolude
changelog: ''
basic-deps:
exceptions: ==0.8.*
witherable: ==0.1.*
MonadRandom: -any
base: ! '>=4.7 && <5'
text: ==1.2.*
monad-control: -any
protolude: ==0.1.*
string-conv: ==0.1.*
lens: ==4.14.*
mtl: ==2.2.*
transformers: ==0.5.*
aeson: ! '>=0.11 && <1.1'
all-versions:
- '0.1.0.0'
- '0.1.0.1'
- '0.1.0.2'
- '0.1.0.3'
- '0.1.0.4'
- '0.1.0.5'
- '0.1.0.6'
- '0.1.0.7'
author: Luka Horvat
latest: '0.1.0.7'
description-type: haddock
description: Prelude replacement based on protolude
license-name: BSD3
| homepage: ''
changelog-type: ''
hash: 5b2c133d41700380d3b927a114a4b1c32978e283ff9f075e8a467701ed80a90f
test-bench-deps: {}
maintainer: luka.horvat9@gmail.com
synopsis: Prelude replacement based on protolude
changelog: ''
basic-deps:
exceptions: ==0.8.*
witherable: ==0.1.*
MonadRandom: -any
base: ! '>=4.7 && <5'
text: ==1.2.*
monad-control: -any
protolude: ==0.1.*
string-conv: ==0.1.*
lens: ==4.14.*
mtl: ==2.2.*
transformers: ==0.5.*
aeson: ! '>=0.11 && <1.1'
all-versions:
- '0.1.0.0'
- '0.1.0.1'
- '0.1.0.2'
- '0.1.0.3'
- '0.1.0.4'
- '0.1.0.5'
- '0.1.0.6'
- '0.1.0.7'
- '0.1.0.8'
author: Luka Horvat
latest: '0.1.0.8'
description-type: haddock
description: Prelude replacement based on protolude
license-name: MIT
|
Comment out ROS2 for now | name: CI
on: [push, pull_request]
jobs:
industrial_ci:
strategy:
matrix:
env:
- {ROS_DISTRO: kinetic, ROS_REPO: testing}
- {ROS_DISTRO: kinetic, ROS_REPO: main}
- {ROS_DISTRO: melodic, ROS_REPO: testing}
- {ROS_DISTRO: melodic, ROS_REPO: main}
- {ROS_DISTRO: noetic, ROS_REPO: testing}
- {ROS_DISTRO: noetic, ROS_REPO: main}
- {ROS_DISTRO: dashing, ROS_REPO: testing}
- {ROS_DISTRO: dashing, ROS_REPO: main}
- {ROS_DISTRO: eloquent, ROS_REPO: testing}
- {ROS_DISTRO: eloquent, ROS_REPO: main}
- {ROS_DISTRO: foxy, ROS_REPO: testing}
- {ROS_DISTRO: foxy, ROS_REPO: main}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: 'ros-industrial/industrial_ci@master'
env: ${{matrix.env}}
| name: CI
on: [push, pull_request]
jobs:
industrial_ci:
strategy:
matrix:
env:
- {ROS_DISTRO: kinetic, ROS_REPO: testing}
- {ROS_DISTRO: kinetic, ROS_REPO: main}
- {ROS_DISTRO: melodic, ROS_REPO: testing}
- {ROS_DISTRO: melodic, ROS_REPO: main}
- {ROS_DISTRO: noetic, ROS_REPO: testing}
- {ROS_DISTRO: noetic, ROS_REPO: main}
# - {ROS_DISTRO: dashing, ROS_REPO: testing}
# - {ROS_DISTRO: dashing, ROS_REPO: main}
# - {ROS_DISTRO: eloquent, ROS_REPO: testing}
# - {ROS_DISTRO: eloquent, ROS_REPO: main}
# - {ROS_DISTRO: foxy, ROS_REPO: testing}
# - {ROS_DISTRO: foxy, ROS_REPO: main}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v1
- uses: 'ros-industrial/industrial_ci@master'
env: ${{matrix.env}}
|
Use custom pylint configuration file | checks:
python:
code_rating: true
duplicate_code: true
tools:
external_code_coverage: true
pylint:
python_version: 3
config_file: ".pylint.ini"
filter:
excluded_paths:
- "*/tests/*"
| build:
tests:
override:
- pylint-run --rcfile=.pylint.ini
checks:
python:
code_rating: true
duplicate_code: true
filter:
excluded_paths:
- "*/tests/*"
|
Update from Hackage at 2017-08-03T09:37:58Z | homepage: https://github.com/biocad/bio-sequence
changelog-type: ''
hash: 9bb6cff2af494fbe530420d93358f6811faeef22a70111be08ed8da0612c9203
test-bench-deps:
bytestring: -any
base: -any
hspec: -any
bio-sequence: -any
QuickCheck: -any
maintainer: yakovlev@biocad.ru
synopsis: Initial project template from stack
changelog: ''
basic-deps:
bytestring: -any
base: ! '>=4.7 && <5'
text: -any
parsec: -any
array: -any
template-haskell: -any
all-versions:
- '0.1.0.0'
author: Pavel Yakovlev
latest: '0.1.0.0'
description-type: markdown
description: ! '# sequence
TODO
'
license-name: BSD3
| |
Update from Hackage at 2017-08-10T01:10:07Z | homepage: https://github.com/qfpl/papa
changelog-type: markdown
hash: 8bb49884e55332ed0badf36691c66c2bb09c988f530e3578b0b1b74774afe348
test-bench-deps:
base: <5 && >=3
filepath: ! '>=1.3'
doctest: ! '>=0.9.7'
QuickCheck: ! '>=2.0'
template-haskell: ! '>=2.8'
directory: ! '>=1.1'
maintainer: Queensland Functional Programming Lab <oᴉ˙ldɟb@llǝʞsɐɥ>
synopsis: useful functions reimplemented
changelog: ! '0.2.1
* `minimum` and `maximum` functions.
0.2.0
* Initial release.
'
basic-deps:
base: ! '>=4.8 && <5'
all-versions:
- '0.3.0'
author: Queensland Functional Programming Lab <oᴉ˙ldɟb@llǝʞsɐɥ>
latest: '0.3.0'
description-type: haddock
description: ! '<<http://i.imgur.com/uZnp9ke.png>>
useful functions reimplemented'
license-name: BSD3
| |
Add package size limit action | name: Package Size Report
on:
pull_request:
branches: [ master, develop ]
jobs:
pkg-size-report:
name: Package Size Report
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Package size report
uses: pkg-size/action@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| |
Update from Hackage at 2018-01-02T22:28:22Z | homepage: https://github.com/koterpillar/simpleconfig#readme
changelog-type: ''
hash: 31214c4d628d1017d4b05bde57e7944d6773e866d60bc7f3f6ff88ff81145d14
test-bench-deps:
base: -any
text: -any
generic-deriving: -any
containers: -any
simpleconfig: -any
lens: -any
maintainer: a@koterpillar.com
synopsis: Short description of your package
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
containers: -any
lens: -any
all-versions:
- '0.0.1'
author: Alexey Kotlyarov
latest: '0.0.1'
description-type: markdown
description: ! '# simpleconfig
### Releasing
* Install [bumpversion](https://github.com/peritus/bumpversion): `pip install bumpversion`.
* Run `bumpversion major|minor|patch`.
* Run `git push --tags`.
'
license-name: BSD3
| |
Update from Hackage at 2017-03-10T14:41:21Z | homepage: https://github.com/szehk/Haskell-Carbonara-Library
changelog-type: markdown
hash: dd9ff006eeb6b5e3ccb4694aa971daaec1886c296fbdbcf4d85dd8f133176c9e
test-bench-deps: {}
maintainer: Chris Sze <cris@graduate.hku.hk>
synopsis: some spaghetti code
changelog: ! '
0.0.1
* Initial release
'
basic-deps:
split: ! '>=0.2.3'
base: ! '>=4.9.1 && <10'
time: ! '>=1.6.0'
postgresql-simple: ! '>=0.5.2'
mysql-simple: ! '>=0.4.0'
all-versions:
- '0.0.1'
author: Chris Sze
latest: '0.0.1'
description-type: haddock
description: ! '@carbonara@ is a set of handy tools for tuples, database'
license-name: BSD3
| |
Update from Hackage at 2017-07-26T07:44:57Z | homepage: https://github.com/clintonmead/fast-mult#readme
changelog-type: ''
hash: e1e7f128d0ab7b182506ad541f430eeb975543166faf76c4b0e79a2d1d5aa2ba
test-bench-deps: {}
maintainer: clintonmead@gmail.com
synopsis: Numeric type with asymptotically faster multiplications.
changelog: ''
basic-deps:
base: ! '>=4.9 && <5'
integer-gmp: -any
ghc-prim: -any
strict-base: -any
all-versions:
- '0.1.0.0'
author: Clinton Mead
latest: '0.1.0.0'
description-type: markdown
description: ! '# fast-mult
'
license-name: BSD3
| |
Update from Hackage at 2017-05-25T01:13:09Z | homepage: ''
changelog-type: ''
hash: affba040236344c799a6afbed82ecedc802386a0d2cef481ccc4bb0d57ba17d4
test-bench-deps:
hourglass: -any
base: -any
hspec: -any
chronologique: -any
QuickCheck: -any
maintainer: Andrew Cowie <andrew@operationaldynamics.com>
synopsis: Time to manipulate time
changelog: ''
basic-deps:
hourglass: -any
base: ! '>=4.9 && <5'
time: -any
all-versions:
- '0.2.1.0'
author: Andrew Cowie <andrew@operationaldynamics.com>
latest: '0.2.1.0'
description-type: haddock
description: ! 'A simple type useful for representing timestamps as generated by system
events, along with conveniences for converting between time types from common
Haskell time libraries.
Our original use was wanting to conveniently measure things happening on
distributed computer systems. Since machine clock cycles are in units of
nanoseconds, this has the nice property that, assuming the system clock is not
corrupted, two subsequent events from the same source process are likely to
have monotonically increasing timestamps. And even if the system clock has
skew, they''re still decently likely to be unique per device. These TimeStamps
thus make good keys when building Maps.
The core type is in "Chrono.TimeStamp", see there for full documentation.'
license-name: BSD3
| |
Add example docker compose for load balance configuration | #
# Docker compose file to demonstrate multiple load balanced instances of services
#
version: '2'
services:
discovery:
# Expose port 8671 to host. Not for the application but
# handy to view the Eureka info page from the host machine.
ports:
- "8761:8761"
image: hotblac/disco:latest
restart: always
database:
environment:
MYSQL_ROOT_PASSWORD: my-secret-pw
image: hotblac/spanners-database:latest
restart: always
api1:
image: hotblac/spanners-api:latest
links:
- database:spanners-database
restart: always
api2:
image: hotblac/spanners-api:latest
links:
- database:spanners-database
restart: always
api3:
image: hotblac/spanners-api:latest
links:
- database:spanners-database
restart: always
users1:
image: hotblac/spanners-users:latest
links:
- database:spanners-database
restart: always
users2:
image: hotblac/spanners-users:latest
links:
- database:spanners-database
restart: always
users3:
image: hotblac/spanners-users:latest
links:
- database:spanners-database
restart: always
mvc1:
image: hotblac/spanners-mvc:latest
ports:
- "8080:8080"
restart: always
mvc2:
image: hotblac/spanners-mvc:latest
ports:
- "8081:8080"
restart: always | |
Update from Forestry.io - Updated Forestry configuration | ---
hide_body: false
fields:
- name: title
label: Title
type: text
hidden: false
default: ''
- name: type
label: Type
type: text
hidden: false
default: ''
- name: location
label: Location
type: text
hidden: false
default: ''
- name: website
label: Website
type: text
hidden: false
default: ''
| |
Add code climate config file | engines:
duplication:
enabled: true
config:
languages:
- php
fixme:
enabled: true
phpmd:
enabled: true
ratings:
paths:
- "**.php"
exclude_paths:
- config/
| |
Add nginx frontend in docker-compose | redash:
image: redash
ports:
- "5000:5000"
- "9001:9001"
links:
- redis
- postgres
env_file: .env
redis:
image: redis:2.8
ports:
- "6379:6379"
postgres:
image: postgres:9.3
ports:
- "5432:5432"
| redash:
image: redash
ports:
- "5000:5000"
- "9001:9001"
links:
- redis
- postgres
env_file: .env
redis:
image: redis:2.8
ports:
- "6379:6379"
postgres:
image: postgres:9.3
ports:
- "5432:5432"
redash-nginx:
image: redash-nginx:1.0
ports:
- "80:80"
volumes:
- "../redash-nginx/nginx.conf:/etc/nginx/nginx.conf"
links:
- redash
|
Add bob.bio.face recipe [skip appveyor] | {% set version = "2.0.4" %}
package:
name: bob.bio.face
version: {{ version }}
source:
fn: bob.bio.face-{{ version }}.zip
url: https://pypi.python.org/packages/source/b/bob.bio.face/bob.bio.face-{{ version }}.zip
md5: 6ee54e0020fac3cdbc7f99bb59bf84fc
build:
entry_points:
- baselines.py = bob.bio.face.script.baselines:main
number: 0
skip: true # [not linux]
script: python -B setup.py install --single-version-externally-managed --record record.txt
requirements:
build:
- python
- setuptools
- bob.extension
- bob.blitz
- bob.core
- bob.io.base
- bob.io.image
- bob.learn.activation
- bob.math
- bob.sp
- bob.ip.base
- bob.ip.color
- bob.ip.draw
- bob.ip.gabor
- bob.learn.linear
- bob.learn.em
- bob.measure
- bob.db.base
- bob.db.verification.utils
- bob.db.verification.filelist
- bob.db.atnt
- bob.bio.base
- bob.learn.boosting
- bob.ip.facedetect
- bob.ip.flandmark
- matplotlib
run:
- python
- bob.extension
- bob.blitz
- bob.core
- bob.io.base
- bob.io.image
- bob.learn.activation
- bob.math
- bob.sp
- bob.ip.base
- bob.ip.color
- bob.ip.draw
- bob.ip.gabor
- bob.learn.linear
- bob.learn.em
- bob.measure
- bob.db.base
- bob.db.verification.utils
- bob.db.verification.filelist
- bob.db.atnt
- bob.bio.base
- bob.learn.boosting
- bob.ip.facedetect
- bob.ip.flandmark
- matplotlib
test:
commands:
- baselines.py --help
- nosetests -sv bob.bio.face
imports:
- bob
- bob.bio
- bob.bio.face
- bob.bio.face.algorithm
- bob.bio.face.config
- bob.bio.face.config.algorithm
- bob.bio.face.config.database
- bob.bio.face.config.extractor
- bob.bio.face.config.preprocessor
- bob.bio.face.extractor
- bob.bio.face.preprocessor
- bob.bio.face.script
- bob.bio.face.test
requires:
- nose
about:
home: https://www.github.com/bioidiap/bob.bio.face
license: GNU General Public License v3 (GPLv3)
summary: Tools for running face recognition experiments
extra:
recipe-maintainers:
- 183amir
| |
Comment from MelissaEnari on creating-a-mean-prototype-6 | _id: 7472f680-d468-11ea-90eb-078ee7ca9bfb
message: 'You are my heart: http://clickfrm.com/zbZL'
name: MelissaEnari
date: 1596335568
| |
Update from Hackage at 2018-06-13T08:44:01Z | homepage: https://github.com/iij-ii/wss-client
changelog-type: ''
hash: 61376f6c088b0e40e9949e8c08b2c3aed0663b5db56625173cd1d3560168c16f
test-bench-deps:
base: -any
hspec: -any
wss-client: -any
QuickCheck: -any
maintainer: yuji-yamamoto@iij.ad.jp
synopsis: A-little-higher-level WebSockets client.
changelog: ''
basic-deps:
http-client: ! '>=0.5.13'
bytestring: -any
base: ! '>=4.7 && <5'
websockets: ! '>=0.12.0 && <0.13'
http-client-tls: -any
network-uri: -any
wss-client: -any
all-versions:
- '0.1.0.0'
author: Yuji Yamamoto
latest: '0.1.0.0'
description-type: markdown
description: ! "# wss-client\n\nA-little-higher-level WebSocket client library. \nThanks
to [http-client](https://hackage.haskell.org/package/http-client) and [http-client-tls](https://hackage.haskell.org/package/http-client-tls),
this package supports `HTTP_PROXY` environment variable and TLS.\n\n## TODO\n\n-
Support non-TLS connection via an HTTP proxy server (I have to modify the [websockets](https://hackage.haskell.org/package/websockets)
package to do that).\n- Add APIs to modify config of both http-client and websockets.\n-
Test with a mock server.\n\n<!-- Uncomment after uploading on Hackage.\n\n## Example\n\nAn
example program is here: [app/sample.hs](app/sample.hs). \nBuild the executable
by enabling build-sample flag:\n\n```bash\nstack unpack wss-client\nstack install
wss-client --flag wss-client:build-sample\n```\n-->\n\n"
license-name: Apache-2.0
| |
Add a route to kibana | apiVersion: v1
kind: Route
metadata:
labels:
name: eparis-kibana
name: eparis-kibana
spec:
port:
targetPort: http
tls:
insecureEdgeTerminationPolicy: Redirect
termination: edge
to:
kind: Service
name: eparis-kibana-logging
| |
Add to test in GitHub Actions | name: Tests
on:
push:
branches:
- '*'
jobs:
build:
runs-on: ubuntu-18.04
strategy:
matrix:
python-version: [3.7]
phonopy-version: [2.7.0, 2.14.0]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Before_install
run: |
sudo apt-get -y install ghostscript inkscape
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install matplotlib==${{ matrix.phonopy-version }}
python -m pip install pytest>=4.6
python -m pip install pytest-cov
python -m pip install codecov coverage
- name: Test with pytest
run: |
python -mpytest -ra --cov=./ --log-level=DEBUG tests
| |
Add Travis configuration with PHP 7.1 build for SS 3.6 | # See https://github.com/silverstripe/silverstripe-travis-support for setup details
sudo: false
language: php
php:
- 5.3
- 5.4
- 5.5
env:
- DB=MYSQL CORE_RELEASE=3.5
matrix:
include:
- php: 5.6
env: DB=PGSQL CORE_RELEASE=3
- php: 5.6
env: DB=PGSQL CORE_RELEASE=3.4
- php: 7.1
env: DB=MYSQL CORE_RELEASE=3.6
before_script:
- composer self-update || true
- git clone git://github.com/silverstripe/silverstripe-travis-support.git ~/travis-support
- php ~/travis-support/travis_setup.php --source `pwd` --target ~/builds/ss --require silverstripe/cms:^3.4
- cd ~/builds/ss
script:
- vendor/bin/phpunit siteconfig/tests
| |
Add mounted postfix compose script | version: '2'
services:
db:
image: mysql:5.6 # mimics recommendations for AWS RDS
environment:
- MYSQL_ROOT_PASSWORD
- MYSQL_DATABASE=drupal
- MYSQL_USER=drupal
- MYSQL_PASSWORD
ports:
- "3306:3306"
web:
image: plainlychrist/site-buildbarbuda:unstable
environment:
- MYSQL_DATABASE=drupal
- MYSQL_USER=drupal
- MYSQL_PASSWORD
- WEB_ADMIN_PASSWORD
- POSTFIX_DOMAIN
- POSTFIX_RELAY_HOST
- POSTFIX_USER
- POSTFIX_PASSWORD
command: ['--use-mysql', '--use-postfix', '--trust-this-ec2-host', '--trust-host-pattern', '^localhost$$']
ports:
- "443:443"
depends_on:
- db
volumes:
- ~/site.root.history:/root/.bash_history
- ~/site.drupaladmin.history:/home/drupaladmin/.bash_history
- ~/var-lib-site-storage-config:/var/lib/site/storage-config
| |
Add closed issue message github action | name: Closed Issue Message
on:
issues:
types: [closed]
jobs:
auto_comment:
runs-on: ubuntu-latest
steps:
- uses: aws-actions/closed-issue-message@v1
with:
# These inputs are both required
repo-token: "${{ secrets.GITHUB_TOKEN }}"
message: |
### ⚠️COMMENT VISIBILITY WARNING⚠️
Comments on closed issues are hard for our team to see.
If you need more assistance, please either tag a team member or open a new issue that references this one.
If you wish to keep having a conversation with other community members under this issue feel free to do so.
| |
Add build and test ci with gh actions | name: Build and test
on:
branches:
- master
pull_request:
branches:
- master
jobs:
test:
name: Test on node ${{ matrix.node_version }}
runs-on: ${{ matrix.os }}
strategy:
matrix:
node_version: [8, 10, 12]
os: [ubuntu-latest, windows-latest, macOS-latest]
steps:
- uses: actions/checkout@master
- name: Use Node.js ${{ matrix.node_version }}
uses: actions/setup-node@v1
with:
version: ${{ matrix.node_version }}
- name: npm install, build, and test
run: |
npm install
npm test
| |
Update from Hackage at 2019-05-17T20:09:39Z | homepage: https://github.com/andrewthad/primitive-unlifted
changelog-type: markdown
hash: 374860a58684a751112ec3a2783419007c1252cce4c87fc4924be5e8f743267d
test-bench-deps:
stm: -any
base: -any
primitive-unlifted: -any
primitive: -any
maintainer: andrew.thaddeus@gmail.com
synopsis: Primitive GHC types with unlifted types inside
changelog: |
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/)
and this project adheres to the [Haskell Package Versioning Policy](https://pvp.haskell.org/).
## 0.1.0.0 -- YYYY-mm-dd
* First version. Released on an unsuspecting world.
basic-deps:
base: ! '>=4.11.1.0 && <5'
primitive: ! '>=0.7 && <0.8'
all-versions:
- 0.1.0.0
author: Andrew Martin
latest: 0.1.0.0
description-type: haddock
description: ''
license-name: BSD-3-Clause
| |
Use py37 env for test. | version: 2
jobs:
py3test:
working_directory: ~/custodian
docker:
- image: materialsvirtuallab/circle-ci-pmg-py3:0.0.2
steps:
- checkout
- run:
command: |
export PATH=$HOME/miniconda3/bin:$PATH
conda create --quiet --yes --name test_env python=3.6
source activate test_env
conda install --quiet --yes numpy scipy matplotlib sympy pandas
conda install --quiet --yes -c openbabel openbabel
conda update --quiet --all
mkdir coverage_reports
pip install --upgrade pip
pip install --quiet --ignore-installed -r requirements.txt -r requirements-ci.txt
# Add executables and path.
export MPLBACKEND=Agg
pip install --quiet -e .
nosetests --config=nose.cfg --cover-html --cover-html-dir=coverage_reports
no_output_timeout: 3600
- store_artifacts:
path: coverage_reports/
destination: tr1
- store_test_results:
path: coverage_reports/
workflows:
version: 2
build_and_test:
jobs:
- py3test
| version: 2
jobs:
py3test:
working_directory: ~/custodian
docker:
- image: materialsvirtuallab/circle-ci-pmg-py3:0.0.2
steps:
- checkout
- run:
command: |
export PATH=$HOME/miniconda3/bin:$PATH
conda create --quiet --yes --name test_env python=3.7
source activate test_env
conda install --quiet --yes numpy scipy matplotlib sympy pandas
conda install --quiet --yes -c openbabel openbabel
conda update --quiet --all
mkdir coverage_reports
pip install --upgrade pip
pip install --quiet --ignore-installed -r requirements.txt -r requirements-ci.txt
# Add executables and path.
export MPLBACKEND=Agg
pip install --quiet -e .
nosetests --config=nose.cfg --cover-html --cover-html-dir=coverage_reports
no_output_timeout: 3600
- store_artifacts:
path: coverage_reports/
destination: tr1
- store_test_results:
path: coverage_reports/
workflows:
version: 2
build_and_test:
jobs:
- py3test
|
Update SHA256 after tag update | {% set repo_name = "whitebox-geospatial-analysis-tools" %}
{% set name = "whitebox_tools" %}
{% set version = "0.1.1" %}
{% set sha256 = "9eafecddb122479b66e9945e0611e5e9a3858aa0455480f3686b6d5c536507e9" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
path: {{ name }}
fn: {{ name|lower }}-{{ version }}.tar.gz
url: https://github.com/ContinuumIO/{{ repo_name }}/archive/v{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
skip: True # [win or py2k]
requirements:
build:
- python
- setuptools
- rust
- openssl
- toolchain
run:
- python
- rust
test:
imports:
- whitebox_tools
about:
home: http://github.com/ContinuumIO/{{ repo_name }}/tree/v{{ version }}/{{ name }}
license: MIT
license_family: MIT
license_file: {{ name }}/LICENSE.txt
summary: 'A source of plugin tools for _Whitebox GAT_, an open-source GIS and remote sensing package.'
extra:
recipe-maintainers:
- gbrener
| {% set repo_name = "whitebox-geospatial-analysis-tools" %}
{% set name = "whitebox_tools" %}
{% set version = "0.1.1" %}
{% set sha256 = "1a5b80b6f0fffaae67aa8d22e0c8696f4d8368365a0962d5436395e93eb7bb80" %}
package:
name: {{ name|lower }}
version: {{ version }}
source:
path: {{ name }}
fn: {{ name|lower }}-{{ version }}.tar.gz
url: https://github.com/ContinuumIO/{{ repo_name }}/archive/v{{ version }}.tar.gz
sha256: {{ sha256 }}
build:
number: 0
skip: True # [win or py2k]
requirements:
build:
- python
- setuptools
- rust
- openssl
- toolchain
run:
- python
- rust
test:
imports:
- whitebox_tools
about:
home: http://github.com/ContinuumIO/{{ repo_name }}/tree/v{{ version }}/{{ name }}
license: MIT
license_family: MIT
license_file: {{ name }}/LICENSE.txt
summary: 'A source of plugin tools for _Whitebox GAT_, an open-source GIS and remote sensing package.'
extra:
recipe-maintainers:
- gbrener
|
Set up CI with Azure Pipelines | # Python package
# Create and test a Python package on multiple Python versions.
# Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/python
trigger:
branches:
include:
- master
- staging
- trying
paths:
include:
- pyvisa_py
- setup.py
- azure-pipelines.yml
pr:
- master
variables:
PYVISA_KEYSIGHT_VIRTUAL_INSTR: 1
pool:
name: default
demands: KEYSIGHT -equals TCPIP
steps:
- script: |
echo Activate conda
call $(CONDA_PATH)\activate.bat
echo Create environment
conda create -n test_ python=3.7 numpy --yes
displayName: 'Create environment'
- script: |
echo Activate conda
call $(CONDA_PATH)\activate.bat
echo Activate environment
call conda activate test_
echo Install project
pip install -e .
displayName: 'Install dependencies'
- script: |
echo Activate conda
call $(CONDA_PATH)\activate.bat
echo Activate environment
call conda activate test_
echo Install pytest and co
pip install pytest pytest-azurepipelines pytest-cov
echo Run pytest
python -X dev -m pytest --pyargs pyvisa --cov pyvisa --cov-report xml
displayName: 'Run tests'
- script: |
echo Activate conda
call $(CONDA_PATH)\activate.bat
echo Activate environment
call conda activate test_
echo Install codecov
pip install codecov
echo Run codecov
codecov --file coverage.xml --token $(CODECOV_TOKEN) --env PYVISA_KEYSIGHT_VIRTUAL_INSTR --tries 5 --required -F unittest --name codecov-umbrella
displayName: 'Upload test coverage results'
- script: |
call $(CONDA_PATH)\activate.bat
conda remove -n test_ --all --yes
displayName: 'Remove test environment'
condition: always()
| |
Update from Hackage at 2015-07-31T14:19:04+0000 | homepage: https://github.com/tibbe/ekg-json
changelog-type: ''
hash: 36572f70392960b874000598bf21238e7ccffd8a17219835a90f705de013f7bd
test-bench-deps: {}
maintainer: johan.tibell@gmail.com
synopsis: JSON encoding of ekg metrics
changelog: ''
basic-deps:
ekg-core: ! '>=0.1 && <0.2'
base: ! '>=4.5 && <4.9'
unordered-containers: <0.3
text: <1.3
aeson: <0.11
all-versions:
- '0.1.0.0'
author: Johan Tibell
latest: '0.1.0.0'
description-type: haddock
description: ! 'Encodes ekg metrics as JSON, using the same encoding as used by the
ekg package, thus allowing ekg metrics to be served by other HTTP
servers than the one used by the ekg package.'
license-name: BSD3
| |
Deploy assets to S3 via GitHub Action | name: Deploy
on:
push:
branches: ["master"]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
deploy:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: us-west-2
- name: Deploy static site to S3 bucket
# bucket name: `servo-builds2`
run: aws s3 sync . s3://servo-builds2 --delete --acl=public_read
| |
Update from Hackage at 2015-10-06T08:17:30+0000 | homepage: https://github.com/ocean0yohsuke/deepcontrol
changelog-type: ''
hash: ffb89821e78fce97e48fc82ab6b7102e653b0d11e5d28c50e3786b59e13ef808
test-bench-deps:
base: -any
deepcontrol: -any
doctest: ! '>=0.9.3'
HUnit: ! '>=1.3.0'
QuickCheck: ! '>=2.8.1'
maintainer: ocean0yohsuke@gmail.com
synopsis: Enable deeper level style of programming than the usual control provides
changelog: ''
basic-deps:
base: ! '>=4.8 && <4.9'
mtl: ! '>=2.2 && <2.3'
all-versions:
- '0.1.0.0'
author: KONISHI Yohsuke
latest: '0.1.0.0'
description-type: haddock
description: This module enables deeper level style of programming than the usual
control provides, especially for Applicative and Monad.
license-name: BSD3
| |
Add a playbook for archiving Cassandra logs | ---
- hosts: kv
vars:
cassandra_home: /opt/apache-cassandra
tasks:
- name: creating a temporary directory
tempfile:
state: directory
register: result
- name: archiving the database logs
archive:
path: "{{ cassandra_home }}/logs"
dest: "{{ result.path }}/{{ inventory_hostname }}.zip"
format: zip
- name: downloading the remote logs
fetch:
src: "{{ result.path }}/{{ inventory_hostname }}.zip"
dest: ../
flat: yes
- name: removing temporary files
file: path={{ result.path }} state=absent
| |
Use AppVeyor as our CI. | # gitversion will change the version number
version: x-{build}
# Install scripts. (runs after repo cloning)
install:
- choco install gitversion.portable -pre -y
before_build:
- npm install
- ps: gitversion /output buildserver
# Post-install test scripts.
test_script:
- npm test
# Don't actually build.
build: off
pull_requests:
do_not_increment_build_number: true
assembly_info:
patch: false
| |
Set up CI with Azure Pipelines | # Node.js
# Build a general Node.js project with npm.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/javascript
pool:
vmImage: 'Ubuntu 16.04'
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.x'
displayName: 'Install Node.js'
- script: |
yarn
yarn test
displayName: 'yarn install and test'
| |
Update from Hackage at 2022-08-08T11:43:13Z | homepage: https://github.com/stla/jackpolynomials#readme
changelog-type: markdown
hash: f973ec71af44f6b63fac4382105716d124779cf43a9db8b9dd258b680bbe1cce
test-bench-deps: {}
maintainer: laurent_step@outlook.fr
synopsis: Jack, zonal, and Schur polynomials
changelog: |-
1.0.0.0
-------
* initial release
basic-deps:
base: '>=4.7 && <5'
array: '>=0.5.4.0 && <0.6'
mpolynomials: '>=0.1.0.0 && <0.2'
lens: '>=5.0.1 && <5.1'
math-functions: '>=0.3.4.2 && <0.4'
ilist: '>=0.4.0.1 && <0.5'
numeric-prelude: '>=0.4.4 && <0.5'
all-versions:
- 1.0.0.0
author: Stéphane Laurent
latest: 1.0.0.0
description-type: markdown
description: "# jackpolynomials\n\nSchur polynomials have applications in combinatorics
and zonal polynomials have\napplications in multivariate statistics. They are particular
cases of\n[Jack polynomials](https://en.wikipedia.org/wiki/Jack_function). This
package\nallows to evaluate these polynomials. It can also compute their symbolic
form.\n\n```haskell\nimport Math.Algebra.Jack\nimport Data.Ratio\njackPol [1, 1]
[3, 1] (2%1)\n-- 48 % 1\n```\n\n```haskell\nimport Math.Algebra.JackPol\nimport
Data.Ratio\nimport Math.Algebra.MultiPol\njp = jackPol 2 [3, 1] (2%1)\njp\n-- (M
(Monomial {coefficient = 18 % 1, powers = fromList [1,3]}) \n-- :+: \n-- M (Monomial
{coefficient = 12 % 1, powers = fromList [2,2]})) \n-- :+: \n-- M (Monomial {coefficient
= 18 % 1, powers = fromList [3,1]})\nprettyPol show \"x\" jp\n-- \"(18 % 1) * x^(1,
3) + (12 % 1) * x^(2, 2) + (18 % 1) * x^(3, 1)\"\nevalPoly jp [1, 1]\n-- 48 % 1\n```\n"
license-name: GPL-3.0-only
| |
Update from Hackage at 2015-12-31T09:35:47+0000 | homepage: http://github.com/ylilarry/fcache#readme
changelog-type: ''
hash: f74d13173494b3dc2e4a25ad6c111cec31712cb9434f32ea4135e12d9ba2ac39
test-bench-deps:
base: -any
hspec: -any
fcache: -any
mtl: -any
maintainer: ylilarry@gmail.com
synopsis: Cache a function (a -> b)
changelog: ''
basic-deps:
base: ! '>=4.7 && <5'
unordered-containers: -any
containers: -any
mtl: -any
hashable: -any
all-versions:
- '0.1.0.0'
author: Yu Li
latest: '0.1.0.0'
description-type: haddock
description: Please see README.md
license-name: BSD3
| |
Disable Firefox beta in Travis tests until unbranded build is available | sudo: false
language: cpp
compiler:
- gcc
env:
- FX_CHANNEL=""
- FX_CHANNEL="-esr"
- FX_CHANNEL="-beta"
matrix:
fast_finish: true
allow_failures:
- env: FX_CHANNEL="-beta"
notifications:
email: false
install:
- wget -O tarball "https://download.mozilla.org/?product=firefox${FX_CHANNEL}-latest&os=linux64&lang=en-US"
- tar xf tarball
before_script:
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
script:
- test/runtests.sh -x firefox/firefox
| sudo: false
language: cpp
compiler:
- gcc
env:
- FX_CHANNEL=""
- FX_CHANNEL="-esr"
# Disabled until unbranded build is available
#- FX_CHANNEL="-beta"
matrix:
fast_finish: true
allow_failures:
- env: FX_CHANNEL="-beta"
notifications:
email: false
install:
- wget -O tarball "https://download.mozilla.org/?product=firefox${FX_CHANNEL}-latest&os=linux64&lang=en-US"
- tar xf tarball
before_script:
- export DISPLAY=:99.0
- sh -e /etc/init.d/xvfb start
script:
- test/runtests.sh -x firefox/firefox
|
Update from Hackage at 2018-03-21T20:10:01Z | homepage: https://github.com/identicalsnowflake/hlrdb
changelog-type: ''
hash: 1fe6b06dcbe251c3173eeef37e5d4189478c8a922675a38857fe5fbeffb4faef
test-bench-deps: {}
maintainer: identicalsnowflake@protonmail.com
synopsis: High-level Redis Database
changelog: ''
basic-deps:
bytestring: ^>=0.10.8.1
time-exts: ^>=3.0
base: ! '>=4.9 && <5.0'
base64-bytestring: ^>=1.0.0.1
unordered-containers: ^>=0.2.8.0
hlrdb-core: ^>=0.1
memory: ^>=0.14.8
store: ^>=0.4.3.2
cryptonite: ^>=0.24
hashable: ^>=1.2.6.1
random: ^>=1.1
hedis: ^>=0.10.1
all-versions:
- '0.1.0.0'
author: Identical Snowflake
latest: '0.1.0.0'
description-type: haddock
description: A library for type-driven interaction with Redis
license-name: MIT
| |
Add sphinx-testing recipe from conda skeleton pypi | {% set name = "sphinx-testing" %}
{% set version = "1.0.1" %}
package:
name: "{{ name|lower }}"
version: "{{ version }}"
source:
url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz"
sha256: "ef661775b5722d7b00f67fc229104317d35637a4fb4434bf2c005afdf1da4d09"
build:
number: 0
script: "{{ PYTHON }} -m pip install . --no-deps --ignore-installed -vv "
requirements:
host:
- pip
- python
- six
- sphinx
run:
- python
- six
- sphinx
test:
imports:
- sphinx_testing
about:
home: "https://github.com/sphinx-doc/sphinx-testing"
license: "BSD"
license_family: "BSD"
license_file: ""
summary: "testing utility classes and functions for Sphinx extensions"
doc_url: ""
dev_url: ""
extra:
recipe-maintainers:
- your-github-id-here
| |
Update from Hackage at 2016-01-13T18:36:00+0000 | homepage: https://github.com/s9gf4ult/dom-parser
changelog-type: markdown
hash: 8d509db218b80863aa5b9a23474f27caaf894b028c11a9f0aae2e1d7722e94a7
test-bench-deps:
shakespeare: -any
dom-parser: -any
xml-conduit: -any
base: -any
hspec: -any
text: -any
data-default: -any
maintainer: s9gf4ult@gmail.com
synopsis: Simple monad for parsing DOM
changelog: ! '# CHANGELOG
## 0.0.1
* First working version
'
basic-deps:
shakespeare: -any
xml-conduit: -any
base: ! '>=4.7 && <5'
text: -any
semigroups: -any
lens: -any
mtl: -any
transformers: -any
all-versions:
- '0.0.1'
author: Aleksey Uimanov
latest: '0.0.1'
description-type: haddock
description: ''
license-name: BSD3
| |
Add a recipe for packaging. | {% set name = "packaging" %}
{% set version = "16.7" %}
package:
name: {{ name }}
version: {{ version }}
source:
fn: {{ name }}-{{ version }}.tar.gz
url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz
md5: 5bfeb52de8dee2fcc95a003b0ebe9011
build:
number: 0
script: python setup.py install --single-version-externally-managed --record=record.txt
requirements:
build:
- python
- setuptools
- pyparsing
- six
run:
- python
- pyparsing
- six
test:
imports:
- packaging
about:
home: https://github.com/pypa/packaging
license: Apache 2.0 or BSD 2-Clause
summary: Core utilities for Python packages
extra:
recipe-maintainers:
- jakirkham
| |
Remove API jar from CircleCI release artifacts |
checkout:
post:
# clone the Glowkit submodule (see https://circleci.com/docs/configuration)
- git submodule sync
- git submodule update --init
# setup for mvn dependency:resolve (see http://stackoverflow.com/questions/7772556/maven-fails-to-resolve-dependency)
- (cd Glowkit && mvn install)
test:
post:
# release artifacts for API jar and server
- rm Glowkit/target/*shaded*
- cp Glowkit/target/*.jar $CIRCLE_ARTIFACTS/
- cp -r target/glowstone*.jar $CIRCLE_ARTIFACTS/
|
checkout:
post:
# clone the Glowkit submodule (see https://circleci.com/docs/configuration)
- git submodule sync
- git submodule update --init
# setup for mvn dependency:resolve (see http://stackoverflow.com/questions/7772556/maven-fails-to-resolve-dependency)
- (cd Glowkit && mvn install)
test:
post:
- cp -r target/glowstone*.jar $CIRCLE_ARTIFACTS/
|
Update from Hackage at 2020-08-30T20:16:33Z | homepage: https://hackage.haskell.org/package/uniqueness-periods-vector-common
changelog-type: markdown
hash: c222b702d909f9334642af53a294fbd86b6ec9a2d9b38ce619e29cf12324f43e
test-bench-deps: {}
maintainer: olexandr543@yahoo.com
synopsis: Generalization of the dobutokO-poetry-general package functionality
changelog: |
# Revision history for uniqueness-periods-vector-common
## 0.1.0.0 -- 2020-08-30
* First version. Released on an unsuspecting world.
basic-deps:
base: '>=4.7 && <4.15'
vector: '>=0.11 && <0.14'
all-versions:
- 0.1.0.0
author: OleksandrZhabenko
latest: 0.1.0.0
description-type: haddock
description: Generalization of the dobutokO-poetry-general package functionality.
Can be used to rearrange 7 sublists in the list to obtain somewhat more suitable
list for some purposes.
license-name: MIT
| |
Update from Hackage at 2019-04-17T13:34:24Z | homepage: https://github.com/groscoe/simple-units#readme
changelog-type: ''
hash: 3b7a85c47b2ede1972e4bdc50c783f4a5d621229ce46137cc8af75fc16fab86e
test-bench-deps: {}
maintainer: gustavo@gustavoroscoe.com
synopsis: Simple arithmetic with SI units using type-checked dimensional analysis.
changelog: ''
basic-deps:
first-class-families: ! '>=0.5 && <0.6'
base: ! '>=4.9 && <5'
all-versions:
- 1.0.0.0
author: Gustavo Roscoe
latest: 1.0.0.0
description-type: markdown
description: |
# simple-units
A Haskell library for simple arithmetic with SI units using type-checked dimensional analysis.
```haskell
>>> let newton = kilogram .* meter ./ (second .* second)
>>> 23*newton
23.0 kg*m/s^2
>>> let g = 6.67408e-11 * newton .* (meter .* meter) ./ (kilogram .* kilogram)
>>> g -- gravitational constant
6.67408e-11 m^3/kg*s^2
>>> let gravity m1 m2 r = g .* (m1 * kilogram) .* (m2 * kilogram) ./ (r*meter .* r*meter)
>>> let earth_mass = 5.972e24 * kilogram
>>> let mars_mass = 6.417e23 * kilogram
>>> let earth_radius = 6371e3 * meter
>>> let mars_radius = 3389.5e3 * meter
>>> let weight_on_earth mass = gravity mass earth_mass earth_radius
>>> let weight_on_mars mass = gravity mass mars_mass mars_radius
>>> weight_on_earth (80 * kilogram)
785.5719790179963 kg*m/s^2
>>> weight_on_mars (80 * kilogram)
298.22370259533704 kg*m/s^2
>>> weight_on_mars 1 ./ weight_on_earth 1
0.3796261966575378 <adimensional>
```
license-name: MIT
| |
Add docker compose file for all services for public USSD line | version: '3'
services:
junebug:
image: praekeltfoundation/junebug:alpine
ports:
- '80:80'
- '9001:9001'
links:
- rabbitmq
- redis
environment:
- AMQP_HOST=rabbitmq
- AMQP_VHOST=/
- REDIS_HOST=redis
command: jb --channel 'telnet_addr:vumi.transports.telnet.AddressedTelnetServerTransport'
restart: always
public_ussd:
build: ../
volumes:
- ./:/config
environment:
- AMQP_HOST=rabbitmq
- CONFIG_FILE=/config/public.yaml
links:
- identitystore
- hub
- sbm
- messagesender
- redis
- rabbitmq
restart: always
redis:
image: redis:alpine
restart: always
rabbitmq:
image: rabbitmq:alpine
restart: always
postgres:
image: postgres:alpine
environment:
- POSTGRES_PASSWORD=postgres
restart: always
identitystore:
image: praekeltfoundation/seed-identity-store
links:
- postgres
environment:
- IDENTITIES_DATABASE=postgres://postgres:postgres@postgres/postgres
- BROKER_URL=amqp://guest:guest@rabbitmq//
restart: always
hub:
image: praekeltfoundation/ndoh-hub
links:
- postgres
environment:
- HUB_DATABASE=postgres://postgres:postgres@postgres/postgres
- BROKER_URL=amqp://guest:guest@rabbitmq//
restart: always
sbm:
image: praekeltfoundation/seed-stage-based-messaging
links:
- postgres
environment:
- STAGE_BASED_MESSAGING_DATABASE=postgres://postgres:postgres@postgres/postgres
- BROKER_URL=amqp://guest:guest@rabbitmq//
restart: always
messagesender:
image: praekeltfoundation/seed-message-sender
links:
- postgres
environment:
- MESSAGE_SENDER_DATABASE=postgres://postgres:postgres@postgres/postgres
- BROKER_URL=amqp://guest:guest@rabbitmq//
restart: always | |
Add file format to carry claim mappings. | ###############################################################################
# Copyright (c) 2016, WSO2 Inc. (http://www.wso2.org) All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
---
# Configurations related to claim mappings
claimMappings:
applications:
-
dialectURI: "http://application1.com"
name: "Application1"
mappings:
name : "http://wso2.org/claims/username"
role : "http://wso2.org/claims/role"
mobile: "http://wso2.org/claims/mobile"
email : "http://wso2.org/claims/email"
identityProviders:
-
dialectURI: "http://identityprovider1.com"
name: "IDP1"
mappings:
idp-name : "http://wso2.org/claims/username"
idp-role : "http://wso2.org/claims/role"
idp-mobile: "http://wso2.org/claims/mobile"
idp-email : "http://wso2.org/claims/email"
standards:
-
dialectURI: "urn:scim:schemas:core:1.0"
name: "SCIM"
mappings:
username : "http://wso2.org/claims/username"
role : "http://wso2.org/claims/role"
mobile: "http://wso2.org/claims/mobile"
email : "http://wso2.org/claims/email" | |
Set up CI with Azure Pipelines | pr:
- master
pool:
vmImage: "ubuntu-latest"
steps:
- task: NodeTool@0
inputs:
versionSpec: "10.x"
displayName: "Install NodeJS"
- script: |
npm install -g vsce
displayName: "Install VSCE"
- script: |
npm install
displayName: "NPM Install"
- script: |
vsce package --out $(Build.ArtifactStagingDirectory)
displayName: "VSCE Package"
- task: PublishBuildArtifacts@1
inputs:
pathtoPublish: $(Build.ArtifactStagingDirectory)
artifactName: drop
| |
Add CI via Github Actions | name: ci
on: [push, pull_request]
jobs:
autotools:
runs-on: ubuntu-latest
steps:
- name: Prepare
run: |
sudo apt update -qq
sudo apt install -qq check
pip install cpp-coveralls
- uses: actions/checkout@v2
- name: Build
run: |
./autogen.sh
./configure --enable-check --enable-debug --enable-gcov
make V=1
- name: Install
run: sudo make install
- name: Run tests
run: make check
- name: Upload coverage
if: github.repository == 'c9s/r3'
run: coveralls --exclude php
cmake:
runs-on: ubuntu-latest
steps:
- name: Prepare
run: |
sudo apt update -qq
sudo apt install -qq check ninja-build
- uses: actions/checkout@v2
- name: Build and test
run: |
mkdir build && cd build
cmake -GNinja ..
ninja -v
ctest --verbose
sanitizers:
name: ${{ matrix.sanitizer }}-sanitizer [${{ matrix.compiler }}]
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
compiler: [gcc, clang]
sanitizer: [thread, undefined, leak, address]
steps:
- name: Prepare
run: |
sudo apt update -qq
sudo apt install -qq check
- uses: actions/checkout@v2
- name: Build
env:
CC: ${{ matrix.compiler }}
run: |
mkdir build && cd build
CFLAGS="-fsanitize=${{ matrix.sanitizer }} -fno-sanitize-recover=all -fno-omit-frame-pointer" cmake ..
VERBOSE=1 make all
- name: Test
run: |
cd build
ctest --verbose
| |
Set up CI with Azure Pipelines | # Node.js with gulp
# Build a Node.js project using the gulp task runner.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/javascript
pool:
vmImage: 'Ubuntu 16.04'
steps:
- task: NodeTool@0
inputs:
versionSpec: '8.x'
displayName: 'Install Node.js'
- script: |
npm install
gulp default --gulpfile gulpfile.js
displayName: 'npm install and run gulp'
| |
Set up CI with Azure Pipelines | # Universal Windows Platform
# Build a Universal Windows Platform project using Visual Studio.
# Add steps that test and distribute an app, save build artifacts, and more:
# https://aka.ms/yaml
trigger:
- master
pool:
vmImage: 'VS2017-Win2016'
variables:
solution: '**/*.sln'
buildPlatform: 'x86|x64|ARM'
buildConfiguration: 'Release'
appxPackageDir: '$(build.artifactStagingDirectory)\AppxPackages\\'
steps:
- task: NuGetToolInstaller@0
- task: NuGetCommand@2
inputs:
restoreSolution: '$(solution)'
- task: VSBuild@1
inputs:
platform: 'x86'
solution: '$(solution)'
configuration: '$(buildConfiguration)'
msbuildArgs: '/p:AppxBundlePlatforms="$(buildPlatform)" /p:AppxPackageDir="$(appxPackageDir)" /p:AppxBundle=Always /p:UapAppxPackageBuildMode=StoreUpload'
| |
Create template for logobatch home dotfile | default_email: example_email@gmail.com
storage:
- name: remote1
hostname: remote1_nfs.rc.uni.edu
- name: cluster_shared
hostname: cluster_nfs.nfs.rc.uni.edu
default_path: /mnt/cluster_shared
compute:
- name: Compute1
type: compute
hostname: compute1.rc.uni.edu
fast_storage: ['/disk/scratch', '/disk/scratch2']
remote_storage:
- name: remote1
path: /share/remote_nfs
- name: Compute2
type: compute
hostname: compute2.rc.uni.edu
fast_storage: ['/disk/scratch', '/disk/scratch2']
remote_storage:
- name: remote1
path: /mnt/remote_nfs
- name: Cluster1
type: cluster_slurm
hostname: cluster1.rc.uni.edu
fast_storage: /disk/scratch
remote_storage:
- name: cluster_shared
- name: Cluster2
type: cluster_sge
hostname: cluster2.rc.uni.edu
remote_storage:
- name: cluster_shared
| |
Add circleCI for build test | version: 2
aliases:
docker-cp: &docker-cp
run:
name: Create Docker Volume
command: |
docker create -v /${WORKING_DIR} --name ${VOLUME} --privileged tizenrt/tizenrt:${DOCKER_IMG_VERSION} /bin/bash
docker cp ./. ${VOLUME}:/${WORKING_DIR}
docker run -d --rm -it --name ${BUILDER} --volumes-from=${VOLUME} -w /${WORKING_DIR}/os --privileged tizenrt/tizenrt:${DOCKER_IMG_VERSION} /bin/bash
arm-version: &arm-version
run:
name: ARM GCC Version
command: |
docker exec ${BUILDER} arm-none-eabi-gcc --version
run-test: &run-test
run:
name: TizenRT Build Test
command: |
docker exec -it ${BUILDER} bash -c "cd tools; ./build_test.sh"
jobs:
checkout_code:
machine: true
working_directory: ~/TizenRT
steps:
- checkout
- persist_to_workspace:
root: ~/TizenRT
paths:
- ./
build_test:
machine: true
working_directory: ~/TizenRT
steps:
- attach_workspace:
at: ~/TizenRT
- *docker-cp
- *run-test
workflows:
version: 2
build-tizenrt:
jobs:
- checkout_code
- build_test:
requires:
- checkout_code
| |
Add thoughtbot-specific SCSS style guide | scss_files: "**/*.scss"
linters:
BangFormat:
enabled: true
space_before_bang: true
space_after_bang: false
BorderZero:
enabled: false
ColorKeyword:
enabled: true
severity: warning
Comment:
enabled: true
DebugStatement:
enabled: true
DeclarationOrder:
enabled: true
DuplicateProperty:
enabled: true
ElsePlacement:
enabled: true
style: same_line
EmptyLineBetweenBlocks:
enabled: true
ignore_single_line_blocks: true
EmptyRule:
enabled: true
FinalNewline:
enabled: true
present: true
HexLength:
enabled: false
style: short
HexNotation:
enabled: true
style: lowercase
HexValidation:
enabled: true
IdSelector:
enabled: true
ImportPath:
enabled: true
leading_underscore: false
filename_extension: false
Indentation:
enabled: true
character: space
width: 2
LeadingZero:
enabled: true
style: include_zero
MergeableSelector:
enabled: true
force_nesting: true
NameFormat:
enabled: true
allow_leading_underscore: true
convention: hyphenated_lowercase
NestingDepth:
enabled: true
max_depth: 4
severity: warning
PlaceholderInExtend:
enabled: false
PropertySortOrder:
enabled: true
ignore_unspecified: false
severity: warning
PropertySpelling:
enabled: true
extra_properties: []
QualifyingElement:
enabled: true
allow_element_with_attribute: false
allow_element_with_class: false
allow_element_with_id: false
severity: warning
SelectorDepth:
enabled: true
max_depth: 2
severity: warning
SelectorFormat:
enabled: true
convention: hyphenated_lowercase
Shorthand:
enabled: true
severity: warning
SingleLinePerProperty:
enabled: true
allow_single_line_rule_sets: true
SingleLinePerSelector:
enabled: true
SpaceAfterComma:
enabled: true
SpaceAfterPropertyColon:
enabled: true
style: one_space
SpaceAfterPropertyName:
enabled: true
SpaceBeforeBrace:
enabled: true
style: space
allow_single_line_padding: false
SpaceBetweenParens:
enabled: true
spaces: 0
StringQuotes:
enabled: true
style: double_quotes
TrailingSemicolon:
enabled: true
TrailingZero:
enabled: false
UnnecessaryMantissa:
enabled: true
UnnecessaryParentReference:
enabled: true
UrlFormat:
enabled: true
UrlQuotes:
enabled: true
VendorPrefixes:
enabled: true
identifier_list: bourbon
include: []
exclude: []
ZeroUnit:
enabled: true
severity: warning
Compass::*:
enabled: false
| |
Update from Hackage at 2019-06-17T19:09:54Z | homepage: https://github.com/haskell-primitive/primitive-ffi
changelog-type: markdown
hash: 194a02c3465fbc8eed5dd2f22f7d5d22201b3a33b873358c221028b1b3f4b8dc
test-bench-deps:
primitive-foreign: -any
base: -any
QuickCheck: -any
primitive: -any
maintainer: chessai <chessai1996@gmail.com>
synopsis: using the `Prim` interface for the FFI
changelog: |-
# Changelog
`primitive-ffi` uses [PVP Versioning][1].
The changelog is available [on GitHub][2].
0.0.0
=====
* Initially created.
[1]: https://pvp.haskell.org
[2]: https://github.com/haskell-primitive/primitive-ffi/releases
basic-deps:
base: ! '>=4.10.1 && <4.13'
primitive: ! '>=0.6.4 && <0.8'
all-versions:
- '0.1'
author: chessai
latest: '0.1'
description-type: markdown
description: "# primitive-foreign\n\n[](https://hackage.haskell.org/package/primitive-foreign)\n[](LICENSE)\n[](https://travis-ci.com/haskell-primitive/primitive-foreign)\n
\nThe goal of this library is to make it possible to avoid the duplicated code between
`Storable` and `Prim` APIs when one is working mostly with the `primitive` or `contiguous`
APIs, by using the `Prim` interface to facilitate marshalling of values in memory.\n"
license-name: BSD-3-Clause
| |
Add recipe for simmer package | {% set name = "simmer" %}
{% set version = "0.3.7" %}
package:
name: "{{ name|lower }}"
version: "{{ version }}"
source:
url: "https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/{{ name }}-{{ version }}.tar.gz"
sha256: 7b6bb13189ebcec123e290163dce18d98c697c7fa9d8a78112cce2cdea53379e
build:
number: 0
skip: True # [py<36]
script: "{{ PYTHON }} -m pip install . --no-deps -vv"
requirements:
host:
- astropy >=3.1.2
- cerberus >=1.3.2
- matplotlib >=3.0.2
- nbconvert ==5.6.1
- nbsphinx ==0.5.1
- numpy
- openpyxl >=2.5.12
- pandas ==1.0
- pillow >=6.2.0
- pip
- pygments >=2.4.2
- python
- pyyaml >=5.3.1
- scikit-image >=0.16.2
- scipy >=1.1.0
- tqdm
- xlrd >=1.2.0
run:
- astropy >=3.1.2
- cerberus >=1.3.2
- matplotlib >=3.0.2
- nbconvert ==5.6.1
- nbsphinx ==0.5.1
- numpy
- openpyxl >=2.5.12
- pandas ==1.0
- pillow >=6.2.0
- pygments >=2.4.2
- python
- pyyaml >=5.3.1
- scikit-image >=0.16.2
- scipy >=1.1.0
- tqdm
- xlrd >=1.2.0
test:
imports:
- simmer
- simmer.Examples
- simmer.Examples.PHARO
- simmer.Examples.Shane
- simmer.schemas
- simmer.tests
about:
home: https://github.com/arjunsavel/SImMER
license: MIT
license_family: MIT
license_file: LICENSE.md
summary: "An open-source astronomical image reduction pipeline."
doc_url: https://simmer.readthedocs.io
dev_url: https://github.com/arjunsavel/SImMER
extra:
recipe-maintainers:
- arjunsavel
| |
Add CircleCI configuration to repo | machine:
java:
version: openjdk8
dependencies:
pre:
- echo y | android -s update sdk -u -a -t "tools"
- mkdir -p $ANDROID_HOME/licenses
- echo -e "8933bad161af4178b1185d1a37fbf41ea5269c55" > $ANDROID_HOME/licenses/android-sdk-license
- echo -e "84831b9409646a918e30573bab4c9c91346d8abd" > $ANDROID_HOME/licenses/android-sdk-preview-license
- $ANDROID_HOME/tools/bin/sdkmanager "platform-tools"
- $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout;1.0.2"
- $ANDROID_HOME/tools/bin/sdkmanager "extras;m2repository;com;android;support;constraint;constraint-layout-solver;1.0.2"
cache_directories:
- /usr/local/android-sdk-linux/build-tools/25.0.3
- /usr/local/android-sdk-linux/tools/
override:
- ./gradlew app:dependencies
compile:
override:
- ./gradlew clean assembleStandardRelease -PdisablePreDex
test:
override:
- ./gradlew testStandardRelease -PdisablePreDex | |
Add daemonset that uses Canonical provided installer on GKE. | # Copyright 2017 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
apiVersion: apps/v1
kind: DaemonSet
metadata:
name: nvidia-driver-installer
namespace: kube-system
labels:
k8s-app: nvidia-driver-installer
spec:
selector:
matchLabels:
k8s-app: nvidia-driver-installer
updateStrategy:
type: RollingUpdate
template:
metadata:
labels:
name: nvidia-driver-installer
k8s-app: nvidia-driver-installer
spec:
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: cloud.google.com/gke-accelerator
operator: Exists
tolerations:
- key: "nvidia.com/gpu"
effect: "NoSchedule"
operator: "Exists"
volumes:
- name: dev
hostPath:
path: /dev
- name: boot
hostPath:
path: /boot
- name: root-mount
hostPath:
path: /
initContainers:
- image: ubuntu-nvidia-installer:fixed
name: nvidia-driver-installer
resources:
requests:
cpu: 0.15
securityContext:
privileged: true
volumeMounts:
- name: boot
mountPath: /boot
- name: dev
mountPath: /dev
- name: root-mount
mountPath: /root
containers:
- image: "gcr.io/google-containers/pause:2.0"
name: pause
| |
Update from Hackage at 2021-09-01T01:52:08Z | homepage: https://github.com/iand675/thread-utils#readme
changelog-type: markdown
hash: a8435240bfc0ae96c94704d2986699a11a395c496f9a7f2e5f5d729a0b967549
test-bench-deps:
base: '>=4.7 && <5'
ghc-prim: -any
thread-utils-finalizers: -any
maintainer: ian@iankduncan.com
synopsis: Perform finalization for threads.
changelog: |
# Changelog for thread-finalizers
## Unreleased changes
basic-deps:
base: '>=4.7 && <5'
ghc-prim: -any
all-versions:
- 0.1.0.0
author: Ian Duncan
latest: 0.1.0.0
description-type: markdown
description: |
# thread-finalizers
license-name: BSD-3-Clause
| |
Add build script for Travis CI | language: csharp
solution: ./Extension Library/Extension Library.sln
install:
- nuget restore ./Extension Library/Extension Library.sln
- nuget install xunit.runners -Version 1.9.2 -OutputDirectory testrunner
script:
- xbuild /p:Configuration=Release ./Extension Library/Extension Library.sln
- mono ./testrunner/xunit.runners.1.9.2/tools/xunit.console.clr4.exe ./Extension_Library.Tests/bin/Release/MyProject.Tests.dll
branches:
only:
- master | |
Update from Hackage at 2019-09-16T22:38:25Z | homepage: https://github.com/andys8/network-manager-tui#readme
changelog-type: ''
hash: 6509358d96588d8b9d19daa28b33f5b76b1a06cc0c1ab6c88d4c2bd0c168cd22
test-bench-deps:
split: -any
base: ! '>=4.7 && <5'
hspec: -any
vty: -any
process: -any
network-manager-tui-lib: -any
microlens: -any
brick: -any
vector: -any
maintainer: andys8@users.noreply.github.com
synopsis: network-manager tui
changelog: ''
basic-deps:
split: -any
base: ! '>=4.7 && <5'
vty: -any
process: -any
network-manager-tui-lib: -any
microlens: -any
brick: -any
vector: -any
all-versions:
- 0.1.0.0
author: andys8
latest: 0.1.0.0
description-type: markdown
description: |
# network-manager-tui
A simple network manager command-line tool

## Dependencies
`nmcli` has to be installed
## Installation
### Clone repository
```sh
git clone https://github.com/andys8/network-manager-tui.git
cd network-manager-tui
stack install
```
## Usage
```sh
nmt
```
license-name: BSD-3-Clause
| |
Stop gs-pages from running under ci | # CircleCI by default tests every push to every branch. This mean automated release from source to gp-pages will trigger CI testing gp-pages.
# we disable that. Piece and quiet: https://circleci.com/docs/1.0/configuration/#branches
# this also prevents pull requests from triggering the CI and thrashing e.g. the crowdin translation logic in our main circle.yml
# gp-pages is cleared out on every push, so we can't manually put a circle.yml there. So we piggy back off static/ being copied over to gp-pages during deployment.
general:
branches:
ignore:
- gh-pages
| |
Add linting as a CI step | name: CI
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Use Node.js v14.15.4
uses: actions/setup-node@v1
with:
node-version: '14.15.4'
- name: Install packages and run tests
run: |
yarn
yarn lint
| |
Add Github Actions as defined in jaraco/skeleton | name: Automated Tests
on: [push, pull_request]
jobs:
test:
strategy:
matrix:
python: [3.6, 3.7, 3.8]
platform: [ubuntu-latest, macos-latest, windows-latest]
runs-on: ${{ matrix.platform }}
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: ${{ matrix.python }}
- name: Install tox
run: |
python -m pip install tox
- name: Run tests
run: tox
release:
needs: test
if: github.event_name == 'push' && contains(github.ref, 'refs/tags/')
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Setup Python
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Install tox
run: |
python -m pip install tox
- name: Release
run: tox -e release
env:
TWINE_PASSWORD: ${{ secrets.PYPI_TOKEN }}
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
| |
Update from Hackage at 2022-09-09T17:19:06Z | homepage: https://github.com/TristanCacqueray/haskell-xstatic#readme
changelog-type: markdown
hash: f4903d1e5900c074553bb7b5078b22c08d95983ad01be010d4533ece0cd35eb6
test-bench-deps: {}
maintainer: tdecacqu@redhat.com
synopsis: XStatic adapter for servant
changelog: |
# Changelog
## 0.1.0
- Initial release
basic-deps:
base: <5
servant-server: -any
servant: -any
xstatic: -any
all-versions:
- 0.1.0
author: Tristan Cacqueray
latest: 0.1.0
description-type: haddock
description: Use this library to serve XStatic through servant.
license-name: BSD-3-Clause
| |
Add GitHub actions for PyPI release. | name: Publish tfds-nightly to PyPI
on:
# Event manually triggered (in the Github UI)
workflow_dispatch:
inputs:
git-ref:
description: Git ref (e.g. SHA or tag) (Optional)
default: ""
required: false
jobs:
publish-job:
# Prevents action from running on forks.
if: github.repository == 'tensorflow/datasets'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
ref: ${{ github.event.inputs.git-ref }}
- name: Setup Python.
uses: actions/setup-python@v2
with:
python-version: 3.7
- name: Install deps.
run: pip install setuptools wheel twine
- name: Create the source distribution and build the wheels.
run: |
python setup.py sdist --nightly
python setup.py bdist_wheel --nightly
- name: Publish the package.
run: |
twine check dist/*
twine upload dist/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.NIGHTLY_PYPI_PASSWORD }}
| |
Set up CI with Azure Pipelines | # Maven
# Build your Java project and run tests with Apache Maven.
# Add steps that analyze code, save build artifacts, deploy, and more:
# https://docs.microsoft.com/azure/devops/pipelines/languages/java
trigger:
- master
pool:
vmImage: 'Ubuntu-16.04'
steps:
- task: Maven@3
inputs:
mavenPomFile: 'pom.xml'
mavenOptions: '-Xmx3072m'
javaHomeOption: 'JDKVersion'
jdkVersionOption: '1.11'
jdkArchitectureOption: 'x64'
publishJUnitResults: false
testResultsFiles: '**/surefire-reports/TEST-*.xml'
goals: 'package'
| |
Migrate "At least one ELB node is not responding" monitor to Prometheus | # Source: paas-metrics
---
- type: replace
path: /instance_groups/name=prometheus2/jobs/name=prometheus2/properties/prometheus/custom_rules?/-
value:
name: AWSELBUnhealthyNodes
rules:
- alert: AWSELBUnhealthyNodes_Warning
expr: max_over_time(paas_aws_elb_unhealthy_node_count[5m]) > 0
labels:
severity: warning
notify: email
annotations:
summary: "At least one ELB node is not responding"
description: Requests to the healthcheck app via {{ $value | printf \"%.0f\" }} of the ELB IP addresses failed.
url: https://team-manual.cloud.service.gov.uk/incident_management/responding_to_alerts/#intermittent-elb-failures
| |
Update from Hackage at 2019-11-07T23:12:39Z | homepage: ''
changelog-type: ''
hash: 15c4ecc86d6d71a93adf99b5893d7b5587c31f88f405d95fd47b8f3d3b16d27d
test-bench-deps: {}
maintainer: strake888@gmail.com
synopsis: Store of values of arbitrary types
changelog: ''
basic-deps:
base-unicode-symbols: -any
base: ! '>=4.7 && <5'
containers: -any
util: -any
key: -any
all-versions:
- 0.1.0.0
author: M Farkas-Dyck
latest: 0.1.0.0
description-type: markdown
description: |
# key-vault
license-name: BSD-3-Clause
| |
Add release note for Aggregate[Core|Ram|Disk]Filter change | ---
upgrade:
- |
Starting in Ocata, there is a behavior change where aggregate-based
overcommit ratios will no longer be honored during scheduling for the
FilterScheduler. Instead, overcommit values must be set on a
per-compute-node basis in the Nova configuration files.
If you have been relying on per-aggregate overcommit, during your upgrade,
you must change to using per-compute-node overcommit ratios in order for
your scheduling behavior to stay consistent. Otherwise, you may notice
increased NoValidHost scheduling failures as the aggregate-based overcommit
is no longer being considered.
You can safely remove the AggregateCoreFilter, AggregateRamFilter, and
AggregateDiskFilter from your ``[filter_scheduler]enabled_filters`` and you
do not need to replace them with any other core/ram/disk filters. The
placement query in the FilterScheduler takes care of the core/ram/disk
filtering, so CoreFilter, RamFilter, and DiskFilter are redundant.
Please see the mailing list thread for more information:
http://lists.openstack.org/pipermail/openstack-operators/2018-January/014748.html
| |
Update from Hackage at 2017-10-06T22:47:37Z | homepage: https://github.com/masterdezign/hmep#readme
changelog-type: ''
hash: e9232c9c15db26c3afb3a65e0bb7f2f5db560ce68801385fbbabb11b62970060
test-bench-deps:
base: -any
HUnit: -any
containers: -any
hmep: -any
vector: -any
maintainer: dev at penkovsky dot com
synopsis: ! 'HMEP Multi Expression Programming –
a genetic programming variant'
changelog: ''
basic-deps:
mersenne-random-pure64: -any
base: ! '>=4.7 && <5'
containers: -any
hmep: -any
random: -any
monad-mersenne-random: -any
hmatrix: -any
vector: -any
all-versions:
- '0.0.0'
author: Bogdan Penkovsky
latest: '0.0.0'
description-type: markdown
description: ! "# Multi Expression Programming\n\nYou say, Haskell has not enough
machine learning libraries?\n\nHere is yet another one!\n\n## History\n\nThere exist
many other Genetic Algorithm (GA) Haskell packages.\nPersonally I have used\n[simple
genetic algorithm](http://hackage.haskell.org/package/moo),\n[GA](http://hackage.haskell.org/package/moo),\nand
[moo](http://hackage.haskell.org/package/moo) for quite a long time.\nThe last package
was the most preferred, but the other two are\nalso great.\n\nHowever, when I came
up with this\n[MEP paper](http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.5.4352&rep=rep1&type=pdf),\nto
my surprise there was no MEP realization in Haskell.\nSoon I realized that existing
GA packages are limited,\nand it would be more efficient to implement MEP from scratch.\n\nThat
is how this package was started. I also wish to say thank you\nto the authors of
the [moo](http://hackage.haskell.org/package/moo) \nGA library, which inspired the
present \n[hmep](http://github.com/masterdezign/hmep) package.\n\n## About MEP\n\nMulti
Expression Programming is a genetic programming variant encoding multiple\nsolutions
in the same chromosome. A chromosome is a computer program.\nEach gene is featuring
[code reuse](https://en.wikipedia.org/wiki/Code_reuse).\nFor more details, please
check http://mepx.org/papers.html and\nhttps://en.wikipedia.org/wiki/Multi_expression_programming.\n"
license-name: BSD3
| |
Create GitHub Action to build+test on push or PR | # This workflow will build a Java project with Gradle and cache/restore any dependencies to improve the workflow execution time
# For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-gradle
name: Java CI with Gradle
on:
push:
branches: [ "master" ]
pull_request:
branches: [ "master" ]
permissions:
contents: read
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- name: Set up JDK 11
uses: actions/setup-java@v3
with:
java-version: '11'
distribution: 'temurin'
- name: Build with Gradle
uses: gradle/gradle-build-action@67421db6bd0bf253fb4bd25b31ebb98943c375e1
with:
arguments: build
| |
Add bob.db.frgc recipe [skip appveyor] | {% set version = "2.0.3" %}
package:
name: bob.db.frgc
version: {{ version }}
source:
fn: bob.db.frgc-{{ version }}.zip
md5: 078eb06307c7ef586ec7564ec5efdad5
url: https://pypi.python.org/packages/source/b/bob.db.frgc/bob.db.frgc-{{ version }}.zip
build:
number: 0
skip: true # [not linux]
script: python -B setup.py install --single-version-externally-managed --record record.txt
requirements:
build:
- python
- setuptools
- six
- bob.db.base
- bob.db.verification.utils
run:
- python
- six
- bob.db.base
- bob.db.verification.utils
test:
commands:
- nosetests -sv bob.db.frgc
imports:
- bob
- bob.db
- bob.db.frgc
requires:
- nose
about:
home: http://github.com/bioidiap/bob.db.frgc
license: GNU General Public License v3 (GPLv3)
summary: Database Access API of the Face Recognition Grand Challenge (FRGC) ver2.0 image database for Bob
extra:
recipe-maintainers:
- 183amir
| |
Send code coverage reports to Codecov | osx_image: xcode9.2
language: objective-c
xcode_workspace: Iterable-iOS-SDK.xcworkspace
xcode_scheme: Iterable-iOS-SDKTests
xcode_sdk: iphonesimulator
before_install:
- gem install xcpretty xcpretty-travis-formatter
script:
- xcodebuild test -sdk iphonesimulator -destination 'platform=iOS Simulator,name=iPhone X' -workspace Iterable-iOS-SDK.xcworkspace -scheme Iterable-iOS-SDKTests CODE_SIGNING_REQUIRED=NO | xcpretty -f `xcpretty-travis-formatter`
- pod lib lint | osx_image: xcode9.2
language: objective-c
xcode_workspace: Iterable-iOS-SDK.xcworkspace
xcode_scheme: Iterable-iOS-SDKTests
xcode_sdk: iphonesimulator
before_install:
- gem install xcpretty xcpretty-travis-formatter
script:
- xcodebuild test -sdk iphonesimulator -destination 'platform=iOS Simulator,name=iPhone X' -workspace Iterable-iOS-SDK.xcworkspace -scheme Iterable-iOS-SDKTests CODE_SIGNING_REQUIRED=NO GCC_INSTRUMENT_PROGRAM_FLOW_ARCS=YES GCC_GENERATE_TEST_COVERAGE_FILES=YES | xcpretty -f `xcpretty-travis-formatter`
- pod lib lint
after_success:
- bash <(curl -s https://codecov.io/bash) |
Fix issue with no NAT on Travis | ---
# Test IPv4 network
subnetwork_ipv4: '192.168.128.1'
| ---
# Test IPv4 network
subnetwork_ipv4: '192.168.128.1/24'
# Test IPv6 network
subnetwork_ipv6: [ '2001:db8:1234:5678::1/64' ]
# NAT is disabled on Travis-CI
# https://github.com/travis-ci/travis-ci/issues/1341
subnetwork_ipv4_nat: False
|
Test against latest Ruby 2.1 release | before_install: gem update bundler
bundler_args: --without development
language: ruby
rvm:
- 1.9.2
- 1.9.3
- 2.0.0
- 2.1.0
- rbx-2
- ruby-head
matrix:
include:
allow_failures:
- rvm: ruby-head
fast_finish: true
script: bundle exec rake default
| before_install: gem update bundler
bundler_args: --without development
language: ruby
rvm:
- 1.9.2
- 1.9.3
- 2.0.0
- 2.1
- rbx-2
- ruby-head
matrix:
include:
allow_failures:
- rvm: rbx-2
- rvm: ruby-head
fast_finish: true
script: bundle exec rake default
|
Add configuration file for Travis | language: python
python:
- "2.6"
- "2.7"
- "3.2"
- "3.3"
- "3.4"
- "3.5"
| |
Add config for Travis CI | language: python
python:
- "2.5"
- "2.6"
- "2.7"
install:
- pip install unittest2 mock nose --use-mirrors
- pip install . --use-mirrors
script: nosetests
| |
Add integration to Travis CI | language: node_js
notifications:
email:
on_success: never
on_failure: change
node_js:
- stable
before_install:
- npm install -g typings gulp && typings install
script:
- npm test
| |
Update from Hackage at 2019-04-16T15:24:45Z | homepage: https://github.com/chessai/dura
changelog-type: markdown
hash: d89958cf89ffd463465ffa348720447498a5f6a7bf5749ddf3295b903b1d9f23
test-bench-deps: {}
maintainer: chessai <chessai1996@gmail.com>
synopsis: durable/atomic file system writes (from rio package)
changelog: |-
# Changelog
`dura` uses [PVP Versioning][1].
The changelog is available [on GitHub][2].
0.0.0
=====
* Initially created.
[1]: https://pvp.haskell.org
[2]: https://github.com/chessai/dura/releases
basic-deps:
bytestring: ! '>=0.10 && <0.11'
base: ! '>=4.10.1.0 && <4.13'
filepath: ! '>=1.4 && <1.5'
directory: ! '>=1.3 && <1.4'
all-versions:
- '0.1'
author: chessai
latest: '0.1'
description-type: markdown
description: |-
# dura
[](https://hackage.haskell.org/package/dura)
[](LICENSE)
durable/atomic file system writes (from rio package)
license-name: BSD-3-Clause
| |
Add some Rust related CI jobs | name: Rust
on:
pull_request: {}
push:
branches:
- master
jobs:
cargo_bloat:
strategy:
fail-fast: false
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: Run cargo bloat
uses: orf/cargo-bloat-action@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
rustfmt:
strategy:
fail-fast: false
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Install Rust
uses: actions-rs/toolchain@v1
with:
toolchain: stable
- name: Run rustfmt
run: |
git ls-files '*.rs' | xargs rustfmt --check
| |
Add Staticman comment from JoeHx | _id: 9eee6ff0-0536-11e9-9a4e-a50effefcdf5
replyto: entry1545375446
msg: "Hi Kim!\r\n\r\nPrint-on-demand is where the product isn't printed until a sale is made. In the case of t-shirts, I provide a design to be printed on the t-shirt, and then when the website sells a t-shirt with that design, they'll print the design on a t-shirt and ship it to the customer. I don't have to keep an inventory of any physical products at all, and the website (Redbubble, Amazon, etc) shares the profit of the sale with me."
name: JoeHx
email: e591f3aa7c8cbd7dd9e3389958949ed3
url: 'http://hendrixjoseph.github.io/'
date: 1545406777
| |
Use Appveyor for Windows CI | install:
- ps: Start-FileDownload 'https://static.rust-lang.org/dist/rust-nightly-i686-pc-windows-gnu.exe'
- rust-nightly-i686-pc-windows-gnu.exe /VERYSILENT /NORESTART /DIR="C:\Program Files (x86)\Rust"
- SET PATH=%PATH%;C:\Program Files (x86)\Rust\bin
- rustc -V
- cargo -V
- git submodule update --init --recursive
build: false
test_script:
- cargo test --verbose
| |
Update from Hackage at 2016-09-16T13:51:51+0000 | homepage: ''
changelog-type: ''
hash: c4e4a28c405311df73a5c398bdd7303a6a0c5e436a9fc9bbeed66a237faf552d
test-bench-deps: {}
maintainer: haskelldevelopers@chordify.net
synopsis: Functionality for reporting function progress.
changelog: ''
basic-deps:
base: ! '>=4.6 && <4.10'
time: ! '>=1.4 && <1.7'
mtl: ! '>=2.1 && <2.3'
deepseq: ! '>=1.3 && <1.5'
all-versions:
- '1.0.0'
author: Jeroen Bransen
latest: '1.0.0'
description-type: haddock
description: ! 'This module provides a set of functions for writing
functions that report their own progress in a monadic
context, which can be evaluated to get progress reports
while time-consuming functions are running.'
license-name: LGPL-3
| |
Set up CI with Azure Pipelines | # Jekyll site
# Package your Jekyll site using the jekyll/builder Docker container image.
# Add steps that build, test, save build artifacts, deploy, and more:
# https://aka.ms/yaml
trigger:
- master
pool:
vmImage: 'ubuntu-latest'
steps:
- task: Docker@0
displayName: 'Run Jekyll'
inputs:
containerRegistryType: 'Container Registry'
action: 'Run an image'
imageName: 'jekyll/builder:latest'
volumes: |
$(build.sourcesDirectory):/srv/jekyll
$(build.binariesDirectory):/srv/jekyll/_site
containerCommand: 'jekyll build --future'
detached: false
| |
Update from Hackage at 2019-01-10T15:26:48Z | homepage: https://github.com/vabal/vabal
changelog-type: markdown
hash: 1bfabb8cdac506262f636743f211b45a3710d9f3ecb06421927d75543be07137
test-bench-deps:
Cabal: -any
base: -any
containers: -any
vabal-lib: -any
maintainer: franciman12@gmail.com
synopsis: Core algorithms and datatypes used by vabal
changelog: |
# Revision history for vabal-lib
## 2.0.0 -- 2019-01-10
* First version. Released on an unsuspecting world.
basic-deps:
bytestring: ! '>=0.10.8 && <0.11'
Cabal: ! '>=2.2 && <2.5'
base: ! '>=4.11 && <4.13'
containers: ! '>=0.5.11 && <0.7'
cassava: ! '>=0.5.1 && <0.6'
vector: ! '>=0.12.0 && <0.13'
all-versions:
- 2.0.0
author: Francesco Magliocca
latest: 2.0.0
description-type: markdown
description: |
This is the library powering [vabal](https://github.com/vabal/vabal).
For rendered documentation, see http://hackage.haskell.org/package/vabal-lib
or generate the haddock by running:
> $ cabal new-haddock
in the directory where you downloaded `vabal-lib`.
license-name: MIT
| |
Configure Insolvency Service legacy domain on transition | ---
site: bis_insolvency
whitehall_slug: insolvency-service
title: The Insolvency Service
redirection_date: 30th April 2014
homepage: https://www.gov.uk/government/organisations/insolvency-service
tna_timestamp: 20140111212538
host: www.insolvency.gov.uk
furl: www.gov.uk/insolvency-service
aliases:
- insolvency.gov.uk
global: =410
| |
Update from Forestry.io - Updated Forestry configuration | ---
new_page_extension: md
auto_deploy: false
admin_path:
webhook_url:
sections:
- type: jekyll-pages
label: Pages
create: all
- type: jekyll-posts
label: Posts
create: all
upload_dir: uploads
public_path: "/uploads"
front_matter_path: ''
use_front_matter_path: false
file_template: ":filename:"
build:
preview_command: bundle exec jekyll build --drafts --unpublished --future -d _site
publish_command: bundle exec jekyll build -d _site
preview_env:
- JEKYLL_ENV=staging
publish_env:
- JEKYLL_ENV=production
preview_output_directory: _site
output_directory: _site
instant_preview_command: bundle exec jekyll serve --drafts --unpublished --future
--port 8080 --host 0.0.0.0 -d _site
| |
Add HHVM to test targets | language: php
env:
- DB=sqlite
php:
- 5.3
- 5.4
- 5.5
before_script:
- cd tests
script: phpunit
| language: php
env:
- DB=sqlite
php:
- 5.3
- 5.4
- 5.5
- hhvm
before_script:
- cd tests
script: phpunit
|
Update from Hackage at 2016-01-17T08:35:31+0000 | homepage: http://github.com/mstksg/configurator-export
changelog-type: markdown
hash: ad6d0ac555b0f6d1851895a7803fcc22dba5395a3b94ddf3e9d3e850d44c9a86
test-bench-deps:
base: -any
configurator-export: -any
maintainer: justin@jle.im
synopsis: ! 'Pretty printer and exporter for configurations from
the "configurator" library.'
changelog: ! '0.1.0.0
-------
<https://github.com/mstksg/configurator-export/releases/tag/v0.1.0.0>
* Initial release!
'
basic-deps:
base: ! '>=4.7 && <5'
configurator: -any
unordered-containers: -any
text: -any
semigroups: -any
pretty: -any
all-versions:
- '0.1.0.0'
author: Justin Le
latest: '0.1.0.0'
description-type: markdown
description: ! "configurator-export\n===================\n\nPretty printers and exporters
for 'Config's from the great\n*[configurator](http://hackage.haskell.org/package/configurator)*
library.\n\nAll results are intended to be valid parsing files in the configuration
file\nsyntax of the library.\n\nFor a full round trip:\n\n~~~haskell\nmain = do\n
\ cfg <- load [Required \"config.cfg\"]\n writeConf \"config.cfg\" cfg\n~~~\n\nThis
should load the config file, parse it, and then re-export it, rewriting\nthe original
config file. The result should be an identical configuration\nfile (with keys potentially
re-arranged and re-sorted, comments removed, etc.)\n\nCan also export/print any
`HashMap Name Value`, in the form exported from a\n`Config` using `getMap`. Modify
a map yourself to dynically\ngenerate/customize configuration files!\n\nSample output:\n\n~~~haskell\nfoo
{\n bar {\n baz1 = true\n baz2 = [1, 0.6, \"hello\", true]\n
\ }\n aardvark = \"banana\"\n monkey = [true, false, 1.9e-3]\n zebra
\ = 24\n}\n\nfoo2 {\n bar = 8.1e-8\n}\n\napple = [\"cake\", true]\norange
\ = 8943\n~~~\n\nFurther configuration on sorting of keys, displaying of bools and
floats, etc.\nis possible by passing in custom `ConfStyle` style option values.\n\n\n"
license-name: BSD3
| |
Add ec2 example with regions and stuff | plugin: aws_ec2
regions:
- us-west-1
groups:
ec2: true
keyed_groups:
- prefix: ""
separator: ""
key: placement.region
- prefix: "unsafe_region_"
separator: ""
key: placement.region
unsafe: true
| |
Update CI configs to v0.7.3 | name: "CodeQL"
on:
workflow_dispatch:
schedule:
- cron: '23 5 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
permissions:
actions: read
contents: read
security-events: write
steps:
- name: Checkout repo
uses: actions/checkout@v3
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: 'go'
- name: CodeQL Analysis
uses: github/codeql-action/analyze@v2
| |
Update from Hackage at 2020-01-09T21:05:00Z | homepage: https://github.com/ejconlon/blanks#readme
changelog-type: ''
hash: 7c2c3c7f8752503226abc1125394cbc389e598240a87d31c72dbaa4163f955e5
test-bench-deps:
base: ! '>=4.12 && <5'
tasty-discover: -any
adjunctions: ! '>=4.4'
distributive: ! '>=0.6'
containers: ! '>=0.6'
mtl: ! '>=2.2'
tasty-hunit: -any
tasty: -any
blanks: -any
maintainer: ejconlon@gmail.com
synopsis: Fill-in-the-blanks - A library factoring out substitution from ASTs
changelog: ''
basic-deps:
base: ! '>=4.12 && <5'
adjunctions: ! '>=4.4'
distributive: ! '>=0.6'
containers: ! '>=0.6'
mtl: ! '>=2.2'
blanks: -any
all-versions:
- 0.3.0
author: Eric Conlon
latest: 0.3.0
description-type: markdown
description: |
# blanks
[](https://circleci.com/gh/ejconlon/blanks/tree/master)
Fill-in-the-blanks - A library factoring out substitution from ASTs. (Not production ready!)
license-name: BSD-3-Clause
| |
Add a GitHub Packages Release workflow | ---
jobs:
github_packages:
runs-on: "ubuntu-latest"
steps:
- uses: "actions/checkout@v1"
- uses: "actions/setup-ruby@v1"
with:
ruby-version: "2.7"
- name: "Publish to GitHub"
uses: "jstastny/publish-gem-to-github@v1.1"
with:
owner: "envylabs"
token: "${{ secrets.GITHUB_TOKEN }}"
name: "Release"
"on":
push:
tags:
- "v*"
| |
Support for the HDArea.co tracker. | ---
site: hdarea
name: HDArea
language: en-us
links:
- http://www.hdarea.co/
caps:
categories:
402: TV #TV Series
403: TV #TV Shows
410: Movies/HD #FullHD
410: TV/HD #FullHD
411: Movies/HD #HD
411: TV/HD #HD
413: Movies/HD #HDTV
413: TV/HD #HDTV
401: Movies/HD #BluRay
401: TV/HD #BluRay
414: TV/SD #DVD
414: Movies/DVD #DVD
415: Movies/BluRay #Movies/TV Remux
416: Movies/3D
412: TV/WEB-DL #Web-dl
412: Movies/WEBDL #Web-dl
417: TV/OTHER #iPad
417: Movies/Other #iPad
404: TV/Documentary
405: TV/Anime
406: Audio/Video #Music Videos
407: TV/Sport
409: Other/Misc
408: Audio/Lossless
modes:
search: [q]
login:
path: /takelogin.php
method: post
inputs:
username: "{{ .Config.username }}"
password: "{{ .Config.password }}"
error:
path: /takelogin.php
message:
selector: td.text
test:
path: /messages.php
search:
path: /torrents.php
inputs:
$raw: "search={{ .Query.Keywords }}"
rows:
selector: table.torrents tbody tr.nonstick_outer_bg
fields:
category:
selector: td:nth-child(1) > a:nth-child(1)
attribute: href
filters:
- name: regexp
args: "\\?cat=(\\d+)"
title:
selector: td:nth-child(2) table.torrentname tbody td a
details:
selector: td:nth-child(1) > a:nth-child(1)
attribute: href
download:
selector: td:nth-child(4) > a:nth-child(1)
attribute: href
size:
selector: td:nth-child(5)
date:
selector: td:nth-child(4) > span:nth-child(1)
attribute: title
seeders:
selector: td:nth-child(6)
leechers:
selector: td:nth-child(7) | |
Update from Hackage at 2017-07-16T09:37:33Z | homepage: https://github.com/jystic/transformers-bifunctors
changelog-type: markdown
hash: 67d1d3a860f8db3cc87983c76f79cb24078b8d837f6b46e331a3038ab0b8fbc6
test-bench-deps: {}
maintainer: Jacob Stanley <jacob@stanley.io>
synopsis: Bifunctors over monad transformers.
changelog: ! '## Version 0.1 (2017-07-16)
* First version. Released on an unsuspecting world.
'
basic-deps:
base: ! '>=3 && <5'
mmorph: ! '>=1.0 && <1.2'
transformers: ! '>=0.3 && <0.6'
all-versions:
- '0.1'
author: Jacob Stanley
latest: '0.1'
description-type: markdown
description: ! "transformers-bifunctors [![Hackage][hackage-shield]][hackage]\n=======================\n\nBifunctors
over monad transformers.\n\n[hackage]:\n http://hackage.haskell.org/package/transformers-bifunctors\n[hackage-shield]:\n
\ https://img.shields.io/hackage/v/transformers-bifunctors.svg\n"
license-name: BSD3
| |
Migrate project Nailgun off AppVeyor CI | version: 2.1
orbs:
win: circleci/windows@2.2.0
windows_environment: &windows_environment
PLATFORM: "windows"
NAILGUN_ROOT: "C:\\Users\\circleci\\nailgun"
jobs:
windows_build_test:
environment:
<<: *windows_environment
working_directory: "C:\\Users\\circleci\\nailgun"
executor: win/default
steps:
- checkout
- run:
name: Install OpenJDK8
command: choco install adoptopenjdk8
shell: cmd.exe
- run:
name: Install Maven
command: choco install maven -y -f -i
shell: cmd.exe
- run:
name: Build Script
command: cd %NAILGUN_ROOT% && refreshenv && mvn package
shell: cmd.exe
- run:
name: Test Script
command: cd %NAILGUN_ROOT% && refreshenv && python nailgun-client\py\test_ng.py
shell: cmd.exe
workflows:
version: 2.1
windows_jobs:
jobs:
windows_build_test
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.