code
stringlengths 38
801k
| repo_path
stringlengths 6
263
|
|---|---|
site_name: doing CLI for Azure DevOps
site_description: 'Speed up work with Azure Devops'
edit_uri: ''
copyright: 'Copyright © 2021'
repo_url: https://github.com/ing-bank/doing-cli
site_url: 'https://ing-bank.github.io/doing-cli/'
theme:
name: material
palette:
# Light mode
- media: "(prefers-color-scheme: light)"
scheme: default
primary: white
accent: deep orange
toggle:
icon: material/toggle-switch-off-outline
name: Switch to dark mode
# Dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
primary: deep orange
accent: deep orange
toggle:
icon: material/toggle-switch
name: Switch to light mode
font: false
icon:
logo: octicons/terminal-16
repo: fontawesome/brands/github
favicon: assets/images/cli.png
features:
- navigation.tracking
- navigation.sections
- header.autohide
- navigation.tabs
- navigation.top
nav:
- index.md
- Getting started:
- get_started/install.md
- get_started/setup_auth.md
- get_started/setup_project.md
- How to:
- Workflow new issues: howto/workflow_new_item.md
- Workflow existing issues: howto/workflow_existing_item.md
- Workflow sprints: howto/workflow_sprints.md
- Workflow bulk edit issues: howto/workflow_bulk_edit_items.md
- Python scripting: howto/python_scripting.md
- Shell completion: howto/command_completion.md
- Connection problems: howto/connection_problems.md
- Commands:
- init: reference/manual/init.md
- list: reference/manual/list.md
- issue:
- create: reference/manual/issue_create.md
- close: reference/manual/issue_close.md
- list: reference/manual/issue_list.md
- pr:
- create: reference/manual/pr_create.md
- close: reference/manual/pr_close.md
- checkout: reference/manual/pr_checkout.md
- list: reference/manual/pr_list.md
- workon: reference/manual/workon.md
- open: reference/manual/open.md
- Config:
- Config file: config/config_file.md
- Using env vars: config/env_config.md
- Discussion:
- One Project Setup: discussion/oneproject_setup.md
- contributing.md
- discussion/upgrading.md
plugins:
- search
- macros:
module_name: src/doing/utils
markdown_extensions:
- abbr
- meta
- admonition
- pymdownx.keys
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.snippets
- pymdownx.superfences
- pymdownx.details
- pymdownx.tabbed
- pymdownx.emoji:
emoji_index: !!python/name:materialx.emoji.twemoji
emoji_generator: !!python/name:materialx.emoji.to_svg
options:
custom_icons:
- site/overrides/.icons
- pymdownx.superfences:
custom_fences:
- name: mermaid
class: mermaid
format: !!python/name:pymdownx.superfences.fence_div_format
extra_javascript:
- 'https://unpkg.com/mermaid@8.8.4/dist/mermaid.min.js'
- 'assets/js/termynal.js'
- 'assets/js/custom.js'
extra_css:
- 'assets/css/termynal.css'
- 'assets/css/custom.css'
|
mkdocs.yml
|
api_name: []
items:
- children:
- azure.mgmt.web.models.MySqlMigrationType.local_to_remote
- azure.mgmt.web.models.MySqlMigrationType.remote_to_local
class: azure.mgmt.web.models.MySqlMigrationType
fullName: azure.mgmt.web.models.MySqlMigrationType
inheritance:
- inheritance:
- type: builtins.object
type: builtins.str
- inheritance:
- type: builtins.object
type: enum.Enum
langs:
- python
module: azure.mgmt.web.models
name: MySqlMigrationType
source:
id: MySqlMigrationType
path: azure-mgmt-web\azure\mgmt\web\models\web_site_management_client_enums.py
remote:
branch: master
path: azure-mgmt-web\azure\mgmt\web\models\web_site_management_client_enums.py
repo: https://github.com/Azure/azure-sdk-for-python.git
startLine: 480
summary: 'An enumeration.
'
syntax: {}
type: class
uid: azure.mgmt.web.models.MySqlMigrationType
- class: azure.mgmt.web.models.MySqlMigrationType
fullName: azure.mgmt.web.models.MySqlMigrationType.local_to_remote
langs:
- python
module: azure.mgmt.web.models
name: local_to_remote
syntax:
content: local_to_remote = 'LocalToRemote'
type: attribute
uid: azure.mgmt.web.models.MySqlMigrationType.local_to_remote
- class: azure.mgmt.web.models.MySqlMigrationType
fullName: azure.mgmt.web.models.MySqlMigrationType.remote_to_local
langs:
- python
module: azure.mgmt.web.models
name: remote_to_local
syntax:
content: remote_to_local = 'RemoteToLocal'
type: attribute
uid: azure.mgmt.web.models.MySqlMigrationType.remote_to_local
references:
- fullName: azure.mgmt.web.models.MySqlMigrationType.local_to_remote
isExternal: false
name: local_to_remote
parent: azure.mgmt.web.models.MySqlMigrationType
uid: azure.mgmt.web.models.MySqlMigrationType.local_to_remote
- fullName: azure.mgmt.web.models.MySqlMigrationType.remote_to_local
isExternal: false
name: remote_to_local
parent: azure.mgmt.web.models.MySqlMigrationType
uid: azure.mgmt.web.models.MySqlMigrationType.remote_to_local
|
docs-ref-autogen/azure-mgmt-web/azure.mgmt.web.models.MySqlMigrationType.yml
|
name: build
on: [push]
jobs:
black:
name: black
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: docker://kiwicom/black
with:
args: black --check --diff .
pylint:
name: pylint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: '3.7'
- run: pip install poetry
- run: poetry add pylint
- run: poetry install
- run: poetry run pylint src/schemathesis
mypy:
name: mypy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: docker://kiwicom/mypy
with:
args: mypy src/schemathesis
py36:
name: py36
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: '3.6'
- run: pip install tox coverage
- run: tox -e py36
- run: coverage combine
- run: coverage report
- run: coverage xml -i
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1.0.2
with:
token: ${{secrets.CODECOV_TOKEN}}
file: ./coverage.xml
flags: unittests
name: codecov-py36
py37:
name: py37
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: '3.7'
- run: pip install tox coverage
- run: tox -e py37
- run: coverage combine
- run: coverage report
- run: coverage xml -i
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1.0.2
with:
token: ${{secrets.CODECOV_TOKEN}}
file: ./coverage.xml
flags: unittests
name: codecov-py37
py38:
name: py38
runs-on: ubuntu-latest
steps:
- run: sudo add-apt-repository ppa:deadsnakes/ppa
- run: sudo apt-get update
- run: sudo apt-get install -y --no-install-recommends python3.8-dev python3.8-distutils
- uses: actions/checkout@master
- uses: actions/setup-python@v1
with:
python-version: '3.7'
- run: pip install tox coverage
- run: tox -e py38
- run: coverage combine
- run: coverage report
- run: coverage xml -i
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1.0.2
with:
token: ${{secrets.CODECOV_TOKEN}}
file: ./coverage.xml
flags: unittests
name: codecov-py38
|
.github/workflows/build.yml
|
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: spostgres
namespace: scheduler
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 10Gi
---
apiVersion: v1
kind: Service
metadata:
name: spostgres
namespace: scheduler
spec:
selector:
name: spostgres
type: NodePort
ports:
- name: "5432"
port: 5432
targetPort: 5432
nodePort: 30432
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: spostgres
namespace: scheduler
annotations:
description: pg数据库
spec:
selector:
matchLabels:
name: spostgres
replicas: 1
strategy:
type: Recreate
template:
metadata:
labels:
name: spostgres
spec:
containers:
- name: spostgres
image: postgres:12.5
imagePullPolicy: IfNotPresent
env:
- name: POSTGRES_USER
value: postgres
- name: POSTGRES_PASSWORD
value: <PASSWORD>
- name: POSTGRES_DB
value: db_task
- name: PGDATA
value: /var/lib/postgresql/data/pgdata
livenessProbe:
tcpSocket:
port: 5432
initialDelaySeconds: 30
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
periodSeconds: 10
readinessProbe:
tcpSocket:
port: 5432
initialDelaySeconds: 30
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
periodSeconds: 10
resources: { }
volumeMounts:
- mountPath: /var/lib/postgresql/data
name: spostgres
- mountPath: /etc/localtime
name: localtime
readOnly: true
- mountPath: /dev/shm
name: dshm
volumes:
- persistentVolumeClaim:
claimName: spostgres
name: spostgres
- hostPath:
path: /etc/localtime
name: localtime
- name: dshm
emptyDir:
medium: Memory
sizeLimit: 1Gi
---
apiVersion: v1
kind: PersistentVolumeClaim
metadata:
name: sredis
namespace: scheduler
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 5Gi
---
apiVersion: v1
kind: Service
metadata:
name: sredis
namespace: scheduler
spec:
selector:
name: sredis
type: ClusterIP
ports:
- name: sredis
port: 6379
targetPort: 6379
protocol: TCP
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: sredis
namespace: scheduler
annotations:
description: redis缓存
spec:
selector:
matchLabels:
name: sredis
replicas: 1
strategy:
type: Recreate
template:
metadata:
labels:
name: sredis
spec:
containers:
- name: sredis
image: redis:alpine
imagePullPolicy: IfNotPresent
args:
- --appendonly
- 'yes'
- --requirepass
- '<PASSWORD>'
livenessProbe:
tcpSocket:
port: 6379
initialDelaySeconds: 30
timeoutSeconds: 5
successThreshold: 1
failureThreshold: 5
periodSeconds: 10
readinessProbe:
exec:
command:
- /bin/sh
- -i
- -c
- test "$(redis-cli -h 127.0.0.1 -a 12345 ping)" == "PONG"
failureThreshold: 3
initialDelaySeconds: 5
periodSeconds: 10
successThreshold: 1
timeoutSeconds: 1
resources: { }
volumeMounts:
- mountPath: /data
name: sredis
volumes:
- persistentVolumeClaim:
claimName: sredis
name: sredis
|
.k3s/db.yml
|
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: openpitrix-hyperpitrix-deployment
namespace: ${NAMESPACE}
labels:
app: openpitrix
component: openpitrix-hyperpitrix
version: ${VERSION}
spec:
selector:
matchLabels:
app: openpitrix
component: openpitrix-hyperpitrix
replicas: 1
template:
metadata:
labels:
app: openpitrix
component: openpitrix-hyperpitrix
version: ${VERSION}
spec:
initContainers:
- name: wait-mysql
image: ${BUSYBOX}
imagePullPolicy: IfNotPresent
command: ['sh', '-c', 'until nc -z ${DB_SERVICE} 3306; do echo "waiting for mysql"; sleep 2; done;']
- name: wait-etcd
image: ${BUSYBOX}
imagePullPolicy: IfNotPresent
command: ['sh', '-c', 'until nc -z ${ETCD_SERVICE} 2379; do echo "waiting for etcd"; sleep 2; done;']
hostAliases:
- ip: 127.0.0.1
hostnames:
- openpitrix-task-manager
- openpitrix-runtime-manager
- openpitrix-repo-indexer
- openpitrix-repo-manager
- openpitrix-job-manager
- openpitrix-isv-manager
- openpitrix-cluster-manager
- openpitrix-attachment-manager
- openpitrix-category-manager
- openpitrix-app-manager
- openpitrix-api-gateway
- openpitrix-rp-manager
containers:
- name: hyperpitrix
image: ${IMAGE}
imagePullPolicy: ${IMAGE_PULL_POLICY}
command:
- hyperpitrix
ports:
- containerPort: 9100
name: api-gateway
- containerPort: 9102
name: app-manager
- containerPort: 9108
name: cateogory-mgr
- containerPort: 9122
name: attachment-mgr
- containerPort: 9104
name: cluster-mgr
- containerPort: 9118
name: isv-mgr
- containerPort: 9106
name: job-mgr
- containerPort: 9101
name: repo-mgr
- containerPort: 9108
name: repo-indexer
- containerPort: 9121
name: rp-mgr
- containerPort: 9103
name: runtime-mgr
- containerPort: 9107
name: task-mgr
env:
- name: OPENPITRIX_GRPC_SHOW_ERROR_CAUSE
value: "${GRPC_SHOW_ERROR_CAUSE}"
- name: OPENPITRIX_LOG_LEVEL
value: ${OPENPITRIX_LOG_LEVEL}
- name: OPENPITRIX_ETCD_ENDPOINTS
value: "${OPENPITRIX_ETCD_ENDPOINTS}"
- name: OPENPITRIX_MYSQL_HOST
value: "${OPENPITRIX_MYSQL_HOST}"
- name: OPENPITRIX_ATTACHMENT_ENDPOINT
value: "${OPENPITRIX_ATTACHMENT_ENDPOINT}"
- name: OPENPITRIX_ATTACHMENT_BUCKET_NAME
value: "${OPENPITRIX_ATTACHMENT_BUCKET_NAME}"
- name: OPENPITRIX_MYSQL_PASSWORD
valueFrom:
secretKeyRef:
key: password.txt
name: mysql-pass
resources:
limits:
cpu: ${CPU_LIMITS}m
memory: ${MEMORY_LIMITS}Mi
requests:
cpu: ${CPU_REQUESTS}m
memory: ${MEMORY_REQUESTS}Mi
|
roles/openpitrix/files/openpitrix/hyperpitrix-kubernetes/openpitrix/openpitrix-hyperpitrix.yaml
|
name: Build and Test
on:
pull_request:
branches: [ master ]
env:
dotnet-version: 6.0.x # SDK Version for building Dafny
jobs:
singletons:
runs-on: ubuntu-18.04
steps:
- name: Setup dotnet
uses: actions/setup-dotnet@v1.9.0
with:
dotnet-version: ${{env.dotnet-version}}
- name: Checkout Dafny
uses: actions/checkout@v2
with:
path: dafny
- name: Restore tools
working-directory: dafny
run: dotnet tool restore
- name: Get XML
id: boogieVersion
uses: QwerMike/xpath-action@v1
with:
filename: 'dafny/Source/Dafny/DafnyPipeline.csproj'
expression: "//PackageReference[@Include='Boogie.ExecutionEngine']/@Version"
- uses: actions-ecosystem/action-regex-match@v2
id: regex-match
with:
text: ${{ steps.boogieVersion.outputs.result }}
regex: ' Version="([\d\.]+)"'
- name: Attempt custom Boogie patch
working-directory: dafny
run: git apply customBoogie.patch
- name: Checkout Boogie
uses: actions/checkout@v2
with:
repository: boogie-org/boogie
path: dafny/boogie
ref: v${{ steps.regex-match.outputs.group1 }}
- name: Build Dafny with local Boogie
working-directory: dafny
run: dotnet build Source/Dafny.sln
- name: Check whitespace and style
working-directory: dafny
run: dotnet tool run dotnet-format -w -s error --check Source/Dafny.sln --exclude Dafny/Scanner.cs --exclude Dafny/Parser.cs
- name: Create NuGet package (just to make sure it works)
run: dotnet pack --no-build dafny/Source/Dafny.sln
check-deep-tests:
uses: ./.github/workflows/check-deep-tests-reusable.yml
with:
branch: master
integration-tests:
needs: check-deep-tests
if: always() && (needs.check-deep-tests.result == 'success' || contains(github.event.pull_request.labels.*.name, 'run-deep-tests'))
uses: ./.github/workflows/integration-tests-reusable.yml
with:
# By default run only on one platform, but run on all platforms if the PR has the "run-deep-tests"
# label, and skip checking the nightly build above.
# This is the best way to fix an issue in master that was only caught by the nightly build.
all_platforms: ${{ contains(github.event.pull_request.labels.*.name, 'run-deep-tests') }}
num_shards: 5
|
.github/workflows/msbuild.yml
|
title: arloou
# 网站标题后缀
suffix: website
# 作者名称(会显示在首页文章下面)
author: Luzz
# 首页简介(显示在首页顶部Logo的那句话)
home_describe: 一個無聊的網站
# 网站关键字
keyword: luzz,arloou,blog
is_article_img: true #false是否开启文章背景图显示(随机图片)
# 网站描述
web_description: 描述
# 导航名称
menus_title:
home: 首頁
archive: 歸檔
categories: 分類
tags: 標籤
links: 鏈接
about: 關於
# 导航 (不建议修改)
menus:
home: /
categories: /categories
tags: /tags
archive: /archives
about: /about
# logo
logo: https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/avatar.png
# 网页标签上那个小图标 一定要用本地存储
icon: /image/favicon.ico
# 每个页面上面的图表
topIcon:
homeIcon: "https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/drink_tea.png"
categoriesIcon: "https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/milktea_icon.png"
archivesIcon: "https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/wine_icon.png"
tagIcon: "https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/juhua_icon.png"
linksIcon: "https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/avatar.png"
# 标签页显示多少个标签
tagsNum: 1000
# 网页访问统计
web_analytics:
enable: false #需要改为true
baidu: 4b5fe1472f22fa # (替换)百度统计的 Key,参见 https://tongji.baidu.com/sc-web/10000033910/home/site/getjs?siteId=13751376 代码获取中 hm.js? 后边的字符串
# 底部显示的图标(github 或者其他)
bottomIcon:
#可以多个
- {
name: 'email',
iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/email-fill.png',
toUrl: 'mailto:<EMAIL>'
}
# - {
# #描述名称
# name: 'Github',
# #图标
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/github-fill.png',
# #跳转链接
# toUrl: 'https://github.com/luzizheng'
# }
# - {
# name: '微博',
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/weibo-circle-fill.png',
# toUrl: 'https://weibo.com/u/1806986113'
# }
# - {
# name: 'Instagram',
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/instagram-fill.png',
# toUrl: 'https://www.instagram.com/luzizheng/'
# }
# - {
# name: '知乎',
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/zhihu-circle-fill.png',
# toUrl: 'https://www.zhihu.com/people/frank-lu-73'
# }
# - {
# name: '微信',
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/WeChat.png',
# toUrl: ''
# }
# - {
# name: '网易云音乐',
# iconLink: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/neteasymusic_icon.png',
# toUrl: 'https://music.163.com/#/user/home?id=18065540'
# }
# 友情链接
linksList:
- {
title: 'Gmail',
intro: '<EMAIL>',
link: 'mailto:<EMAIL>',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/Mail.png'
}
- {
title: '微信',
intro: '@arloou',
link: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/2241638164989_.pic.jpg',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/social-wechat.png'
}
- {
title: '新浪微博',
intro: '@_Luzz_',
link: 'https://weibo.com/u/1806986113/',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/social-weibo.png'
}
- {
title: 'GitHub',
intro: '@luzizheng',
link: 'https://github.com/luzizheng',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/social-github.png'
}
- {
title: 'Instagram',
intro: '@luzizheng',
link: 'https://www.instagram.com/luzizheng/',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/Instagram-Logo.png'
}
- {
title: 'Bandcamp',
intro: '@luzz0616',
link: 'https://bandcamp.com/luzz0616',
image: 'https://cdn.jsdelivr.net/gh/luzizheng/images@master/img/bandcamp.png'
}
# 文字内点击图片放大相关配置
picture:
# 蒙版的颜色也就是 打开图片背景 显示的颜色
colour: '#FFFDD0'
# 透明度
opacity: 0.5
# 图片角度 就是显示图片的角度
imgRadius: "10px"
#默认的cover (默认缩略图) 在没有填写(这里可以替换(可以是api也可以是固定的图))
default_cover: https://api.ixiaowai.cn/gqapi/gqapi.php
|
luzizheng.github.io/themes/quiet/_config.yml
|
name: 'build-test'
on: # rebuild any PRs and main branch changes
pull_request:
push:
branches:
- main
- 'releases/*'
jobs:
build: # make sure build/ci work properly
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- run: |
npm install
- run: |
npm run all
test: # make sure the action works on a clean machine without building
runs-on: ubuntu-latest
env:
FAROS_API_URL: https://dev.api.faros.ai
FAROS_DRY_RUN: 1
steps:
- uses: actions/checkout@v2
- name: Set build start time
id: set_start_time
run: |
# Set the current time in milliseconds: https://serverfault.com/a/151112
echo "::set-output name=started-at::$(date +%s%N | cut -b1-13)"
- name: Set build end time
id: set_end_time
run: |
# Set the current time in milliseconds: https://serverfault.com/a/151112
echo "::set-output name=ended-at::$(date +%s%N | cut -b1-13)"
- uses: ./
name: Send CI event to Faros using commit
id: send-ci-event-commit
with:
api-key: fake_key
event: CI
run-status: ${{ job.status }}
run-started-at: ${{ steps.set_start_time.outputs.started-at }}
run-ended-at: ${{ steps.set_end_time.outputs.ended-at }}
- uses: ./
name: Send CI event to Faros using artifact
id: send-ci-event-artifact
with:
api-key: fake_key
event: CI
artifact: ${{ format('Docker://farosai/emitter/{0}', github.sha) }}
run-status: ${{ job.status }}
run-started-at: ${{ steps.set_start_time.outputs.started-at }}
run-ended-at: ${{ steps.set_end_time.outputs.ended-at }}
- uses: ./
name: Send CD event to Faros using commit
id: send-cd-event-commit
with:
api-key: fake_key
event: CD
deploy: ${{ format('CodeDeploy://Emitter/Prod/{0}', github.run_id) }}
deploy-status: Success
deploy-started-at: ${{ steps.set_start_time.outputs.started-at }}
deploy-ended-at: ${{ steps.set_end_time.outputs.ended-at }}
run-status: ${{ job.status }}
- uses: ./
name: Send CD event to Faros using artifact
id: send-cd-event-artifact
with:
api-key: fake_key
event: CD
deploy: ${{ format('CodeDeploy://Emitter/Prod/{0}', github.run_id) }}
deploy-status: Success
deploy-started-at: ${{ steps.set_start_time.outputs.started-at }}
deploy-ended-at: ${{ steps.set_end_time.outputs.ended-at }}
artifact: ${{ format('Docker://farosai/emitter/{0}', github.sha) }}
run-status: ${{ job.status }}
|
.github/workflows/test.yml
|
version: '3.5'
volumes:
apollo-data:
postgres-data:
services:
oauthproxy:
image: veupathdb/oauthproxy:${OAUTH_TAG:-latest}
environment:
OIDCProviderMetadataURL: ${OIDCProviderMetadataURL:-https://eupathdb.org/oauth/discovery}
OIDCRedirectURI: ${OIDCRedirectURI:-http://apollo-dev.apidb.org/annotator/openid/eupathdb}
OIDCClientID: ${OIDCClientID}
OIDCClientSecret: ${OIDCClientSecret}
OIDCCryptoPassphrase: ${OIDCCryptoPassphrase}
networks:
- internal
- traefik
labels:
- "traefik.docker.network=traefik"
- "traefik.http.routers.${ROUTER:-apollo-dev}.rule=Host(`${DOMAIN:-apollo-dev.apidb.org}`)"
- "traefik.http.routers.${ROUTER:-apollo-dev}.tls=true"
- "traefik.http.routers.${ROUTER:-apollo-dev}.entrypoints=websecure"
- "traefik.http.services.${ROUTER:-apollo-dev}.loadbalancer.server.port=${PROXY_PORT:-80}"
- "com.centurylinklabs.watchtower.enable=${OAUTH_WATCHTOWER:-false}"
apollo:
image: veupathdb/apollo:${APOLLO_TAG:-latest}
volumes:
- apollo-data:/data
environment:
WEBAPOLLO_DB_HOST: ${WEBAPOLLO_DB_HOST:-postgres}
WEBAPOLLO_DB_PASSWORD: ${WEBAPOLLO_DB_PASSWORD}
CHADO_DB_HOST: ${CHADO_DB_HOST:-postgres}
CHADO_DB_PASSWORD: ${CHADO_DB_PASSWORD}
APOLLO_ADMIN_PASSWORD: ${APOLLO_ADMIN_PASSWORD}
WEBAPOLLO_START_POSTGRES: ${WEBAPOLLO_START_POSTGRES:-false}
WEBAPOLLO_REMOTE_USER_AUTH: ${WEBAPOLLO_REMOTE_USER_AUTH:-false}
labels:
- "traefik.docker.network=traefik"
- "traefik.http.middlewares.ipwhitelist-${ROUTER:-apollo-dev}.ipwhitelist.sourcerange=${IPWHITELIST}"
- "traefik.http.routers.admin-${ROUTER:-apollo-dev}.rule=Host(`${ADMIN_DOMAIN:-apollo-dev.local.apidb.org}`)"
- "traefik.http.routers.admin-${ROUTER:-apollo-dev}.tls=true"
- "traefik.http.routers.admin-${ROUTER:-apollo-dev}.entrypoints=local"
- "traefik.http.routers.api-${ROUTER:-apollo-dev}.middlewares=ipwhitelist-${ROUTER:-apollo-dev}"
- "traefik.http.routers.api-${ROUTER:-apollo-dev}.rule=Host(`${API_DOMAIN:-apollo-dev-api.local.apidb.org}`)"
- "traefik.http.routers.api-${ROUTER:-apollo-dev}.tls=true"
- "traefik.http.routers.api-${ROUTER:-apollo-dev}.entrypoints=websecure"
- "traefik.http.services.api-${ROUTER:-apollo-dev}.loadbalancer.server.port=${API_PORT:-8080}"
- "com.centurylinklabs.watchtower.enable=${APOLLO_WATCHTOWER:-false}"
depends_on:
- postgres
networks:
- internal
- traefik
postgres:
image: veupathdb/postgres-apollo:${POSTGRES_TAG:-latest}
volumes:
- postgres-data:/var/lib/postgresql/data
environment:
WEBAPOLLO_DB_HOST: ${WEBAPOLLO_DB_HOST:-postgres}
WEBAPOLLO_DB_PASSWORD: ${WEBAPOLLO_DB_PASSWORD}
CHADO_DB_HOST: ${CHADO_DB_HOST:-postgres}
CHADO_DB_PASSWORD: ${CHADO_DB_PASSWORD}
WEBAPOLLO_DB_NAME: ${WEBAPOLLO_DB_NAME:-apollo}
WEBAPOLLO_DB_USERNAME: ${WEBAPOLLO_DB_USERNAME:-apollo}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD}
labels:
- "traefik.enable=false"
- "com.centurylinklabs.watchtower.enable=${POSTGRES_WATCHTOWER:-false}"
networks:
- internal
networks:
internal:
external: false
traefik:
external: true
|
docker-compose.yml
|
apiVersion: apps/v1
kind: Deployment
metadata:
labels:
app: jbserver
name: jbserver
namespace: jb
annotations:
kubernetes.io/change-cause: "增加定单读取与用户信息读取断路器功能"
spec:
revisionHistoryLimit: 10
replicas: 3
strategy:
type: RollingUpdate
rollingUpdate:
maxSurge: 25%
maxUnavailable: 25%
selector:
matchLabels:
app: jbserver
template:
metadata:
labels:
app: jbserver
actorSystemName: jbserver
annotations:
k8s.aliyun.com/eci-use-specs: "ecs.c5.large"
k8s.aliyun.com/eci-spot-strategy: "SpotAsPriceGo"
k8s.aliyun.com/eci-with-eip: "true"
spec:
volumes:
- name: jb-pvc-nas
persistentVolumeClaim:
claimName: jb-pvc-nas
- name: workdir
emptyDir: {}
containers:
- name: jbserver
image: registry-vpc.cn-hangzhou.aliyuncs.com/xinghan-docker/jbserver:latest
livenessProbe:
httpGet:
path: /alive
port: management
periodSeconds: 10
failureThreshold: 5
initialDelaySeconds: 20
readinessProbe:
httpGet:
path: /ready
port: management
periodSeconds: 10
failureThreshold: 3
initialDelaySeconds: 10
ports:
- name: http
containerPort: 30000
protocol: TCP
- name: remote
containerPort: 25520
protocol: TCP
- name: proxy
containerPort: 30001
protocol: TCP
- name: management
containerPort: 8558
protocol: TCP
volumeMounts:
- name: jb-pvc-nas
mountPath: "/alllog"
- name: workdir
mountPath: "/root"
env:
- name: REQUIRED_CONTACT_POINT_NR
value: "3"
---
apiVersion: v1
kind: Service
metadata:
name: jbserver-service
namespace: jb
spec:
selector:
app: jbserver
ports:
- name: http
port: 30000
protocol: TCP
targetPort: http
type: ClusterIP
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
name: jbserver-ingress
namespace: jb
spec:
rules:
- host: jbserver.61week.com
http:
paths:
- path: /
backend:
serviceName: jbserver-service
servicePort: 30000
---
kind: Role
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: pod-reader
namespace: jb
rules:
- apiGroups: [""] # "" indicates the core API group
resources: ["pods"]
verbs: ["get", "watch", "list"]
---
kind: RoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: read-pods
namespace: jb
subjects:
- kind: User
name: system:serviceaccount:jb:default
roleRef:
kind: Role
name: pod-reader
apiGroup: rbac.authorization.k8s.io
|
kubernetes/akka-cluster-pro.yaml
|
GoetasWebservices\Client\SalesforceEnterprise\Sobject\RelationshipInfoType:
properties:
childSobject:
expose: true
access_type: public_method
serialized_name: ChildSobject
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getChildSobject
setter: setChildSobject
type: GoetasWebservices\Client\SalesforceEnterprise\Sobject\EntityDefinitionType
childSobjectId:
expose: true
access_type: public_method
serialized_name: ChildSobjectId
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getChildSobjectId
setter: setChildSobjectId
type: string
durableId:
expose: true
access_type: public_method
serialized_name: DurableId
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getDurableId
setter: setDurableId
type: string
field:
expose: true
access_type: public_method
serialized_name: Field
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getField
setter: setField
type: GoetasWebservices\Client\SalesforceEnterprise\Sobject\FieldDefinitionType
fieldId:
expose: true
access_type: public_method
serialized_name: FieldId
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getFieldId
setter: setFieldId
type: string
isCascadeDelete:
expose: true
access_type: public_method
serialized_name: IsCascadeDelete
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getIsCascadeDelete
setter: setIsCascadeDelete
type: bool
isDeprecatedAndHidden:
expose: true
access_type: public_method
serialized_name: IsDeprecatedAndHidden
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getIsDeprecatedAndHidden
setter: setIsDeprecatedAndHidden
type: bool
isRestrictedDelete:
expose: true
access_type: public_method
serialized_name: IsRestrictedDelete
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getIsRestrictedDelete
setter: setIsRestrictedDelete
type: bool
junctionIdListNames:
expose: true
access_type: public_method
serialized_name: JunctionIdListNames
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getJunctionIdListNames
setter: setJunctionIdListNames
type: array<string>
xml_list:
inline: false
entry_name: names
skip_when_empty: true
namespace: 'urn:enterprise.soap.sforce.com'
relationshipDomains:
expose: true
access_type: public_method
serialized_name: RelationshipDomains
xml_element:
namespace: 'urn:sobject.enterprise.soap.sforce.com'
accessor:
getter: getRelationshipDomains
setter: setRelationshipDomains
type: GoetasWebservices\Client\SalesforceEnterprise\Types\QueryResultType
|
metadata/Sobject.RelationshipInfoType.yml
|
file_version: stressor#0
# The run configuration defines how the `sequences` are executed in parallel
# sessions.
config:
name: Test WebDAV
# tag: Not set (pass `-o tag:TAG_NAME` to override)
details: |
This scenario runs some tests against a WebDAV server.
We use it to test stressor against a locally running WsgiDAV server:
- Open a terminal and run
$ wsgidav --root tests/stressor/htdocs --host 127.0.0.1 --port 8082 --auth anonymous --no-config
- Open a second terminal and run
$ stressor run tests/stressor/test_rw -q
verbose: 3
base_url: http://127.0.0.1:8082
timeout: 1
# Initial context value definitions.
# All used parameters must be listed here in the form:
# parameter_name: default_value
# Set `null` value to make it mandatory, i.e. must be passed by caller.
context:
sessions:
users: $load(users.yaml)
count: 10
# basic_auth: true
# Define what actions should be performed by every session
scenario:
- sequence: init
- sequence: main
# repeat: 3
duration: 30
- sequence: end
# List of named action sequences. Used as building blocks for scenarios
sequences:
# 'init' is the reserved name for the set-up sequence.
init:
# Log-in
- activity: HTTPRequest
url: /
method: OPTIONS
assert_match_headers: ".*'DAV'.*"
- activity: GetRequest
url: /mock_login_response.json
assert_json:
result.user_guid: "{abc123}"
store_json:
user_guid: "result.user_guid"
# - activity: $debug()
# Other sections can have arbitrary names and are excuted in order of appearance
main:
# Test dirbrowser for root URL
- activity: GetRequest
url: /
assert_match: ".*Index of /.*"
assert_html:
"//*[@class='logo']": true
# Plain GET (all sessions read the same file)
- activity: GetRequest
url: /private/test.html
auth: $(user.auth)
# assert_match: ".*Secret text.*"
monitor: true
# debug: true
# PUT + GET (all sessions create their own file)
- activity: PutRequest
url: /temp/wsgidav_test_file~$(session_id).txt
data: "Test"
assert_max_time: 0.5
monitor: true
# debug: true
- activity: GetRequest
url: /temp/wsgidav_test_file~$(session_id).txt
assert_match: "Test"
monitor: true
# 'end' is the reserved name for the tear-down sequence
end:
- activity: $sleep(0.1)
- activity: DeleteRequest
url: /temp/wsgidav_test_file~$(session_id).txt
|
tests/stressor/test_rw.yaml
|
server:
port: ${services.orca.port:8083}
address: ${services.orca.host:localhost}
default:
bake:
account: default
securityGroups: []
vpc:
securityGroups: []
front50:
enabled: true
baseUrl: ${services.front50.baseUrl:http://localhost:8080}
tide:
baseUrl: http://localhost:8090
oort:
baseUrl: ${services.clouddriver.baseUrl:http://localhost:7002}
mort:
baseUrl: ${services.clouddriver.baseUrl:http://localhost:7002}
kato:
baseUrl: ${services.clouddriver.baseUrl:http://localhost:7002}
echo:
enabled: true
baseUrl: ${services.echo.baseUrl:http://localhost:8089}
igor:
enabled: true
baseUrl: ${services.igor.baseUrl:http://localhost:8088}
bakery:
enabled: true
baseUrl: ${services.rosco.baseUrl:http://localhost:8087}
kayenta:
enabled: ${services.kayenta.enabled:false}
baseUrl: ${services.kayenta.baseUrl:http://localhost:8090}
redis:
connection: ${services.redis.baseUrl:redis://localhost:6379}
keiko:
queue:
fillExecutorEachCycle: true
redis:
queueName: orca.task.queue
deadLetterQueueName: orca.task.deadLetterQueue
tasks:
useWaitForAllNetflixAWSInstancesDownTask: false
executionWindow:
timezone: ${global.spinnaker.timezone:America/Los_Angeles}
logging:
config: classpath:logback-defaults.xml
resilience4j.retry:
instances:
# TODO(rz): These defaults were just copied from the resilience4j website. They still need to be tuned.
default:
maxRetryAttempts: 2
waitDuration: 10s
enableExponentialBackoff: false
# katoRetrofitServiceWriter is any write operation to Clouddriver using Retrofit.
katoRetrofitServiceWriter:
retryExceptions:
- retrofit.RetrofitError
integrations:
gremlin:
enabled: false
baseUrl: https://api.gremlin.com/v1
# This configuration lets you configure Webhook stages that will appear as native stages in the Deck UI.
# Properties that are set here will not be displayed in the GUI
#webhook:
# preconfigured:
# - label: Some Webhook
# description: This is a webhook stage, but it appears as a native stage in Spinnaker
# type: customWebhook # Should be unique
# # The following properties are all optional:
# enabled: true # default true
# url: https://my.webhook.com/event
# customHeaders:
# Accept:
# - application/json
# AnotherHeader:
# - valueOne
# - valueTwo
# method: POST
# payload: |
# {
# "text": "Version ${trigger.buildInfo.artifacts[0].version} deployed"
# }
# failFastStatusCodes:
# - 404
# - 501
# signalCancellation: true
# cancelEndpoint: https://my.webhook.com/cancel
# cancelMethod: POST
# cancelPayload: "{}"
# waitForCompletion: true
# # The rest of the properties are only used if waitForCompletion == true
# statusUrlResolution: webhookResponse # getMethod, locationHeader, webhookResponse
# statusUrlJsonPath: $.statusUrl # Only used if statusUrlResolution == webhookResponse
# statusJsonPath: $.status
# progressJsonPath: $.progress
# successStatuses: SUCCESS,COMPLETED
# canceledStatuses: CANCELED
# terminalStatuses: TERMINATED
# - label: Another Webhook
# description: This is also a webhook stage, but it has no properties set
# type: anotherCustomWebhook
# enabled: false
# This configuration lets you configure runJob stages that will appear as native stages in the Deck UI.
# Properties that are set here will not be displayed in the GUI
# job:
# preconfigured:
# - label: Some runJob
# description: This is a runJob stage, but it appears as a native stage in Spinnaker
# type: customRunJob # Should be unique
# cloudProvider: titus
# cluster:
# imageId: alpine:latest
# credentials: test
# region: us-east-1
# enabled: true # default true
# waitForCompletion: true
# parameters:
# - name: source
# mapping: cluster.env.source
# defaultValue: mysource
# order: 0
# - name: destination
# mapping: cluster.env.destination
# defaultValue: final
# order: 1
|
orca-web/config/orca.yml
|
items:
- uid: '@azure/cosmos.LocationRouting'
name: LocationRouting
fullName: LocationRouting
children:
- '@azure/cosmos.LocationRouting.clearRouteToLocation'
- '@azure/cosmos.LocationRouting.ignorePreferredLocation'
- '@azure/cosmos.LocationRouting.locationEndpointToRoute'
- '@azure/cosmos.LocationRouting.locationIndexToRoute'
- '@azure/cosmos.LocationRouting.routeToLocation_1'
- '@azure/cosmos.LocationRouting.routeToLocation'
langs:
- typeScript
type: class
summary: ''
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.clearRouteToLocation'
name: clearRouteToLocation()
children: []
type: method
langs:
- typeScript
summary: ''
syntax:
content: function clearRouteToLocation()
parameters: []
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.ignorePreferredLocation'
name: ignorePreferredLocation
fullName: ignorePreferredLocation
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'ignorePreferredLocation: boolean'
return:
type:
- boolean
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.locationEndpointToRoute'
name: locationEndpointToRoute
fullName: locationEndpointToRoute
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'locationEndpointToRoute: string'
return:
type:
- string
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.locationIndexToRoute'
name: locationIndexToRoute
fullName: locationIndexToRoute
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'locationIndexToRoute: number'
return:
type:
- number
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.routeToLocation_1'
name: 'routeToLocation(number, boolean)'
children: []
type: method
langs:
- typeScript
summary: ''
syntax:
content: >-
function routeToLocation(locationIndex: number, ignorePreferredLocation:
boolean)
parameters:
- id: locationIndex
type:
- number
description: ''
- id: ignorePreferredLocation
type:
- boolean
description: ''
package: '@azure/cosmos'
- uid: '@azure/cosmos.LocationRouting.routeToLocation'
name: routeToLocation(string)
children: []
type: method
langs:
- typeScript
summary: ''
syntax:
content: 'function routeToLocation(locationEndpoint: string)'
parameters:
- id: locationEndpoint
type:
- string
description: ''
package: '@azure/cosmos'
|
docs-ref-autogen/@azure/cosmos/LocationRouting.yml
|
author: mongoose-os
description: DHT sensor usage example (JavaScript)
version: 1.0
libs_version: ${mos.version}
modules_version: ${mos.version}
mongoose_os_version: ${mos.version}
sources:
- src
filesystem:
- fs
config_schema:
- ["rpc.uart.uart_no", -1]
# - ["debug.stdout_uart", 0]
# - ["debug.stderr_uart", 0]
- ["wifi.sta.enable", true]
- ["wifi.sta.ssid", "GL_Guest"]
- ["wifi.sta.pass", "<PASSWORD>#"]
- ["wifi.ap.enable", false]
- ["device.id", "esp8266_0EA1CD"]
# - ["mqtt.enable", true]
# - ["mqtt.server", "IoTGetStartedAS.azure-devices.net:8883"]
# - ["mqtt.client_id", "esp8266_0EA1CD"]
# - ["mqtt.user", "IoTGetStartedAS.azure-devices.net:8883/esp8266_0EA1CD"]
# - ["mqtt.pass", "<PASSWORD>Signature sr=IoTGetStartedAS.azure-devices.net%2Fdevices%2Fesp8266_0EA1CD&sig=McxocEQckhaa5Ede%2BeWuRTvFPbkcdtZUcD%2FAmdR9SZg%3D&se=1536323031"]
# - ["mqtt.ssl_ca_cert", "ca.pem"]
# - ["debug.level", 2]
libs:
# common mgos libs
- origin: https://github.com/mongoose-os-libs/ca-bundle
# - origin: https://github.com/mongoose-os-libs/i2c
- origin: https://github.com/mongoose-os-libs/rpc-service-config
- origin: https://github.com/mongoose-os-libs/rpc-service-fs
- origin: https://github.com/mongoose-os-libs/rpc-uart
#-------------------------------------------------------------
# - origin: https://github.com/mongoose-os-libs/http-server
# - origin: https://github.com/mongoose-os-libs/ota-http-server
- origin: https://github.com/mongoose-os-libs/rpc-common
- origin: https://github.com/mongoose-os-libs/rpc-loopback
# - origin: https://github.com/mongoose-os-libs/rpc-mqtt
- origin: https://github.com/mongoose-os-libs/rpc-service-ota
# - origin: https://github.com/mongoose-os-libs/spi
# libs necessary for the current app
- origin: https://github.com/mongoose-os-libs/wifi
- origin: https://github.com/mongoose-os-libs/mjs
- origin: https://github.com/mongoose-os-libs/mqtt
- origin: https://github.com/mongoose-os-libs/rpc-uart
tags:
- js
- hw
manifest_version: 2017-05-18
|
mos.yml
|
---
#DO NOT TRANSLATE THIS FILE DIRECTLY
#If you want to contribute translations, visit https://translate.glucocheck.app
assistant_responses:
blood_sugar:
long;with_trend;with_time: >
El nivel de azúcar en sangre es {{value}} y {{trend}} desde hace {{time}}.
long;no_trend;with_time: >
El azúcar en sangre es {{value}} desde hace {{time}}.
long;with_trend;no_time: >
El azúcar en sangre es {{value}} y {{trend}}.
long;no_trend;no_time: >
El azúcar en sangre es {{value}}.
short;no_trend;with_time: >
{{value}} desde hace {{time}}.
short;with_trend;with_time: >
{{value}} y {{trend}} desde hace {{time}}.
trends:
DoubleUp: "subiendo rapidamente"
SingleUp: "subiendo"
SlightUp: "subiendo lentamente"
Stable: "estable"
SlightDown: "bajando lentamente"
SingleDown: "bajando"
DoubleDown: "bajando rapidamente"
carbs_on_board:
long;with_time: >
Hay {{value}} hidratos de carbono activos desde hace {{time}}
long;no_time: >
Hay {{value}} hidratos de carbono activos.
short;with_time: >
Hay {{value}} hidratos de carbono activos desde hace {{time}}
insulin_on_board:
long;with_time: >
Hay {{value}} unidades de insulina desde hace {{time}}
long;no_time: >
Hay {{value}} unidades de insulina activas.
short;with_time: >
{{value}} unidades de insulina activas desde hace {{time}}.
cannula_age: >
La cánula se insertó hace {{time}}.
sensor_age: >
El sensor se insertó hace {{time}}.
pump_battery: >
La batería de la bomba está a {{percent}}.
pump_reservoir: >
La bomba contiene {{reservoir}} unidades restantes.
errors:
User not found: >
¡Hola! Gracias por probar Gluco Check. Antes de que pueda leer su información, primero deberá vincular su sitio de Nightscout. Para hacer esto, visite nuestro sitio web: {{gc_url}}
Nightscout Unavailable: >
Lo sentimos, parece que hay un problema con tu sitio de Nightscout. No pude obtener tu información.
Nightscout Unauthorized: >
Lo siento, parece que no puedo leer su sitio de Nightscout. Para dejarme entrar, primero debes crear un token. Para hacer esto, visite nuestro sitio web: {{gc_url}}
DmMetric Not Found: >
No pude encontrar tu {{metric}}.
common:
disclaimer:
assistant_IANAD: >
Solo un recordatorio: no soy médico. Si necesita consejo médico, consulte a su doctor/a.
metrics:
blood sugar: azúcar en sangre
insulin on board: Insulina activa
carbs on board: Hidratos de carbono activos
sensor age: Días uso del sensor
cannula age: Días de uso de la cánula
pump battery: Batería de la Bomba
pump reservoir: Reservorio de la Bomba
|
gluco-check-common/strings/es-ES/assistant-responses.yaml
|
items:
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages
id: DefinitionStages
artifact: com.microsoft.azure:azure-mgmt-storage:1.33.1
parent: com.microsoft.azure.management.storage
children:
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.Blank
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCors
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCreate
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDefaultServiceVersion
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDeleteRetentionPolicy
- com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithStorageAccount
langs:
- java
name: BlobServiceProperties.DefinitionStages
nameWithType: BlobServiceProperties.DefinitionStages
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages
type: Interface
package: com.microsoft.azure.management.storage
summary: Grouping of BlobServiceProperties definition stages.
syntax:
content: public static interface BlobServiceProperties.DefinitionStages
references:
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.Blank
name: BlobServiceProperties.DefinitionStages.Blank
nameWithType: BlobServiceProperties.DefinitionStages.Blank
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.Blank
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithStorageAccount
name: BlobServiceProperties.DefinitionStages.WithStorageAccount
nameWithType: BlobServiceProperties.DefinitionStages.WithStorageAccount
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithStorageAccount
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCors
name: BlobServiceProperties.DefinitionStages.WithCors
nameWithType: BlobServiceProperties.DefinitionStages.WithCors
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCors
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDefaultServiceVersion
name: BlobServiceProperties.DefinitionStages.WithDefaultServiceVersion
nameWithType: BlobServiceProperties.DefinitionStages.WithDefaultServiceVersion
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDefaultServiceVersion
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDeleteRetentionPolicy
name: BlobServiceProperties.DefinitionStages.WithDeleteRetentionPolicy
nameWithType: BlobServiceProperties.DefinitionStages.WithDeleteRetentionPolicy
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithDeleteRetentionPolicy
- uid: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCreate
name: BlobServiceProperties.DefinitionStages.WithCreate
nameWithType: BlobServiceProperties.DefinitionStages.WithCreate
fullName: com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.WithCreate
|
docs-ref-autogen/com.microsoft.azure.management.storage.BlobServiceProperties.DefinitionStages.yml
|
Bundler/OrderedGems:
Exclude:
- 'Gemfile'
# Offense count: 6
# Cop supports --auto-correct.
Layout/EmptyLineAfterGuardClause:
Exclude:
- 'bin/bundle'
# Offense count: 1
# Cop supports --auto-correct.
Layout/EmptyLines:
Exclude:
- 'config/environments/development.rb'
# Offense count: 1
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle.
# SupportedStyles: empty_lines, no_empty_lines
Layout/EmptyLinesAroundBlockBody:
Exclude:
- 'Gemfile'
# Offense count: 1
# Cop supports --auto-correct.
# Configuration parameters: AllowForAlignment, AllowBeforeTrailingComments, ForceEqualSignAlignment.
Layout/ExtraSpacing:
Exclude:
- 'config/environments/production.rb'
# Offense count: 1
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle, IndentationWidth.
# SupportedStyles: aligned, indented
Layout/MultilineOperationIndentation:
Exclude:
- 'bin/bundle'
# Offense count: 1
# Cop supports --auto-correct.
# Configuration parameters: AllowForAlignment, EnforcedStyleForExponentOperator.
# SupportedStylesForExponentOperator: space, no_space
Layout/SpaceAroundOperators:
Exclude:
- 'config/environments/production.rb'
# Offense count: 2
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle, EnforcedStyleForEmptyBrackets.
# SupportedStyles: space, no_space, compact
# SupportedStylesForEmptyBrackets: space, no_space
Layout/SpaceInsideArrayLiteralBrackets:
Exclude:
- 'config/environments/production.rb'
# Offense count: 1
# Configuration parameters: IgnoredMethods.
Metrics/CyclomaticComplexity:
Max: 15
# Offense count: 1
# Configuration parameters: CountComments, CountAsOne, ExcludedMethods.
Metrics/MethodLength:
Max: 30
# Offense count: 1
# Configuration parameters: IgnoredMethods.
Metrics/PerceivedComplexity:
Max: 15
# Offense count: 3
Style/Documentation:
Exclude:
- 'spec/**/*'
- 'test/**/*'
- 'app/mailers/application_mailer.rb'
- 'app/models/application_record.rb'
- 'config/application.rb'
# Offense count: 3
# Cop supports --auto-correct.
Style/ExpandPathArguments:
Exclude:
- 'bin/bundle'
- 'bin/rails'
- 'bin/rake'
# Offense count: 30
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle.
# SupportedStyles: always, always_true, never
Style/FrozenStringLiteralComment:
Enabled: false
# Offense count: 2
# Cop supports --auto-correct.
Style/IfUnlessModifier:
Exclude:
- 'bin/bundle'
# Offense count: 1
# Cop supports --auto-correct.
Style/PerlBackrefs:
Exclude:
- 'bin/bundle'
# Offense count: 1
# Cop supports --auto-correct.
# Configuration parameters: .
# SupportedStyles: use_perl_names, use_english_names
Style/SpecialGlobalVars:
EnforcedStyle: use_perl_names
# Offense count: 37
# Cop supports --auto-correct.
# Configuration parameters: EnforcedStyle, ConsistentQuotesInMultiline.
# SupportedStyles: single_quotes, double_quotes
Style/StringLiterals:
Exclude:
- 'bin/bundle'
- 'config/application.rb'
- 'config/environments/production.rb'
- 'config/puma.rb'
- 'config/spring.rb'
# Offense count: 2
# Cop supports --auto-correct.
# Configuration parameters: MinSize.
# SupportedStyles: percent, brackets
Style/SymbolArray:
EnforcedStyle: brackets
# Offense count: 2
# Cop supports --auto-correct.
# Configuration parameters: AutoCorrect, AllowHeredoc, AllowURI, URISchemes, IgnoreCopDirectives, IgnoredPatterns.
# URISchemes: http, https
Layout/LineLength:
Max: 200
Rails/UniqueValidationWithoutIndex:
Enabled: false
RSpec/InstanceVariable:
Enabled: false
RSpec/ExampleLength:
Enabled: false
RSpec/RepeatedExampleGroupDescription:
Enabled: false
RSpec/MultipleExpectations:
Enabled: false
RSpec/DescribeClass:
Enabled: false
RSpec/NestedGroups:
Enabled: false
RSpec/RepeatedDescription:
Enabled: false
Rails/FilePath:
Enabled: false
Lint/MissingSuper:
Enabled: false
|
api/.rubocop_todo.yml
|
- name: Stop Tic-Smart Api Service - {{ item.name }}
become: yes
docker_swarm_service:
name: "{{item.name}}"
state: absent
networks:
- "{{swarm_network}}"
loop:
- "{{tic_smart_api}}"
when: tic_smart_api.switch == "on" and inventory_hostname in groups.swarm_manager_prime
# Clean tic-smart-api files folders , if they exists
- name: Clean tic-smart-api files folders , if they exists
become: yes
file:
path: "/root/hlft-store/tic_event_listener/"
state: absent
when: tic_smart_api.switch == "on"
# git clone tic-smart-api files
- name: Git clone tic-smart-api files
become: yes
shell: git clone --single-branch --branch automateSchemaCreation {{tic_smart_api.git_repository}} /root/hlft-store/tic_event_listener
# Template copy Tic-Smart Api config.json file
- name: Template copy Tic-Smart Api app_config.json file
become: yes
template:
src: "tic_smart_api_config.json"
dest: "/root/hlft-store/tic_event_listener/app_config.json"
mode: "0750"
force: yes
when: tic_smart_api.switch == "on"
# Build Docker Image
- name: Build Docker Image for Tic-Smart Api
become: yes
shell: cd /root/hlft-store/tic_event_listener && bash build_docker_image.sh
ignore_errors: yes
# Run as docker service with replicas
# - name: Run as docker service with replicas
# become: yes
# shell: docker service create --name tic-smart-api-service --replicas 1 -p {{tic_smart_api.port}}:{{tic_smart_api.port}} tic-smart-api:latest
# Run as docker service with replicas
- name: Tic-Smart Api Docker Service
become: yes
docker_swarm_service:
name: "{{tic_smart_api.name}}"
hostname: "{{tic_smart_api.name}}"
networks:
- "{{swarm_network}}"
image: "{{tic_smart_api.image}}:{{tic_smart_api.tag}}"
mode: replicated
replicas: "{{tic_smart_api.replicas}}"
publish:
- published_port: "{{tic_smart_api.port}}"
target_port: "{{tic_smart_api.port}}"
protocol: "tcp"
force_update: yes
when: tic_smart_api.switch == "on" and inventory_hostname in groups.swarm_manager_prime
|
roles/tic_smart_api/tasks/main.yml
|
timeout_in: 30m
task:
# this task should fail fast or rely on 'depends_on' for all other tasks
name: Tests (Unit)
container:
image: cirrusci/flutter:latest
pub_cache:
folder: ~/.pub-cache
activate_coverage_script: pub global activate coverage
tests_script: ./scripts/runTests.sh
task:
name: Integration Tests for $app_arch (Linux)
# don't run for PRs
only_if: $CIRRUS_PR == ''
skip: '!changesInclude(".cirrus.yml", "$app_arch/*", "$app_arch/**/*")'
env:
EMULATOR_API_LEVEL: 22
ANDROID_ABI: "default;armeabi-v7a"
matrix:
app_arch: bloc_flutter
app_arch: bloc_library
app_arch: built_redux
app_arch: firestore_redux
app_arch: frideos_library
app_arch: inherited_widget
app_arch: mvc
app_arch: mvi_flutter
app_arch: mvu
app_arch: redurx
app_arch: redux
app_arch: scoped_model
app_arch: simple_bloc_flutter
app_arch: vanilla
container:
image: cirrusci/flutter:latest
allow_failures: $app_arch == "mvu" || $app_arch == "redurx"
install_images_script: sdkmanager "system-images;android-$EMULATOR_API_LEVEL;$ANDROID_ABI"
create_device_script:
echo no | avdmanager create avd --force -n test -k "system-images;android-$EMULATOR_API_LEVEL;$ANDROID_ABI"
start_emulator_background_script:
$ANDROID_HOME/emulator/emulator -avd test -no-audio -no-window
pub_cache:
folder: ~/.pub-cache
wait_for_emulator_script:
- ./scripts/android-wait-for-emulator.sh
- adb shell input keyevent 82
doctor_script: flutter doctor -v
devices_script: flutter devices
ci_script: ./scripts/ci.sh ./$app_arch || ./scripts/ci.sh ./$app_arch
task:
name: Integration Tests for $app_arch (macOS)
# don't run for PRs
only_if: $CIRRUS_PR == ''
skip: '!changesInclude(".cirrus.yml", "$app_arch/*", "$app_arch/**/*")'
env:
matrix:
app_arch: bloc_flutter
app_arch: bloc_library
app_arch: built_redux
app_arch: firestore_redux
app_arch: frideos_library
app_arch: inherited_widget
app_arch: mvc
app_arch: mvi_flutter
app_arch: mvu
app_arch: redurx
app_arch: redux
app_arch: scoped_model
app_arch: simple_bloc_flutter
app_arch: vanilla
allow_failures: $app_arch == "mvu"
osx_instance:
image: mojave-flutter
simulator_script:
- xcrun simctl list devicetypes
- xcrun simctl list runtimes
# create simulator
- udid=$(xcrun simctl create "iPhone X" com.apple.CoreSimulator.SimDeviceType.iPhone-X com.apple.CoreSimulator.SimRuntime.iOS-12-1)
# boot simulator
- xcrun simctl boot $udid
doctor_script: flutter doctor -v
devices_script: flutter devices
ci_script: ./scripts/ci.sh ./$app_arch || ./scripts/ci.sh ./$app_arch
|
.cirrus.yml
|
backend:
name: git-gateway
branch: master
media_folder: static/img
public_folder: /img
collections:
- name: "schedule"
label: "Schedule"
description: "Conference schedule"
folder: "src/pages/schedule"
create: true
fields:
- { label: "Template Key", name: "templateKey", widget: "hidden", default: "schedule" }
- { label: "Title", name: "title", widget: "string" }
- {
label: Schedule,
name: schedule,
required: true,
widget: list,
fields:
[
{ label: "Publish Date", name: "date", widget: "datetime" },
{ label: Name, name: name, required: true, widget: string },
{
label: Presentation Title,
name: presentationTitle,
required: false,
widget: string,
},
{
label: Image,
name: image,
required: false,
hint: "If an image isn't specified, a default headshot placeholder image will be used",
widget: image,
},
{ label: Text, name: text, required: true, widget: text },
],
}
- name: "speakers"
label: "Speaker"
create: true
folder: "src/pages/speakers"
slug: "speakers/{{slug}}"
fields:
- {label: "Template Key", name: "templateKey", widget: "hidden", default: "speaker"}
- {label: "Name", name: "name", widget: "string"}
- {label: "Title", name: "title", widget: "string"}
- {label: "Company", name: "company", widget: "string"}
- {label: "bio", name: "bio", widget: "text"}
- {label: "Github", name: "github", widget: "string"}
- {label: "Twitter", name: "twitter", widget: "string"}
- {label: "Discord", name: "discord", widget: "string"}
- {label: "Profile Image", name: "image", widget: image}
- {label: "Publish Date", name: "date", widget: "datetime"}
- name: "pages"
label: "Pages"
files:
- file: "src/pages/index.md"
label: "Home Page"
name: "index"
fields:
- {label: "Template Key", name: "templateKey", widget: "hidden", default: "index-page"}
- {label: Title, name: title, widget: string}
- {label: Image, name: image, widget: image}
- {label: Heading, name: heading, widget: string}
- {label: Subheading, name: subheading, widget: string}
- {label: Mainpitch, name: mainpitch, widget: object, fields: [{label: Title, name: title, widget: string}, {label: Description, name: description, widget: text}]}
- {label: Description, name: description, widget: string}
- {label: Intro, name: intro, widget: object, fields: [{label: Heading, name: heading, widget: string}, {label: Description, name: description, widget: text}, {label: Blurbs, name: blurbs, widget: list, fields: [{label: Image, name: image, widget: image}, {label: Text, name: text, widget: text}]}]}
- {label: Main, name: main, widget: object, fields: [{label: Heading, name: heading, widget: string}, {label: Description, name: description, widget: text}, {label: Image1, name: image1, widget: object, fields: [{label: Image, name: image, widget: image}, {label: Alt, name: alt, widget: string}]}, {label: Image2, name: image2, widget: object, fields: [{label: Image, name: image, widget: image}, {label: Alt, name: alt, widget: string}]}, {label: Image3, name: image3, widget: object, fields: [{label: Image, name: image, widget: image}, {label: Alt, name: alt, widget: string}]}]}
- file: "src/pages/code-of-conduct/index.md"
label: "Code of Conduct"
name: "code-of-conduct"
fields:
- {label: "Template Key", name: "templateKey", widget: "hidden", default: "coc-page"}
- {label: "Title", name: "title", widget: "string"}
- {label: "Body", name: "body", widget: "markdown"}
- file: "src/pages/about/index.md"
label: "About"
name: "about"
fields:
- {label: "Template Key", name: "templateKey", widget: "hidden", default: "about-page"}
- {label: "Title", name: "title", widget: "string"}
- {label: "Body", name: "body", widget: "markdown"}
|
static/admin/config.yml
|
grafana:
rbac:
pspEnabled: false
image:
repository: gcr.io/pluralsh/grafana/grafana
initChownData:
image:
repository: gcr.io/pluralsh/busybox
tag: latest
downloadDashboardsImage:
repository: gcr.io/pluralsh/curlimages/curl
grafana.ini:
security:
allow_embedding: true
admin:
existingSecret: grafana-credentials
plugins:
- grafana-worldmap-panel
- natel-discrete-panel
# dashboardProviders:
# dashboardproviders.yaml:
# apiVersion: 1
# providers:
# - name: default
# orgId: 1
# folder: ''
# type: file
# updateIntervalSeconds: 10
# disableDeletion: true
# editable: true
# options:
# path: /var/lib/grafana/dashboards/default
# - name: sidecar
# orgId: 1
# folder: ''
# type: file
# updateIntervalSeconds: 10
# disableDeletion: true
# editable: true
# options:
# path: /var/lib/grafana/dashboards/sidecar
# dashboards:
# default:
# kubernetes-overview:
# gnetId: 10856
# revision: 1
# uid: 'kubernetes-overview'
# datasource: Prometheus
# kubernetes-deepdive:
# gnetId: 8588
# revision: 1
# uid: 'kubernetes-deepdive'
# datasource: Prometheus
# node-overview:
# gnetId: 1860
# revision: 16
# uid: 'node-overview'
# datasource: Prometheus
service:
type: NodePort
persistence:
enabled: true
type: statefulset
inMemory:
enabled: false
sidecar:
dashboards:
provider:
name: sidecar
allowUiUpdates: true
foldersFromFilesStructure: true
enabled: true
searchNamespace: ALL
folderAnnotation: k8s-sidecar-target-directory
datasources:
enabled: true
searchNamespace: ALL
ingress:
enabled: true
path: /.*
annotations:
kubernetes.io/tls-acme: "true"
kubernetes.io/ingress.class: "nginx"
cert-manager.io/cluster-issuer: letsencrypt-prod
nginx.ingress.kubernetes.io/force-ssl-redirect: 'true'
nginx.ingress.kubernetes.io/use-regex: "true"
|
grafana/helm/grafana/values.yaml
|
sorting-type: GROUPS
sorting-placeholder: "%some_level_maybe?%"
#by default, names are sorted as A-Z _ a-z
#by disabling you will make it Aa-Zz
#this also applies for everything else, such as sorting by placeholder (factions, ..)
case-sentitive-sorting: true
#list of lines with adaptive height, in that order.
#after adding a new property you can give it to someone in config just like any other properties
unlimited-nametag-mode-dynamic-lines:
- abovename
- nametag #the original one, combination of tagprefix + customtagname + tagsuffix
- belowname
- another
#lines with static height. Dynamic is using 0.22 blocks per line, starting at 0. With 3 lines, they are as: 0, 0.22, 0.44
#can also be negative, but be careful when configuring value so it won't end up inside of the player
unlimited-nametag-mode-static-lines:
myCustomLine: 0.66
##############################################
################# SCOREBOARD #################
##############################################
#Limitations:
#1.8 - 1.12 clients: scoreboard title 32 characters, lines 30 characters
#1.13+ clients: no limits
#maximum 15 lines, client can't display more
#for bungee mode it counts as "server" instead of "world", such as disable in servers
scoreboard:
enabled: false
toggle-command: /sb
#using 1-15 instead of 0s everywhere
use-numbers: false
disable-in-worlds:
- disabledworld
#set to NONE to have no default scoreboard
default-scoreboard: "scoreboard3"
refresh-interval-ticks: 1
per-world:
testworld: "twscoreboard"
scoreboards:
scoreboard3:
#permission is "tab.scoreboard.<scoreboard name>", in this case tab.scoreboard.scoreboard3
permission-required: true
#if player is missing the permission, display a different scoreboard
#if no other should be displayed / permission required is false, delete the line entirely
if-permission-missing: scoreboard2
title: "Super board"
lines:
- "Scoreboard for people with permission"
- "tab.scoreboard.scoreboard3"
scoreboard2:
#here the permission is "tab.scoreboard.scoreboard2"
permission-required: true
if-permission-missing: scoreboard1
title: "Average board"
lines:
- "Scoreboard for people with permission"
- "tab.scoreboard.scoreboard2"
- "AND MISSING tab.scoreboard.scoreboard3"
scoreboard1:
permission-required: false
title: "Default"
lines:
- "Scoreboard for people with none of"
- "tab.scoreboard.scoreboard3 or"
- "tab.scoreboard.scoreboard2 "
- "permission"
twscoreboard:
permission-required: false
title: "TestWorld"
lines:
- "This is scoreboard for everyone in testworld world"
#toggle messages
scoreboard-on: "&2Scorebord enabled"
scoreboard-off: "&7Scoreboard disabled"
|
premiumconfig.yml
|
tosca_definitions_version: alien_dsl_2_0_0
#
# Ystia Forge
# Copyright (C) 2018 <NAME>. - Bull, <NAME>, B.P.68, 78340, Les Clayes-sous-Bois, France.
# Use of this source code is governed by Apache 2 LICENSE that can be found in the LICENSE file.
#
metadata:
template_name: org.ystia.samples.topologies.welcome_monitoring
template_version: 2.3.0-SNAPSHOT
template_author: Ystia
imports:
- tosca-normative-types:1.0.0-ALIEN20
- yorc-types:1.1.0
- org.ystia.common:2.3.0-SNAPSHOT
- org.ystia.samples.welcome.linux.bash:2.3.0-SNAPSHOT
- org.ystia.samples.tcpecho.ansible:2.3.0-SNAPSHOT
topology_template:
description: A monitored topology template for the Welcome sample
node_templates:
# Network
Network:
type: tosca.nodes.Network
properties:
ip_version: 4
# VM for MntWelcome
MntCompute:
type: tosca.nodes.Compute
requirements:
- network:
node: Network
relationship: tosca.relationships.Network
MntWelcome:
type: org.ystia.samples.welcome.linux.bash.nodes.Welcome
requirements:
- host:
node: MntCompute
MntTCPEcho:
type: org.ystia.samples.tcpecho.ansible.nodes.TCPEcho
requirements:
- host:
node: MntCompute
policies:
- HTTPMonitoring:
type: yorc.policies.monitoring.HTTPMonitoring
targets: [ MntWelcome ]
properties:
scheme: http
port: 8111
time_interval: 5s
- TCPMonitoring:
type: yorc.policies.monitoring.TCPMonitoring
targets: [ MntTCPEcho ]
properties:
port: 9111
time_interval: 5s
# Output property for MntWelcome URL
outputs:
url:
description: The URL to access the Welcome page
value: { get_attribute: [ MntWelcome, url ] }
tcp_echo_url:
description: The URL to access the Echo Server
value: { get_attribute: [ MntTCPEcho, url ] }
workflows:
killWebServer:
steps:
Welcome_stop:
target: MntWelcome
activities:
- call_operation: Standard.stop
stopWebServer:
steps:
Welcome_stopping:
target: MntWelcome
activities:
- set_state: stopping
on_success:
- Welcome_stop
Welcome_stopped:
target: MntWelcome
activities:
- set_state: stopped
Welcome_stop:
target: MntWelcome
activities:
- call_operation: Standard.stop
on_success:
- Welcome_stopped
startWebServer:
steps:
Welcome_started:
target: MntWelcome
activities:
- set_state: started
Welcome_start:
target: MntWelcome
activities:
- call_operation: Standard.start
on_success:
- Welcome_started
Welcome_starting:
target: MntWelcome
activities:
- set_state: starting
on_success:
- Welcome_start
killTCPEchoServer:
steps:
Welcome_stop:
target: MntTCPEcho
activities:
- call_operation: Standard.stop
stopTCPEchoServer:
steps:
TCPEcho_stopping:
target: MntTCPEcho
activities:
- set_state: stopping
on_success:
- TCPEcho_stop
TCPEcho_stopped:
target: MntTCPEcho
activities:
- set_state: stopped
TCPEcho_stop:
target: MntTCPEcho
activities:
- call_operation: Standard.stop
on_success:
- TCPEcho_stopped
startTCPEchoServer:
steps:
TCPEcho_started:
target: MntTCPEcho
activities:
- set_state: started
TCPEcho_start:
target: MntTCPEcho
activities:
- call_operation: Standard.start
on_success:
- TCPEcho_started
TCPEcho_starting:
target: MntTCPEcho
activities:
- set_state: starting
on_success:
- TCPEcho_start
|
org/ystia/samples/topologies/welcome_monitoring/types.yml
|
uid: "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby"
fullName: "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby"
name: "ApplicationsOrderby"
nameWithType: "ApplicationsOrderby"
summary: "Defines values for ApplicationsOrderby."
inheritances:
- "<xref href=\"java.lang.Object\" data-throw-if-not-resolved=\"False\" />"
- "<xref href=\"com.azure.core.util.ExpandableStringEnum\" data-throw-if-not-resolved=\"False\" />"
inheritedMembers:
- "com.azure.core.util.ExpandableStringEnum.<T>fromString(java.lang.String,java.lang.Class<T>)"
- "com.azure.core.util.ExpandableStringEnum.<T>values(java.lang.Class<T>)"
- "com.azure.core.util.ExpandableStringEnum.equals(java.lang.Object)"
- "com.azure.core.util.ExpandableStringEnum.hashCode()"
- "com.azure.core.util.ExpandableStringEnum.toString()"
- "java.lang.Object.clone()"
- "java.lang.Object.finalize()"
- "java.lang.Object.getClass()"
- "java.lang.Object.notify()"
- "java.lang.Object.notifyAll()"
- "java.lang.Object.wait()"
- "java.lang.Object.wait(long)"
- "java.lang.Object.wait(long,int)"
syntax: "public final class ApplicationsOrderby extends ExpandableStringEnum<ApplicationsOrderby>"
constructors:
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.ApplicationsOrderby()"
fields:
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.APP_DISPLAY_NAME"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.APP_DISPLAY_NAME_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.DATA_TYPE"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.DATA_TYPE_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.DELETED_DATE_TIME"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.DELETED_DATE_TIME_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.ID"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.ID_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.IS_SYNCED_FROM_ON_PREMISES"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.IS_SYNCED_FROM_ON_PREMISES_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.NAME"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.NAME_DESC"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.TARGET_OBJECTS"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.TARGET_OBJECTS_DESC"
methods:
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.fromString(java.lang.String)"
- "com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.values()"
type: "class"
metadata: {}
package: "com.azure.resourcemanager.authorization.fluent.models"
artifact: com.azure.resourcemanager:azure-resourcemanager-authorization:2.2.0
|
docs-ref-autogen/com.azure.resourcemanager.authorization.fluent.models.ApplicationsOrderby.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-16 18:16"
game: "Unreal Tournament"
name: "CTF-Carrier"
author: "<NAME>\"a.k.a.Bubba\""
description: "u.s.s.Nemisis"
releaseDate: "2001-04"
attachments:
- type: "IMAGE"
name: "CTF-Carrier_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/C/CTF-Carrier_shot_1.png"
originalFilename: "ctf-carrier.zip"
hash: "1f0118211c84b042f7f269f3c6d0ea3491ef389b"
fileSize: 2659993
files:
- name: "richrig.utx"
fileSize: 2234223
hash: "e2115772791315c91cdf6ab99e687d2f94e5685d"
- name: "warship.utx"
fileSize: 618933
hash: "f174b35e86b07e98b66f94f8ee30e9692bc90097"
- name: "CTF-Carrier.unr"
fileSize: 1894276
hash: "ce87e0ba311fba727b47b81988644d7c99763d16"
otherFiles: 3
dependencies:
CTF-Carrier.unr:
- status: "OK"
name: "richrig"
- status: "OK"
name: "warship"
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/Capture%20The%20Flag/C/ctf-carrier.zip"
main: true
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/CTF/MapsC/&file=ctf-carrier.zip"
main: false
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/CTF/&file=ctf-carrier.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/CTF&file=ctf-carrier.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/CTF/MapsC/ctf-carrier.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/Capture%20The%20Flag/C/1/f/011821/ctf-carrier.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/Capture%20The%20Flag/C/1/f/011821/ctf-carrier.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Capture The Flag"
title: "Carrier"
playerCount: "8-16"
themes:
Industrial: 1.0
bots: true
|
content/Unreal Tournament/Maps/Capture The Flag/C/1/f/011821/ctf-carrier_[1f011821].yml
|
items:
- uid: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.Blank
id: Blank
parent: com.microsoft.azure.management.network
href: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.Blank.yml
langs:
- java
name: ApplicationGatewayProbe.DefinitionStages.Blank<ParentT>
nameWithType: ApplicationGatewayProbe.DefinitionStages.Blank<ParentT>
fullName: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.Blank<ParentT>
type: Interface
source:
remote:
path: azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/ApplicationGatewayProbe.java
branch: v1.3.0
repo: https://github.com/Azure/azure-sdk-for-java
path: azure-mgmt-network/src/main/java/com/microsoft/azure/management/network/ApplicationGatewayProbe.java
startLine: 59
package: com.microsoft.azure.management.network
summary: "<p>The first stage of an application gateway probe definition. </p>"
syntax:
content: public interface Blank<ParentT> extends ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>
typeParameters:
- id: ParentT
type: 18a275b1
description: <p>the stage of the parent application gateway definition to return to after attaching this definition </p>
inheritedMembers:
- com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost.withHost(String)
references:
- uid: 18a275b1
spec.java:
- name: ParentT
fullName: ParentT
- uid: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost.withHost(String)
parent: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost
href: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost.yml
name: withHost(String host)
nameWithType: ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>.withHost(String host)
fullName: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>.withHost(String host)
type: Method
summary: "<p>Specifies the host name to send the probe to. </p>"
syntax:
content: public WithPath<ParentT> withHost(String host)
parameters:
- id: host
type: "26831127"
description: <p>a host name </p>
return:
type: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithPathda8a2936
description: <p>the next stage of the definition </p>
- uid: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost
parent: com.microsoft.azure.management.network
href: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost.yml
name: ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>
nameWithType: ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>
fullName: com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.WithHost<ParentT>
type: Interface
summary: "<p>Stage of an application gateway probe definition allowing to specify the host to send the probe to. </p>"
syntax:
content: public interface WithHost<ParentT>
typeParameters:
- id: ParentT
type: 18a275b1
description: <p>the stage of the parent application gateway definition to return to after attaching this definition </p>
- uid: com.microsoft.azure.management.network.ApplicationGatewayProbe.Definition
parent: com.microsoft.azure.management.network
href: com.microsoft.azure.management.network.ApplicationGatewayProbe.Definition.yml
name: ApplicationGatewayProbe.Definition<ParentT>
nameWithType: ApplicationGatewayProbe.Definition<ParentT>
fullName: com.microsoft.azure.management.network.ApplicationGatewayProbe.Definition<ParentT>
type: Interface
summary: "<p>The entirety of an application gateway probe definition. </p>"
syntax:
content: public interface Definition<ParentT>
typeParameters:
- id: ParentT
type: 18a275b1
description: <p>the stage of the parent application gateway definition to return to after attaching this definition </p>
|
docs-ref-autogen/com.microsoft.azure.management.network.ApplicationGatewayProbe.DefinitionStages.Blank.yml
|
title: Dokumentation zu Visual Studio-Abonnements
summary: 'Visual Studio-Abonnements bieten u. a. folgende Leistungen: Zugriff auf Entwicklertools, Clouddienste, Software, Schulungen sowie Unterstützung für Ihre Entwicklungs- und Testanforderungen.'
brand: visual-studio
metadata:
title: Visual Studio-Abonnements
description: 'Visual Studio-Abonnements sind eine umfassende Sammlung von Ressourcen zum Erstellen, Bereitstellen und Verwalten großartiger Anwendungen auf Ihren bevorzugten Plattformen und Geräten, einschließlich Windows, iOS, Android und Linux. Abonnements bieten u.a. folgende Vorteile: Zugriff auf Entwicklertools, Clouddienste, Software, Schulungen sowie Unterstützung für Ihre Entwicklungs- und Testanforderungen.'
ms.topic: hub-page
author: evanwindom
ms.author: lank
manager: lank
ms.date: 12/04/2019
ms.openlocfilehash: bde855a1550f304df07c22262d7cb07094b5bc9e
ms.sourcegitcommit: f8e3715c64255b476520bfa9267ceaf766bde3b0
ms.translationtype: HT
ms.contentlocale: de-DE
ms.lasthandoff: 03/21/2020
ms.locfileid: "74883832"
highlightedContent:
items:
- title: Neuerungen in Visual Studio-Abonnements
itemType: whats-new
url: ~/whats-new-in-subscriptions.md
- title: Verwenden des Abonnentenportals
itemType: how-to-guide
url: ~/using-the-subscriber-portal.md
- title: Verwenden des Administratorportals
itemType: how-to-guide
url: ~/using-admin-portal.md
conceptualContent:
title: Erste Schritte
items:
- title: Nutzen von Leistungen
links:
- text: Azure-Gutschrift
url: ~/vs-azure.md
itemType: get-started
- text: Softwaredownloads
url: ~/software-download-list.md
itemType: get-started
footerLink:
url: ~/vs-azure.md
text: Weitere Informationen
- title: Beliebte Ressourcen
links:
- text: Vergleich von Optionen und Preisen
url: https://visualstudio.microsoft.com/vs/pricing
itemType: overview
- text: Support für die Verwaltung von Visual Studio und Abonnements
url: https://visualstudio.microsoft.com/support/support-overview-vs
itemType: overview
- title: Zugriff auf das Abonnementportal
links:
- text: Abonnenten
url: https://my.visualstudio.com/
itemType: get-started
- text: Administratoren
url: https://manage.visualstudio.com/
itemType: get-started
tools:
title: Verfügbare Produkte
summary: Dies sind nur einige der Produkte, die über Visual Studio-Abonnements verfügbar sind
items:
- title: Azure
imageSrc: https://docs.microsoft.com/media/logos/logo_azure.svg
url: /azure/
- title: Visual Studio-IDE
imageSrc: https://docs.microsoft.com/media/logos/logo_vs-ide.svg
url: /visualstudio/windows/
- title: Azure DevOps Services
imageSrc: https://docs.microsoft.com/media/logos/logo_vs-team-services.svg
url: /azure/devops/
- title: Visual Studio Code
imageSrc: https://docs.microsoft.com/media/logos/logo_vs-code.svg
url: https://code.visualstudio.com/docs
- title: Visual Studio für Mac
imageSrc: https://docs.microsoft.com/media/logos/logo_vs-mac.svg
url: /visualstudio/mac/
- title: Visual Studio App Center
imageSrc: https://docs.microsoft.com/media/logos/logo_vs-mobile-center.svg
url: /appcenter/
additionalContent:
sections:
- title: Details zur Leistung
summary: Ihr Abonnement kann folgende Leistungen umfassen
items:
- title: Entwicklertools
links:
- text: Visual Studio
url: ~/vs-ide-benefit.md
note: (einschließlich Visual Studio für Mac)
- title: Clouddienste
links:
- text: Azure Dev/Test
url: ~/vs-azure-payg.md
- text: Visual Studio App Center
url: ~/vs-visual-studio-app-center.md
- text: Azure DevOps Server und CAL
url: ~/vs-vsts.md
- text: Office 365-Entwicklerabonnement
url: ~/vs-office-dev.md
- text: Windows-Entwicklerkonto
url: ~/vs-windows-dev.md
- title: Schulungen und Support
links:
- text: LinkedIn Premium
url: ~/vs-linkedin-learning.md
note: (geschäftlich, technisch und kreativ)
- text: Pluralsight
url: ~/vs-pluralsight.md
note: (Cloud, Design, mobile Lösungen und Daten)
- text: Data Camp
url: ~/vs-datacamp.md
note: (Data Science und künstliche Intelligenz)
- text: Professioneller technischer Support von Microsoft bei Incidents
url: ~/vs-tech-support.md
- text: Azure-Community
url: ~/vs-azure-community.md
|
subscriptions/index.yml
|
variables:
autogen: |
export NOCONFIGURE=1;
if [ -x %{conf-cmd} ]; then true;
elif [ -x %{conf-root}/autogen ]; then %{conf-root}/autogen;
elif [ -x %{conf-root}/autogen.sh ]; then %{conf-root}/autogen.sh;
elif [ -x %{conf-root}/bootstrap ]; then %{conf-root}/bootstrap;
elif [ -x %{conf-root}/bootstrap.sh ]; then %{conf-root}/bootstrap.sh;
else autoreconf -ivf %{conf-root};
fi
# Project-wide extra arguments to be passed to `configure`
conf-global: ''
# Element-specific extra arguments to be passed to `configure`.
conf-local: ''
# For backwards compatibility only, do not use.
conf-extra: ''
conf-cmd: "%{conf-root}/configure"
conf-args: |
--prefix=%{prefix} \
--exec-prefix=%{exec_prefix} \
--bindir=%{bindir} \
--sbindir=%{sbindir} \
--sysconfdir=%{sysconfdir} \
--datadir=%{datadir} \
--includedir=%{includedir} \
--libdir=%{libdir} \
--libexecdir=%{libexecdir} \
--localstatedir=%{localstatedir} \
--sharedstatedir=%{sharedstatedir} \
--mandir=%{mandir} \
--infodir=%{infodir} %{conf-extra} %{conf-global} %{conf-local}
configure: |
%{conf-cmd} %{conf-args}
make: make
make-install: make -j1 DESTDIR="%{install-root}" install
# Set this if the sources cannot handle parallelization.
#
# notparallel: True
# Automatically remove libtool archive files
#
# Set remove-libtool-modules to "true" to remove .la files for
# modules intended to be opened with lt_dlopen()
#
# Set remove-libtool-libraries to "true" to remove .la files for
# libraries
#
# Value must be "true" or "false"
remove-libtool-modules: "false"
remove-libtool-libraries: "false"
delete-libtool-archives: |
if %{remove-libtool-modules} || %{remove-libtool-libraries}; then
find "%{install-root}" -name "*.la" -print0 | while read -d '' -r file; do
if grep '^shouldnotlink=yes$' "${file}" &>/dev/null; then
if %{remove-libtool-modules}; then
echo "Removing ${file}."
rm "${file}"
else
echo "Not removing ${file}."
fi
else
if %{remove-libtool-libraries}; then
echo "Removing ${file}."
rm "${file}"
else
echo "Not removing ${file}."
fi
fi
done
fi
config:
# Commands for configuring the software
#
configure-commands:
- |
%{autogen}
- |
%{configure}
# Commands for building the software
#
build-commands:
- |
%{make}
# Commands for installing the software into a
# destination folder
#
install-commands:
- |
%{make-install}
- |
%{delete-libtool-archives}
# Commands for stripping debugging information out of
# installed binaries
#
strip-commands:
- |
%{strip-binaries}
# Use max-jobs CPUs for building and enable verbosity
environment:
MAKEFLAGS: -j%{max-jobs}
V: 1
# And dont consider MAKEFLAGS or V as something which may
# affect build output.
environment-nocache:
- MAKEFLAGS
- V
|
src/buildstream/plugins/elements/autotools.yaml
|
name: Verify Examples
on:
push:
branches:
- main
pull_request:
schedule:
- cron: "0 0 * * 0"
defaults:
run:
shell: bash
env:
PUB_ENVIRONMENT: bot.github
jobs:
verify_examples:
runs-on: ubuntu-latest
strategy:
matrix:
example_directory:
- examples/hello
- examples/json
- examples/raw_cloudevent
- examples/fullstack/backend
defaults:
run:
working-directory: ${{ matrix.example_directory }}
steps:
- name: Cache Pub hosted dependencies
uses: actions/cache@v2
with:
path: "~/.pub-cache/hosted"
key: "os:ubuntu-latest;pub-cache-hosted;dart:dev;packages:${{ matrix.example_directory }}"
restore-keys: |
os:ubuntu-latest;pub-cache-hosted;dart:dev
os:ubuntu-latest;pub-cache-hosted
os:ubuntu-latest
- uses: dart-lang/setup-dart@v1.2
with:
sdk: stable
- uses: actions/checkout@v2
- run: dart --version
- id: pub_get
run: dart pub get
- if: "always() && steps.pub_get.conclusion == 'success'"
run: dart format --output=none --set-exit-if-changed .
- if: "always() && steps.pub_get.conclusion == 'success'"
run: dart analyze --fatal-infos .
- if: "always() && steps.pub_get.conclusion == 'success'"
run: dart test
- if: "always() && steps.pub_get.conclusion == 'success'"
run: |
dart run build_runner build --delete-conflicting-outputs
export DIFF_OUTPUT="$(git diff)"
if [[ $DIFF_OUTPUT ]]; then
echo "::error::Build needs to be re-run!"
echo "::group::git diff"
echo "$DIFF_OUTPUT"
echo "::endgroup::"
exit 1
else
echo "Build output is all good!"
fi
notify:
name: Notify failure
runs-on: ubuntu-latest
if: "(github.event_name == 'push' || github.event_name == 'schedule') && failure()"
steps:
- run: |
curl -H "Content-Type: application/json" -X POST -d \
"{'text':'Build failed! ${GITHUB_SERVER_URL}/${GITHUB_REPOSITORY}/actions/runs/${GITHUB_RUN_ID}'}" \
"${CHAT_WEBHOOK_URL}"
env:
CHAT_WEBHOOK_URL: "${{ secrets.CHAT_WEBHOOK_URL }}"
needs:
- verify_examples
|
.github/workflows/verify_examples.yml
|
---
result: FAILURE
failure_category: code
failure_cause: 'build Licensing: windows; NetFetcher rust: unix'
timestamp: 2016-05-24 22:19:09 UTC
url: http://manhattan.ci.chef.co/job/chefdk-build/482/
trigger_url: http://manhattan.ci.chef.co/job/chefdk-trigger-ad_hoc/336/
duration: 26m30s
runs:
windows-2008r2-i386:
result: FAILURE
failure_category: code
failure_cause: build Licensing
failed_in:
step: build Licensing
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=i386,platform=windows-2008r2,project=chefdk,role=builder/482/
duration: 13m41s
debian-6:
result: FAILURE
failure_category: code
failure_cause: NetFetcher rust
failed_in:
step: NetFetcher rust
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=debian-6,project=chefdk,role=builder/482/
duration: 1m25s
el-6:
result: FAILURE
failure_category: code
failure_cause: NetFetcher rust
failed_in:
step: NetFetcher rust
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=el-6,project=chefdk,role=builder/482/
duration: 1m38s
el-7:
result: FAILURE
failure_category: code
failure_cause: NetFetcher rust
failed_in:
step: NetFetcher rust
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=el-7,project=chefdk,role=builder/482/
duration: 1m16s
mac_os_x-10.9:
result: FAILURE
failure_category: code
failure_cause: NetFetcher rust
failed_in:
step: NetFetcher rust
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=mac_os_x-10.9,project=chefdk,role=builder/482/
duration: 58s
ubuntu-12.04:
result: FAILURE
failure_category: code
failure_cause: NetFetcher rust
failed_in:
step: NetFetcher rust
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=ubuntu-12.04,project=chefdk,role=builder/482/
duration: 1m14s
|
reports/stages/manhattan.ci.chef.co/job/chefdk-build/482.yaml
|
os: linux
dist: xenial
language: python
env:
global:
# Doctr deploy key for NSLS-II/NSLS-II.github.io
- secure: "<KEY>
# Doctr deploy key for bluesky/bluesky.github.io
- secure: "<KEY>="
- MPLBACKEND: Qt5Agg
cache:
directories:
- $HOME/.cache/pip
- $HOME/.cache/matplotlib
addons:
apt:
packages:
- qtbase5-dev
jobs:
fast_finish: true
include:
- os: linux
python: 3.7
env: FLAKE_8=1
- os: linux
python: 3.6
env: PUBLISH_DOCS=1
- os: linux
python: 3.7
- os: linux
python: 3.8
- os: linux
python: 3.8
env: OPHYD_MASTER=true
# - os: osx
# language: generic
# env: PYTHON=3.6
# - os: osx
# language: generic
# env: PYTHON=3.7
- os: linux
python: nightly
allow_failures:
- python: nightly
before_install:
- |
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
pushd ${HOME}
wget https://repo.anaconda.com/miniconda/Miniconda3-latest-MacOSX-x86_64.sh
bash Miniconda3-latest-MacOSX-x86_64.sh -b -p ${HOME}/conda
rm -fv Miniconda*.sh
popd
export PATH=${HOME}/conda/bin:$PATH
conda create -n py${PYTHON} python=${PYTHON} -y
source activate py${PYTHON}
conda env list
conda info
conda list
fi
install:
- if [ "$FLAKE_8" ]; then
pip install flake8;
flake8 .;
let res=$?;
echo "The project code was verified by 'flake8'. Exit code ($res).";
exit $res;
fi
- env | sort -u
- export GIT_FULL_HASH=`git rev-parse HEAD`
- pip install --upgrade pip
- pip install --upgrade cython
- pip install --upgrade numpy
- pip install --quiet --upgrade --global-option='--without-libyaml' pyyaml
- pip install -r requirements.txt
- pip install -r requirements-dev.txt
# Install ophyd master branch if build settings indicate to do so.
- |
if [ "$OPHYD_MASTER" = true ] ; then
pip install -U git+https://github.com/bluesky/ophyd@master
fi
- pip install codecov
- python setup.py install
# Need to clean the python build directory (and other cruft) or pytest is
# going to find the build directory and get confused why there are two sets
# of every test file
- git clean -xfd
# make sure the sqlite file exists to avoid race conditions
- python -c "from bluesky.utils import get_history; get_history()"
- pip list
script:
# ensure that callbacks import without matplotlib
- python -c 'import sys, bluesky.callbacks; assert "matplotlib" not in sys.modules'
- coverage run -m pytest -v
- coverage report -m
- set -e
- make -C doc html
# Publish docs.
- |
if [ $PUBLISH_DOCS ]; then
# Publish docs.
doctr deploy --deploy-repo NSLS-II/NSLS-II.github.io --deploy-branch-name master bluesky;
doctr deploy --deploy-repo bluesky/bluesky.github.io --deploy-branch-name master bluesky;
fi
after_success:
- codecov
|
.travis.yml
|
AF: Affganistan
AL: Albania
DZ: Algeria
AD: Andorra
AO: Angola
AI: Anguilla
AQ: Antarctica
AG: 'Antigwa a Barbuda'
AM: Armenia
AW: Aruba
AU: Awstralia
AT: Awstria
AZ: Azerbaijan
BH: Bahrain
BD: Bangladesh
BB: Barbados
BY: Belarws
BZ: Belize
BJ: Benin
BM: Bermwda
BT: Bhwtan
BO: Bolifia
BA: 'Bosnia a Herzegovina'
BW: Botswana
BR: Brasil
BN: Brunei
BF: 'Burkina Faso'
BI: Burundi
BG: Bwlgaria
NC: 'Caledonia Newydd'
KH: Cambodia
CA: Canada
CV: 'Cape Verde'
KE: Cenia
TD: Chad
CL: Chile
KG: Cirgistan
CU: Ciwba
CO: Colombia
KM: Comoros
CG: Congo
CR: 'Costa Rica'
KW: Coweit
HR: Croatia
CY: Cyprus
CI: 'Côte d’Ivoire'
ZA: 'De Affrica'
KR: 'De Corea'
DK: Denmarc
DJ: Djibouti
DM: Dominica
EC: Ecwador
SV: 'El Salfador'
AE: 'Emiraethau Arabaidd Unedig'
ER: Eritrea
EE: Estonia
ET: Ethiopia
FR: Ffrainc
VN: Fietnam
FJ: Fiji
GA: Gabon
GM: Gambia
GE: Georgia
GH: Ghana
GF: 'Giana Ffrengig'
GI: Gibraltar
GN: Gini
GQ: 'Gini Gyhydeddol'
KP: 'Gogledd Corea'
EH: 'Gorllewin Sahara'
GD: Grenada
GP: Guadeloupe
GU: Guam
GT: Guatemala
GW: Guinea-Bissau
GY: Guyana
CF: 'Gweriniaeth Canol Affrica'
CD: 'Gweriniaeth Ddemocrataidd y Congo'
CZ: 'Gweriniaeth Tsiec'
BE: 'Gwlad Belg'
GR: 'Gwlad Groeg'
PL: 'Gwlad Pwyl'
TH: 'Gwlad Thai'
IS: 'Gwlad yr Iâ'
HT: Haiti
HN: Hondwras
HK: 'Hong Kong S.A.R., Tseina'
HU: Hwngari
IN: India
ID: Indonesia
IQ: Irac
IR: Iran
IL: Israel
IE: Iwerddon
JM: Jamaica
KZ: Kazakhstan
KI: Kiribati
LA: Laos
LV: Latfia
LS: Lesotho
LB: Libanus
LR: Liberia
LY: Libia
LI: Liechtenstein
LT: Lithwania
LU: Lwcsembwrg
MO: 'Macao S.A.R., Tseina'
MK: Macedonia
MG: Madagascar
MW: Malawi
MY: Malaysia
MV: Maldives
ML: Mali
MT: Malta
MQ: Martinique
MU: Mawrisiws
MR: Mawritania
YT: Mayotte
MX: Mecsico
FM: Micronesia
MD: Moldofa
MC: Monaco
MN: Mongolia
MS: Montserrat
MA: Moroco
MZ: Mozambique
MM: Myanmar
UM: 'Mân Ynysoedd Pellenig yr Unol Daleithiau'
NA: Namibia
NR: Nawrw
NP: Nepal
NI: Nicaragwa
NE: Niger
NG: Nigeria
NU: Niue
NO: Norwy
OM: Oman
PK: Pacistan
PW: Palau
PA: Panama
PG: 'Pap<NAME>'
PY: Paraguay
PE: Perw
PH: Philipinau
PN: Pitcairn
PF: 'Polynesia Ffrainc'
PT: Portiwgal
GB: '<NAME>'
PR: 'Puerto Rico'
QA: Qatar
RW: Rwanda
RO: Rwmania
RU: Rwsia
RE: Réunion
SH: 'S<NAME>'
KN: 'Saint Kitts a Nevis'
LC: 'Saint Lucia'
PM: 'Saint Pierre a Miquelon'
VC: 'Saint Vincent a’r Grenadines'
ZM: Sambia
WS: Samoa
AS: 'Samoa Americanaidd'
SM: 'San Marino'
ST: 'Sao Tome a Principe'
SA: Sawdi-Arabia
ES: Sbaen
NZ: 'Seland Newydd'
SN: Senegal
SC: Seychelles
JP: Siapan
SL: 'Sierra Leone'
ZW: Simbabwe
SG: Singapore
SK: Slofacia
SI: Slofenia
SO: Somalia
LK: 'Sri Lanka'
SJ: 'Svalbard a Jan Mayen'
SZ: Swaziland
SE: Sweden
SR: Swrinam
SY: Syria
TW: Taiwan
TJ: Tajicistan
TZ: Tansanïa
TL: Timor-Leste
IO: 'Tiriogaeth Cefnfor India Prydain'
PS: 'Tiriogaeth Palesteina'
TF: 'Tiriogaethau Ffrengig y De'
TN: Tiwnisia
TG: Togo
TK: Tokelau
TO: Tonga
TT: 'Trinidad a Thobago'
CN: Tseina
TV: Twfalw
TR: Twrci
TM: Tyrcmenistan
UG: Uganda
UY: Uruguay
VU: Vanuatu
VE: Venezuela
WF: 'Wallis a Futuna'
UA: Wcráin
UZ: Wsbecistan
BS: 'Y Bahamas'
CM: 'Y Camerŵn'
VA: 'Y Fatican'
FI: 'Y Ffindir'
SD: 'Y Swdan'
CH: 'Y Swistir'
DO: 'Y Weriniaeth Ddominicaidd'
YE: Yemen
BV: 'Ynys Bouvet'
HM: 'Ynys Heard ac Ynysoedd McDonald'
IM: 'Ynys Manaw'
NF: 'Ynys Norfolk'
CX: 'Ynys y Nadolig'
AX: 'Ynysoedd Aland'
VI: 'Ynysoedd Americanaidd y Wyryf'
AN: 'Ynysoedd Caribî yr Iseldiroedd'
KY: 'Ynysoedd Cayman'
CC: 'Ynysoedd Cocos (Keeling)'
CK: 'Ynysoedd Cook'
GS: 'Ynysoedd De Georgia a De Sandwich'
FO: 'Ynysoedd Ffaröe'
MP: 'Ynysoedd Gogledd Mariana'
MH: 'Ynysoedd Marshall'
VG: 'Ynysoedd Prydeinig y Wyryf'
SB: 'Ynysoedd Solomon'
TC: 'Ynysoedd Turks a Caicos'
FK: 'Ynysoedd y Falkland'
EG: 'Yr Aifft'
DE: 'Yr Almaen'
AR: 'Yr Ariannin'
IT: 'Yr Eidal'
JO: 'Yr Iorddonen'
NL: 'Yr Iseldiroedd'
US: 'Yr Unol Daleithiau'
GL: 'Yr Ynys Las'
|
country/cldr/cy_GB/country.yaml
|
lv:
welcome:
there_are_no_users: Vēl nav neviena lietotāja, līdz ar to mēs Jums izveidosim pirmo.
remember_admin_location: "Atcerieties, Jūsu Refinery administrācijas panelis atrodas šeit:"
lets_start: Tātad turpināsim, lai Jūs varētu pieslēgties...
continue: Turpināt...
admin:
menu:
reorder_menu: Pārkārtot izvēlni
reorder_menu_done: Esmu beidzis pārkārtot izvēlni
dialogs:
show:
save: Saglabāt
cancel: Atcelt
refinery:
crudify:
created: %{what} tika veiksmīgi izveidots.
updated: %{what} tika veiksmīgi atjaunots.
destroyed: %{what} tika veiksmīgi izdzēsts.
shared:
site_bar:
log_out: Iziet
switch_to_your_website: Pārslēgties uz Jūsu saitu
switch_to_your_website_editor: Pārslēgties uz Jūsu saita administrācijas paneli
admin:
continue_editing:
save_and_continue_editing: Saglabāt un turpināt rediģēt
form_actions:
save: Saglabāt
cancel: Atcelt
cancel_lose_changes: Atceļot tiks zaudētas visas tekošās izmaiņas, kas šeit tika veiktas
delete: Dzēst
previous: Iepriekšējā
next: Nākamā
close: Aizvērt
image_picker:
none_selected: Pašlaik nav izvēlēta neviena %{what}. Lūdzu, spied šeit, lai pievienotu.
remove_current: Dzēst pašreizējo %{what}
change: Spied šeit, lai izvēlētos bildi
show: Rādīt
resource_picker:
download_current: Lejuplādēt pašreizējo %{what}
opens_in_new_window: Atvērsies jaunā logā
remove_current: Dzēst pašreizējo %{what}
search:
button_text: Meklēt
results_for: Meklēšanas rezultāti pēc '%{query}'
no_results: Atvainojiet, nekas netika atrasts
delete:
message: Vai tiešām vēlaties dzēst '%{title}'?
error_messages:
problems_in_following_fields: Problēmas sekojošos ievades laukos
help: palīdzība
message:
close: Aizvērt
close_this_message: Aizvērt šo paziņojumu
draft_page_message:
not_live: Šī lapa NAV publiskai apskatei.
footer:
copyright: Visas tiesības paturētas ©
|
vendor/refinerycms/core/config/locales/lv.yml
|
- name: "Random - 3"
date: "7 April 2020"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/23.jpg"
- name: "Random - 2"
date: "7 April 2020"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/22.jpg"
- name: "Random - 1"
date: "7 April 2020"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/21.jpg"
- name: "Sun(set)(rise) ! :)"
date: "18 Mar 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/20.jpg"
- name: "Urban Side"
date: "06 Mar 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/19.jpg"
- name: "Light Yagami and L"
date: "04 Mar 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/18.jpg"
- name: "Just an Abstract"
date: "18 Feb 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/17.jpg"
- name: "Just another Landscape Scribble! :)"
date: "11 Feb 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/16.jpg"
- name: "Fall"
date: "26 Jan 2018"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/15.jpg"
- name: "Rubik's Cube Cheat! :P"
date: "06 May 2017"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/14.jpg"
- name: "Nature's Call"
date: "07 Nov 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/13.jpg"
- name: "Imaginary Black Hole :)"
date: "05 Sep 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/12.jpg"
- name: "Snowy Landscape"
date: "12 Jul 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/11.jpg"
- name: "Beach Watching :)"
date: "02 Jul 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/10.jpg"
- name: "Sleeeeeepy MONDAY! (-_-) zZZZ!!"
date: "06 Jun 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/9.jpg"
- name: "Yachting! :)"
date: "29 May 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/8.jpg"
- name: "Country Side"
date: "16 Jan 2016"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/7.jpg"
- name: "Imaginary Aurora Borealis! :)"
date: "10 Nov 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/6.jpg"
- name: "ET :D"
date: "27 Sep 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/5.jpg"
- name: "Oil Pastels - Abstract"
date: "06 Sep 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/4.jpg"
- name: "Drawing is always fun :D !"
date: "18 May 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/3.jpg"
- name: "Mixed Emotions"
date: "11 Feb 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/2.jpg"
- name: "Calvin and Hobbes :D"
date: "11 Jan 2015"
description: |
embed_url: ""
links:
- label: "Sketch"
url: "/assets/img/sketches/1.jpg"
|
_data/sketches.yml
|
---
# Defaults vars file for php-fpm role
# Packages management
php_fpm_apt_update_cache: True
php_fpm_apt_cache_valid_time: 3600
php_fpm_packages: "{{ _php_fpm_packages }}"
# Binaries
php_fpm_binary_name: "{{ _php_fpm_binary_name }}"
# Service management
php_fpm_disable_default_service: False
php_fpm_init_file_set_user: False
php_fpm_init_file_process_user: "{{ php_fpm_instance.fpm_pools[0].user }}"
php_fpm_init_file_timeout: 30
# Paths
php_fpm_binary_check_config_file_path: "{{ _php_fpm_binary_check_config_file_path }}"
php_fpm_binary_file_path: "{{ _php_fpm_binary_file_path }}"
php_fpm_config_base_path: "{{ _php_fpm_config_base_path }}"
php_fpm_init_base_path: '/etc/init.d'
php_fpm_init_file_path: "{{ _php_fpm_init_file_path }}"
php_fpm_log_base_path: "{{ _php_fpm_log_base_path }}"
php_fpm_run_base_path: "{{ _php_fpm_run_base_path }}"
php_fpm_systemd_base_path: "{{ _php_fpm_systemd_base_path | default('') }}"
# Files
php_fpm_error_log_file_path: "{{ _php_fpm_error_log_file_path }}"
php_fpm_pid_file_path: "{{ _php_fpm_pid_file_path }}"
# Permissions
php_fpm_config_owner: 'root'
php_fpm_config_group: 'root'
php_fpm_config_directories_mode: '0700'
php_fpm_config_files_mode: '0644'
php_fpm_init_files_mode: '0755'
# Instance management
php_fpm_instance:
name: 'fpm'
service_name: "{{ _php_fpm_service_name }}"
fpm_config:
- section: 'global'
option: 'pid'
value: "{{ php_fpm_pid_file_path }}"
- section: 'global'
option: 'error_log'
value: "{{ php_fpm_error_log_file_path }}"
- section: 'global'
option: 'include'
value: "{{ php_fpm_config_base_path }}/fpm/pool.d/*.conf"
fpm_pools:
- name: 'www'
user: 'www-data'
group: 'www-data'
listen: "/var/run/{{ _php_fpm_service_name }}.sock"
listen.owner: 'www-data'
listen.group: 'www-data'
chdir: '/'
php_config: []
php_modules: []
# php.ini configuration file configuration
php_fpm_shared_php_enabled: True
php_fpm_shared_php_force_unlink: False
php_fpm_shared_php_master_file: "{{ php_fpm_config_base_path }}/fpm/php.ini"
php_fpm_shared_php_master_confd: "{{ php_fpm_config_base_path }}/fpm/conf.d"
# Pools default settings
php_fpm_pool_defaults:
pm: dynamic
pm.max_children: 5
pm.start_servers: 2
pm.min_spare_servers: 1
pm.max_spare_servers: 3
pm.status_path: /status
# Logrotate configuration
php_fpm_manage_logrotate_config: True
php_fpm_logrotate_config:
filename: "/etc/logrotate.d/{{ php_fpm_instance.service_name }}"
log_pattern: "{{ php_fpm_error_log_file_path }}"
options:
- 'rotate 54'
- 'weekly'
- 'missingok'
- 'notifempty'
- 'compress'
- 'delaycompress'
- 'postrotate'
- "[ -r '{{ php_fpm_pid_file_path }}' ] && kill -USR1 $(cat '{{ php_fpm_pid_file_path }}') > /dev/null"
- 'endscript'
|
defaults/main.yml
|
name: Format check with Prettier
description: Check that files are formatted with Prettier.
inputs:
parser:
required: true
description: "Which parser to use."
paths:
required: false
description: "Paths to check target files."
prettier-version:
default: "2.5.1"
required: false
description: "The Prettier version."
cache:
default: "true"
required: false
description: "The flag to enable/disable cache. Specify false, if you wish disabling cache."
runs:
using: composite
steps:
- name: Create cache parameter
shell: bash
id: cache-param
if: ${{ inputs.cache == 'true' }}
env:
RUNNER_OS: ${{ runner.os }}
RUNNER_ARCH: ${{ runner.arch }}
PRETTIER_VERSION: ${{ inputs.prettier-version }}
run: |
cache_key="${RUNNER_OS}-${RUNNER_ARCH}-node-$(node --version)-$(date "+%Y-%m")-version-${PRETTIER_VERSION}"
echo "::set-output name=cache-key::${cache_key}"
echo "::set-output name=cache-dir::$(npm config get cache)"
- name: Cache
uses: actions/cache@v2
if: ${{ inputs.cache == 'true' }}
with:
path: ${{ steps.cache-param.outputs.cache-dir }}
key: ${{ steps.cache-param.outputs.cache-key }}
- name: Install
shell: bash
env:
PRETTIER_VERSION: ${{ inputs.prettier-version }}
run: |
npm install prettier@"${PRETTIER_VERSION}"
- name: Display version
shell: bash
run: |
npx prettier --version
- name: Create default paths from parser
shell: bash
id: default-paths
env:
PARSER: ${{ inputs.parser }}
PATHS: ${{ inputs.paths }}
run: |
if [[ "${PATHS}" != "" ]]; then
echo "::set-output name=paths::${PATHS}"
exit 0
fi
case "${PARSER}" in
"yaml" ) echo '::set-output name=paths::**/*.y*ml' ;;
"markdown" ) echo '::set-output name=paths::**/*.md' ;;
* ) echo 'error: undefined paths'; exit 1 ;;
esac
- name: Run prettier
shell: bash
env:
PARSER: ${{ inputs.parser }}
PATHS: ${{ steps.default-paths.outputs.paths }}
run: |
IFS=$' ' read -ra path_array <<< "$(printenv PATHS)"
npx prettier --parser="${PARSER}" --check "${path_array[@]}"
# https://docs.github.com/en/actions/creating-actions/metadata-syntax-for-github-actions#branding
branding:
icon: "align-left"
color: "green"
|
action.yml
|
name: TestKeys
uid: '@azure/arm-appplatform.TestKeys'
package: '@azure/arm-appplatform'
summary: Test keys payload
fullName: TestKeys
remarks: ''
isPreview: false
isDeprecated: false
type: interface
properties:
- name: enabled
uid: '@azure/arm-appplatform.TestKeys.enabled'
package: '@azure/arm-appplatform'
summary: Indicates whether the test endpoint feature enabled or not
fullName: enabled
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'enabled?: undefined | false | true'
return:
type: undefined | false | true
description: ''
- name: primaryKey
uid: '@azure/arm-appplatform.TestKeys.primaryKey'
package: '@azure/arm-appplatform'
summary: Primary key
fullName: primaryKey
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'primaryKey?: undefined | string'
return:
type: undefined | string
description: ''
- name: primaryTestEndpoint
uid: '@azure/arm-appplatform.TestKeys.primaryTestEndpoint'
package: '@azure/arm-appplatform'
summary: Primary test endpoint
fullName: primaryTestEndpoint
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'primaryTestEndpoint?: undefined | string'
return:
type: undefined | string
description: ''
- name: secondaryKey
uid: '@azure/arm-appplatform.TestKeys.secondaryKey'
package: '@azure/arm-appplatform'
summary: Secondary key
fullName: secondaryKey
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'secondaryKey?: undefined | string'
return:
type: undefined | string
description: ''
- name: secondaryTestEndpoint
uid: '@azure/arm-appplatform.TestKeys.secondaryTestEndpoint'
package: '@azure/arm-appplatform'
summary: Secondary test endpoint
fullName: secondaryTestEndpoint
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'secondaryTestEndpoint?: undefined | string'
return:
type: undefined | string
description: ''
|
docs-ref-autogen/@azure/arm-appplatform/TestKeys.yml
|
pool:
name: Hosted macOS
variables:
- name: release
value: false
steps:
- task: ArchiveFiles@1
displayName: 'Archive source '
inputs:
rootFolder: '$(Build.SourcesDirectory)'
includeRootFolder: false
archiveType: tar
archiveFile: '$(Build.BinariesDirectory)/source.tar.gz'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: source'
inputs:
PathtoPublish: '$(Build.BinariesDirectory)/source.tar.gz'
ArtifactName: source
- task: NodeTool@0
displayName: 'Use Node 14.x'
inputs:
versionSpec: 14.X
- task: Npm@1
displayName: 'Use npm 6.14.8'
inputs:
command: custom
verbose: false
customCommand: 'install -g npm@6.14.8'
- task: Npm@1
displayName: 'npm install'
inputs:
verbose: false
- task: Npm@1
displayName: 'npm custom'
inputs:
command: custom
verbose: false
customCommand: 'install -g gulp-cli'
- task: gulp@0
displayName: 'gulp install'
inputs:
targets: install
enabled: false
- bash: |
export BUILDMACHINE=true
export VsMsSqlEnv=dev
node node_modules/gulp/bin/gulp.js build --gulpfile $(Build.SourcesDirectory)/gulpfile.js
displayName: 'gulp build (dev)'
condition: and(succeeded(), eq(variables['release'], false))
- bash: |
export BUILDMACHINE=true
export VsMsSqlEnv=production
node node_modules/gulp/bin/gulp.js build --gulpfile $(Build.SourcesDirectory)/gulpfile.js
displayName: 'gulp build (release)'
condition: and(succeeded(), eq(variables['release'], true))
- bash: 'gulp lint'
displayName: 'gulp lint'
- bash: 'npm install -g vsce'
displayName: 'npm install vsce'
- task: gulp@0
displayName: 'gulp package:offline'
inputs:
targets: 'package:offline'
- task: gulp@0
displayName: 'gulp package:online'
inputs:
targets: 'package:online'
- task: CopyFiles@2
displayName: 'Copy Files to: $(Build.ArtifactStagingDirectory)'
inputs:
SourceFolder: '$(Build.SourcesDirectory)'
Contents: '*.vsix'
TargetFolder: '$(Build.ArtifactStagingDirectory)'
- task: PublishBuildArtifacts@1
displayName: 'Publish Artifact: drop'
- script: |
brew update
brew install --cask visual-studio-code
displayName: 'Install VS Code for testing'
- task: gulp@0
displayName: 'gulp ext:install-service'
inputs:
targets: 'ext:install-service'
- task: gulp@0
displayName: 'gulp cover:jenkins'
inputs:
targets: 'cover:jenkins'
publishJUnitResults: true
testResultsFiles: '$(Build.SourcesDirectory)/test-reports/*.xml'
enabled: false
continueOnError: true
- task: PublishCodeCoverageResults@1
displayName: 'Publish code coverage from $(Build.SourcesDirectory)/coverage/cobertura-coverage.xml'
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(Build.SourcesDirectory)/coverage/cobertura-coverage.xml'
enabled: false
- task: ms.vss-governance-buildtask.governance-build-task-component-detection.ComponentGovernanceComponentDetection@0
displayName: 'Component Detection'
inputs:
failOnAlert: true
|
build/build.yml
|
{{$NODES_PER_NAMESPACE := 100}}
{{$PODS_PER_NODE := 30}}
#Variables
{{$totalPods := MultiplyInt $PODS_PER_NODE .Nodes}}
{{$namespaces := DivideInt (SubtractInt .Nodes 1) $NODES_PER_NAMESPACE | AddInt 1}}
{{$replicas := DivideInt $totalPods $namespaces}}
{{$totalPodsRounded := MultiplyInt $namespaces $replicas}}
#Test
name: density
automanagedNamespaces: {{$namespaces}}
tuningSets:
- name: Uniform100qps
qpsLoad:
qps: 100
- name: Uniform5qps
qpsLoad:
qps: 5
steps:
- measurements:
- Identifier: APIResponsiveness
Method: APIResponsiveness
Params:
action: reset
- Identifier: SchedulingMetrics
Method: SchedulingMetrics
Params:
action: reset
# Create saturation pods
- phases:
- namespaceRange:
min: 1
max: {{$namespaces}}
replicasPerNamespace: 1
tuningSet: Uniform100qps
objectBundle:
- basename: saturation-rc
objectTemplatePath: rc.yaml
templateFillMap:
Replicas: {{$replicas}}
PodKind: saturationPod
- measurements:
- Identifier: WaitForSaturationRunning
Method: WaitForRunningPods
Params:
desiredPodCount: {{$totalPodsRounded}}
labelSelector: kind = saturationPod
# timeout = (totalPodsRounded /20)s + 3m
timeout: {{DivideInt $totalPodsRounded 20 | AddInt 180}}s
- measurements:
- Identifier: PodStartupLatency
Method: PodStartupLatency
Params:
action: start
labelSelector: kind = latencyPod
- name: Creating saturation pods
# Create latency pods
- phases:
- namespaceRange:
min: 1
max: {{$namespaces}}
replicasPerNamespace: {{DivideInt .Nodes $namespaces}}
tuningSet: Uniform5qps
objectBundle:
- basename: latency-pod-rc
objectTemplatePath: rc.yaml
templateFillMap:
Replicas: 1
PodKind: latencyPod
- measurements:
- Identifier: WaitForLatencyPodRunning
Method: WaitForRunningPods
Params:
desiredPodCount: {{.Nodes}}
labelSelector: kind = latencyPod
timeout: 10m
- measurements:
- Identifier: PodStartupLatency
Method: PodStartupLatency
Params:
action: gather
- name: Creating latency pods
# Delete pods
- phases:
- namespaceRange:
min: 1
max: {{$namespaces}}
replicasPerNamespace: 0
tuningSet: Uniform100qps
objectBundle:
- basename: latency-pod-rc
objectTemplatePath: rc.yaml
- name: Deleting latancy pods
- phases:
- namespaceRange:
min: 1
max: {{$namespaces}}
replicasPerNamespace: 0
tuningSet: Uniform100qps
objectBundle:
- basename: saturation-rc
objectTemplatePath: rc.yaml
- name: Deleting saturation pods
- measurements:
- Identifier: APIResponsiveness
Method: APIResponsiveness
Params:
action: gather
- Identifier: SchedulingMetrics
Method: SchedulingMetrics
Params:
action: gather
|
clusterloader2/testing/density/config.yaml
|
name: client
on:
pull_request:
paths:
- '**/Cargo.toml'
- '**/*.rs'
- .github/workflows/client.yml
permissions:
contents: read
env:
CARGO_ACTION_FMT_VERSION: v0.1.3
CARGO_INCREMENTAL: 0
CARGO_NET_RETRY: 10
K3D_VERSION: v5.3.0
RUST_BACKTRACE: short
RUST_VERSION: 1.56.1
RUSTUP_MAX_RETRIES: 10
jobs:
local:
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
# Build the examples
- name: Install rust
run: |
rm -rf $HOME/.cargo
curl --proto =https --tlsv1.3 -fLsSv https://sh.rustup.rs | sh -s -- -y --default-toolchain=${RUST_VERSION}
source $HOME/.cargo/env
echo "PATH=$PATH" >> $GITHUB_ENV
cargo version
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
- run: cargo build --package=kubert-examples --examples
# Setup a cluster
- run: curl --proto =https --tlsv1.3 -fLsSv https://raw.githubusercontent.com/k3d-io/k3d/${K3D_VERSION}/install.sh | bash
- run: k3d --version
- run: k3d cluster create --no-lb --k3s-arg '--no-deploy=local-storage,traefik,servicelb,metrics-server@server:*'
- run: kubectl version
# Run the example locally
- name: Run watch-pods
run: |
cargo run --package=kubert-examples --example=watch-pods -- \
--exit \
--log-level=debug
- name: Setup RBAC
run: |
kubectl create namespace kubert-test
kubectl create serviceaccount --namespace=kubert-test watch-pods
kubectl create clusterrole watch-pods --verb=get,list,watch --resource=pods
kubectl create clusterrolebinding watch-pods --clusterrole=watch-pods --serviceaccount=kubert-test:watch-pods
- name: Run watch-pods with impersonation
run: |
cargo run --package=kubert-examples --example=watch-pods -- \
--exit \
--log-level=debug \
--as=system:serviceaccount:kubert-test:watch-pods \
--kubeconfig=$HOME/.kube/config
in-cluster:
timeout-minutes: 10
runs-on: ubuntu-latest
steps:
# Setup a cluster
- run: curl --proto =https --tlsv1.3 -fLsSv https://raw.githubusercontent.com/k3d-io/k3d/${K3D_VERSION}/install.sh | bash
- run: k3d --version
- run: k3d cluster create --no-lb --k3s-arg "--no-deploy=local-storage,traefik,servicelb,metrics-server@server:*"
- run: kubectl version
- name: Setup RBAC
run: |
kubectl create namespace kubert-test
kubectl create serviceaccount --namespace=kubert-test watch-pods
kubectl create clusterrole watch-pods --verb=get,list,watch --resource=pods
kubectl create clusterrolebinding watch-pods --clusterrole=watch-pods --serviceaccount=kubert-test:watch-pods
# Build a docker image with the examples
- uses: actions/checkout@2541b1294d2704b0964813337f33b291d3f8596b
- uses: docker/setup-buildx-action@dc7b9719a96d48369863986a06765841d7ea23f6
- run: docker buildx build . -f examples/Dockerfile --tag kubert-examples:test --load
- run: k3d image import kubert-examples:test
# Run the example in-cluster
- name: Run kubectl run watch-pods
run: |
kubectl run watch-pods \
--attach \
--command \
--image=kubert-examples:test \
--image-pull-policy=Never \
--labels=olix0r.net/kubert-test=watch-pods \
--namespace=kubert-test \
--overrides='{"spec": {"serviceAccount": "watch-pods"}}' \
--quiet \
--restart=Never \
--rm \
-- \
watch-pods --exit --log-level=debug --selector=olix0r.net/kubert-test=watch-pods
|
.github/workflows/client.yml
|
nameWithType: MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException
type: constructor
members:
- fullName: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException()
name: MqttRejectedProtocolVersionException()
nameWithType: MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException()
syntax: public MqttRejectedProtocolVersionException()
uid: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException()
- fullName: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String message)
name: MqttRejectedProtocolVersionException(String message)
nameWithType: MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String message)
parameters:
- name: message
type: <xref href="String?alt=String&text=String" data-throw-if-not-resolved="False"/>
syntax: public MqttRejectedProtocolVersionException(String message)
uid: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String)
- fullName: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String message, Throwable cause)
name: MqttRejectedProtocolVersionException(String message, Throwable cause)
nameWithType: MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String message, Throwable cause)
parameters:
- name: message
type: <xref href="String?alt=String&text=String" data-throw-if-not-resolved="False"/>
- name: cause
type: <xref href="Throwable?alt=Throwable&text=Throwable" data-throw-if-not-resolved="False"/>
syntax: public MqttRejectedProtocolVersionException(String message, Throwable cause)
uid: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(String,Throwable)
- fullName: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(Throwable cause)
name: MqttRejectedProtocolVersionException(Throwable cause)
nameWithType: MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(Throwable cause)
parameters:
- name: cause
type: <xref href="Throwable?alt=Throwable&text=Throwable" data-throw-if-not-resolved="False"/>
syntax: public MqttRejectedProtocolVersionException(Throwable cause)
uid: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException(Throwable)
uid: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException*
fullName: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException
name: MqttRejectedProtocolVersionException
package: com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions
metadata: {}
|
legacy/docs-ref-autogen/com.microsoft.azure.sdk.iot.device.transport.mqtt.exceptions.MqttRejectedProtocolVersionException.MqttRejectedProtocolVersionException.yml
|
version: 2.1
orbs:
crystal: manastech/crystal@1.0.0
commands:
shards-make-test:
steps:
- run:
name: git config
command: |
git config --global user.email "<EMAIL>"
git config --global user.name "<NAME>"
git config --global column.ui always
- crystal/version
- checkout
- run: shards install --ignore-crystal-version
- run: make
- run: make test
- run: crystal tool format --check src spec
with-brew-cache:
parameters:
steps:
type: steps
steps:
- restore_cache:
keys:
- brew-cache-v1-{{ .Branch }}
- brew-cache-v1-
- steps: <<parameters.steps>>
- save_cache:
key: brew-cache-v1-{{ .Branch }}-{{ epoch }}
paths:
- /usr/local/Homebrew
- ~/Library/Caches/Homebrew
jobs:
build-manpages:
docker:
- image: asciidoctor/docker-asciidoctor
steps:
- checkout
- run:
name: Build manpages
command: make manpages
- store_artifacts:
path: man
test:
docker:
- image: crystallang/crystal:latest
steps:
- run:
name: Install mercurial
command: apt-get update && apt-get install mercurial -y
- shards-make-test
test-on-osx:
macos:
xcode: 13.2.1
steps:
- with-brew-cache:
steps:
- run:
name: Install Crystal and Mercurial
command: brew install crystal mercurial
- shards-make-test
test-on-nightly:
docker:
- image: crystallang/crystal:nightly
steps:
- run:
name: Install mercurial
command: apt-get update && apt-get install mercurial -y
- shards-make-test
workflows:
version: 2
ci:
jobs:
- build-manpages
- test
- test-on-osx
- test-on-nightly
nightly:
triggers:
- schedule:
cron: '0 2 * * *'
filters:
branches:
only:
- master
jobs:
- test-on-nightly
|
.circleci/config.yml
|
name: Build/release
on:
push:
tags:
- 'v*'
jobs:
build_windows:
runs-on: windows-latest
steps:
- name: Check out Git repository
uses: actions/checkout@v1
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v1
with:
node-version: 12
- name: Build app on Windows
env:
GH_TOKEN: ${{ secrets.github_token }}
run: |
npm install
npm run electron:windows
- name: Package artifacts
run: |
mkdir artifact-setup
mv release/*-setup.exe* artifact-setup/
- uses: actions/upload-artifact@master
name: Upload installer
with:
name: Windows
path: artifact-setup
build_linux:
runs-on: ubuntu-latest
steps:
- name: Check out Git repository
uses: actions/checkout@v1
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v1
with:
node-version: 12
- name: Build app on Linux
env:
GH_TOKEN: ${{ secrets.github_token }}
run: |
npm install
npm run electron:linux
- name: Package artifacts
run: |
mkdir artifact-linux
mv release/*.deb artifact-linux/
- uses: actions/upload-artifact@master
name: Upload linux artifacts
with:
name: Linux
path: artifact-linux
build_macos:
runs-on: macOS-latest
steps:
- name: Check out Git repository
uses: actions/checkout@v1
- name: Install Node.js, NPM and Yarn
uses: actions/setup-node@v1
with:
node-version: 12
- name: Build app on macOS
env:
GH_TOKEN: ${{ secrets.github_token }}
run: |
npm install
npm run electron:mac
- name: Package artifacts
run: |
mkdir artifact-macos
mv release/*.pkg artifact-macos/
- uses: actions/upload-artifact@master
name: Upload macos artifacts
with:
name: MacOS
path: artifact-macos
release_all:
runs-on: ubuntu-latest
needs: [build_windows, build_linux, build_macos]
steps:
- name: Checkout
uses: actions/checkout@v2
- uses: actions/download-artifact@v1
name: Download windows artifact
with:
name: Windows
- uses: actions/download-artifact@v1
with:
name: Linux
- uses: actions/download-artifact@v1
with:
name: MacOS
- uses: ncipollo/release-action@v1
with:
artifacts: "Linux/*,Windows/*,MacOS/*"
bodyFile: "WHATSNEW.md"
token: ${{ secrets.GITHUB_TOKEN }}
|
.github/workflows/release.yml
|
image:
repository: tccr.io/truecharts/makemkv
tag: v1.21.3@sha256:f118ce074c75f8544913c1ed1f2354613e3a8838061aa7d44c323a52a811f23d
pullPolicy: IfNotPresent
securityContext:
readOnlyRootFilesystem: false
runAsNonRoot: false
podSecurityContext:
runAsUser: 0
runAsGroup: 0
secretEnv:
VNC_PASSWORD: ""
envFrom:
- configMapRef:
name: '{{ include "tc.common.names.fullname" . }}-makemkv'
makemkv:
# General Settings
KEEP_APP_RUNNING: false
# GUI Settings
DISPLAY_WIDTH: 1280
DISPLAY_HEIGHT: 768
# App Settings
MAKEMKV_KEY: "BETA"
AUTO_DISC_RIPPER_INTERVAL: 5
AUTO_DISC_RIPPER_BD_MODE: "mkv"
AUTO_DISC_RIPPER_MAKEMKV_PROFILE: ""
SECURE_CONNECTION: false
AUTO_DISC_RIPPER: false
AUTO_DISC_RIPPER_EJECT: false
AUTO_DISC_RIPPER_PARALLEL_RIP: false
AUTO_DISC_RIPPER_NO_GUI_PROGRESS: false
AUTO_DISC_RIPPER_FORCE_UNIQUE_OUTPUT_DIR: false
service:
main:
ports:
main:
port: 10180
targetPort: 5800
vnc:
enabled: true
ports:
vnc:
enabled: true
port: 10181
targetPort: 5900
configmap:
makemkv:
enabled: true
data:
KEEP_APP_RUNNING: "{{ ternary \"1\" \"0\" .Values.makemkv.KEEP_APP_RUNNING }}"
SECURE_CONNECTION: "{{ ternary \"1\" \"0\" .Values.makemkv.SECURE_CONNECTION }}"
DISPLAY_WIDTH: "{{ .Values.makemkv.DISPLAY_WIDTH }}"
DISPLAY_HEIGHT: "{{ .Values.makemkv.DISPLAY_HEIGHT }}"
MAKEMKV_KEY: "{{ .Values.makemkv.MAKEMKV_KEY }}"
AUTO_DISC_RIPPER_INTERVAL: "{{ .Values.makemkv.AUTO_DISC_RIPPER_INTERVAL }}"
AUTO_DISC_RIPPER_BD_MODE: "{{ .Values.makemkv.AUTO_DISC_RIPPER_BD_MODE }}"
AUTO_DISC_RIPPER_MAKEMKV_PROFILE: "{{ .Values.makemkv.AUTO_DISC_RIPPER_MAKEMKV_PROFILE }}"
AUTO_DISC_RIPPER: "{{ ternary \"1\" \"0\" .Values.makemkv.AUTO_DISC_RIPPER }}"
AUTO_DISC_RIPPER_EJECT: "{{ ternary \"1\" \"0\" .Values.makemkv.AUTO_DISC_RIPPER_EJECT }}"
AUTO_DISC_RIPPER_PARALLEL_RIP: "{{ ternary \"1\" \"0\" .Values.makemkv.AUTO_DISC_RIPPER_PARALLEL_RIP }}"
AUTO_DISC_RIPPER_NO_GUI_PROGRESS: "{{ ternary \"1\" \"0\" .Values.makemkv.AUTO_DISC_RIPPER_NO_GUI_PROGRESS }}"
AUTO_DISC_RIPPER_FORCE_UNIQUE_OUTPUT_DIR: "{{ ternary \"1\" \"0\" .Values.makemkv.AUTO_DISC_RIPPER_FORCE_UNIQUE_OUTPUT_DIR }}"
persistence:
config:
enabled: true
mountPath: "/config"
storage:
enabled: true
mountPath: "/storage"
output:
enabled: true
mountPath: "/output"
|
charts/stable/makemkv/values.yaml
|
name: Go
on:
push:
branches: [ master ]
jobs:
build:
name: Build
runs-on: ubuntu-20.04
steps:
- name: Set up Go 1.x
uses: actions/setup-go@v2
with:
go-version: ^1.13
id: go
- name: Check out code into the Go module directory
uses: actions/checkout@v2
- name: Get dependencies
run: go get -v -t -d ./...
- name: Install cross-compilers
run: |
sudo apt-get install -y \
binutils \
upx-ucl \
gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu \
gcc-arm-linux-gnueabi binutils-arm-linux-gnueabi \
gcc-arm-linux-gnueabihf binutils-arm-linux-gnueabihf \
gcc-mingw-w64 binutils-mingw-w64-i686
- name: Build (linux/amd64)
env:
CGO_ENABLED: 1
GOOS: linux
GOARCH: amd64
run: |
go build -v -o ./bin/luxaudio_linux-amd64 ./cmd/luxaudio
strip ./bin/luxaudio_linux-amd64
upx -9 ./bin/luxaudio_linux-amd64
- name: Build (linux/aarch64)
env:
CGO_ENABLED: 1
GOOS: linux
GOARCH: arm64
CC: aarch64-linux-gnu-gcc
run: |
go build -v -o ./bin/luxaudio_linux-aarch64 ./cmd/luxaudio
aarch64-linux-gnu-strip ./bin/luxaudio_linux-aarch64
upx -9 ./bin/luxaudio_linux-aarch64
- name: Build (linux/armv7)
env:
CGO_ENABLED: 1
GOOS: linux
GOARCH: arm
GOARM: 7
CC: arm-linux-gnueabihf-gcc
run: |
go build -v -o ./bin/luxaudio_linux-armv7 ./cmd/luxaudio
arm-linux-gnueabihf-strip ./bin/luxaudio_linux-armv7
upx -9 ./bin/luxaudio_linux-armv7
- name: Build (linux/armv6)
env:
CGO_ENABLED: 1
GOOS: linux
GOARCH: arm
GOARM: 6
CC: arm-linux-gnueabi-gcc
run: |
go build -v -o ./bin/luxaudio_linux-armv6 ./cmd/luxaudio
arm-linux-gnueabihf-strip ./bin/luxaudio_linux-armv6
upx -9 ./bin/luxaudio_linux-armv6
- name: Build (windows/amd64)
env:
CGO_ENABLED: 1
GOOS: windows
GOARCH: amd64
CC: x86_64-w64-mingw32-gcc
run: |
go build -v -o ./bin/luxaudio_windows-amd64.exe ./cmd/luxaudio
x86_64-w64-mingw32-strip ./bin/luxaudio_windows-amd64.exe
upx -9 ./bin/luxaudio_windows-amd64.exe
- name: Create Release
id: create_release
uses: actions/create-release@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: rc${{ github.run_number }}
release_name: Release rc${{ github.run_number }}
draft: false
prerelease: true
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bin/luxaudio_linux-amd64
asset_name: luxaudio_linux-amd64
asset_content_type: application/x-elf
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bin/luxaudio_linux-aarch64
asset_name: luxaudio_linux-aarch64
asset_content_type: application/x-elf
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bin/luxaudio_linux-armv7
asset_name: luxaudio_linux-armv7
asset_content_type: application/x-elf
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bin/luxaudio_linux-armv6
asset_name: luxaudio_linux-armv6
asset_content_type: application/x-elf
- name: Upload Release Asset
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: ./bin/luxaudio_windows-amd64.exe
asset_name: luxaudio_windows-amd64.exe
asset_content_type: application/vnd.microsoft.portable-executable
|
.github/workflows/go.yml
|
site_name: Arista Devops Community
site_author: Arista Ansible Team
site_description: A List of contribution in devops world
docs_dir: docs/
copyright: Copyright © 2019 - 2022 Arista Networks
repo_name: 'Arista Netdevops Community'
repo_url: https://github.com/aristanetworks/netdevops-examples
use_directory_urls: true
nav:
- Arista Netdevops Community: index.md
- Arista Networks: arista.md
- Arista EOS+: eosplus.md
- Existing Netdevops Examples: github.md
- About:
- Ansible Cloudvision: https://cvp.avd.sh
- Arista Validated Design: https://www.avd.sh
- Arista Automation Community: https://github.com/arista-netdevops-community
theme:
name: material
features:
- navigation.instant
- navigation.top
highlightjs: true
hljs_languages:
- yaml
- python
- shell
icon:
repo: fontawesome/brands/github
logo: fontawesome/solid/book
favicon: docs/_media/favicon.ico
font:
code: Fira Mono
language: en
include_search_page: false
search_index_only: true
palette:
- media: "(prefers-color-scheme: light)"
scheme: default
toggle:
icon: material/weather-sunny
name: Switch to dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
toggle:
icon: material/weather-night
name: Switch to light mode
extra_css:
- docs/stylesheets/extra.material.css
- stylesheets/extra.css
extra_javascript:
- https://cdnjs.cloudflare.com/ajax/libs/tablesort/5.2.1/tablesort.min.js
- stylesheets/tables.js
- https://cdnjs.cloudflare.com/ajax/libs/highlight.js/10.7.2/highlight.min.js
- stylesheets/highlight.js
plugins:
- search:
lang: en
- git-revision-date-localized:
type: date
# Deactivated due to https://github.com/tikitu/jsmin/issues/33
# Issue in progress: https://github.com/byrnereese/mkdocs-minify-plugin/issues/15
# - minify:
# minify_html: true
# minify_js: true
markdown_extensions:
- mdx_truly_sane_lists
- smarty
- pymdownx.arithmatex
- pymdownx.betterem:
smart_enable: all
- pymdownx.caret
- pymdownx.critic
- pymdownx.details
- pymdownx.inlinehilite
- pymdownx.magiclink
- pymdownx.mark
- pymdownx.smartsymbols
- pymdownx.superfences
- pymdownx.tasklist:
custom_checkbox: true
- pymdownx.tilde
- fontawesome_markdown
- admonition
- codehilite:
guess_lang: true
- toc:
separator: "-"
# permalink: "#"
permalink: true
baselevel: 3
- pymdownx.highlight
- pymdownx.superfences
- pymdownx.snippets:
base_path: 'ansible_collections/arista/cvp/'
|
mkdocs.yml
|
items:
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions
id: WithAllDifferentOptions
artifact: com.microsoft.azure:azure-mgmt-sql:1.33.1
parent: com.microsoft.azure.management.sql
langs:
- java
name: SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions
nameWithType: SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions
type: Interface
package: com.microsoft.azure.management.sql
summary: The SQL database interface with all starting options for definition.
syntax:
content: public static interface SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions extends SqlDatabaseOperations.DefinitionStages.WithElasticPoolName, SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase, SqlDatabaseOperations.DefinitionStages.WithImportFrom, SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase, SqlDatabaseOperations.DefinitionStages.WithSampleDatabase, SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId, SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
implements:
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithElasticPoolName
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithImportFrom
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSampleDatabase
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId
- com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
references:
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId
name: SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId
nameWithType: SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSourceDatabaseId
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithImportFrom
name: SqlDatabaseOperations.DefinitionStages.WithImportFrom
nameWithType: SqlDatabaseOperations.DefinitionStages.WithImportFrom
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithImportFrom
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
name: SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
nameWithType: SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithCreateAllOptions
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase
name: SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase
nameWithType: SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorePointDatabase
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase
name: SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase
nameWithType: SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithRestorableDroppedDatabase
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSampleDatabase
name: SqlDatabaseOperations.DefinitionStages.WithSampleDatabase
nameWithType: SqlDatabaseOperations.DefinitionStages.WithSampleDatabase
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithSampleDatabase
- uid: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithElasticPoolName
name: SqlDatabaseOperations.DefinitionStages.WithElasticPoolName
nameWithType: SqlDatabaseOperations.DefinitionStages.WithElasticPoolName
fullName: com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithElasticPoolName
|
docs-ref-autogen/com.microsoft.azure.management.sql.SqlDatabaseOperations.DefinitionStages.WithAllDifferentOptions.yml
|
name: Release
on:
push:
tags:
- 'release/*'
jobs:
package:
strategy:
matrix:
architecture: [amd64, arm64, arm]
include:
- architecture: amd64
target: x86_64-unknown-linux-musl
strip: strip
- architecture: arm64
target: aarch64-unknown-linux-musl
strip: aarch64-linux-gnu-strip
- architecture: arm
target: armv7-unknown-linux-musleabihf
strip: arm-linux-gnueabihf-strip
name: Package (${{ matrix.architecture }})
runs-on: ubuntu-latest
timeout-minutes: 40
steps:
- name: git co
uses: actions/checkout@v2
- name: meta
id: release-tag-meta
uses: ./.github/actions/release-tag-meta
with:
git-ref: ${{ github.ref }}
#- run: echo "${{ toJSON(steps.release-tag-meta) }}"
- name: package
env:
CARGO: cross
CARGO_RELEASE: "1"
CROSS_DOCKER_IN_DOCKER: true
PACKAGE_VERSION: ${{ steps.release-tag-meta.outputs.name }}
CARGO_TARGET: ${{ matrix.target }}
STRIP: ${{ matrix.strip }}
ARCH: ${{ matrix.architecture }}
RUST_BACKTRACE: 1
uses: ./.github/actions/package
with:
entrypoint: make
args: release
- name: upload artifacts
uses: actions/upload-artifact@v2
with:
name: ${{ matrix.architecture }}-artifacts
path: release/*
release:
needs: [package]
name: GitHub Release
runs-on: ubuntu-latest
timeout-minutes: 5
steps:
- name: git co
uses: actions/checkout@v2
- name: meta
id: release-tag-meta
uses: ./.github/actions/release-tag-meta
with:
git-ref: ${{ github.ref }}
- name: download artifacts
uses: actions/download-artifact@v2
with:
path: artifacts
- name: list artifacts
run: find artifacts -type f -ls
- name: release
uses: softprops/action-gh-release@affa18ef97bc9db20076945705aba8c516139abd
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
name: ${{ steps.release-tag-meta.outputs.name }}
files: artifacts/**/*
|
.github/workflows/release.yml
|
- name: lite
version: 3.5.6
- name: common-core-services
version: 3.5.9
- name: data-refinery
version: 3.5.8
- name: wkc-adm
version: 3.5.1602
- name: 0071-wkc-prereqs
version: 3.5.1602
images:
- name: wdp-db2
version: 3.1.70
gitLog: https://github.ibm.com/wdp-gov/wdp-db2/commits/3.1.70
- name: wkc-init-container
version: 1.0.216
gitLog: https://github.ibm.com/wdp-gov/wkc-init-container/commits/1.0.216
- name: 0075-wkc-lite
version: 3.5.1623
images:
- name: dataconn-engine-opdiscovery
version: 3.0.159
gitLog: https://github.ibm.com/dataconnect/wdp-opdiscovery/commits/3.0.159
- name: dataconn-engine-service
version: 5.0.190
gitLog: https://github.ibm.com/dataconnect/dataconn-engine/commits/5.0.190
- name: dataconn-engine-spark-cluster
version: 5.0.190
gitLog: https://github.ibm.com/dataconnect/dataconn-engine/commits/5.0.190
- name: wdp-activities
version: 3.0.123
gitLog: https://github.ibm.com/dataconnect/wdp-activities/commits/3.0.123
- name: wkc-gov-ui
version: 3.5.1131
gitLog: https://github.ibm.com/wdp-gov/wkc-gov-ui/commits/3.5.1131
- name: wkc-glossary-service
version: 3.5.808
gitLog: https://github.ibm.com/wdp-gov/glossary-service/commits/3.5.808
- name: wdp-search
version: 1.0.546
- name: wdp-profiling
version: 3.5.145
gitLog: https://github.ibm.com/dataconnect/wdp-profiling/commits/3.5.145
- name: wdp-profiling-messaging
version: 3.5.116
- name: wdp-profiling-ui
version: 3.5.180
- name: wkc-init-container
version: 1.0.216
gitLog: https://github.ibm.com/wdp-gov/wkc-init-container/commits/1.0.216
- name: wkc-workflow-service
version: 3.7.9
gitLog: https://github.ibm.com/wdp-gov/wkc-workflow-service/commits/3.7.9
- name: metadata-discovery
version: 3.5.27
gitLog: https://github.ibm.com/wdp-gov/discovery/commits/3.5.27
- name: wkc-metadata-imports-ui
version: 3.5.253
gitLog: https://github.ibm.com/wdp-gov/wkc-metadata-imports-ui/commits/3.5.253
- name: wdp-policy-service
version: 3.5.802
gitLog: https://github.ibm.com/dataconnect/wdp-policy-service/commits/3.5.802
- name: wdp-lineage
version: 3.5.193
gitLog: https://github.ibm.com/wdp-gov/wdp-lineage/commits/3.5.193
|
repo/cpd/3.5/assembly/wkc/x86_64/3.5.9/wkc-core-image-info.yaml
|
en:
notifications:
new_user_registration:
subject: 'Welcome to Catarse!'
header: 'Welcome to Catarse!'
backer_canceled_after_confirmed:
subject: "Backer canceled after confirmed"
header: "Backer canceled after confirmed"
project_owner_backer_confirmed:
subject: "Hooray! Another new supporter backed your project"
header: "A new back has just been confirmed for your project"
confirm_backer:
subject: "Thank you! Your back for %{project_name} was confirmed!"
header: "Back confirmed"
payment_slip:
subject: "Status do apoio para %{project_name}: boleto impresso"
header: "Informações sobre pagamentos por boleto no Catarse"
processing_payment:
subject: "Seu apoio para %{project_name} foi recebido e está aguardando confirmação do %{payment_method}"
header: "Apoio Em Análise - o que é isso?"
updates:
subject: "Project UPDATE#%{update_id} - %{project_name}"
header: "Project UPDATE#%{update_id} - %{project_name}"
project_in_wainting_funds:
subject: "O prazo do seu projeto acabou, saiba quais são os próximos passos"
header: "Acabou o prazo!"
project_success:
subject: "Project successful: some necessary information"
header: "Project successful"
project_unsuccessful:
subject: "Your project was not funded on Catarse, but we would like your feedback x)"
header: "Project was not successfully funded ;)"
backer_project_successful:
subject: "Hooray! The project %{project_name} has succeeded on Catarse"
header: "Hooray! The project you backed has succeeded on Catarse"
backer_project_unsuccessful:
subject: "The project %{project_name} was not funded - some useful information"
header: "Information on credits and refunds ;)"
pending_backer_project_unsuccessful:
subject: "You started to back the project %{project_name}, which was not funded - some useful information"
header: "Information about confirmation of support, credits and refunds ;)"
project_received:
subject: "Your project was received by Catarse"
header: "Your project was received by the Catarse team"
project_received_channel:
subject: "Your project was received by the Wings Channel"
header: "Your project was welcomed by the staff of the Wings Channel"
new_draft_project_channel:
subject: "The project %{project_name} has just been created"
header: "A new project is awaiting review"
project_visible:
subject: "Your project is now online at Catarse"
header: "Now it's for real"
project_rejected:
subject: "Your project does not fit Catarse's focus"
header: "Your project was not accepted ;("
new_draft_project:
subject: "New project created - %{project_name}"
header: "New project created"
new_features:
subject: "Ajuda para tese de doutorado sobre crowdfunding"
header: "Ajuda para tese de doutorado sobre crowdfunding"
adm_project_deadline:
subject: "The project %{project_name} has reached its deadline"
header: "The project reached its deadline"
credits_warning:
subject: "You have R$ %{amount} in credits on Catarse"
header: "Why not back another project?"
backer_confirmed_after_project_was_closed:
subject: "New backer confirmed on finished project '%{project_name}' "
header: "New backer confirmed."
temporary_password:
subject: "Temporary password for Catarse"
header: "Log on to change your password"
|
config/locales/notifications.en.yml
|
- position: 1
driverNumber: 7
driverId: kimi-raikkonen
constructorId: ferrari
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 71
time: "1:06.957"
gap:
interval:
- position: 2
driverNumber: 5
driverId: sebastian-vettel
constructorId: ferrari
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 67
time: "1:07.082"
gap: "+0.125"
interval: "+0.125"
- position: 3
driverNumber: 44
driverId: lewis-hamilton
constructorId: mercedes
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 58
time: "1:07.241"
gap: "+0.284"
interval: "+0.159"
- position: 4
driverNumber: 33
driverId: max-verstappen
constructorId: red-bull
engineManufacturerId: tag-heuer
tyreManufacturerId: pirelli
lap: 70
time: "1:07.442"
gap: "+0.485"
interval: "+0.201"
- position: 5
driverNumber: 3
driverId: daniel-ricciardo
constructorId: red-bull
engineManufacturerId: tag-heuer
tyreManufacturerId: pirelli
lap: 46
time: "1:07.591"
gap: "+0.634"
interval: "+0.149"
- position: 6
driverNumber: 9
driverId: marcus-ericsson
constructorId: sauber
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 67
time: "1:08.216"
gap: "+1.259"
interval: "+0.625"
- position: 7
driverNumber: 20
driverId: kevin-magnussen
constructorId: haas
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 70
time: "1:08.476"
gap: "+1.519"
interval: "+0.260"
- position: 8
driverNumber: 11
driverId: sergio-perez
constructorId: force-india
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 66
time: "1:08.504"
gap: "+1.547"
interval: "+0.028"
- position: 9
driverNumber: 14
driverId: fernando-alonso
constructorId: mclaren
engineManufacturerId: renault
tyreManufacturerId: pirelli
lap: 69
time: "1:08.661"
gap: "+1.704"
interval: "+0.157"
- position: 10
driverNumber: 55
driverId: carlos-sainz-jr
constructorId: renault
engineManufacturerId: renault
tyreManufacturerId: pirelli
lap: 37
time: "1:08.766"
gap: "+1.809"
interval: "+0.105"
- position: 11
driverNumber: 31
driverId: esteban-ocon
constructorId: force-india
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 64
time: "1:08.850"
gap: "+1.893"
interval: "+0.084"
- position: 12
driverNumber: 2
driverId: stoffel-vandoorne
constructorId: mclaren
engineManufacturerId: renault
tyreManufacturerId: pirelli
lap: 63
time: "1:08.894"
gap: "+1.937"
interval: "+0.044"
- position: 13
driverNumber: 35
driverId: sergey-sirotkin
constructorId: williams
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 64
time: "1:08.971"
gap: "+2.014"
interval: "+0.077"
- position: 14
driverNumber: 16
driverId: charles-leclerc
constructorId: sauber
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 68
time: "1:09.006"
gap: "+2.049"
interval: "+0.035"
- position: 15
driverNumber: 77
driverId: valtteri-bottas
constructorId: mercedes
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 12
time: "1:09.044"
gap: "+2.087"
interval: "+0.038"
- position: 16
driverNumber: 8
driverId: romain-grosjean
constructorId: haas
engineManufacturerId: ferrari
tyreManufacturerId: pirelli
lap: 17
time: "1:09.071"
gap: "+2.114"
interval: "+0.027"
- position: 17
driverNumber: 28
driverId: brendon-hartley
constructorId: toro-rosso
engineManufacturerId: honda
tyreManufacturerId: pirelli
lap: 42
time: "1:09.171"
gap: "+2.214"
interval: "+0.100"
- position: 18
driverNumber: 18
driverId: lance-stroll
constructorId: williams
engineManufacturerId: mercedes
tyreManufacturerId: pirelli
lap: 68
time: "1:09.203"
gap: "+2.246"
interval: "+0.032"
- position: 19
driverNumber: 10
driverId: pierre-gasly
constructorId: toro-rosso
engineManufacturerId: honda
tyreManufacturerId: pirelli
lap: 38
time: "1:09.295"
gap: "+2.338"
interval: "+0.092"
- position: 20
driverNumber: 27
driverId: nico-hulkenberg
constructorId: renault
engineManufacturerId: renault
tyreManufacturerId: pirelli
lap: 7
time: "1:10.380"
gap: "+3.423"
interval: "+1.085"
|
src/data/seasons/2018/races/09-austria/fastest-laps.yml
|
nl:
refinery:
plugins:
refinery_users:
title: Gebruikers
description: Gebruikers beheren
admin:
users:
delete: Deze gebruiker definitief verwijderen
edit: Deze gebruiker bewerken
update:
cannot_remove_user_plugin_from_current_user: "U kunt de plugin 'Gebruikers' niet deactiveren voor het account waarmee u op dit moment bent ingelogd."
form:
blank_password_keeps_current: 'Als u dit veld leeg laat, blijft het huidige wachtwoord behouden'
plugin_access: Toegang tot plugins
role_access: Toegang tot rollen
enable_all: alles aanvinken
actions:
create_new_user: Een nieuwe gebruiker toevoegen
user:
email_user: Deze gebruiker e-mailen
preview: '(%{who}) is toegevoegd op %{created_at}'
sessions:
new:
hello_please_sign_in: Hallo! Log in om verder te gaan.
sign_in: Inloggen
forgot_password: <PASSWORD>
user_mailer:
reset_notification:
subject: Link om een nieuw wachtwoord aan te vragen
reset_request_received_for: 'Aanvraag voor een nieuw wachtwoord voor %{username}'
visit_this_url: Bezoek dit adres om een nieuw wachtwoord in te stellen
remain_same_if_no_action: 'Als u niet op dit bericht reageert blijft uw wachtwoord ongewijzigd'
users:
new:
fill_form: 'Vul het onderstaande formulier in, om te kunnen beginnen.'
sign_up: Registreer
create:
welcome: 'Welkom bij Refinery, %{who}'
forgot:
email_address: E-mailadres
enter_email_address: Vul het e-mailadres van uw account in.
reset_password: <PASSWORD>wo<PASSWORD> aanvragen
blank_email: Uw heeft geen e-mailadres ingevuld.
email_not_associated_with_account_html: "Sorry, '%{email}' is niet gekoppeld aan een account. <br/>Weet u zeker dat u het juiste e-mailadres heeft ingevuld?"
email_reset_sent: Er is een e-mail naar u verzonden met een link om een nieuw wachtwoord in te stellen.
password_encryption: 'U moet een nieuw wachtwoord instellen omdat de wachtwoordversleuteling van Refinery is verbeterd, hiermee wordt uw wachtwoord nog veiliger opgeslagen.'
reset:
code_invalid: "Het spijt ons, maar deze code is verlopen of ongeldig. U kunt het probleem wellicht verhelpen door de link direct vanuit de e-mail te kopieren en in uw browser te plakken, of door het proces opnieuw te starten."
successful: "Het instellen van een nieuw wachtwoord voor %{email} is gelukt."
pick_new_password_for: 'Kies een nieuw wachtwoord voor %{email}'
reset_password: <PASSWORD>
roles:
superuser: Beheerder
refinery: Refinery-gebruiker
devise:
failure:
unauthenticated: U moet inloggen om verder te kunnen
invalid: 'Sorry, uw wachtwoord, of gebruikersnaam is onjuist.'
sessions:
signed_in: U bent ingelogd.
activerecord:
models:
refinery/user: gebruiker
attributes:
refinery/user:
login: Gebruikersnaam
username: Gebruikersnaam
password: <PASSWORD>
password_confirmation: <PASSWORD>
email: E-mail
remember_me: Onthoud mijn gegevens
|
authentication/config/locales/nl.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-16 18:27"
game: "Unreal Tournament"
name: "CTF-KungFu][v2"
author: "Korrupt & DarkStar"
description: "Prison Bitch Dojo"
releaseDate: "2001-08"
attachments:
- type: "IMAGE"
name: "CTF-KungFu][v2_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/K/CTF-KungFu%5D%5Bv2_shot_1.png"
originalFilename: "ctf-kungfu][v2.zip"
hash: "f2a46f68d9c67d2e4f676076a292a1ccc8c3e34c"
fileSize: 1633346
files:
- name: "CTF-KungFu][v2.unr"
fileSize: 3320025
hash: "d73b05658780e11f78316c7c25750461f6308912"
- name: "Bsin.uax"
fileSize: 350822
hash: "d9cc0d42832bb57e8a1613556e47955603b845a0"
- name: "KungFu.utx"
fileSize: 915208
hash: "ac04f16f8ad4f086a39a0ddcb9a5949d319408a3"
otherFiles: 1
dependencies:
CTF-KungFu][v2.unr:
- status: "OK"
name: "Bsin"
- status: "OK"
name: "KungFu"
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/Capture%20The%20Flag/K/ctf-kungfu%5D%5Bv2.zip"
main: true
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/CTF/MapsK/&file=ctf-kungfu%5D%5Bv2.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/CTF&file=ctf-kungfu%5D%5Bv2-2.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/CTF&file=ctf-kungfu%5D%5Bv2.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/CTF/MapsK/ctf-kungfu%5d%5bv2.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/Capture%20The%20Flag/K/f/2/a46f68/ctf-kungfu%255D%255Bv2.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/Capture%20The%20Flag/K/f/2/a46f68/ctf-kungfu%255D%255Bv2.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Capture The Flag"
title: "<NAME>"
playerCount: "4-12"
themes:
Industrial: 1.0
bots: true
|
content/Unreal Tournament/Maps/Capture The Flag/K/f/2/a46f68/ctf-kungfuv2_[f2a46f68].yml
|
name: Build, test, analyze and publish
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
paths-ignore:
- '**/*.md'
- '**/*.png'
jobs:
# Build docker image from branch and run tests
build:
name: Build and test
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Build lowlighter/metrics:${{ github.head_ref || 'master' }}
run: docker build -t lowlighter/metrics:${{ github.head_ref || 'master' }} .
- name: Run tests
run: docker run --workdir=/metrics --entrypoint="" lowlighter/metrics:${{ github.head_ref || 'master' }} npm test
# Run CodeQL on branch
analyze:
name: Analyze code
runs-on: ubuntu-latest
needs: [ build ]
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Setup CodeQL
uses: github/codeql-action/init@v1
with:
languages: javascript
config-file: ./.github/config/codeql.yml
- name: Analyze code
uses: github/codeql-action/analyze@v1
# Build docker image from master and publish it to GitHub registry
docker-master:
name: Publish master to GitHub registry
runs-on: ubuntu-latest
needs: [ build ]
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Login to GitHub registry
run: echo ${{ secrets.CONTAINER_REGISTRY_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Build docker image
run: docker build -t ghcr.io/lowlighter/metrics:master .
- name: Publish to GitHub registry
run: docker push ghcr.io/lowlighter/metrics:master
# Test lowlighter/metrics@master
action-master-test:
name: Test lowlighter/metrics@master
runs-on: ubuntu-latest
needs: [ docker-master ]
if: github.event_name == 'push' && github.ref == 'refs/heads/master'
steps:
- name: Run tests
uses: lowlighter/metrics@master
with:
token: MOCKED_TOKEN
plugins_errors_fatal: yes
dryrun: yes
use_mocked_data: yes
verify: yes
use_prebuilt_image: master
# Build docker image from master and publish it to GitHub registry with release tag
docker-release:
name: Publish release to GitHub registry
runs-on: ubuntu-latest
needs: [ build, analyze, action-master-test ]
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && contains(github.event.head_commit.message, '[release]')
steps:
- name: Checkout repository
uses: actions/checkout@v2
- name: Login to GitHub registry
run: echo ${{ secrets.CONTAINER_REGISTRY_TOKEN }} | docker login ghcr.io -u ${{ github.actor }} --password-stdin
- name: Pull docker image (master)
run: docker pull ghcr.io/lowlighter/metrics:master
- name: Tag docker image (release)
run: docker tag ghcr.io/lowlighter/metrics:master ghcr.io/lowlighter/metrics:$(echo '${{ github.event.head_commit.message }}' | grep -Po 'v\d+[.]\d+')
- name: Publish release to GitHub registry
run: docker push ghcr.io/lowlighter/metrics:$(echo '${{ github.event.head_commit.message }}' | grep -Po 'v\d+[.]\d+')
- name: Tag docker image (latest)
run: docker tag ghcr.io/lowlighter/metrics:master ghcr.io/lowlighter/metrics:latest
- name: Publish latest to GitHub registry
run: docker push ghcr.io/lowlighter/metrics:latest
# Test lowlighter/metrics@latest
action-lastest-test:
name: Test lowlighter/metrics@latest
runs-on: ubuntu-latest
needs: [ docker-release ]
if: github.event_name == 'push' && github.ref == 'refs/heads/master' && contains(github.event.head_commit.message, '[release]')
steps:
- name: Run tests
uses: lowlighter/metrics@latest
with:
token: MOCKED_TOKEN
plugins_errors_fatal: yes
dryrun: yes
use_mocked_data: yes
verify: yes
|
.github/workflows/workflow.yml
|
on:
push:
pull_request:
workflow_dispatch:
jobs:
pre_job:
# continue-on-error: true # Uncomment once integration is finished
runs-on: ubuntu-latest
# Map a step output to a job output
outputs:
should_skip: ${{ steps.skip_check.outputs.should_skip }}
steps:
- id: skip_check
uses: the-iea/skip-duplicate-actions@master
with:
# All of these options are optional, so you can remove them if you are happy with the defaults
concurrent_skipping: 'never'
skip_after_successful_duplicate: 'true'
paths_ignore: '["**/README.md", "**/docs/**"]'
do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]'
- run: echo "${{ toJSON(steps.skip_check.outputs) }}"
main_job:
needs: pre_job
if: ${{ needs.pre_job.outputs.should_skip != 'true' }}
runs-on: ubuntu-latest
steps:
- run: echo "Running slow tests..." && sleep 30
skip_individual_steps_job:
runs-on: ubuntu-latest
steps:
- id: skip_check
uses: the-iea/skip-duplicate-actions@master
with:
cancel_others: 'false'
paths: '["src/**", "dist/**"]'
- if: ${{ steps.skip_check.outputs.should_skip != 'true' }}
run: |
echo "${{ toJSON(steps.skip_check.outputs) }}"
echo "Run only if src/ or dist/ changed..." && sleep 30
echo "Do other stuff..."
private_action:
runs-on: ubuntu-latest
steps:
# To use the Action from the current branch, we checkout the repository
- name: Checkout
uses: actions/checkout@v2
- run: |
yarn install
- id: skip_check
uses: ./ # Uses this Action in the root directory
with:
github_token: ${{ github.token }}
paths_ignore: '["**/*.md"]'
cancel_others: 'true'
concurrent_skipping: 'outdated_runs'
skip_after_successful_duplicate: 'true'
do_not_skip: '["pull_request", "workflow_dispatch", "schedule"]'
- if: ${{ steps.skip_check.outputs.should_skip == 'false' }}
run: |
echo "${{ toJSON(steps.skip_check.outputs) }}"
echo "Do stuff..." && sleep 30
|
.github/workflows/test.yml
|
items:
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.Definition
id: Definition
artifact: com.microsoft.azure:azure-mgmt-cosmosdb:1.36.3
parent: com.microsoft.azure.management.cosmosdb
langs:
- java
name: CosmosDBAccount.Definition
nameWithType: CosmosDBAccount.Definition
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.Definition
type: Interface
package: com.microsoft.azure.management.cosmosdb
summary: Grouping of cosmos db definition stages.
syntax:
content: public static interface CosmosDBAccount.Definition extends CosmosDBAccount.DefinitionStages.Blank, CosmosDBAccount.DefinitionStages.WithGroup, CosmosDBAccount.DefinitionStages.WithKind, CosmosDBAccount.DefinitionStages.WithWriteReplication, CosmosDBAccount.DefinitionStages.WithReadReplication, CosmosDBAccount.DefinitionStages.WithCreate
implements:
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.Blank
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithGroup
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithKind
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithWriteReplication
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithReadReplication
- com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithCreate
references:
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithCreate
name: CosmosDBAccount.DefinitionStages.WithCreate
nameWithType: CosmosDBAccount.DefinitionStages.WithCreate
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithCreate
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithWriteReplication
name: CosmosDBAccount.DefinitionStages.WithWriteReplication
nameWithType: CosmosDBAccount.DefinitionStages.WithWriteReplication
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithWriteReplication
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithReadReplication
name: CosmosDBAccount.DefinitionStages.WithReadReplication
nameWithType: CosmosDBAccount.DefinitionStages.WithReadReplication
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithReadReplication
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithGroup
name: CosmosDBAccount.DefinitionStages.WithGroup
nameWithType: CosmosDBAccount.DefinitionStages.WithGroup
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithGroup
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithKind
name: CosmosDBAccount.DefinitionStages.WithKind
nameWithType: CosmosDBAccount.DefinitionStages.WithKind
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.WithKind
- uid: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.Blank
name: CosmosDBAccount.DefinitionStages.Blank
nameWithType: CosmosDBAccount.DefinitionStages.Blank
fullName: com.microsoft.azure.management.cosmosdb.CosmosDBAccount.DefinitionStages.Blank
|
docs-ref-autogen/com.microsoft.azure.management.cosmosdb.CosmosDBAccount.Definition.yml
|
name: Upload test coverage to codecov
on:
push:
branch:
dev
jobs:
build-environment:
name: Build environment
runs-on: ubuntu-latest
steps:
- name: Checkout repository
uses: actions/checkout@v2
# See: https://github.com/marketplace/actions/setup-conda
- name: Setup anaconda
uses: s-weigand/setup-conda@v1
with:
conda-channels: "conda-forge"
# Build cache of environment
- name: Cache conda environment
id: cache-environment
uses: actions/cache@v2
# Conda environment build step depends on two files,
# so we ensure that the hash key contains both their hashes.
with:
path: |
pyjanitor-dev.tar.gz
key: ${{ runner.os }}-env.${{ hashFiles('environment-dev.yml') }}
- name: Build environment
if: steps.cache-environment.outputs.cache-hit != 'true'
run: bash ./scripts/ci/build_environment.sh
- name: Install conda-pack
if: steps.cache-environment.outputs.cache-hit != 'true'
run: conda install -c conda-forge conda-pack
- name: Run conda-pack
if: steps.cache-environment.outputs.cache-hit != 'true'
run: conda pack -n pyjanitor-dev -o pyjanitor-dev.tar.gz
# See: https://github.com/actions/upload-artifact
- name: Upload environment
uses: actions/upload-artifact@v2
with:
name: pyjanitor-dev-tarball
path: pyjanitor-dev.tar.gz
unit-tests:
name: Run unit tests
runs-on: ubuntu-latest
needs: build-environment
steps:
- name: Checkout repository
uses: actions/checkout@v2
# https://github.com/actions/download-artifact
- name: Download environment tarball
uses: actions/download-artifact@v2
with:
name: pyjanitor-dev-tarball
- name: Unpack environment and activate it
run: bash scripts/ci/unpack_environment.sh
- name: Run tests
run: |
source /tmp/pyjanitor-dev_env/bin/activate
python -m pip install -e .
pytest
- name: Upload code coverage
run: |
source /tmp/pyjanitor-dev_env/bin/activate
bash <(curl -s https://codecov.io/bash)
|
.github/workflows/codecov.yml
|
apiVersion: apps/v1
kind: Deployment
metadata:
name: stats-cache
spec:
selector:
matchLabels:
name: stats-cache
template:
metadata:
labels:
name: stats-cache
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: cache
image: redis:5-alpine
---
apiVersion: v1
kind: Service
metadata:
name: stats-cache
spec:
selector:
name: stats-cache
ports:
- port: 6379
targetPort: 6379
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: stats-queue
spec:
selector:
matchLabels:
name: stats-queue
template:
metadata:
labels:
name: stats-queue
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: queue
image: rabbitmq:3-alpine
---
apiVersion: v1
kind: Service
metadata:
name: stats-queue
spec:
selector:
name: stats-queue
ports:
- port: 5672
targetPort: 5672
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: stats-worker
spec:
selector:
matchLabels:
name: stats-worker
template:
metadata:
labels:
name: stats-worker
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: stats-worker
image: azdspublic/todo-app-stats-worker
env:
- name: STATS_QUEUE_URI
value: amqp://stats-queue
- name: REDIS_HOST
value: stats-cache
- name: REDIS_PORT
value: "6379"
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: stats-api
spec:
selector:
matchLabels:
name: stats-api
template:
metadata:
labels:
name: stats-api
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: stats-api
image: azdspublic/todo-app-stats-api
livenessProbe:
httpGet:
path: /hello
port: 80
initialDelaySeconds: 5
periodSeconds: 3
readinessProbe:
httpGet:
path: /hello
port: 80
initialDelaySeconds: 15
periodSeconds: 5
---
apiVersion: v1
kind: Service
metadata:
name: stats-api
spec:
selector:
name: stats-api
ports:
- port: 80
targetPort: 80
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: todos-db
spec:
selector:
matchLabels:
name: todos-db
template:
metadata:
labels:
name: todos-db
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: todos-db
image: mongo:4
---
apiVersion: v1
kind: Service
metadata:
name: todos-db
spec:
selector:
name: todos-db
ports:
- port: 27017
targetPort: 27017
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: frontend
spec:
selector:
matchLabels:
name: frontend
template:
metadata:
labels:
name: frontend
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: frontend
image: azdspublic/todo-app-frontend
env:
- name: STATS_QUEUE_URI
value: amqp://stats-queue
---
apiVersion: v1
kind: Service
metadata:
name: frontend
spec:
type: LoadBalancer
selector:
name: frontend
ports:
- port: 80
targetPort: 80
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: database-api
spec:
selector:
matchLabels:
name: database-api
template:
metadata:
labels:
name: database-api
spec:
nodeSelector:
kubernetes.io/os: linux
containers:
- name: database-api
image: azdspublic/todo-app-database-api
env:
- name: MONGO_CONNECTION_STRING
value: mongodb://todos-db
---
apiVersion: v1
kind: Service
metadata:
name: database-api
spec:
selector:
name: database-api
ports:
- port: 80
targetPort: 80
|
samples/todo-app/deployment.yaml
|
{% set version = "0.15" %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-xfun
version: {{ version|replace("-", "_") }}
source:
url:
- {{ cran_mirror }}/src/contrib/xfun_{{ version }}.tar.gz
- {{ cran_mirror }}/src/contrib/Archive/xfun/xfun_{{ version }}.tar.gz
sha256: 7221a6b9c9d870c654913e11231ce84a1615733281a876c2b27f154233bfb1b9
build:
merge_build_host: true # [win]
number: 0
noarch: generic
rpaths:
- lib/R/lib/
- lib/
requirements:
build:
- {{ posix }}zip # [win]
host:
- r-base
run:
- r-base
test:
commands:
- $R -e "library('xfun')" # [not win]
- "\"%R%\" -e \"library('xfun')\"" # [win]
about:
home: https://github.com/yihui/xfun
license: MIT
summary: Miscellaneous functions commonly used in other packages maintained by 'Yihui Xie'.
license_family: MIT
license_file:
- {{ environ["PREFIX"] }}/lib/R/share/licenses/MIT
- LICENSE
extra:
recipe-maintainers:
- conda-forge/r
- bsennblad
- philip-khor
# Package: xfun
# Type: Package
# Title: Miscellaneous Functions by 'Yihui Xie'
# Version: 0.10
# Authors@R: c( person("Yihui", "Xie", role = c("aut", "cre", "cph"), email = "<EMAIL>", comment = c(ORCID = "0000-0003-0645-5666")), person("Daijiang", "Li", role = "ctb"), person("Xianying", "Tan", role = "ctb"), person("Salim", "Bruggemann", role = "ctb", email = "<EMAIL>"), person() )
# Description: Miscellaneous functions commonly used in other packages maintained by 'Yihui Xie'.
# Imports: stats, tools
# Suggests: testit, parallel, rstudioapi, tinytex, mime, markdown, knitr, htmltools, base64enc, remotes, rmarkdown
# License: MIT + file LICENSE
# URL: https://github.com/yihui/xfun
# BugReports: https://github.com/yihui/xfun/issues
# Encoding: UTF-8
# LazyData: true
# RoxygenNote: 6.1.1
# VignetteBuilder: knitr
# NeedsCompilation: no
# Packaged: 2019-10-01 15:28:13 UTC; yihui
# Author: <NAME> [aut, cre, cph] (<https://orcid.org/0000-0003-0645-5666>), <NAME> [ctb], <NAME> [ctb], <NAME> [ctb]
# Maintainer: <NAME> <<EMAIL>>
# Repository: CRAN
# Date/Publication: 2019-10-01 18:20:02 UTC
|
recipe/meta.yaml
|
- title: "General"
children:
- title: "Overview"
url: "/General"
- title: "Why TCP?"
url: "/General/WhyTCP"
- title: "Getting Started"
url: "/General/Start"
- title: "Migration Guide"
url: "/General/Migration"
- title: "Deprecations"
url: "/General/Deprecations"
- title: "Support"
url: "/General/Support"
- title: "Concepts"
children:
- title: "Overview"
url: "/Concepts"
- title: "Authority"
url: "/Concepts/Authority"
- title: "About IDs"
url: "/Concepts/IDs"
- title: "Clients and Servers"
url: "/Concepts/ClientsServers"
- title: "Conversion"
url: "/Concepts/Conversion"
- title: "Debugging"
url: "/Concepts/Debugging"
- title: "HLAPI"
url: "/Concepts/HLAPI"
- title: "Clock Synchronization"
url: "/Concepts/ClockSync"
- title: "Mobile Tips"
url: "/Concepts/MobileTips"
- title: "Project Setup"
url: "/Concepts/Setup"
- title: "State Syncronization"
url: "/Concepts/StateSync"
- title: "Advanced State Syncronization"
url: "/Concepts/StateSyncAdvanced"
- title: "Visibility"
url: "/Concepts/Visibility"
- title: "Custom Visibility"
url: "/Concepts/VisibilityCustom"
- title: "Communications"
children:
- title: "Overview"
url: "/Concepts/Communications"
- title: "Remote Actions"
url: "/Concepts/Communications/RemoteActions"
- title: "Network Manager Callbacks"
url: "/Concepts/Communications/NetworkManager"
- title: "Network Behavior Callbacks"
url: "/Concepts/Communications/NetworkBehavior"
- title: "Network Messages"
url: "/Concepts/Communications/NetworkMessages"
- title: "GameObjects"
children:
- title: "Overview"
url: "/Concepts/GameObjects"
- title: "Spawn Player"
url: "/Concepts/GameObjects/SpawnPlayer"
- title: "Spawn Player - Custom"
url: "/Concepts/GameObjects/SpawnPlayerCustom"
- title: "Spawn Object"
url: "/Concepts/GameObjects/SpawnObject"
- title: "Spawn Object - Custom"
url: "/Concepts/GameObjects/SpawnObjectCustom"
- title: "Scene Objects"
url: "/Concepts/GameObjects/SceneObjects"
- title: "Components"
children:
- title: "Overview"
url: "/Components"
- title: "NetworkManager"
url: "/Components/NetworkManager"
- title: "NetworkManagerHUD"
url: "/Components/NetworkManagerHUD"
- title: "NetworkIdentity"
url: "/Components/NetworkIdentity"
- title: "NetworkStartPosition"
url: "/Components/NetworkStartPosition"
- title: "NetworkProximityChecker"
url: "/Components/NetworkProximityChecker"
- title: "NetworkTransform"
url: "/Components/NetworkTransform"
- title: "NetworkTransformChild"
url: "/Components/NetworkTransformChild"
- title: "NetworkAnimator"
url: "/Components/NetworkAnimator"
- title: "NetworkNavMeshAgent"
url: "/Components/NetworkNavMeshAgent"
- title: "NetworkController"
url: "/Components/NetworkController"
- title: "NetworkRigidbody"
url: "/Components/NetworkRigidbody"
- title: "Classes"
children:
- title: "Overview"
url: "/Classes"
- title: "NetworkServer"
url: "/Classes/NetworkServer"
- title: "NetworkConnection"
url: "/Classes/NetworkConnection"
- title: "NetworkClient"
url: "/Classes/NetworkClient"
- title: "NetworkBehavior"
url: "/Classes/NetworkBehavior"
- title: "Attributes"
url: "/Classes/Attributes"
- title: "SyncLists"
url: "/Classes/SyncLists"
- title: "Events"
children:
- title: "Application Events"
url: "/Events/Application"
- title: "Server Events"
url: "/Events/Server"
- title: "Client Events"
url: "/Events/Client"
- title: "Other Events"
url: "/Events/Other"
- title: "Messages"
children:
- title: "Overview"
url: "/Messages"
- title: "Transports"
children:
- title: "Overview"
url: "/Transports"
- title: "TCP - Telepathy"
url: "/Transports/Telepathy"
- title: "UDP - Ignorance"
url: "/Transports/Ignorance"
- title: "WebGL - Groove"
url: "/Transports/Groove"
- title: "Steam - Fizzy"
url: "/Transports/Fizzy"
- title: "Socket - Insight"
url: "/Transports/Insight"
- title: "Sample Projects"
children:
- title: "Overview"
url: "/Samples"
- title: "Pong"
url: "/Samples/Pong"
- title: "Services"
children:
- title: "Overview"
url: "/Services"
- title: "Match Service"
url: "/Services/Match"
- title: "Chat Service"
url: "/Services/Chat"
- title: "NAT Service"
url: "/Services/NAT"
- title: "Relay Service"
url: "/Services/Relay"
|
docs/_data/navigation.yml
|
field: b2c6e66f-5fcd-4220-b710-2624b1ef531d # Accordion Matrix
fieldLayouts:
e5166699-4103-4dbe-8f15-7722d35cd4fb:
tabs:
-
elements:
-
fieldUid: 6bc4604d-7a4d-4a0c-ad53-c3a6adc1a6e8 # Question
instructions: null
label: null
required: false
tip: null
type: craft\fieldlayoutelements\CustomField
warning: null
width: 100
-
fieldUid: f85e63c0-1f6c-495c-95b7-6b70d338bd07 # Answer
instructions: null
label: null
required: false
tip: null
type: craft\fieldlayoutelements\CustomField
warning: null
width: 100
name: Content
sortOrder: 1
fields:
6bc4604d-7a4d-4a0c-ad53-c3a6adc1a6e8: # Question
contentColumnType: text
fieldGroup: null
handle: question
instructions: ''
name: Question
searchable: false
settings:
byteLimit: null
charLimit: null
code: ''
columnType: null
initialRows: '4'
multiline: ''
placeholder: null
uiMode: normal
translationKeyFormat: null
translationMethod: none
type: craft\fields\PlainText
f85e63c0-1f6c-495c-95b7-6b70d338bd07: # Answer
contentColumnType: text
fieldGroup: null
handle: answer
instructions: ''
name: Answer
searchable: false
settings:
availableTransforms: '*'
availableVolumes: '*'
cleanupHtml: true
columnType: text
configSelectionMode: choose
defaultTransform: ''
manualConfig: ''
purifierConfig: ''
purifyHtml: '1'
redactorConfig: ''
removeEmptyTags: '1'
removeInlineStyles: '1'
removeNbsp: '1'
showHtmlButtonForNonAdmins: ''
showUnpermittedFiles: false
showUnpermittedVolumes: false
uiMode: enlarged
translationKeyFormat: null
translationMethod: none
type: craft\redactor\Field
handle: item
name: Item
sortOrder: 1
|
config/project/matrixBlockTypes/item--80190ffa-9de6-4824-befb-1c21b98c9752.yaml
|
---
#
# Install the OpenAFS server with RPM package files. The package
# files are transferred from the controller to a temporary directory
# on the remote node and then installed.
#
- name: Verify the path to the installation tarball is specified
assert:
that:
- afs_install_archive is defined
- afs_install_archive != ''
- name: Ensure tar is installed
become: yes
yum:
state: present
name:
- tar
- gzip
- bzip2
when: ansible_pkg_mgr in ('yum', 'dnf')
- debug:
msg: "Installing {{ afs_install_archive }}"
- name: Create directory
file:
state: directory
path: "{{ afs_tmpdir }}/install"
- name: Upload package files
unarchive:
src: "{{ afs_install_archive }}"
dest: "{{ afs_tmpdir }}/install/"
- name: Find server package filenames
find:
path: "{{ afs_tmpdir }}/install/"
file_type: file
recurse: yes
use_regex: yes
patterns:
- "openafs-v?[0-9].*[.]rpm$"
- "openafs-debuginfo-.*[.]rpm$"
- "openafs-debugsource-.*[.]rpm$"
- "openafs-docs-.*[.]rpm$"
- "openafs-krb5-.*[.]rpm$"
- "openafs-server-.*[.]rpm$"
exclude: ".*[.]src[.]rpm$"
register: find_packages
- set_fact:
server_packages: "{{ find_packages.files | map(attribute='path') | list }}"
- name: Verify the package files are present
assert:
that:
- server_packages | select('search', '.*/openafs-v?[0-9][^/]*[.]rpm$') | list | count == 1
- server_packages | select('search', '.*/openafs-server-v?[0-9][^/]*[.]rpm$') | list | count == 1
- name: Install OpenAFS server packages
become: yes
yum:
state: present
disable_gpg_check: yes
name: "{{ server_packages }}"
- name: Get installation paths
openafs_contrib.openafs.openafs_get_install_paths:
package_manager_type: rpm
register: install_results
- name: Store installation facts
become: yes
openafs_contrib.openafs.openafs_store_facts:
state: update
facts:
install_method: "{{ afs_install_method }}"
server_installed: yes
server_service_name: openafs-server
bins: "{{ install_results.bins }}"
dirs: "{{ install_results.dirs }}"
when: not ansible_check_mode
- name: Set the bosserver startup options
become: yes
lineinfile:
path: /etc/sysconfig/openafs
regexp: '^BOSSERVER_ARGS='
line: 'BOSSERVER_ARGS="{{ afs_bosserver_opts }}"'
state: present
notify:
- Restart OpenAFS servers
|
roles/openafs_server/tasks/install/packages-redhat.yaml
|
whitelist_rules:
- deployment_target
- discouraged_direct_init
- duplicate_enum_cases
- dynamic_inline
- generic_type_name
- inert_defer
- is_disjoint
- large_tuple
- legacy_hashing
- nesting
- nsobject_prefer_isequal
- private_unit_test
- protocol_property_accessors_order
- reduce_boolean
- redundant_set_access_control
- unused_capture_list
- unused_control_flow_label
- valid_ibinspectable
- xctfail_message
- unneeded_parentheses_in_closure_argument
- for_where
- implicit_getter
- no_fallthrough_only
- shorthand_operator
- redundant_string_enum_value
- unneeded_break_in_switch
- unused_enumerated
- unused_optional_binding
- vertical_parameter_alignment
- type_name
- syntactic_sugar
- compiler_protocol_init
- closure_parameter_position
- control_statement
- class_delegate_protocol
- duplicate_imports
- legacy_constructor
- switch_case_alignment
- empty_count
- operator_whitespace
- redundant_objc_attribute
- force_unwrapping
- conditional_returns_on_newline
- explicit_type_interface
- force_cast
- cyclomatic_complexity
- file_length
- force_try
- function_body_length
- function_parameter_count
- type_body_length
- closure_body_length
- trailing_whitespace
- trailing_newline
- leading_whitespace
- return_arrow_whitespace
- vertical_whitespace
- operator_usage_whitespace
- vertical_whitespace_closing_braces
- vertical_whitespace_opening_braces
- colon
- comma
- closing_brace
- redundant_void_return
- statement_position
- empty_parameters
- empty_parentheses_with_trailing_closure
- legacy_cggeometry_functions
- legacy_constant
- legacy_nsgeometry_functions
- no_space_in_method_call
- opening_brace
- mark
- redundant_discardable_let
- redundant_optional_initialization
- trailing_comma
- trailing_semicolon
- void_return
- closure_spacing
- literal_expression_end_indentation
- empty_enum_arguments
- unused_closure_parameter
included:
- ../Source
# disabled_rules:
# - discarded_notification_center_observer
# Default Rules
# block_based_kvo: error
class_delegate_protocol: error
closure_parameter_position: error
compiler_protocol_init: error
control_statement: error
cyclomatic_complexity:
warning: 10
error: 76
deployment_target: error
discouraged_direct_init: error
duplicate_enum_cases: error
duplicate_imports:
dynamic_inline: error
file_length:
warning: 600
error: 1155
for_where: error
force_cast: error
force_try: warning
function_body_length:
warning: 50
error: 254
function_parameter_count:
warning: 5
error: 8
generic_type_name: error
# identifier_name: error
implicit_getter: error
inert_defer: error
is_disjoint: error
large_tuple: error
legacy_constructor: error
legacy_hashing: error
# line_length:
# error: 300
# multiple_closures_with_trailing_closure: error
nesting:
no_fallthrough_only: error
# notification_center_detachment: error
nsobject_prefer_isequal: error
operator_whitespace: error
# private_over_fileprivate:
# severity: error
private_unit_test: error
protocol_property_accessors_order: error
reduce_boolean: error
redundant_objc_attribute: error
redundant_set_access_control: error
redundant_string_enum_value: error
shorthand_operator: error
# superfluous_disable_command: error
switch_case_alignment:
severity: error
syntactic_sugar: error
# todo: warning
type_body_length:
warning: 400
error: 704
type_name: error
unneeded_break_in_switch: error
unused_capture_list: error
unused_control_flow_label: error
unused_enumerated: error
unused_optional_binding:
severity: error
unused_setter_value:
severity: error
valid_ibinspectable: error
vertical_parameter_alignment: error
# weak_delegate: error
xctfail_message: error
#----------------- Added rules ---------------------
# anyobject_protocol: error
closure_body_length:
warning: 20
error: 91
empty_count: error
force_unwrapping: error
# missing_docs:
# severity: error
unneeded_parentheses_in_closure_argument: error
# explicit_acl: error
conditional_returns_on_newline:
severity: error
explicit_type_interface:
severity: error
custom_rules:
no_hard_coded_strings:
regex: '[a-zA-Z0-9][a-zA-Z0-9! ]+'
excluded: ".*(Model|Value).*\\.swift"
match_kinds:
- string
message: "Hard-coded string is never a good practice"
severity: warning
|
Example/.swiftlint.yml
|
title: 빅데이터 처리 - 2021 2학기
subtitle: 빅데이터 처리 기말 프로젝트
description: >- # this means to ignore newlines until "baseurl:"
devlopr-jekyll is a beautiful Jekyll Theme Built For Developers, which is optimized for speed and readability.
url: "https://suyoung-jeon.github.io" # the base hostname & protocol for your site, e.g. https://example.github.io
baseurl: "" # the subpath of your site, e.g. /blog
# Navigation
urls:
- text: Home
url: /
- text: About
url: /about
- text: Blog
url: /blog
- text: Gallery
url: /gallery
- # text: Shop
# url : /shop
- text: Contact
url : /contact
# Edit Author details (For multi authors check _data/authors.yml)
author_logo: profile.png
author: a good boy
author_bio: This is a blog of producing the final test project
author_email: "<EMAIL>"
author_location: South Korea
author_website_url: "http://Suyoung-Jeon.com"
hero_cover_img: sample_cover.jpg # replace this for changing homepage cover (eg. try cover.jpeg). Image should be in /assets/img
typewrite-text: 이장연, 이진우, 전수영 블로그에 오신 것을 환영합니다.
# Education
author_education_details:
- college_logo: konyang.png
college_name: Konynag University
college_url: https://www.konyang.ac.kr/kor.do
college_degree: 융합IT학
visibility: true
# social links
twitter_username: null
github_username: null
facebook_username: null
linkedin_username: null
behance_username: null
instagram_username: null
medium_username: null
telegram_username: null
dribbble_username: null
flickr_username: null
#for comments ( we got Disqus and Hyvor Commenting, uncomment the one you want to use )
disqus_shortname: suyoung-jeon-github-io
hyvor_talk_website_id: 476
# wakatime username (coding activity)
wakatime_username: sujaykundu777
# mailchimp embedded form url (newsletter):
mailchimp_form_url: https://sujaykundu.us10.list-manage.com/subscribe/post?u=50bab1c85eae24ecfb0f68361&id=3a2dd721d0
# contact form - (choose formspress or getform)
# getform_endpoint: 83b703c3-9e47-4df4-ac55-e24d7eb02abc
formspree_email: 2
# releases - widget can be (sidebar, modal or embed)
olvy_organization: devlopr
olvy_widget_type: sidebar
# syntax highlighter
markdown: kramdown
highlighter: rouge
permalink: pretty
# Choose what to show ( can be true or false)
show_author_work_experiences: true
show_author_education_details: true
show_author_project_details: true
# pagination of posts
paginate: 4
per_page: 4
paginate_path: "/blog/page/:num/"
# minify
# compress_html:
# clippings: all
# comments: ["<!-- ", " -->"]
# endings: all
# ignore:
# envs: ['development']
# Archives
# jekyll-archives:
# enabled:
# - categories
# layout: archive
# permalinks:
# category: '/category/:name/'
collections:
products:
output: true
authors:
output: true
defaults:
-
scope:
path: "gallery"
values:
permalink: /:path/:basename:output_ext
-
scope:
path: ""
type: authors
values:
layout: author
permalink: /blog/authors/:slug
# Build settings
plugins:
- jekyll-paginate
- jekyll-gist
- jekyll-seo-tag
- jekyll-sitemap
- jekyll-admin
# Jekyll Admin Config
jekyll_admin:
# hidden_links:
# - posts
# - pages
# - staticfiles
# - datafiles
# - configuration
homepage: "posts"
# Exclude from processing.
# The following items will not be processed, by default.
# Any item listed under the `exclude:` key here will be automatically added to
# the internal "default list".
#
# Excluded items can be processed by explicitly listing the directories or
# their entries' file path in the `include:` list.
### Switching Deployment Strategy (in DEPLOY_STRATEGY) file change the key accordingly:
# none - For no default
# gh-pages - For Github Pages
# firebase - For Firebase Hosting
exclude:
- .sass-cache/
- .jekyll-cache/
- gemfiles/
- Gemfile
- Gemfile.lock
- node_modules/
- vendor/bundle/
- vendor/cache/
- vendor/gems/
- vendor/ruby/
destination: ./build
|
_config.yml
|
$schema: "http://json-schema.org/draft-04/schema#"
id: "read_group"
title: Read Group
type: object
description: "Sequencing reads from one lane of an NGS experiment."
namespace: https://dcp.bionimbus.org/
category: biospecimen
validators: null
project: '*'
program: '*'
additionalProperties: false
submittable: true
systemProperties:
- id
- project_id
- created_datetime
- updated_datetime
- state
links:
- name: aliquots
label: derived_from
target_type: aliquot
multiplicity: many_to_one
required: true
backref: read_groups
required:
- type
- submitter_id
- aliquots
uniqueKeys:
- [ id ]
- [ project_id, submitter_id ]
properties:
$ref: "_definitions.yaml#/ubiquitous_properties"
experiment_name:
term:
$ref: "_terms.yaml#/experiment_name"
type: string
sequencing_center:
term:
$ref: "_terms.yaml#/sequencing_center"
type: string
sequencing_date:
$ref: "_definitions.yaml#/datetime"
platform:
term:
$ref: "_terms.yaml#/platform"
enum:
- Illumina
- SOLiD
- LS454
- Ion Torrent
- Complete Genomics
- PacBio
- Other
- Affymetrix
instrument_model:
terms:
$ref: "_terms.yaml#/instrument_model"
enum:
- 454 GS FLX Titanium
- AB SOLiD 4
- AB SOLiD 2
- AB SOLiD 3
- Complete Genomics
- Illumina HiSeq X Ten
- Illumina HiSeq X Five
- Illumina Genome Analyzer II
- Illumina Genome Analyzer IIx
- Illumina HiSeq 2000
- Illumina HiSeq 2500
- Illumina HiSeq 4000
- Illumina MiSeq
- Illumina NextSeq
- Ion Torrent PGM
- Ion Torrent Proton
- PacBio RS
- Ion S5 XL System, Ion 530 Chip
- Other
library_strategy:
term:
$ref: "_terms.yaml#/library_strategy"
enum:
- WGS
- WES
- RNA-Seq
- ChIP-Seq
- miRNA-Seq
- Bisulfite-Seq
- Validation
- Amplicon
- OMNI
- Exclude
- Other
RIN:
term:
$ref: "_terms.yaml#/RIN"
type: number
flow_cell_barcode:
term:
$ref: "_terms.yaml#/flow_cell_barcode"
type: string
includes_spike_ins:
term:
$ref: "_terms.yaml#/includes_spike_ins"
type: boolean
spike_ins_fasta:
term:
$ref: "_terms.yaml#/spike_ins_fasta"
type: string
spike_ins_concentration:
term:
$ref: "_terms.yaml#/spike_ins_concentration"
type: string
library_selection:
term:
$ref: "_terms.yaml#/library_selection"
enum:
- Hybrid_Selection
- PCR
- Affinity_Enrichment
- Poly-T_Enrichment
- RNA_Depletion
- Other
library_preparation_kit_name:
term:
$ref: "_terms.yaml#/library_preparation_kit_name"
type: string
library_preparation_kit_vendor:
term:
$ref: "_terms.yaml#/library_preparation_kit_vendor"
type: string
library_preparation_kit_catalog_number:
term:
$ref: "_terms.yaml#/library_preparation_kit_catalog_number"
type: string
library_preparation_kit_version:
term:
$ref: "_terms.yaml#/library_preparation_kit_version"
type: string
library_name:
term:
$ref: "_terms.yaml#/library_name"
type: string
target_capture_kit_name: # conditionally required for WXS etc
term:
$ref: "_terms.yaml#/target_capture_kit_name"
type: string
target_capture_kit_vendor:
term:
$ref: "_terms.yaml#/target_capture_kit_vendor"
type: string
target_capture_kit_catalog_number:
term:
$ref: "_terms.yaml#/target_capture_kit_catalog_number"
type: string
target_capture_kit_version:
term:
$ref: "_terms.yaml#/target_capture_kit_version"
type: string
target_capture_kit_target_region:
term:
$ref: "_terms.yaml#/target_capture_kit_target_region"
type: string
size_selection_range:
term:
$ref: "_terms.yaml#/size_selection_range"
type: string
adapter_name:
term:
$ref: "_terms.yaml#/adapter_name"
type: string
adapter_sequence:
term:
$ref: "_terms.yaml#/adapter_sequence"
type: string
to_trim_adapter_sequence:
term:
$ref: "_terms.yaml#/to_trim_adapter_sequence"
type: boolean
library_strand:
term:
$ref: "_terms.yaml#/library_strand"
enum:
- Unstranded
- First_Stranded
- Second_Stranded
base_caller_name:
term:
$ref: "_terms.yaml#/base_caller_name"
type: string
base_caller_version:
term:
$ref: "_terms.yaml#/base_caller_version"
type: string
is_paired_end:
term:
$ref: "_terms.yaml#/is_paired_end"
type: boolean
read_length:
type: integer
read_group_name: # it may be good to assign UUID to read group
description: "Read Group Name"
type: string
barcoding_applied:
description: "True/False: was barcoding applied?"
type: boolean
aliquots:
$ref: "_definitions.yaml#/to_one"
|
dictionary/gtex/gdcdictionary/schemas/read_group.yaml
|
pub_rate: 1.0
base_path: '' # Optional, prepended to all diagnostic output
analyzers:
sound_play:
type: diagnostic_aggregator/GenericAnalyzer
path: SoundPlay
startswith: sound_play
breakers:
type: diagnostic_aggregator/AnalyzerGroup
path: Breakers
analyzers:
base_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Base Breaker
contains: base_breaker
remove_prefix: "robot_driver: "
battery_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Battery Breaker
contains: battery_breaker
remove_prefix: "robot_driver: "
computer_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Computer Breaker
contains: computer_breaker
remove_prefix: "robot_driver: "
supply_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Supply Breaker
contains: supply_breaker
remove_prefix: "robot_driver: "
arm_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Arm Breaker
contains: arm_breaker
remove_prefix: "robot_driver: "
gripper_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Gripper Breaker
contains: gripper_breaker
remove_prefix: "robot_driver: "
motors:
type: diagnostic_aggregator/AnalyzerGroup
path: Motor Control Boards
analyzers:
l_wheel:
type: diagnostic_aggregator/GenericAnalyzer
path: Left Wheel
startswith: l_wheel
r_wheel:
type: diagnostic_aggregator/GenericAnalyzer
path: Right Wheel
startswith: r_wheel
shoulder_pan:
type: diagnostic_aggregator/GenericAnalyzer
path: Shoulder Pan Joint
startswith: shoulder_pan
shoulder_lift:
type: diagnostic_aggregator/GenericAnalyzer
path: Shoulder Lift Joint
startswith: shoulder_lift
upperarm_roll:
type: diagnostic_aggregator/GenericAnalyzer
path: Upperarm Roll Joint
startswith: upperarm_roll
elbow_flex:
type: diagnostic_aggregator/GenericAnalyzer
path: Elbow Flex Joint
startswith: elbow_flex
forearm_roll:
type: diagnostic_aggregator/GenericAnalyzer
path: Forearm Roll Joint
startswith: forearm_roll
wrist_flex:
type: diagnostic_aggregator/GenericAnalyzer
path: Wrist Flex Joint
startswith: wrist_flex
wrist_roll:
type: diagnostic_aggregator/GenericAnalyzer
path: Wrist Roll Joint
startswith: wrist_roll
head_tilt:
type: diagnostic_aggregator/GenericAnalyzer
path: Head Tilt Joint
startswith: head_tilt
head_pan:
type: diagnostic_aggregator/GenericAnalyzer
path: Head Pan Joint
startswith: head_pan
torso_lift:
type: diagnostic_aggregator/GenericAnalyzer
path: Torso Lift Joint
startswith: torso_lift
sensors:
type: diagnostic_aggregator/AnalyzerGroup
path: Sensors
analyzers:
base_breaker:
type: diagnostic_aggregator/GenericAnalyzer
path: Sick TIM551 Laser
startswith: sick_tim
#head_camera:
# type: diagnostic_aggregator/GenericAnalyzer
# path:
# startswith:
peripherals:
type: diagnostic_aggregator/AnalyzerGroup
path: Peripherals
analyzers:
joy:
type: diagnostic_aggregator/GenericAnalyzer
path: PS3 Controller
find_and_remove_prefix: "joy:"
system:
type: diagnostic_aggregator/AnalyzerGroup
path: System
analyzers:
mainboard:
type: diagnostic_aggregator/GenericAnalyzer
path: Mainboard
startswith: Mainboard
charger:
type: diagnostic_aggregator/GenericAnalyzer
path: Charger
startswith: Charger
|
simulation_helpers/config/analyzers.yaml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2019-05-24 10:35"
game: "Unreal Tournament"
name: "DM-Deck16][MonsterMadness"
author: "Elliot \"Myscha\" Cannon"
description: "Return to the Deck!"
releaseDate: "2009-04"
attachments:
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_8.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_8.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_3.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_1.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_4.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_4.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_7.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_7.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_11.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_11.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_5.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_5.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_10.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_10.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_6.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_6.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_2.png"
- type: "IMAGE"
name: "DM-Deck16][MonsterMadness_shot_9.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/D/DM-Deck16%5D%5BMonsterMadness_shot_9.png"
originalFilename: "MonsterMadness-Beta.zip"
hash: "edbbd5391700a340ff193e52824ceceb1cff4494"
fileSize: 4318771
files:
- name: "DM-Deck16][MonsterMadness.unr"
fileSize: 881016
hash: "e9d762d7abb3c32aebeaa7d3dc96ff842de87441"
- name: "txMonsterMadness.utx"
fileSize: 3617484
hash: "d936d1f7e7edb8fba26583ef7eeb578df630c655"
- name: "MonsterMadness.u"
fileSize: 1420755
hash: "a7a21c018e75445876de361ce3a8add196c0b69b"
otherFiles: 11
dependencies:
DM-Deck16][MonsterMadness.unr:
- status: "OK"
name: "MonsterMadness"
MonsterMadness.u:
- status: "OK"
name: "txMonsterMadness"
downloads:
- url: "http://www.unrealplayground.com/forums/downloads.php?do=file&id=7405"
main: false
repack: false
state: "MISSING"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/DeathMatch/D/MonsterMadness-Beta.zip"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/DeathMatch/D/e/d/bbd539/MonsterMadness-Beta.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/DeathMatch/D/e/d/bbd539/MonsterMadness-Beta.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "DeathMatch"
title: "Deck16]["
playerCount: "2-16"
themes:
Skaarj Tech: 1.0
bots: true
|
content/Unreal Tournament/Maps/DeathMatch/D/e/d/bbd539/dm-deck16monstermadness_[edbbd539].yml
|
apiVersion: v1
kind: ConfigMap
metadata:
name: fluentd-config
data:
fluent.conf: |
<source>
@type tail
path /var/log/containers/*.log
pos_file /var/log/fluentd-containers.log.pos
tag kubernetes.*
read_from_head true
<parse>
@type regexp
expression /^(?<time>.+) (?<stream>stdout|stderr)( (?<logtag>.))? (?<log>.*)$/
time_format %Y-%m-%dT%H:%M:%S.%NZ
</parse>
exclude_path ["/var/log/containers/fluentd*"]
</source>
<label @FLUENT_LOG>
<match fluent.*>
@type null
</match>
</label>
<match kubernetes.var.log.containers.**fluentd**.log>
@type null
</match>
<match kubernetes.var.log.containers.**kube-system**.log>
@type null
</match>
<match kubernetes.var.log.containers.**kibana**.log>
@type null
</match>
<match kubernetes.var.log.containers.**elasticsearch**.log>
@type null
</match>
<filter kubernetes.**>
@type kubernetes_metadata
@id filter_kube_metadata
</filter>
<filter kubernetes.var.log.containers.**>
@type parser
<parse>
@type json
json_parser oj
time_format %Y-%m-%dT%H:%M:%S
</parse>
key_name log
replace_invalid_sequence true
emit_invalid_record_to_error false
reserve_data true
</filter>
<match kubernetes.**>
@type elasticsearch
@log_level debug
host "#{ENV['FLUENT_ELASTICSEARCH_HOST']}"
port "#{ENV['FLUENT_ELASTICSEARCH_PORT']}"
scheme "#{ENV['FLUENT_ELASTICSEARCH_SCHEME'] || 'http'}"
ssl_verify "#{ENV['FLUENT_ELASTICSEARCH_SSL_VERIFY'] || 'true'}"
user "#{ENV['FLUENT_ELASTICSEARCH_USER']}" # remove these lines if not needed
password "#{ENV['FLUENT_ELASTICSEARCH_PASSWORD']}" # remove these lines if not needed
logstash_format true
logstash_prefix fluentd
logstash_dateformat %Y%m%d
include_tag_key true
reload_connections true
log_es_400_reason true
<buffer>
flush_thread_count 8
flush_interval 5s
chunk_limit_size 2M
queue_limit_length 32
retry_max_interval 30
retry_forever true
</buffer>
</match>
|
dotnet/MSc-Workflows/deploy/logging/fluentd-config-map-containerd.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-20 13:29"
game: "Unreal Tournament 2004"
name: "CTF-UCMP-Aquatica"
author: "Jos '<NAME>"
description: "The sea, rough and ruthless on the surface, tranquil and serene within.\
\ Th"
releaseDate: "2005-03"
attachments:
- type: "IMAGE"
name: "CTF-UCMP-Aquatica_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Capture%20The%20Flag/U/CTF-UCMP-Aquatica_shot_1.png"
originalFilename: "ctf-ucmp-aquatica.zip"
hash: "5b94779f9a01a512354da2a19232f0bb5ec0aeae"
fileSize: 32693276
files:
- name: "CTF-UCMP-Aquatica.ut2"
fileSize: 30904835
hash: "0ee98c18d55da3dd75fb325e75a317fbf16c7451"
- name: "Aquatica_Tex.utx"
fileSize: 904778
hash: "a0389534c09be55d3cfaa46412ed3331cef5dfa8"
- name: "HourDinoraTex.utx"
fileSize: 18741403
hash: "22ca7dbe0faa1ef3f7117a71a93dfaa4e86136e5"
- name: "AmbModern.uax"
fileSize: 6112404
hash: "dac845ee6fd628e223a2c36e66d866b145bb5444"
- name: "HourDinora.usx"
fileSize: 4056062
hash: "2d579af82f16c14cf290446669a2a155ffc4a81a"
- name: "Fish.utx"
fileSize: 1774295
hash: "3a73335418404c3e80f043ccb1f745377334af17"
- name: "Aquatica.ogg"
fileSize: 5598739
hash: "dadf922cfb8014e494c0190a60ba9f6aec3e7303"
otherFiles: 1
dependencies:
CTF-UCMP-Aquatica.ut2:
- status: "OK"
name: "HourDinora"
- status: "OK"
name: "Aquatica_Tex"
- status: "OK"
name: "AmbModern"
- status: "OK"
name: "Fish"
- status: "OK"
name: "HourDinoraTex"
HourDinora.usx:
- status: "OK"
name: "HourDinoraTex"
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament%202004/Maps/Capture%20The%20Flag/U/5/b/94779f/ctf-ucmp-aquatica.zip"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament%202004/Maps/Capture%20The%20Flag/U/5/b/94779f/ctf-ucmp-aquatica.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament%202004/Maps/Capture%20The%20Flag/U/5/b/94779f/ctf-ucmp-aquatica.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Capture The Flag"
title: "Aquatica"
playerCount: "8-12"
themes: {}
bots: true
|
content/Unreal Tournament 2004/Maps/Capture The Flag/U/5/b/94779f/ctf-ucmp-aquatica_[5b94779f].yml
|
form:
badge:
errors:
parent:
loop: 'Ce badge est de niveau supérieur (%hierarchy%), vous ne pouvez donc pas l''utiliser en badge de niveau inférieur.'
synonym: 'Ce badge est un synonyme de "%name%", utilisez "%name%" directement.'
synonym:
has_parent: 'Le badge "%name%" a un badge de niveau précédent, il ne peut donc pas remplacé par un autre badge.'
has_synonyms: 'Le badge "%name%" a des synonymes, vous devez d''abord les supprimer avant de lui-même le marquer comme un synonyme.'
visible: 'Le badge "%name%" est activé, vous ne pouvez donc pas le remplacer par un autre badge.'
campaign:
errors:
message:
empty: 'Veuillez renseigner un message'
type:
empty: 'Sélectionner un type de déclenchement'
volunteers:
min: 'Sélectionner au moins un bénévole'
communication:
errors:
call_multiple: 'Les réponses multiples ne sont pas possibles par téléphone.'
email_geolocation: 'Pas de géolocalisation possible lors d''un envoi par email.'
invalid_shortcut: 'Ce raccourci n''existe pas'
no_subject: 'Vous devez définir un sujet au mail'
prefix_already_used: 'Le préfixe "{{ string }}" est déjà utilisé dans une communication d''un déclenchement en cours.'
too_large_call: 'Ce message vocal est trop long.'
too_large_choice: 'La taille d''une ou plusieurs réponses est trop grande.'
too_large_email: 'Cet email est trop long.'
too_large_sms: 'Ce SMS est trop long.'
operation:
does_not_exist: 'Cette opération est introuvable sur Minutis.'
phone_card:
error_duplicate: 'Vous avez saisi plusieurs fois le même numéro.'
error_multi_preferred: 'Vous ne pouvez avoir qu''un seul numéro de téléphone principal.'
error_no_preferred: 'Vous n''avez pas choisi de numéro principal.'
structure:
parent:
loop: 'Vous êtes en train de créer une boucle dans la hiérarchie des structures (%hierarchy%), vous ne pouvez pas mettre une structure enfant comme structure parente.'
volunteer:
errors:
redcall_user: 'Ce bénévole est rattaché à un utilisateur RedCall, supprimez d''abord l''utilisateur afin de pouvoir désactiver le bénévole.'
|
symfony/translations/validators.fr.yml
|
trigger:
- master
jobs:
# - job: Windows
# pool:
# name: Hosted VS2017
# demands: npm
# steps:
# - task: NodeTool@0
# displayName: 'Use Node 8.x'
# inputs:
# versionSpec: 8.x
# - task: Npm@1
# displayName: 'Install dependencies'
# inputs:
# verbose: false
# - task: Npm@1
# displayName: 'Compile sources'
# inputs:
# command: custom
# verbose: false
# customCommand: 'run compile'
# - script: 'node node_modules/vscode/bin/test'
# displayName: 'Run tests'
# - job: macOS
# pool:
# name: Hosted macOS
# demands: npm
# steps:
# - task: NodeTool@0
# displayName: 'Use Node 8.x'
# inputs:
# versionSpec: 8.x
# - task: Npm@1
# displayName: 'Install dependencies'
# inputs:
# verbose: false
# - task: Npm@1
# displayName: 'Compile sources'
# inputs:
# command: custom
# verbose: false
# customCommand: 'run compile'
# - script: 'node node_modules/vscode/bin/test'
# displayName: 'Run tests'
# - job: Linux
# pool:
# name: Hosted Ubuntu 1604
# demands: npm
# steps:
# - task: NodeTool@0
# displayName: 'Use Node 8.x'
# inputs:
# versionSpec: 8.x
# - task: Npm@1
# displayName: 'Install dependencies'
# inputs:
# verbose: false
# - task: Npm@1
# displayName: 'Compile sources'
# inputs:
# command: custom
# verbose: false
# customCommand: 'run compile'
# - script: |
# set -e
# /usr/bin/Xvfb :10 -ac >> /tmp/Xvfb.out 2>&1 &
# disown -ar
# displayName: 'Start xvfb'
# - script: 'node node_modules/vscode/bin/test'
# displayName: 'Run tests'
# env:
# DISPLAY: :10
- job: Mocha_Windows
pool:
name: Hosted VS2017
demands: npm
steps:
- task: NodeTool@0
displayName: 'Use Node 8.x'
inputs:
versionSpec: 8.x
- task: Npm@1
displayName: 'Install dependencies'
inputs:
verbose: false
- task: Npm@1
displayName: 'Compile sources'
inputs:
command: custom
verbose: false
customCommand: 'run compile'
- task: Npm@1
displayName: 'Run Tests'
inputs:
command: custom
verbose: false
customCommand: 'run mocha'
- job: Mocha_Linux
pool:
name: Hosted Ubuntu 1604
demands: npm
steps:
- task: NodeTool@0
displayName: 'Use Node 8.x'
inputs:
versionSpec: 8.x
- task: Npm@1
displayName: 'Install dependencies'
inputs:
verbose: false
- task: Npm@1
displayName: 'Compile sources'
inputs:
command: custom
verbose: false
customCommand: 'run compile'
- task: Npm@1
displayName: 'Run Tests'
inputs:
command: custom
verbose: false
customCommand: 'run mocha'
- job: Mocha_macOS
pool:
name: Hosted macOS
demands: npm
steps:
- task: NodeTool@0
displayName: 'Use Node 8.x'
inputs:
versionSpec: 8.x
- task: Npm@1
displayName: 'Install dependencies'
inputs:
verbose: false
- task: Npm@1
displayName: 'Compile sources'
inputs:
command: custom
verbose: false
customCommand: 'run compile'
- task: Npm@1
displayName: 'Run Tests'
inputs:
command: custom
verbose: false
customCommand: 'run azure'
- task: PublishTestResults@2
inputs:
testResultsFiles: '**/TEST-RESULTS.xml'
testRunTitle: 'Test results for Unit Tests'
condition: succeededOrFailed()
- task: PublishCodeCoverageResults@1
inputs:
codeCoverageTool: Cobertura
summaryFileLocation: '$(System.DefaultWorkingDirectory)/**/*coverage.xml'
reportDirectory: '$(System.DefaultWorkingDirectory)/**/coverage'
|
azure-pipelines.yml
|
name: 🐛 Bug report
description: Open an issue about a bug that needs fixing.
labels: Issue-Bug
body:
- type: checkboxes
attributes:
label: Prerequisites
options:
- label: I have written a descriptive issue title.
required: true
- label: I have searched all [issues](https://github.com/PowerShell/vscode-powershell/issues?q=is%3Aissue) to ensure it has not already been reported.
- label: I have read the [troubleshooting](https://github.com/PowerShell/vscode-powershell/blob/master/docs/troubleshooting.md) guide.
- label: I am sure this issue is with the _extension itself_ and does not reproduce in a standalone [PowerShell](https://github.com/PowerShell/PowerShell/issues/new/choose) instance.
- label: I have verified that I am using the latest version of Visual Studio Code and the PowerShell extension.
- label: If this is a security issue, I have read the [security issue reporting guidance](https://github.com/PowerShell/vscode-powershell/blob/master/SECURITY.md).
- type: textarea
attributes:
label: Summary
description: Explain the problem briefly below.
placeholder: I am experiencing a problem with X. I think Y should be happening but Z is actually happening.
validations:
required: true
- type: textarea
attributes:
label: PowerShell Version
description: Paste verbatim output from `$PSVersionTable` below. Please double-check that this is the PowerShell version that VS Code is set to use.
render: console
placeholder: |
PS> $PSVersionTable
Name Value
---- -----
PSVersion 7.1.3
PSEdition Core
GitCommitId 7.1.3
OS Darwin 20.4.0 Darwin Kernel
Platform Unix
PSCompatibleVersions {1.0, 2.0, 3.0, 4.0…}
PSRemotingProtocolVersion 2.3
SerializationVersion 172.16.58.3
WSManStackVersion 3.0
validations:
required: true
- type: textarea
attributes:
label: Visual Studio Code Version
description: Paste verbatim output from `code --version` below.
render: console
placeholder: |
PS> code --version
1.57.1
507ce72a4466fbb27b715c3722558bb15afa9f48
arm64
validations:
required: true
- type: textarea
attributes:
label: Extension Version
description: Paste verbatim output from `code --list-extensions --show-versions | Select-String powershell` below.
render: console
placeholder: |
PS> code --list-extensions --show-versions | Select-String powershell
ms-vscode.powershell@2021.8.0
validations:
required: true
- type: textarea
attributes:
label: Steps to Reproduce
description: List of steps, sample code, failing test or link to a project that reproduces the behavior. Make sure you place a stack trace inside a code (```) block to avoid linking unrelated issues.
validations:
required: true
- type: textarea
attributes:
label: Visuals
description: Please upload images or animations that can be used to reproduce issues in the area below. Try the [Steps Recorder](https://support.microsoft.com/en-us/windows/record-steps-to-reproduce-a-problem-46582a9b-620f-2e36-00c9-04e25d784e47) on Windows or [Screenshot](https://support.apple.com/en-us/HT208721) on macOS.
- type: textarea
attributes:
label: Logs
description: Please upload logs collected by following these [instructions](https://github.com/PowerShell/vscode-powershell/blob/master/docs/troubleshooting.md#logs) in the area below. Be careful to scrub sensitive information!
|
.github/ISSUE_TEMPLATE/bug-report.yml
|
items:
- uid: com.microsoft.azure.storage.blob._lease_status
id: _lease_status
parent: com.microsoft.azure.storage.blob
children:
- com.microsoft.azure.storage.blob._lease_status.LOCKED
- com.microsoft.azure.storage.blob._lease_status.parse(final String)
- com.microsoft.azure.storage.blob._lease_status.UNSPECIFIED
href: com.microsoft.azure.storage.blob._lease_status.yml
langs:
- java
name: LeaseStatus
nameWithType: LeaseStatus
fullName: com.microsoft.azure.storage.blob.LeaseStatus
type: Enum
source:
remote: &o0
path: microsoft-azure-storage/src/com/microsoft/azure/storage/blob/LeaseStatus.java
branch: master
repo: https://github.com/Azure/azure-storage-java
path: microsoft-azure-storage/src/com/microsoft/azure/storage/blob/LeaseStatus.java
startLine: 26
package: com.microsoft.azure.storage.blob
summary: >-
<p>Specifies the lease status of a blob. </p>
<p>You can check the lease status of a blob to determine whether it currently has an active lease (locked for exclusive-write access), or whether it is available for exclusive-write access. </p>
syntax: &o1
content: public enum LeaseStatus
inheritance:
- java.lang.Object
- java.lang.Enum<LeaseStatus>
inheritedMembers:
- com.microsoft.azure.storage.blob._lease_status_1a3ec0ca4d0accf678f33cc078b453bcc9
- uid: com.microsoft.azure.storage.blob._lease_status.LOCKED
id: LOCKED
parent: com.microsoft.azure.storage.blob._lease_status
href: com.microsoft.azure.storage.blob._lease_status.yml
langs:
- java
name: LOCKED
nameWithType: LeaseStatus.LOCKED
fullName: com.microsoft.azure.storage.blob.LeaseStatus.LOCKED
type: Field
source:
remote: *o0
path: microsoft-azure-storage/src/com/microsoft/azure/storage/blob/LeaseStatus.java
startLine: 35
package: com.microsoft.azure.storage.blob
summary: <p>Specifies the blob is locked for exclusive-write access. </p>
syntax:
content: public LOCKED
- uid: com.microsoft.azure.storage.blob._lease_status.parse(final String)
id: parse(final String)
parent: com.microsoft.azure.storage.blob._lease_status
href: com.microsoft.azure.storage.blob._lease_status.yml
langs:
- java
name: parse(final String typeString)
nameWithType: LeaseStatus.parse(final String typeString)
fullName: com.microsoft.azure.storage.blob.LeaseStatus.parse(final String typeString)
overload: com.microsoft.azure.storage.blob._lease_status.parse*
type: Method
source:
remote: *o0
path: microsoft-azure-storage/src/com/microsoft/azure/storage/blob/LeaseStatus.java
startLine: 50
package: com.microsoft.azure.storage.blob
summary: >-
<p>Parses a lease status from the given string.</p>
<p></p>
syntax:
content: protected static LeaseStatus parse(final String typeString)
parameters:
- id: typeString
type: 3d93591e
description: <p>A <code>String</code> which contains the lease status to parse.</p>
return:
type: com.microsoft.azure.storage.blob._lease_status
description: <p>A <code>LeaseStatus</code> value that represents the lease status. </p>
- uid: com.microsoft.azure.storage.blob._lease_status.UNSPECIFIED
id: UNSPECIFIED
parent: com.microsoft.azure.storage.blob._lease_status
href: com.microsoft.azure.storage.blob._lease_status.yml
langs:
- java
name: UNSPECIFIED
nameWithType: LeaseStatus.UNSPECIFIED
fullName: com.microsoft.azure.storage.blob.LeaseStatus.UNSPECIFIED
type: Field
source:
remote: *o0
path: microsoft-azure-storage/src/com/microsoft/azure/storage/blob/LeaseStatus.java
startLine: 30
package: com.microsoft.azure.storage.blob
summary: <p>Specifies the lease status is not specified. </p>
syntax:
content: public UNSPECIFIED
references:
- uid: 3d93591e
spec.java:
- name: final String
fullName: final String
- uid: com.microsoft.azure.storage.blob._lease_status.parse*
name: parse
nameWithType: LeaseStatus.parse
fullName: com.microsoft.azure.storage.blob.LeaseStatus.parse
package: com.microsoft.azure.storage.blob
- uid: com.microsoft.azure.storage.blob._lease_status_1a3ec0ca4d0accf678f33cc078b453bcc9
- uid: com.microsoft.azure.storage.blob._lease_status
parent: com.microsoft.azure.storage.blob
href: com.microsoft.azure.storage.blob._lease_status.yml
name: LeaseStatus
nameWithType: LeaseStatus
fullName: com.microsoft.azure.storage.blob.LeaseStatus
type: Enum
summary: >-
<p>Specifies the lease status of a blob. </p>
<p>You can check the lease status of a blob to determine whether it currently has an active lease (locked for exclusive-write access), or whether it is available for exclusive-write access. </p>
syntax: *o1
|
docs-ref-autogen/com.microsoft.azure.storage.blob._lease_status.yml
|
---
web-app:
servlet:
- servlet-name: "cofaxCDS"
servlet-class: "org.cofax.cds.CDSServlet"
init-param:
configGlossary:installationAt: "Philadelphia, PA"
configGlossary:adminEmail: "<EMAIL>"
configGlossary:poweredBy: "Cofax"
configGlossary:poweredByIcon: "/images/cofax.gif"
configGlossary:staticPath: "/content/static"
templateProcessorClass: "org.cofax.WysiwygTemplate"
templateLoaderClass: "org.cofax.FilesTemplateLoader"
templatePath: "templates"
templateOverridePath: ""
defaultListTemplate: "listTemplate.htm"
defaultFileTemplate: "articleTemplate.htm"
useJSP: false
jspListTemplate: "listTemplate.jsp"
jspFileTemplate: "articleTemplate.jsp"
cachePackageTagsTrack: 200
cachePackageTagsStore: 200
cachePackageTagsRefresh: 60
cacheTemplatesTrack: 100
cacheTemplatesStore: 50
cacheTemplatesRefresh: 15
cachePagesTrack: 200
cachePagesStore: 100
cachePagesRefresh: 10
cachePagesDirtyRead: 10
searchEngineListTemplate: "forSearchEnginesList.htm"
searchEngineFileTemplate: "forSearchEngines.htm"
searchEngineRobotsDb: "WEB-INF/robots.db"
useDataStore: true
dataStoreClass: "org.cofax.SqlDataStore"
redirectionClass: "org.cofax.SqlRedirection"
dataStoreName: "cofax"
dataStoreDriver: "com.microsoft.jdbc.sqlserver.SQLServerDriver"
dataStoreUrl: "jdbc:microsoft:sqlserver://LOCALHOST:1433;DatabaseName=goon"
dataStoreUser: "sa"
dataStorePassword: "<PASSWORD>"
dataStoreTestQuery: "SET NOCOUNT ON;select test='test';"
dataStoreLogFile: "/usr/local/tomcat/logs/datastore.log"
dataStoreInitConns: 10
dataStoreMaxConns: 100
dataStoreConnUsageLimit: 100
dataStoreLogLevel: "debug"
maxUrlLength: 500
- servlet-name: "cofaxEmail"
servlet-class: "org.cofax.cds.EmailServlet"
init-param:
mailHost: "mail1"
mailHostOverride: "mail2"
- servlet-name: "cofaxAdmin"
servlet-class: "org.cofax.cds.AdminServlet"
- servlet-name: "fileServlet"
servlet-class: "org.cofax.cds.FileServlet"
- servlet-name: "cofaxTools"
servlet-class: "org.cofax.cms.CofaxToolsServlet"
init-param:
templatePath: "toolstemplates/"
log: 1
logLocation: "/usr/local/tomcat/logs/CofaxTools.log"
logMaxSize: ""
dataLog: 1
dataLogLocation: "/usr/local/tomcat/logs/dataLog.log"
dataLogMaxSize: ""
removePageCache: "/content/admin/remove?cache=pages&id="
removeTemplateCache: "/content/admin/remove?cache=templates&id="
fileTransferFolder: "/usr/local/tomcat/webapps/content/fileTransferFolder"
lookInContext: 1
adminGroupID: 4
betaServer: true
servlet-mapping:
cofaxCDS: "/"
cofaxEmail: "/cofaxutil/aemail/*"
cofaxAdmin: "/admin/*"
fileServlet: "/static/*"
cofaxTools: "/tools/*"
taglib:
taglib-uri: "cofax.tld"
taglib-location: "/WEB-INF/tlds/cofax.tld"
|
src/test/resources/web-app.yml
|
---
result: FAILURE
url: http://manhattan.ci.chef.co/job/chefdk-trigger-release/52/
failure_category: code
failure_cause: 'chef_acceptance[top-cookbooks::provision]: acceptance: chefdk-test'
timestamp: 2016-05-06 15:39:47 UTC
duration: 1h49m59s
triggered_by: chef-jenkins
active_duration: 1h49m44s
parameters:
GIT_REF: v0.14.11
EXPIRE_CACHE: false
change:
git_remote: https://github.com/chef/chef-dk.git
git_commit: <PASSWORD>
project: chefdk
version: 0.14.11
stages:
chefdk-test:
result: FAILURE
failure_category: code
failure_cause: 'chef_acceptance[top-cookbooks::provision]: acceptance'
url: http://manhattan.ci.chef.co/job/chefdk-test/155/
duration: 1h4m48s
runs:
acceptance:
result: FAILURE
failure_category: code
failure_cause: chef_acceptance[top-cookbooks::provision]
failed_in:
chef_acceptance:
- top-cookbooks::provision
step: CHEF-ACCEPTANCE
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=acceptance,project=chefdk,role=tester/155/
duration: 1h4m45s
chef_acceptance_timing:
- top-cookbooks:
provision: 59m51s
force-destroy: 30s
Total: 1h3m43s
Run:
Total: 1h3m43s
windows-2008r2-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=i386,platform=windows-2008r2,project=chefdk,role=tester/155/
duration: 9m40s
debian-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=debian-6,project=chefdk,role=tester/155/
duration: 16m58s
debian-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=debian-7,project=chefdk,role=tester/155/
duration: 12m28s
debian-8:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=debian-8,project=chefdk,role=tester/155/
duration: 13m14s
el-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=el-6,project=chefdk,role=tester/155/
duration: 27m26s
el-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=el-7,project=chefdk,role=tester/155/
duration: 12m43s
mac_os_x-10.10:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=mac_os_x-10.10,project=chefdk,role=tester/155/
duration: 9m22s
mac_os_x-10.11:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=mac_os_x-10.11,project=chefdk,role=tester/155/
duration: 7m31s
mac_os_x-10.9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=mac_os_x-10.9,project=chefdk,role=tester/155/
duration: 8m59s
ubuntu-12.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=ubuntu-12.04,project=chefdk,role=tester/155/
duration: 11m35s
ubuntu-14.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-test/architecture=x86_64,platform=ubuntu-14.04,project=chefdk,role=tester/155/
duration: 15m3s
chefdk-build:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/379/
duration: 44m52s
runs:
windows-2008r2-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=i386,platform=windows-2008r2,project=chefdk,role=builder/379/
duration: 34m38s
debian-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=debian-6,project=chefdk,role=builder/379/
duration: 37m45s
el-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=el-6,project=chefdk,role=builder/379/
duration: 44m47s
el-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=el-7,project=chefdk,role=builder/379/
duration: 32m4s
mac_os_x-10.9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=mac_os_x-10.9,project=chefdk,role=builder/379/
duration: 15m7s
ubuntu-12.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-build/architecture=x86_64,platform=ubuntu-12.04,project=chefdk,role=builder/379/
duration: 36m22s
chefdk-trigger-release:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chefdk-trigger-release/52/
duration: 2s
|
reports/manhattan.ci.chef.co/job/chefdk-trigger-release/52.yaml
|
homepage: https://gitlab.com/lysxia/hakyll-alectryon
changelog-type: markdown
hash: 7c1eb30f95e09cd500fcd70cd69a55bd7517057e27855d45550320d6ea6a3c1e
test-bench-deps: {}
maintainer: <EMAIL>
synopsis: Hakyll extension for rendering Coq code using Alectryon
changelog: |
# 0.1.1.0
- Fix Pygments cache
# 0.1.0.0
* Create hakyll-alectryon
basic-deps:
bytestring: -any
base: '>=4.9 && <5'
text: -any
filepath: -any
process: -any
pandoc-types: -any
pandoc: -any
mtl: -any
optparse-applicative: -any
hakyll: '>=4'
aeson: -any
all-versions:
- 0.1.0.0
- 0.1.1.0
author: <NAME>
latest: 0.1.1.0
description-type: markdown
description: |
Hakyll plugin for Alectryon [](https://hackage.haskell.org/package/hakyll-alectryon)
===========================
[Alectryon][alectryon] is a tool for pretty-printing Coq proofs,
notably rendering proof states between tactics.
This package, *hakyll-alectryon*, integrates Alectryon with the Hakyll site
generator.
[alectryon]: https://github.com/cpitclaudel/alectryon
[pygments]: https://pygments.org
## Dependencies
To use this package, first install [Alectryon][alectryon].
The executables `alectryon` and `python3` must be on your `$PATH`.
(Pygments is also used by this package, and is required by Alectryon anyway.)
## Usage
The simplest way to use this package is to stick the `tryTransform_` function
in a compiler for Markdown blog posts:
```haskell
-- Main.hs
import qualified Hakyll.Alectryon as Alectryon
main :: IO ()
main = hakyll $ do
(...)
match "blog/*.md" $ do
(...)
compile $ do
(...)
Alectryon.tryTransform_ doc >>= (...)
```
This will process all `alectryon` and `coq` code blocks using Alectryon and
Pygments, respectively.
- `alectryon` code blocks are the actual parts of the literate program which
will be interpreted. Interactive proof states will be rendered.
- `coq` code blocks are just for show. They will only go through syntax
highlighting using Pygments, in roughly the same style as Alectryon.
Options can be passed to Alectryon to find Coq dependencies,
via the metadata header of each post:
```
---
title: My awesome post
alectryon: ["-Q", "my/coq/lib", "MyCoqLib"]
---
```
The compiled `.vo` files must already be present.
### Modular usage
You can also allow your blog to be built without requiring those
external dependencies, by caching the output of Alectryon and Pygments
and checking it into version control (git).
Create a cache directory for each document that uses hakyll-alectryon,
and write its path in the `alectryon-cache` field of the document.
The `alectryon` field must also be set; use the empty list by default.
```
---
title: My awesome post
alectryon: []
alectryon-cache: "blog/my-awesome-post/cache"
---
```
The Hakyll site generator must also be modified to add a command-line option to
generate the cache or to use the cache. Replace `Hakyll.hakyll` with
`Alectryon.hakyll`, and pass the option to `Alectryon.tryTransform`:
```haskell
-- Main.hs
import qualified Hakyll.Alectryon as Alectryon
main :: IO ()
main = Alectryon.hakyll $ \opts -> do
(...)
match "blog/*.md" $ do
(...)
compile $ do
(...)
Alectryon.tryTransform opts doc >>= (...)
```
When writing a post, build your site with the option `--run-alectryon` to interpret
your literate Coq file with Alectryon.
```
# Whenever 'coq' and 'alectryon' code blocks change or are reordered
cabal exec mysite -- build --run-alectryon
```
When the post is finished, add the cached outputs to version control.
These are two files `alectryon.html` and `pygments.html`.
```
# If the cache is set to "alectryon-cache: "blog/my-awesome-post/cache"
git add blog/my-awesome-post/cache/*.html
git commit
```
As long as you don't modify the code blocks, the site can be compiled normally,
without any dependency on Alectryon, Coq, or Python.
```
# As long as the 'coq' and 'alectryon' code blocks haven't changed
cabal exec mysite -- build
```
If the code blocks are modified, you must enable `--run-alectryon` again to
reprocess them and update the cache.
See also the [`example/`](./example) directory for a minimal example.
license-name: MIT
|
packages/ha/hakyll-alectryon.yaml
|
version: "3.0"
nlu:
- intent: greet
examples: |
- hey
- hello
- hi
- hello there
- good morning
- good evening
- moin
- hey there
- let's go
- hey dude
- goodmorning
- goodevening
- good afternoon
- intent: goodbye
examples: |
- cu
- good by
- cee you later
- good night
- bye
- goodbye
- have a nice day
- see you around
- bye bye
- see you later
- intent: thank
examples: |
- thank you
- thanks
- thanks a lot
- thanks for your help
- thank you so much
- thank you for your help
- thanks a ton
- thanks a bunch
- thank you!
- intent: affirm
examples: |
- yes
- y
- indeed
- of course
- that sounds good
- correct
- intent: deny
examples: |
- no
- n
- never
- I don't think so
- don't like that
- no way
- not really
- intent: bot_challenge
examples: |
- are you a bot?
- are you a human?
- am I talking to a bot?
- am I talking to a human?
- intent: productivity_unit
examples: |
- bot, set a [timer](productivity_unit_type)
- hey, can you set a [timer](productivity_unit_type) please?
- make a [timer](productivity_unit_type)
- set [timer](productivity_unit_type)
- I want you to set a [timer](productivity_unit_type) for [4 minutes](timer_duration)
- can you set [timer](productivity_unit_type) of [2 minutes](timer_duration)?
- set a [timer](productivity_unit_type) of [4 hours](timer_duration)?
- set a [timer](productivity_unit_type) for [5 minutes](timer_duration) please
- set a [timer](productivity_unit_type) for [1 hour](timer_duration)
- can you set a [timer](productivity_unit_type) for [30 minutes](timer_duration)
- please set a [reminder](productivity_unit_type)
- set a [reminder](productivity_unit_type)
- can you please [remind](productivity_unit_type) me about something later?
- will you [remind](productivity_unit_type) me to [wash the dishes](reminder_name)?
- set a [reminder](productivity_unit_type) to [check email](reminder_name)
- set a [reminder](productivity_unit_type) on [6th december](reminder_datetime)
- set [reminder](productivity_unit_type) at [3pm](reminder_datetime)
- [remind](productivity_unit_type) me to [pick dad up from the hospital](reminder_name) at [7pm today](reminder_datetime)?
- can you set a [timer](productivity_unit_type) for [2 hours](timer_duration)?
- can you set a [timer](productivity_unit_type) please
- [10 minutes](timer_duration)
|
src/data/nlu.yml
|
api_name: []
items:
- children:
- azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.full_data_loss
- azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.invalid
- azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.partial_data_loss
class: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
inheritance:
- inheritance:
- type: builtins.object
type: builtins.str
- inheritance:
- type: builtins.object
type: enum.Enum
langs:
- python
module: azure.servicefabric.models.service_fabric_client_ap_is_enums
name: DataLossMode
source:
id: DataLossMode
path: azure-servicefabric\azure\servicefabric\models\service_fabric_client_ap_is_enums.py
remote:
branch: master
path: azure-servicefabric\azure\servicefabric\models\service_fabric_client_ap_is_enums.py
repo: https://github.com/Azure/azure-sdk-for-python.git
startLine: 934
summary: 'An enumeration.
'
syntax: {}
type: class
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
- class: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.full_data_loss
langs:
- python
module: azure.servicefabric.models.service_fabric_client_ap_is_enums
name: full_data_loss
source:
id: full_data_loss
path: null
remote:
branch: master
path: null
repo: https://github.com/Azure/azure-sdk-for-python.git
startLine: null
summary: 'FullDataLoss option will drop all the replicas which means that all the
data will be lost.
'
syntax:
content: full_data_loss = 'FullDataLoss'
type: attribute
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.full_data_loss
- class: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.invalid
langs:
- python
module: azure.servicefabric.models.service_fabric_client_ap_is_enums
name: invalid
source:
id: invalid
path: null
remote:
branch: master
path: null
repo: https://github.com/Azure/azure-sdk-for-python.git
startLine: null
summary: 'Reserved. Do not pass into API.
'
syntax:
content: invalid = 'Invalid'
type: attribute
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.invalid
- class: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.partial_data_loss
langs:
- python
module: azure.servicefabric.models.service_fabric_client_ap_is_enums
name: partial_data_loss
source:
id: partial_data_loss
path: null
remote:
branch: master
path: null
repo: https://github.com/Azure/azure-sdk-for-python.git
startLine: null
summary: 'PartialDataLoss option will cause a quorum of replicas to go down, triggering
an OnDataLoss event in the system for the given partition.
'
syntax:
content: partial_data_loss = 'PartialDataLoss'
type: attribute
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.partial_data_loss
references:
- fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.full_data_loss
isExternal: false
name: full_data_loss
parent: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.full_data_loss
- fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.invalid
isExternal: false
name: invalid
parent: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.invalid
- fullName: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.partial_data_loss
isExternal: false
name: partial_data_loss
parent: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode
uid: azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.partial_data_loss
|
docs-ref-autogen/azure-servicefabric/azure.servicefabric.models.service_fabric_client_ap_is_enums.DataLossMode.yml
|
$schema: "http://json-schema.org/draft-04/schema#"
id: "blood_pressure_test"
title: Blood Pressure Test
type: object
namespace: http://dcp.bionimbus.org/
category: clinical
program: '*'
project: '*'
description: >
Resting blood pressure will be measured in the right arm after five minutes in the seated position. An automated oscillometric method (Dinamap) and appropriate cuff size will be used. Three readings will be taken; the second and third readings will be averaged to obtain the blood pressure levels used in analyses. An additional systolic blood pressure will be measured in both the right and left brachial, posterior tibial, and dorsalis pedis arteries with a Doppler instrument.
additionalProperties: false
submittable: true
validators: null
systemProperties:
- id
- project_id
- state
- created_datetime
- updated_datetime
links:
- name: subjects
backref: blood_pressure_test
label: describes
target_type: subject
multiplicity: one_to_one
required: true
uniqueKeys:
- [id]
- [project_id, submitter_id]
required:
- submitter_id
- type
- subjects
properties:
$ref: "_definitions.yaml#/ubiquitous_properties"
# HARMONIZED VARIABLES ACROSS GTEX AND TOPMED PROJECTS
bp_diastolic:
description: >
Resting diastolic blood pressure from the upper arm in a clinical setting. (mmHg) (HARMONIZED)
type: number
bp_systolic:
description: >
Resting systolic blood pressure from the upper arm in a clinical setting. (mmHg) (HARMONIZED)
type: number
age_at_bp_systolic:
description: >
age at measurement of bp_systolic (years) (HARMONIZED)
type: number
unit_bp_systolic:
description: >
harmonization unit for bp_systolic (A "harmonization unit" is a defined group of subjects whose phenotypes can be similarly processed.) (HARMONIZED)
type: string
age_at_bp_diastolic:
description: >
age at measurement of bp_diastolic (years) (HARMONIZED)
type: number
unit_bp_diastolic:
description: >
harmonization unit for bp_diastolic (A "harmonization unit" is a defined group of subjects whose phenotypes can be similarly processed.)
type: string
subjects:
$ref: "_definitions.yaml#/to_one"
|
dictionary/gtex/gdcdictionary/schemas/blood_pressure_test.yaml
|
id: 689a9475-440b-4e69-8ab1-a5e241685f39
name: Preview - TI map File entity to WireData Event
description: |
'Identifies a match in WireData Event data from any FileName IOC from TI.
As File name matches can create noise, this is best as hunting query'
requiredDataConnectors:
- connectorId: AzureMonitor(WireData)
dataTypes:
- WireData
- connectorId: ThreatIntelligence
dataTypes:
- ThreatIntelligenceIndicator
- connectorId: ThreatIntelligenceTaxii
dataTypes:
- ThreatIntelligenceIndicator
tactics:
- Impact
query: |
let starttime = todatetime('{{StartTimeISO}}');
let endtime = todatetime('{{EndTimeISO}}');
let ioc_lookBack = 14d;
ThreatIntelligenceIndicator
| where TimeGenerated >= ago(ioc_lookBack) and ExpirationDateTime > now()
| where Active == true
| where isnotempty(FileName)
// using innerunique to keep perf fast and result set low, we only need one match to indicate potential malicious activity that needs to be investigated
| join kind=innerunique (
WireData
| where TimeGenerated between(starttime..endtime)
| where isnotempty(ProcessName)
| extend Process =reverse(substring(reverse(ProcessName), 0, indexof(reverse(ProcessName), "\\")))
| extend WireData_TimeGenerated = TimeGenerated
)
on $left.FileName == $right.Process
| where WireData_TimeGenerated < ExpirationDateTime
| summarize WireData_TimeGenerated = arg_max(WireData_TimeGenerated, *) by IndicatorId, Process
| project WireData_TimeGenerated, Description, ActivityGroupNames, IndicatorId, ThreatType, Url, ExpirationDateTime, ConfidenceScore,
FileName, Computer, Direction, LocalIP, RemoteIP, LocalPortNumber, RemotePortNumber
| extend timestamp = WireData_TimeGenerated, HostCustomEntity = Computer, IPCustomEntity = RemoteIP, URLCustomEntity = Url
entityMappings:
- entityType: Host
fieldMappings:
- identifier: FullName
columnName: HostCustomEntity
- entityType: IP
fieldMappings:
- identifier: Address
columnName: IPCustomEntity
- identifier: Address
columnName: LocalIP
- entityType: URL
fieldMappings:
- identifier: Url
columnName: URLCustomEntity
|
Hunting Queries/ThreatIntelligenceIndicator/FileEntity_WireData.yaml
|
language: cpp
sudo: false
matrix:
include:
- os: osx
osx_image: xcode8
compiler: clang
env: CXX_COMPILER='clang++' C_COMPILER='clang' Fortran_COMPILER='gfortran' BUILD_TYPE='Debug'
- os: osx
osx_image: xcode8
compiler: clang
env: CXX_COMPILER='clang++' C_COMPILER='clang' Fortran_COMPILER='gfortran' BUILD_TYPE='Release'
PROJECT_DEPLOY_TARGET="osx"
install:
- DEPS_DIR="${TRAVIS_BUILD_DIR}/deps"
- mkdir ${DEPS_DIR} && cd ${DEPS_DIR}
- |
if [[ "${TRAVIS_OS_NAME}" == "linux" ]]; then
CMAKE_URL="http://www.cmake.org/files/v3.3/cmake-3.3.2-Linux-x86_64.tar.gz"
mkdir cmake && travis_retry wget --no-check-certificate --quiet -O - ${CMAKE_URL} | tar --strip-components=1 -xz -C cmake
export PATH=${DEPS_DIR}/cmake/bin:${PATH}
echo "The compiler is: ${CXX_COMPILER}"
echo "The CMake path is: ${DEPS_DIR}/cmake/bin"
CMAKE_EXECUTABLE=${DEPS_DIR}/cmake/bin/cmake
else
brew update
brew install cmake
CMAKE_EXECUTABLE=cmake
fi
before_script:
- export CXX=${CXX_COMPILER}
- export CC=${C_COMPILER}
- export FC=${Fortran_COMPILER}
- export PATH=$HOME/.local/bin:/usr/local/bin:/usr/bin:$PATH
script:
- cd ${TRAVIS_BUILD_DIR}
- mkdir build
- cd build
- CXX=${CXX_COMPILER} CC=${C_COMPILER} FC=${Fortran_COMPILER} $CMAKE_EXECUTABLE -G
Xcode .. -DCMAKE_BUILD_TYPE=${BUILD_TYPE} -DCMAKE_OSX_ARCHITECTURES="i386;x86_64"
- cd ..
- cmake --build build --config $BUILD_TYPE
after_success:
- |
if [ -n "${PROJECT_DEPLOY_TARGET+set}" ]; then
echo "Deploying package $PROJECT_DEPLOY_TARGET"
mkdir build/$PROJECT_DEPLOY_TARGET
mkdir build/deploy
cp -r build/$BUILD_TYPE/*.vst build/$PROJECT_DEPLOY_TARGET/.
cp -r build/$BUILD_TYPE/*.vst3 build/$PROJECT_DEPLOY_TARGET/.
cp -r build/$BUILD_TYPE/*.component build/$PROJECT_DEPLOY_TARGET/.
cd build
zip -r ${PROJECT_DEPLOY_TARGET}.zip $PROJECT_DEPLOY_TARGET
mv ${PROJECT_DEPLOY_TARGET}.zip deploy
cd ..
fi
before_deploy:
- export FILE_TO_UPLOAD=$(ls build/deploy/*.zip)
deploy:
provider: releases
api_key:
secure: <KEY>
file_glob: true
file: ${FILE_TO_UPLOAD}
skip_cleanup: true
overwrite: true
on:
branch: master
condition: -n "${PROJECT_DEPLOY_TARGET+set}"
tags: true
|
.travis.yml
|
{{- if semverCompare ">= 1.23" .Capabilities.KubeVersion.GitVersion }}
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: system:cloud-controller-manager
annotations:
rbac.authorization.kubernetes.io/autoupdate: "true"
labels:
k8s-app: cloud-controller-manager
rules:
- apiGroups:
- ""
resources:
- events
verbs:
- create
- patch
- update
- apiGroups:
- ""
resources:
- nodes
verbs:
- "*"
- apiGroups:
- ""
resources:
- nodes/status
verbs:
- patch
- apiGroups:
- ""
resources:
- services
verbs:
- list
- patch
- update
- watch
- apiGroups:
- ""
resources:
- services/status
verbs:
- list
- patch
- update
- watch
- apiGroups:
- ""
resources:
- serviceaccounts
verbs:
- create
- get
- list
- watch
- update
- apiGroups:
- ""
resources:
- persistentvolumes
verbs:
- get
- list
- update
- watch
- apiGroups:
- ""
resources:
- endpoints
verbs:
- create
- get
- list
- watch
- update
- apiGroups:
- ""
resources:
- secrets
verbs:
- get
- list
- watch
- apiGroups:
- coordination.k8s.io
resources:
- leases
verbs:
- get
- create
- update
---
kind: ClusterRoleBinding
apiVersion: rbac.authorization.k8s.io/v1
metadata:
name: system:cloud-controller-manager
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:cloud-controller-manager
subjects:
- kind: User
name: system:cloud-controller-manager
- kind: User
name: cloud-controller-manager
{{- else }}
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRole
metadata:
name: system:azure-cloud-provider
rules:
- apiGroups: [""]
resources: ["events"]
verbs:
- create
- patch
- update
---
apiVersion: rbac.authorization.k8s.io/v1
kind: ClusterRoleBinding
metadata:
name: system:azure-cloud-provider
roleRef:
apiGroup: rbac.authorization.k8s.io
kind: ClusterRole
name: system:azure-cloud-provider
subjects:
- kind: ServiceAccount
name: azure-cloud-provider
namespace: kube-system
{{- end }}
|
charts/internal/shoot-system-components/charts/cloud-controller-manager/templates/rbac-cloud-controller.yaml
|
type: object
required:
- launchSpec
properties:
launchSpec:
type: object
required:
- oceanId
- name
- imageId
properties:
id:
type: string
readOnly: true
example: "ols-1234abcd"
oceanId:
type: string
description: >
The Ocean cluster identifier. Required for to create a virtual node group.
example: "o-1234abcd"
name:
type: string
description: >
User given name of the virtual node group.
example: "mySpecLauncher"
imageId:
type: string
description: >
Set image identifier. Can be null.
example: "img2"
userData:
type: string
format: byte
description: >
Base64-encoded MIME user data to make available to the instances.
example: "dXNlcmJhc2g2NGVuY29kZWQ="
securityGroupIds:
type: array
description: >
Set security groups. Each element in array should be a security group identifier.
items:
type: string
example: "sg1, sg3"
instanceTypes:
type: array
description: >
An array of supported instance types for the VNG.
items:
type: string
example: "c3.2xlarge, c4.2xlarge"
subnetIds:
type: array
description: >
Set the subnets in the VNG. Each element in the array should be a subnet identifier.
items:
type: string
example: "subnet-01234566789abcdef, subnet-9876543210ghijkl"
iamInstanceProfile:
type: array
description: >
The instance profile iamRole object.
items:
oneOf:
- title: 'arn'
type: string
description: >
The iamRole ARN. If set, do not set launchSpec.iamInstanceProfile.name
example: "arn:aws:iam::123456789123:instance-profile"
- title: 'name'
type: string
description: >
The iamRole name. If set, do not set launchSpec.iamInstanceProfile.arn
example: 'my-iamInstance-Profile'
attributes:
description: >
Add attributes to the cluster.
type: array
items:
type: object
required:
- key
- value
properties:
key:
description: >
Give the attribute a key.
type: string
example: "key1"
value:
description: >
Set the value of the attribute.
type: string
example: "value1"
autoScale:
type: object
properties:
headrooms:
description: >
Set custom headroom per VNG. Provide a list of launchSpec.autoscale.headroom objects.
type: array
items:
type: object
properties:
cpuPerUnit:
description: >
Configure the number of CPUs to allocate the headroom. CPUs are denoted in millicores, where 1000 millicores = 1 vCPU.
type: integer
example: 1024
memoryPerUnit:
description: >
Configure the amount of memory (MiB) to allocate the headroom.
type: integer
example: 512
numOfUnits:
description: >
The number of units to retain as headroom, where each unit has the defined headroom CPU and memory.
type: integer
example: 2
tags:
description: >
List of kay-value pairs of tags.
type: array
items:
type: object
properties:
tagKey:
type: string
description: >
Set the tag key.
example: "Creator"
tagValue:
type: string
description: >
Set the tag value.
example: "<NAME>"
blockDeviceMappings:
$ref: "../schemas/launchSpec-blockDeviceMappings.yaml"
restrictScaleDown:
type: boolean
description: >
When set to True, the VNG container instances will be treated as if all tasks running have the [restrict-scale-down label](https://docs.spot.io/ocean/features/labels-and-taints).
Therefore, Ocean will not scale down container instances unless they are empty.
example: false
instanceMetadataOptions:
$ref: "./oceanInstanceMetadataOptions.yaml"
createdAt:
type: string
readOnly: true
format: date-time
example: "2019-06-03T08:13:08.000Z"
updatedAt:
type: string
readOnly: true
format: date-time
example: "2019-06-10T15:06:13.000Z"
|
api/services/ocean/ecs/schemas/launchSpec.yaml
|
---
GN:
- name: African Development Bank
swift: ADBGGNG1XXX
swift8: ADBGGNG1
branch_name: Head Office
branch_address: 37 Cite Des Nations Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Afriland First Bank Guinee S A
swift: CCEIGNGNXXX
swift8: CCEIGNGN
branch_name: Head Office
branch_address: Commune De Kaloum 324 324 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Centrale De La Republique De Guinee
swift: REPCGNGAXXX
swift8: REPCGNGA
branch_name: No branch name
branch_address: No branch address
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Centrale De La Republique De Guinee
swift: REPCGNGNXXX
swift8: REPCGNGN
branch_name: Head Office
branch_address: 12 Boulevard Du Commerce Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Internationale Pour Le Commerce Et L'industrie De La Guinee
swift: BICIGNCXXXX
swift8: BICIGNCX
branch_name: Head Office
branch_address: Avenue De La Republique 1484 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Islamique De Guinee
swift: ISGUGNGNXXX
swift8: ISGUGNGN
branch_name: Head Office
branch_address: Immeuble Nafaya Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Populaire Maroco-guineenne Sa
swift: POMBGNGNXXX
swift8: POMBGNGN
branch_name: Head Office
branch_address: Banque Populaire Maroco-Guineenne Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Banque Pour Le Commerce Et L'industrie De Guinee
swift: COLIGNGNXXX
swift8: COLIGNGN
branch_name: Head Office
branch_address: Siege Bci 359 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: <NAME>
swift: BSGNGNGNXXX
swift8: BSGNGNGN
branch_name: Head Office
branch_address: Fawaz Building 224 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: <NAME>
swift: ECOCGNCNXXX
swift8: ECOCGNCN
branch_name: Head Office
branch_address: Immeuble Al Iman Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: First International Bank Sa
swift: FIBLGNGNXXX
swift8: FIBLGNGN
branch_name: Head Office
branch_address: Avenue De La Republique 557 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: International Commercial Bank Sa
swift: ICMOGNCNXXX
swift8: ICMOGNCN
branch_name: Head Office
branch_address: Ex-Cite Chemins De Fer Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: <NAME>
swift: BIAOGNGNXXX
swift8: BIAOGNGN
branch_name: Head Office
branch_address: Almamya 483 Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: <NAME>
swift: ORBKGNGNXXX
swift8: ORBKGNGN
branch_name: Head Office
branch_address: 5 Bld., Angle 6e Av. Centre Ville Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Skye Bank Guinea Limited S.a.
swift: SKYEGNGNXXX
swift8: SKYEGNGN
branch_name: Head Office
branch_address: Skye House Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: Societe Generale De Banques En Guinee
swift: SGGNGNGNXXX
swift8: SGGNGNGN
branch_name: Head Office
branch_address: Avenue De La Republique Conakry
branch_code: XXX
city: Conakry
country: Guinea
- name: United Bank For Africa Guinea
swift: UBAGGNCNXXX
swift8: UBAGGNCN
branch_name: Head Office
branch_address: Immeuble Sonoco 1198 Conakry
branch_code: XXX
city: Conakry
country: Guinea
|
data/banks/GN.yml
|
name: External Links
version: 1.2.2
description: "This plugin adds small icons to external and mailto links, informing users the link will take them to a new site or open their email client."
icon: external-link
author:
name: Sommerregen
email: <EMAIL>
homepage: https://github.com/sommerregen/grav-plugin-external-links
keywords: [external, links, filter, formatter, plugin]
docs: https://github.com/sommerregen/grav-plugin-external-links/blob/master/README.md
bugs: https://github.com/sommerregen/grav-plugin-external-links/issues
license: MIT
form:
validation: strict
fields:
enabled:
type: toggle
label: Plugin Status
highlight: 1
default: 0
options:
1: Enabled
0: Disabled
validate:
type: bool
built_in_css:
type: toggle
label: Use built in CSS
highlight: 1
default: 1
options:
1: Enabled
0: Disabled
validate:
type: bool
weight:
type: text
label: Set the weight (order of execution)
default: 0
validate:
type: int
min: -100
max: 100
exclude.classes:
type: text
label: Exclude all links with this class
help: Comma separated list.
validate:
type: commalist
exclude.domains:
type: text
label: A list of domains to be excluded (any regular expression can be used)
help: Comma separated list of domains e.g. _localhost/*_.
validate:
type: commalist
process:
type: toggle
label: Filter external links
highlight: 1
default: 1
options:
1: Enabled
0: Disabled
validate:
type: bool
no_follow:
type: toggle
label: Add `rel="nofollow"` to all external links
highlight: 1
default: 1
options:
1: Enabled
0: Disabled
validate:
type: bool
target:
type: text
label: Set target attribute of the link.
default: _blank
placeholder: _blank
|
user/plugins/external_links/blueprints.yaml
|
name: Publish Alpha
on:
push:
branches:
- alpha
jobs:
build:
runs-on: ${{ matrix.os }}
strategy:
matrix:
os: [ubuntu-18.04]
include:
- os: ubuntu-18.04
TARGET: linux
steps:
- name: Install Linux dependencies
if: startsWith(matrix.os, 'ubuntu')
run: |
sudo apt-get update
sudo apt-get install -y libgtk-3-dev libx11-dev pkg-config libreadline-dev
# Set up Flutter and add it to the path.
- name: Clone Flutter repository with stable channel
uses: actions/checkout@v2
with:
repository: 'flutter/flutter'
ref: '2.0.6'
path: 'flutter'
fetch-depth: 0
- name: Add Flutter to the PATH for Unix
run: echo "$GITHUB_WORKSPACE/flutter/bin" >> $GITHUB_PATH
if: runner.os != 'Windows'
- name: Add Flutter to the PATH for Windows
run: echo "${env:GITHUB_WORKSPACE}\flutter\bin" >> ${env:GITHUB_PATH}
if: runner.os == 'Windows'
- name: Add Dart to the PATH for Unix
run: echo "$GITHUB_WORKSPACE/flutter/bin/cache/dart-sdk/bin" >> $GITHUB_PATH
if: runner.os != 'Windows'
- name: Add Dart to the PATH for Windows
run: echo "${env:GITHUB_WORKSPACE}\flutter\bin\cache\dart-sdk\bin" >> ${env:GITHUB_PATH}
if: runner.os == 'Windows'
- name: Run Flutter doctor
run: flutter doctor -v
- name: Node and NPM Versions
run: node -v && npm -v
- name: Clone Hydro-SDK
uses: actions/checkout@v2
with:
token: ${{ secrets.WAVEFORM_BOT_PAT }}
path: 'hydro'
- name: Get packages for Hydro-SDK JS
run: npm install
working-directory: hydro
- name: Get packages for Hydro-SDK Dart
run: flutter pub get
working-directory: hydro
- name: Fetch Tags
run: git fetch --all --tags
working-directory: hydro
- name: Configure Git Identity
env:
PUBLISH_EMAIL: ${{secrets.PUBLISH_EMAIL}}
PUBLISH_USERNAME: ${{secrets.PUBLISH_USERNAME}}
run: |
git config --global user.email "$PUBLISH_EMAIL"
git config --global user.name "$PUBLISH_USERNAME"
working-directory: hydro
- name: Run Tsc
shell: bash
run: ./node_modules/.bin/tsc
working-directory: hydro
- name: Publish Alpha
env:
NPM_ORG_TOKEN: ${{secrets.NPM_ORG_TOKEN}}
run: export NPM_TOKEN=$NPM_ORG_TOKEN; flutter pub run waveform:tagAndPublishAlpha
working-directory: hydro
- name: Push Tags
run: git push --tags
working-directory: hydro
|
.github/workflows/publish-alpha.yml
|
---
- name: install required software
package: name={{ item }} state=present
loop:
- msmtp-mta
- mutt
- notmuch-mutt
- offlineimap
- openssl
become: true
- set_fact:
config_dir: "{{ lookup('env', 'HOME') }}/.config"
cache_dir: "{{ lookup('env', 'HOME') }}/.cache"
email_dir: "{{ lookup('env', 'HOME') }}/Private/mail/basealt"
basealt_email_user: "{{ lookup('env', 'USER') }}"
- set_fact:
offlineimap_dir: "{{ config_dir }}/offlineimap"
mutt_dir: "{{ config_dir }}/mutt"
msmtp_dir: "{{ config_dir }}/msmtp"
notmuch_dir: "{{ config_dir }}/notmuch"
- name: figure out real name of user
shell: >
getent passwd {{ basealt_email_user }} | cut -d':' -f5 | sed -re 's/,.*$//'
register: email_user_getent_realname
- set_fact:
basealt_email_realname: "{{ email_user_getent_realname.stdout.strip() }}"
when: basealt_email_realname is undefined
- name: figure out server certificate fingerprint
shell: >
msmtp --serverinfo --host={{ basealt_smtp_server }} --tls=on --tls-certcheck=off |
sed -rne 's/^\s*SHA256:\s*([0-9a-fA-F:]+)\s*$/\1/p'
register: smtp_cert_fingerprint
when: basealt_smtp_server_cert_fingerprint is undefined
- set_fact:
basealt_smtp_server_cert_fingerprint: "{{ smtp_cert_fingerprint.stdout.strip() }}"
when: basealt_smtp_server_cert_fingerprint is undefined
- name: create msmtp config dir
file:
state: directory
path: "{{ msmtp_dir }}"
mode: 0755
- name: generate msmtp config file
template:
src: ./templates/msmtp.config.j2
dest: "{{ msmtp_dir }}/config"
mode: 0644
- name: create email directory
file:
path: "{{ email_dir }}"
state: directory
mode: 0755
- name: figure out IMAP server certificate
shell: >
openssl s_client -connect {{ basealt_imap_server }}:{{ basealt_imap_port }} </dev/null |
sed -rne '/^-----BEGIN CERTIFICATE-----/,/^-----END CERTIFICATE-----/p' |
openssl x509 -noout -in - -fingerprint -sha512 |
sed -rne 's/^SHA512 Fingerprint=([0-9a-fA-F:]+)\s*$/\1/p' |
tr -d ':' |
tr '[A-F]' '[a-f]'
register: imap_server_cert_fingerprint
when: basealt_imap_server_cert_fingerprint is undefined
- set_fact:
basealt_imap_server_cert_fingerprint: "{{ imap_server_cert_fingerprint.stdout.strip() }}"
when: basealt_imap_server_cert_fingerprint is undefined
- name: create offlineimap config directory
file:
path: "{{ offlineimap_dir }}"
state: directory
mode: 0755
- name: create offlineimap config
template:
src: ./templates/offlineimap.conf.j2
dest: "{{ offlineimap_dir }}/offlineimap.conf"
mode: 0644
- name: install offlineimap python code
copy:
src: ./files/offlineimap.py
dest: "{{ offlineimap_dir }}/offlineimap.py"
mode: 0644
- name: create mutt config directory
file:
path: "{{ mutt_dir }}"
state: directory
mode: 0755
- name: install mutt config file
template:
src: ./templates/muttrc.j2
dest: "{{ mutt_dir }}/muttrc"
mode: 0644
- name: create notmuch config dir
file:
path: "{{ notmuch_dir }}"
state: directory
mode: 0755
- name: generate notmuch config file
template:
src: ./templates/notmuch.config.j2
dest: "{{ notmuch_dir }}/config"
mode: 0644
- name: create notmuch cache dir
file:
path: "{{ cache_dir }}/notmuch/mutt"
state: directory
mode: 0755
|
roles/basealt-email/tasks/main.yml
|
games:
# Trunk games
- id: normal-web-trunk
name: Play Crawl (development branch)
crawl_binary: /crawl-git/bin/crawl
rcfile_path: /data/rcs/git
macro_path: /data/rcs/git
morgue_path: /data/morgue/%n/
socket_path: /websockets
dir_path: /data/logfiles/git
inprogress_path: /data/inprogress/git
ttyrec_path: /data/ttyrec/%n
client_path: /crawl-git
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
- id: tut-web-trunk
name: Tutorial
crawl_binary: /crawl-git/bin/crawl
rcfile_path: /data/rcs/git
macro_path: /data/rcs/git
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/git
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-git
dir_path: /data/logfiles/git
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -tutorial
- id: sprint-web-trunk
name: Sprint
crawl_binary: /crawl-git/bin/crawl
rcfile_path: /data/rcs/git
macro_path: /data/rcs/git
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/git
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-git
dir_path: /data/logfiles/git
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -sprint
# v27 games
- id: normal-web-27
name: Play Crawl (v27)
crawl_binary: /crawl-27/bin/crawl
rcfile_path: /data/rcs/27
macro_path: /data/rcs/27
morgue_path: /data/morgue/%n/
socket_path: /websockets
dir_path: /data/logfiles/27
inprogress_path: /data/inprogress/27
ttyrec_path: /data/ttyrec/%n
client_path: /crawl-27
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
- id: tut-web-27
name: Tutorial
crawl_binary: /crawl-27/bin/crawl
rcfile_path: /data/rcs/27
macro_path: /data/rcs/27
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/27
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-27
dir_path: /data/logfiles/27
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -tutorial
- id: sprint-web-27
name: Sprint
crawl_binary: /crawl-27/bin/crawl
rcfile_path: /data/rcs/27
macro_path: /data/rcs/27
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/27
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-27
dir_path: /data/logfiles/27
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -sprint
# v26 games
- id: normal-web-26
name: Play Crawl (v26)
crawl_binary: /crawl-26/bin/crawl
rcfile_path: /data/rcs/26
macro_path: /data/rcs/26
morgue_path: /data/morgue/%n/
socket_path: /websockets
dir_path: /data/logfiles/26
inprogress_path: /data/inprogress/26
ttyrec_path: /data/ttyrec/%n
client_path: /crawl-26
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
- id: tut-web-26
name: Tutorial
crawl_binary: /crawl-26/bin/crawl
rcfile_path: /data/rcs/26
macro_path: /data/rcs/26
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/26
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-26
dir_path: /data/logfiles/26
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -tutorial
- id: sprint-web-26
name: Sprint
crawl_binary: /crawl-26/bin/crawl
rcfile_path: /data/rcs/26
macro_path: /data/rcs/26
morgue_path: /data/morgue/%n/
inprogress_path: /data/inprogress/26
ttyrec_path: /data/ttyrec/%n
socket_path: /websockets
client_path: /crawl-26
dir_path: /data/logfiles/26
morgue_url: https://crawl.kirbytoso.xyz/morgue/%n/
show_save_info: True
send_json_options: True
options:
- -sprint
|
crawl/webserver/games.d/base.yaml
|
AWSTemplateFormatVersion: 2010-09-09
Transform: AWS::Serverless-2016-10-31
Description: Siebel eScript Memory Leak Analyzer - Serverless application
Resources:
## S3 buckets
S3UploadBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: !Ref UploadBucketName
CorsConfiguration:
CorsRules:
- AllowedHeaders:
- "*"
AllowedMethods:
- GET
- PUT
- HEAD
AllowedOrigins:
- "*"
S3OutputBucket:
Type: AWS::S3::Bucket
Properties:
BucketName: memleak-output
CorsConfiguration:
CorsRules:
- AllowedHeaders:
- "*"
AllowedMethods:
- GET
- PUT
- HEAD
AllowedOrigins:
- "*"
# HTTP API
MyApi:
Type: AWS::Serverless::HttpApi
Properties:
# CORS configuration - this is open for development only and should be restricted in prod.
# See https://docs.aws.amazon.com/serverless-application-model/latest/developerguide/sam-property-httpapi-httpapicorsconfiguration.html
CorsConfiguration:
AllowMethods:
- GET
- POST
- DELETE
- OPTIONS
AllowHeaders:
- "*"
AllowOrigins:
- "https://jpmota.net/"
MaxAge: 3600
## Lambda functions
UploadRequestFunction:
# More info about Function Resource: https://github.com/awslabs/serverless-application-model/blob/master/versions/2016-10-31.md#awsserverlessfunction
Type: AWS::Serverless::Function
Properties:
FunctionName: s3-get-signed-url
CodeUri: getSignedURL/
Handler: app.handler
Runtime: nodejs12.x
Timeout: 3
MemorySize: 128
Environment:
Variables:
UploadBucket: !Ref UploadBucketName
Policies:
- S3WritePolicy:
BucketName: !Ref UploadBucketName
## This permission allows the Lambda function to request signed URLs
## for objects that will be publicly readable. Uncomment if you want this ACL.
# - Statement:
# - Effect: Allow
# Resource: !Sub 'arn:aws:s3:::${S3UploadBucket}/'
# Action:
# - s3:putObjectAcl
Events:
UploadAssetAPI:
Type: HttpApi
Properties:
Path: /uploads
Method: get
ApiId: !Ref MyApi
## Lambda functions
MemLeakAnalyzer:
Type: AWS::Serverless::Function
Properties:
FunctionName: escript-memleak-analyzer
CodeUri: MemLeakAnalyzer/
Handler: memleak_lambda.lambda_handler
Runtime: python3.8
Timeout: 90
MemorySize: 128
Environment:
Variables:
UploadBucket: memleak-upload
Policies:
- AWSLambdaBasicExecutionRole
- S3ReadPolicy:
BucketName: !Ref UploadBucketName
- S3FullAccessPolicy:
BucketName: !Ref S3OutputBucket
Events:
UploadAssetAPI:
Type: HttpApi
Properties:
Path: /escript-memleak-analyzer
Method: POST
ApiId: !Ref MyApi
# S3Event:
# Type: S3
# Properties:
# Bucket: !Ref S3UploadBucket
# Events: s3:ObjectCreated:*
# ## This Permission is required to have Lambda trigger created
# ## More info: https://github.com/aws/serverless-application-model/issues/300
# LambdaInvokePermission:
# Type: 'AWS::Lambda::Permission'
# Properties:
# FunctionName: !GetAtt MemLeakAnalyzer.Arn
# Action: 'lambda:InvokeFunction'
# Principal: 's3.amazonaws.com'
# SourceAccount: !Sub ${AWS::AccountId}
# SourceArn: !GetAtt S3UploadBucket.Arn
## Parameters
Parameters:
UploadBucketName:
Default: memleak-upload
Type: String
## Take a note of the outputs for deploying the workflow templates in this sample application
Outputs:
APIendpoint:
Description: "HTTP API endpoint URL - Update frontend html"
Value: !Sub "https://${MyApi}.execute-api.${AWS::Region}.amazonaws.com"
S3UploadBucketName:
Description: "S3 bucket for application uploads"
Value: !Ref UploadBucketName
|
template.yaml
|
version: 2
references:
node_image: &node_image
docker:
- image: circleci/python:3.7-stretch-node-browsers
attach_workspace: &attach_workspace
attach_workspace:
at: .
master_branch: &master_branch
filters:
branches:
only:
- master
not_master_branch: ¬_master_branch
filters:
branches:
ignore:
- master
restore_cache: &restore_cache
restore_cache:
keys:
# when lock file changes, use increasingly general patterns to restore cache
- npm-repo-v1-{{ .Branch }}-{{ checksum "yarn.lock" }}
- npm-repo-v1-{{ .Branch }}-
- npm-repo-v1
jobs:
install-and-build:
<<: *node_image
steps:
- checkout
- *restore_cache
- run: pip install awscli --upgrade --user
- run: yarn install
- run: yarn build
- persist_to_workspace:
root: .
paths:
- yarn.lock
- public
- save_cache:
paths:
- ~/.local
- ~/project/node_modules
key: npm-repo-v1-{{ .Branch }}-{{ checksum "yarn.lock" }}
deploy-to-prod:
<<: *node_image
steps:
- *attach_workspace
- *restore_cache
- run:
name: Update PATH
command: |
echo 'export PATH=~/.local/bin/:$PATH' >> $BASH_ENV
source $BASH_ENV
- run:
name: Upload files to S3
command: aws s3 cp --recursive ./public/ s3://www.pristineclean.com.au
- run:
name: Invalidate cloudfront cache
command: aws configure set preview.cloudfront true; aws cloudfront create-invalidation --distribution-id E1TG52R0R5X2CD --paths /
workflows:
version: 2
build-and-deploy:
jobs:
- install-and-build
- unlock-prod-deploy:
<<: *master_branch
type: approval
requires:
- install-and-build
- deploy-to-prod:
<<: *master_branch
requires:
- unlock-prod-deploy
|
.circleci/config.yml
|
project:
name: quickstart-ibm-maximo
owner: <EMAIL>
package_lambda: false
lambda_source_path: functions/source
lambda_zip_path: functions/packages
s3_regional_buckets: true
shorten_stack_name: true
regions:
- us-east-1
- us-east-2
- us-west-2
- eu-central-1
- eu-west-1
- eu-west-2
- ap-northeast-1
- ap-southeast-1
- ap-southeast-2
s3_bucket: ''
parameters:
AvailabilityZones: $[taskcat_genaz_3]
DBPassword: $[taskcat_genpass_8A]
KeyPairName: $[taskcat_getkeypair]
MaximoFilesLocation: override
QSS3BucketName: $[taskcat_autobucket]
QSS3BucketRegion: $[taskcat_current_region]
RemoteAccessCIDR: 10.0.0.0/16
WebAccessCIDR: 10.0.0.0/16
ALBSSLCertificateARN: ''
WASNodesMinSize: '2'
WASNodesMaxSize: '4'
WASNodesDesiredCapacity: '2'
ORAVersion: 'Standard-Edition-Two-18.0-License-Included'
SiteDomain: ''
Route53HostedZoneId: ''
WASAdminUsername: wasadmin$[taskcat_genpass_4A]
WASAdminPassword: $[task<PASSWORD>]
tests:
maximo-core:
parameters:
DeployModules: 'Core'
DBName: 'maxcore'
DBInstanceIdentifier: 'MAXCORE'
regions:
- us-east-1
template: templates/maximo-main.template.yaml
maximo-hse:
parameters:
DeployModules: 'HSE'
DBName: 'maxhse'
DBInstanceIdentifier: 'MAXHSE'
regions:
- us-east-2
template: templates/maximo-main.template.yaml
maximo-nuclear:
parameters:
DeployModules: 'Nuclear'
DBName: 'maxnuc'
DBInstanceIdentifier: 'MAXNUC'
regions:
- us-west-2
template: templates/maximo-main.template.yaml
maximo-scheduler:
parameters:
DeployModules: 'Scheduler'
DBName: 'maxsched'
DBInstanceIdentifier: 'MAXSCHED'
regions:
- eu-west-1
template: templates/maximo-main.template.yaml
maximo-transport:
parameters:
DeployModules: 'Transportation'
DBName: 'maxtxp'
DBInstanceIdentifier: 'MAXTXP'
regions:
- eu-west-2
template: templates/maximo-main.template.yaml
maximo-utilities:
parameters:
DeployModules: 'Energy-and-Utilities'
DBName: 'maxene'
DBInstanceIdentifier: 'MAXENE'
regions:
- eu-central-1
template: templates/maximo-main.template.yaml
|
.taskcat.yml
|
funcs:
- name: fixSliceType
type: resolver
body: |
function(sliceType) {
sliceType = sliceType.split(' ').join('');
// This check is for empty interface or struct, which include `{}` as part of their type definition
if (sliceType.substring(sliceType.length - 4, sliceType.length) == "{}{}" && (sliceType.indexOf("interface") !== 0 || sliceType.indexOf("struct") !== 0))
return sliceType.substring(0, sliceType.indexOf("{")) + "{}"
return sliceType.substring(0, sliceType.indexOf("{"))
}
tenets:
- name: declare-empty-slice
flows:
codelingo/review:
comment: Declare {{sliceName}} as a nil slice, with `var {{sliceName}} {{fixSliceType(sliceType)}} [as specified in Code Review Comments](https://github.com/golang/go/wiki/CodeReviewComments#declaring-empty-slices)
codelingo/rewrite:
codelingo/docs:
title: Declaring Empty Slices
body: |
When declaring an empty slice, prefer
```go
var t []string
```
over
```go
t := []string{}
```
The former declares a nil slice value, while the latter is non-nil but zero-length. They are functionally equivalent-their `len` and `cap` are both zero `tbut the nil slice is the preferred style.
Note that there are limited circumstances where a non-nil but zero-length slice is preferred, such as when encoding JSON objects (a `nil` slice encodes to `null`, while `[]string{}` encodes to the JSON array `[]`).
When designing interfaces, avoid making a distinction between a nil slice and a non-nil, zero-length slice, as this can lead to subtle programming errors.
For more discussion about nil in Go see <NAME>'s talk [Understanding Nil](https://www.youtube.com/watch?v=ynoY2xz-F8s).
query: |
import codelingo/ast/go
@review comment
@rewrite --replace "var {{sliceName}} {{fixSliceType(sliceType)}}"
go.assign_stmt(depth = any):
tok == ":="
go.lhs:
go.ident:
name as sliceName
go.rhs:
raw as sliceType
start_offset
end_offset
go.composite_lit:
child_count == 1
go.array_type
|
tenets/codelingo/code-review-comments/declare-empty-slice/codelingo.yaml
|
project:
name: project
collection: edgecontainer.projects
attributes:
- &project
parameter_name: projectsId
attribute_name: project
help: The project name.
property: core/project
location:
name: location
collection: edgecontainer.projects.locations
attributes:
- *project
- &location
parameter_name: locationsId
attribute_name: location
help: The global location name.
property: edge_container/location
disable_auto_completers: false
zone:
name: zone
collection: edgecontainer.projects.locations
attributes:
- *project
- &zone
parameter_name: locationsId
attribute_name: zone
help: The name of the Edge Container zone.
disable_auto_completers: false
cluster:
name: cluster
collection: edgecontainer.projects.locations.clusters
request_id_field: clusterId
attributes:
- *project
- *location
- &cluster
parameter_name: clustersId
attribute_name: cluster
help: Kubernetes cluster.
disable_auto_completers: false
nodePool:
name: node pool
collection: edgecontainer.projects.locations.clusters.nodePools
request_id_field: nodePoolId
attributes:
- *project
- *location
- *cluster
- &nodePool
parameter_name: nodePoolsId
attribute_name: node_pool
help: Pool of Kubernetes nodes with similar properties.
disable_auto_completers: false
machine:
name: machine
collection: edgecontainer.projects.locations.machines
request_id_field: machineId
attributes:
- *project
- *location
- &machine
parameter_name: machinesId
attribute_name: machine
help: Machines represent compute entities which can assume the role of a node in a cluster.
disable_auto_completers: false
vpnConnection:
name: vpn connection
collection: edgecontainer.projects.locations.vpnConnections
request_id_field: vpnConnectionId
attributes:
- *project
- *location
- &vpnConnection
parameter_name: vpnConnectionsId
attribute_name: vpn_connection
help: VPN connection between cluster and GCP VPC.
disable_auto_completers: false
operation:
name: operation
collection: edgecontainer.projects.locations.operations
attributes:
- *project
- *location
- parameter_name: operationsId
attribute_name: operation
help: Edge-container long running operation.
disable_auto_completers: false
|
lib/googlecloudsdk/command_lib/edge_container/resources.yaml
|
app_shop_partial:
resource: "shop/partial.yml"
prefix: /_partial
app_shop_search:
resource: "shop/search.yml"
app_shop_cart:
resource: "shop/cart.yml"
app_shop_manufacturer:
resource: "shop/manufacturer.yml"
app_shop_tag:
resource: "shop/tag.yml"
app_shop_page:
resource: "shop/page.yml"
###################
# Contact Request #
###################
# app_shop_partial_contact_request:
# path: /contact-form
# methods: [GET]
# defaults:
# _controller: app.controller.footer_contact:requestAction
# _sylius:
# template: "@SyliusShop/Contact/footerForm.html.twig"
# form: AppBundle\Form\Type\FooterContactType
app_shop_contact_request:
path: /send-contact-form
methods: [POST]
defaults:
_controller: app.controller.footer_contact:requestAction
# _sylius:
# redirect: referer
# form: AppBundle\Form\Type\FooterContactType
sylius_shop_product_show:
path: /produkt/{slug}
methods: [GET]
defaults:
_controller: sylius.controller.product:showSaveSessionAction
_sylius:
template: "@SyliusShop/Product/show.html.twig"
repository:
method: findOneByChannelAndSlug
arguments:
- "expr:service('sylius.context.channel').getChannel()"
- "expr:service('sylius.context.locale').getLocaleCode()"
- $slug
sylius_shop_product_index:
path: /kategorie/{slug}
methods: [GET]
defaults:
_controller: sylius.controller.product:indexAction
_sylius:
template: "@SyliusShop/Product/index.html.twig"
grid: sylius_shop_product
requirements:
slug: .+
sylius_shop_partial_product_index_by_tag_with_variant:
path: /by-tag/{count}
methods: [GET]
defaults:
_controller: sylius.controller.product:indexAction
_sylius:
template: $template
repository:
method: findByTagByChannelWithVariant
arguments:
- "expr:service('sylius.context.channel').getChannel()"
- "expr:service('sylius.context.locale').getLocaleCode()"
- "expr:service('app.repository.tag').findOneBySlug($tagSlug, service('sylius.context.locale').getLocaleCode())"
- !!int $count
sylius_shop_partial_random_product_index_by_tag_with_variant:
path: /random-by-tag/{count}
methods: [GET]
defaults:
_controller: sylius.controller.product:indexAction
_sylius:
template: $template
repository:
method: findRandomByTagByChannelWithVariant
arguments:
- "expr:service('sylius.context.channel').getChannel()"
- "expr:service('sylius.context.locale').getLocaleCode()"
- "expr:service('app.repository.tag').findOneBySlug($tagSlug, service('sylius.context.locale').getLocaleCode())"
- !!int $count
sylius_shop_partial_product_index_by_taxon_with_variant:
path: /by-taxon/{count}
methods: [GET]
defaults:
_controller: sylius.controller.product:indexAction
_sylius:
template: $template
repository:
method: findByTaxonByChannelWithVariant
arguments:
- "expr:service('sylius.context.channel').getChannel()"
- "expr:service('sylius.repository.taxon').findOneBySlug($taxonSlug, service('sylius.context.locale').getLocaleCode())"
- "expr:service('sylius.context.locale').getLocaleCode()"
- !!int $count
sylius_shop_partial_random_product_index_by_taxon_with_variant:
path: /random-by-taxon/{count}
methods: [GET]
defaults:
_controller: sylius.controller.product:indexAction
_sylius:
template: $template
repository:
method: findRandomByTaxonByChannelWithVariant
arguments:
- "expr:service('sylius.context.channel').getChannel()"
- "expr:service('sylius.repository.taxon').findOneBySlug($taxonSlug, service('sylius.context.locale').getLocaleCode())"
- "expr:service('sylius.context.locale').getLocaleCode()"
- !!int $count
|
app/config/app/routing/shop.yml
|
---
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-trigger-release/31/
timestamp: 2016-07-25 23:37:41 UTC
duration: 2h55m47s
triggered_by: nellshamrell
active_duration: 2h55m28s
parameters:
GIT_REF: 2.8.1
EXPIRE_CACHE: false
change:
git_remote: <EMAIL>:chef/supermarket.git
git_commit: <PASSWORD>
project: supermarket
version: 2.8.1
stages:
supermarket-promote:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-promote/324/
duration: 7s
supermarket-test:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/334/
duration: 19m25s
runs:
el-5:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=el-5,project=supermarket,role=tester/334/
duration: 19m24s
el-6:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=el-6,project=supermarket,role=tester/334/
duration: 11m10s
el-7:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=el-7,project=supermarket,role=tester/334/
duration: 6m16s
ubuntu-12.04:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=ubuntu-12.04,project=supermarket,role=tester/334/
duration: 7m20s
ubuntu-14.04:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=ubuntu-14.04,project=supermarket,role=tester/334/
duration: 10m25s
ubuntu-16.04:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-test/architecture=x86_64,platform=ubuntu-16.04,project=supermarket,role=tester/334/
duration: 9m41s
supermarket-build:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-build/375/
duration: 2h35m49s
runs:
el-5:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-build/architecture=x86_64,platform=el-5,project=supermarket,role=builder/375/
duration: 2h9m36s
el-6:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-build/architecture=x86_64,platform=el-6,project=supermarket,role=builder/375/
duration: 1h8m26s
el-7:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-build/architecture=x86_64,platform=el-7,project=supermarket,role=builder/375/
duration: 48m12s
ubuntu-12.04:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-build/architecture=x86_64,platform=ubuntu-12.04,project=supermarket,role=builder/375/
duration: 2h35m42s
supermarket-trigger-release:
result: SUCCESS
url: http://wilson.ci.chef.co/job/supermarket-trigger-release/31/
duration: 6s
|
reports/wilson.ci.chef.co/job/supermarket-trigger-release/31.yaml
|
---
# Copyright 2014, Rackspace US, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
uca_enable: True
uca_apt_repo_url: "http://ubuntu-cloud.archive.canonical.com/ubuntu"
# If you want to use your own keys for UCA or RDO (instead of ubuntu or
# CentOS defaults), please define user_external_repo_key dict with
# keys/values corresponding to the ansible module arguments for your
# distribution.
#
# For CentOS you'd define the following:
#user_external_repo_key:
# key: https://my-repo.example.com/signing-key.asc
# You could also use key: <keyid> from a keyserver, see ansible rpm_key doc.
# Validate cert option from the module is also supported.
# In CentOS, refusing to use the RDO package also forces you to add your own
# repository. See below to know how to include your own repository.
#
# For Ubuntu, you'd define the following:
#user_external_repo_key:
# url: https://my-repo.example.com/signing-key.asc
# This leverages apt_key module, and passes the id, keyserver, and url argument.
# Therefore, you can ensure the id of the key you want to import with id: <keyid>
# or replace the source url with a keyserver.
# If you have defined another set of keys you want to include, the chances are
# high you want to give also your own repository.
# For CentOS, define the following dict+key/values:
#user_external_repo:
# name: "mymirror"
# baseurl: "http://mymirrorurl/baseurl/"
#See also gpgcheck, gpgkey, description of the Ansible yum_repository module
# For Ubuntu, define something like the following:
#user_external_repo:
# repo: "deb http://mymirrorurl/ubuntu/ xenial main"
# filename: "mymirror"
# If your mirror includes UCA mirroring, you may then want to disable using uca
# by setting in your user variables uca_enable: False
# Set the package install state for distribution packages
# Options are 'present' and 'latest'
pip_install_package_state: "latest"
## Path to pip download/installation script.
pip_upstream_url: https://bootstrap.pypa.io/get-pip.py
pip_fallback_url: https://raw.githubusercontent.com/pypa/get-pip/master/get-pip.py
# Allow the deployer to force pip to download locally to the deployment host
# and copy it to the remote container for installation. Useful for environments
# where the containers lack internet access.
pip_offline_install: false
pip_tmp_packages: /tmp/pip_install
# Additional options that you might want to pass to "get-pip.py" when installing pip.
# Default `pip_get_pip_options` is an empty string.
pip_get_pip_options: ""
pip_source_install_options: ""
# The URL/path of a constraints file to use when installing the additional pip packages.
#pip_install_upper_constraints: "http://git.openstack.org/cgit/openstack/requirements/plain/upper-constraints.txt"
pip_install_distro_packages: "{{ pip_install_build_packages | ternary(pip_install_distro_build_packages, []) }}"
pip_install_remove_distro_packages: []
pip_install_distro_build_packages: []
# Some pip installations require the system to have a compiler present
# Toggling this will install a compiler and python-dev libraries to the host
pip_install_build_packages: yes
# This list var can be used to specify specific versions of pip, setuptools,
# wheel and any other packages which must be installed when pip installs.
pip_packages:
- pip
pip_required_pip_packages: []
## APT Cache options
cache_timeout: 600
# Validate Certificates when downloading pip. May be set to "no" when proxy server
# is intercepting the certificates.
pip_validate_certs: "yes"
pip_lock_to_internal_repo: False
# Options for pip global
pip_enable_pre_releases: true
pip_timeout: 120
# Options for pip install
pip_upgrade: true
# Drop link files to lock down pip
# Example:
# pip_links:
# - name: "openstack_release"
# link: "{{ openstack_repo_url }}/os-releases/{{ openstack_release }}/"
pip_links: []
## Tunable overrides
pip_global_conf_overrides: {}
## Additional options to pass to pip
# Example:
# pip_install_options: "--cert /etc/ssl/certs/ca-certificates.crt"
#
# See the documentationat https://pip.pypa.io/en/stable/reference/pip
# for details.
pip_install_options: ""
# Always force getting the latest get-pip script
pip_get_pip_force: yes
# Set the default python details
pip_py_default: py2
pip_py_defaults:
py2:
pip: pip2
python: python2
py3:
pip: pip3
python: python3
# Software collection installed. Currently this is only used on
# RHEL installations when Python3 is enabled.
pip_install_distro_scl_packages: []
|
defaults/main.yml
|
---
name: pr_test
on:
pull_request:
branches:
- trunk
- release-*
jobs:
proto_unchanged:
name: validate protobuf files unchanged
runs-on: ubuntu-latest
env:
GOVER: 1.17
steps:
- uses: actions/checkout@v2
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: ${{ env.GOVER }}
- name: Make venv for proto gen
working-directory: ai/src
run: make venv-gen-proto
- name: Run gen-proto check diff
run: |
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.19.3/protoc-3.19.3-linux-x86_64.zip
unzip protoc-3.19.3-linux-x86_64.zip -d protoc
sudo cp -r protoc/include/google/ /usr/local/include/
sudo chmod -R 755 /usr/local/include/google
sudo cp protoc/bin/protoc /usr/local/bin/
sudo chmod +x /usr/local/bin/protoc
rm -r protoc protoc-3.19.3-linux-x86_64.zip
make init-proto
export PATH="$PATH:$(go env GOPATH)/bin"
make gen-proto check-proto-diff
spiced-and-spice:
name: go test spiced & spice
runs-on: ubuntu-latest
env:
GOVER: 1.17
steps:
- uses: actions/checkout@v2
- name: Set up Go
uses: actions/setup-go@v2
with:
go-version: ${{ env.GOVER }}
- name: Build and copy dashboard
working-directory: dashboard
run: make
- name: Test dashboard
run: yarn test
working-directory: dashboard
- name: go vet
run: go vet ./...
- name: go test
run: go test -v -count=10 -shuffle=on ./...
working-directory: pkg
aiengine:
name: test aiengine
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v1
with:
python-version: 3.8
- name: Cache PIP
id: cache-pip
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os }}-${{ hashFiles('ai/src/requirements/development.txt') }}
- name: Install Python dependencies
working-directory: ai/src
run: make venv-dev
- name: Run tests
working-directory: ai/src
run: make test
|
.github/workflows/pr_test.yml
|
name: Bug report
description: Create a report to help us improve
title: "[BUG]: "
labels: ["type/bug", "needs-triage"]
body:
- type: markdown
attributes:
value: |
Thank you for taking the time to create a Container Storage Modules (CSM) bug report. To better help us investigate this bug, please provide the information below.
- type: textarea
id: bug-description
attributes:
label: Bug Description
description: A clear and concise description of what the bug is.
validations:
required: true
- type: textarea
id: files
attributes:
label: Logs
description: Copy/paste any relevant log output here or Upload log files by dragging and dropping the files into the text area. Please be sure to remove all sensitive data such as IP addresses, host names, credentials, etc.
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Upload any relevant screenshots here by dragging and dropping the files into the text area.
validations:
required: false
- type: textarea
id: info
attributes:
label: Additional Environment Information
description: Provide additional environment information such as a listing of pods, PVs, PVCs, VolumeAttachments, Events, etc.
validations:
required: false
- type: textarea
id: steps
attributes:
label: Steps to Reproduce
description: What steps can taken to reproduce this bug?
validations:
required: true
- type: textarea
id: expected
attributes:
label: Expected Behavior
description: A clear and concise description of what you expected to happen.
validations:
required: true
- type: textarea
id: driver
attributes:
label: CSM Driver(s)
description: Please list the CSI drivers and versions that are applicable to this bug.
placeholder: |
CSI Driver for PowerFlex v2.1
CSI Driver for PowerScale v2.1
CSI Driver for PowerMax v2.1
CSI Driver for PowerStore v2.1
CSI Driver for Unity XT v2.1
validations:
required: true
- type: input
id: install
attributes:
label: Installation Type
description: What installation type was used? Operator or Helm? Please provide the version as well.
placeholder: Operator v1.6.1
validations:
required: false
- type: textarea
id: module
attributes:
label: Container Storage Modules Enabled
description: Please list the Container Storage Modules that are enabled along with the image versions.
placeholder: |
Resiliency (Podmon) v1.1.0
Replication v1.1.0
Authorization v1.1.0
Observability v1.1.0
Volume Group Snapshotter v4.2.0
validations:
required: false
- type: input
id: co
attributes:
label: Container Orchestrator
description: Which container orchestrator is being used? Please provide the version as well.
placeholder: Kubernetes 1.23.1
validations:
required: true
- type: input
id: os
attributes:
label: Operating System
description: Which operating system is being used? Please provide the version as well.
placeholder: RHEL 7.6
validations:
required: true
|
.github/ISSUE_TEMPLATE/bug_report.yml
|
---
groups:
- name: bosh
jobs:
- start-job
- unit-1.9
- unit-2.1
- integration-1.9-postgres
- integration-2.1-mysql
- integration-2.1-postgres
- publish-coverage
- promote-candidate
- name: 1.9
jobs:
- start-job
- unit-1.9
- integration-1.9-postgres
- publish-coverage
- promote-candidate
- name: 2.1
jobs:
- start-job
- unit-2.1
- integration-2.1-mysql
- integration-2.1-postgres
- publish-coverage
- promote-candidate
- name: mysql
jobs:
- start-job
- unit-1.9
- unit-2.1
- integration-2.1-mysql
- publish-coverage
- promote-candidate
- name: postgres
jobs:
- start-job
- unit-1.9
- unit-2.1
- integration-1.9-postgres
- integration-2.1-postgres
- publish-coverage
- promote-candidate
jobs:
- name: start-job
public: true
serial: true
plan:
- { get: interval-trigger, trigger: true }
- { get: bosh-src }
- name: unit-1.9
public: true
serial: true
plan:
- { get: bosh-src, trigger: true, passed: [start-job] }
- task: test
file: bosh-src/ci/tasks/test-unit.yml
config:
params:
RUBY_VERSION: 1.9.3
- name: unit-2.1
public: true
serial: true
plan:
- { get: bosh-src, trigger: true, passed: [start-job] }
- task: test
file: bosh-src/ci/tasks/test-unit.yml
config:
params:
RUBY_VERSION: 2.1.7
- name: integration-1.9-postgres
public: true
serial: true
plan:
- { get: bosh-src, trigger: true, passed: [start-job] }
- task: test
privileged: true
file: bosh-src/ci/tasks/test-integration.yml
config:
tags: ["bosh-integration"]
params:
DB: postgresql
RUBY_VERSION: 1.9.3
NUM_GROUPS: 16
- name: integration-2.1-mysql
public: true
serial: true
plan:
- { get: bosh-src, trigger: true, passed: [start-job] }
- task: test
privileged: true
file: bosh-src/ci/tasks/test-integration.yml
config:
tags: ["bosh-integration"]
params:
DB: mysql
RUBY_VERSION: 2.1.7
NUM_GROUPS: 16
- name: integration-2.1-postgres
public: true
serial: true
plan:
- { get: bosh-src, trigger: true, passed: [start-job] }
- task: test
privileged: true
file: bosh-src/ci/tasks/test-integration.yml
config:
tags: ["bosh-integration"]
params:
DB: postgresql
RUBY_VERSION: 2.1.7
NUM_GROUPS: 16
- name: publish-coverage
public: true
serial: true
plan:
- trigger: true
passed: [integration-2.1-mysql, integration-1.9-postgres, integration-2.1-postgres, unit-2.1, unit-1.9]
get: bosh-src
- task: publish
file: bosh-src/ci/tasks/publish-coverage.yml
config:
params:
CODECLIMATE_REPO_TOKEN: {{codeclimate_token}}
- name: promote-candidate
public: true
serial: true
plan:
- trigger: true
passed: [integration-2.1-mysql, integration-1.9-postgres, integration-2.1-postgres, unit-2.1, unit-1.9]
get: bosh-src
- {put: bosh-candidate, params: {repository: bosh-src}}
resources:
- name: interval-trigger
type: time
source:
interval: 45m # average build time for the integration tests
- name: bosh-src
type: git
source:
uri: https://github.com/cloudfoundry/bosh.git
branch: {{branch}}
- name: bosh-candidate
type: git
source:
uri: <EMAIL>:cloudfoundry/bosh.git
branch: candidate
private_key: {{github_deployment_key}}
|
ci/pipeline.yml
|