code
stringlengths 38
801k
| repo_path
stringlengths 6
263
|
|---|---|
- position: 1
driverNumber: 6
driverId: kimi-raikkonen
constructorId: ferrari
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:31.396"
gap:
interval:
laps: 18
- position: 2
driverNumber: 2
driverId: lewis-hamilton
constructorId: mclaren
engineManufacturerId: mercedes
tyreManufacturerId: bridgestone
time: "1:31.627"
gap: "+0.231"
interval: "+0.231"
laps: 12
- position: 3
driverNumber: 1
driverId: fernando-alonso
constructorId: mclaren
engineManufacturerId: mercedes
tyreManufacturerId: bridgestone
time: "1:32.039"
gap: "+0.643"
interval: "+0.412"
laps: 11
- position: 4
driverNumber: 10
driverId: robert-kubica
constructorId: bmw-sauber
engineManufacturerId: bmw
tyreManufacturerId: bridgestone
time: "1:32.039"
gap: "+0.643"
interval: "+0.000"
laps: 18
- position: 5
driverNumber: 5
driverId: felipe-massa
constructorId: ferrari
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:32.217"
gap: "+0.821"
interval: "+0.178"
laps: 18
- position: 6
driverNumber: 16
driverId: nico-rosberg
constructorId: williams
engineManufacturerId: toyota
tyreManufacturerId: bridgestone
time: "1:32.344"
gap: "+0.948"
interval: "+0.127"
laps: 16
- position: 7
driverNumber: 9
driverId: nick-heidfeld
constructorId: bmw-sauber
engineManufacturerId: bmw
tyreManufacturerId: bridgestone
time: "1:32.581"
gap: "+1.185"
interval: "+0.237"
laps: 20
- position: 8
driverNumber: 15
driverId: mark-webber
constructorId: red-bull
engineManufacturerId: renault
tyreManufacturerId: bridgestone
time: "1:32.632"
gap: "+1.236"
interval: "+0.051"
laps: 16
- position: 9
driverNumber: 14
driverId: david-coulthard
constructorId: red-bull
engineManufacturerId: renault
tyreManufacturerId: bridgestone
time: "1:32.679"
gap: "+1.283"
interval: "+0.047"
laps: 18
- position: 10
driverNumber: 11
driverId: ralf-schumacher
constructorId: toyota
engineManufacturerId: toyota
tyreManufacturerId: bridgestone
time: "1:32.788"
gap: "+1.392"
interval: "+0.109"
laps: 21
- position: 11
driverNumber: 18
driverId: vitantonio-liuzzi
constructorId: toro-rosso
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:32.841"
gap: "+1.445"
interval: "+0.053"
laps: 20
- position: 12
driverNumber: 7
driverId: jenson-button
constructorId: honda
engineManufacturerId: honda
tyreManufacturerId: bridgestone
time: "1:32.869"
gap: "+1.473"
interval: "+0.028"
laps: 18
- position: 13
driverNumber: 12
driverId: jarno-trulli
constructorId: toyota
engineManufacturerId: toyota
tyreManufacturerId: bridgestone
time: "1:32.936"
gap: "+1.540"
interval: "+0.067"
laps: 20
- position: 14
driverNumber: 19
driverId: scott-speed
constructorId: toro-rosso
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:32.974"
gap: "+1.578"
interval: "+0.038"
laps: 14
- position: 15
driverNumber: 17
driverId: alexander-wurz
constructorId: williams
engineManufacturerId: toyota
tyreManufacturerId: bridgestone
time: "1:33.154"
gap: "+1.758"
interval: "+0.180"
laps: 16
- position: 16
driverNumber: 3
driverId: giancarlo-fisichella
constructorId: renault
engineManufacturerId: renault
tyreManufacturerId: bridgestone
time: "1:33.214"
gap: "+1.818"
interval: "+0.060"
laps: 17
- position: 17
driverNumber: 8
driverId: rubens-barrichello
constructorId: honda
engineManufacturerId: honda
tyreManufacturerId: bridgestone
time: "1:33.229"
gap: "+1.833"
interval: "+0.015"
laps: 19
- position: 18
driverNumber: 4
driverId: heikki-kovalainen
constructorId: renault
engineManufacturerId: renault
tyreManufacturerId: bridgestone
time: "1:33.484"
gap: "+2.088"
interval: "+0.255"
laps: 13
- position: 19
driverNumber: 23
driverId: anthony-davidson
constructorId: super-aguri
engineManufacturerId: honda
tyreManufacturerId: bridgestone
time: "1:33.792"
gap: "+2.396"
interval: "+0.308"
laps: 20
- position: 20
driverNumber: 22
driverId: takuma-sato
constructorId: super-aguri
engineManufacturerId: honda
tyreManufacturerId: bridgestone
time: "1:33.945"
gap: "+2.549"
interval: "+0.153"
laps: 19
- position: 21
driverNumber: 20
driverId: adrian-sutil
constructorId: spyker
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:34.423"
gap: "+3.027"
interval: "+0.478"
laps: 20
- position: 22
driverNumber: 21
driverId: markus-winkelhock
constructorId: spyker
engineManufacturerId: ferrari
tyreManufacturerId: bridgestone
time: "1:36.090"
gap: "+4.694"
interval: "+1.667"
laps: 19
|
src/data/seasons/2007/races/10-europe/free-practice-3-results.yml
|
---
result: SUCCESS
timestamp: 2016-07-13 17:30:53 UTC
url: http://manhattan.ci.chef.co/job/chef-build/134/
trigger_url: http://manhattan.ci.chef.co/job/chef-trigger-ad_hoc/66/
duration: 1h3m24s
active_duration: 35m14s
retries: 2
retry_delay: 28m9s
runs:
debian-6-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=debian-6,project=chef,role=builder/134/
duration: 5m54s
el-5-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=el-5,project=chef,role=builder/134/
duration: 6m30s
el-6-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=el-6,project=chef,role=builder/134/
duration: 6m44s
freebsd-10-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=freebsd-10,project=chef,role=builder/134/
duration: 5m55s
freebsd-9-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=freebsd-9,project=chef,role=builder/134/
duration: 6m5s
ubuntu-12.04-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=ubuntu-12.04,project=chef,role=builder/134/
duration: 6m11s
windows-2008r2-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i386,platform=windows-2008r2,project=chef,role=builder/134/
duration: 8m25s
solaris-10-i86pc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=i86pc,platform=solaris-10,project=chef,role=builder/134/
duration: 7m29s
aix-6.1-powerpc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=powerpc,platform=aix-6.1,project=chef,role=builder/134/
duration: 16m38s
el-7-ppc64:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=ppc64,platform=el-7,project=chef,role=builder/134/
duration: 7m59s
el-7-ppc64le:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=ppc64le,platform=el-7,project=chef,role=builder/134/
duration: 8m14s
ubuntu-14.04-ppc64le:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=ppc64le,platform=ubuntu-14.04,project=chef,role=builder/134/
duration: 3m28s
solaris-10-sun4v:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=sun4v,platform=solaris-10,project=chef,role=builder/134/
duration: 32m25s
debian-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=debian-6,project=chef,role=builder/134/
duration: 7m51s
el-5:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=el-5,project=chef,role=builder/134/
duration: 8m33s
el-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=el-6,project=chef,role=builder/134/
duration: 9m13s
el-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=el-7,project=chef,role=builder/134/
duration: 6m37s
freebsd-10:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=freebsd-10,project=chef,role=builder/134/
duration: 8m15s
freebsd-9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=freebsd-9,project=chef,role=builder/134/
duration: 9m16s
ios_xr-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=ios_xr-6,project=chef,role=builder/134/
duration: 5m7s
mac_os_x-10.9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=mac_os_x-10.9,project=chef,role=builder/134/
duration: 4m
nexus-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=nexus-7,project=chef,role=builder/134/
duration: 6m24s
ubuntu-12.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=ubuntu-12.04,project=chef,role=builder/134/
duration: 7m33s
windows-2008r2:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/chef-build/architecture=x86_64,platform=windows-2008r2,project=chef,role=builder/134/
duration: 11m49s
|
reports/stages/manhattan.ci.chef.co/job/chef-build/134.yaml
|
---
# vars file for jenkins-server
openjdk_package: java-1.8.0-openjdk
# Certificate
cert_file: ci_sagrid_ac_za
jenkins_init_file: /etc/sysconfig/jenkins
jenkins_http_port_param: JENKINS_PORT
jenkins_java_options_env_var: JENKINS_JAVA_OPTIONS
jenkins_plugins:
- analysis-core
- git
- pam-auth
- build-user-vars-plugin
- ssh-credentials
- git-client
- ansible
- docker-plugin
- ldap
- plain-credentials
- conditional-buildstep
- github
- mailer
- ssh-slaves
- ansicolor
- github-api
- project-build-times
- credentials
- ant
- project-stats-plugin
- antisamy-markup-formatter
- mapdb-api
- subversion
- run-condition
- cvs
- github-oauth
- matrix-auth
- token-macro
- saml
- build-failure-analyzer
- global-build-stats
- dashboard-view
- gravatar
- depgraph-view
- script-security
- icon-shim
- maven-plugin
- embeddable-build-status
- warnings
- javadoc
- sidebar-link
- envinject
- modernstatus
- buildgraph-view
- simple-theme-plugin
- external-monitor-job
- jquery
- monitoring
- build-metrics
- slack
- ghprb
- jquery-ui
- naginator
- build-monitor-plugin
- ssh-agent
- buildtriggerbadge
- junit
jenkins_plugin_configs:
# - hudson.plugins.git.GitSCM.xml
- jenkins.plugins.slack.SlackNotifier.xml
# - hudson.plugins.git.GitTool.xml
- hudson.plugins.google.analytics.GoogleAnalyticsPageDecorator.xml
- org.jenkins_ci.plugins.flexible_publish.FlexiblePublisher.xml
# This adds the bootstrap stuff.
- org.codefirst.SimpleThemeDecorator.xml
# these job names are kept in the JenkinsConfig repo on github.
# Are you special ? Then you can see them :) Else - GTFO
jenkins_jobs:
- abinit-deploy
- abyss-deploy
- add-scores-deploy
- ASR template
- atlas-deploy
- autodock-deploy
- boost-deploy
- bzlib-deploy
- clhep-deploy
- cmake-deploy
- curl-deploy
- Fermionic-Molecular-Dynamics-deploy
- fftw2-deploy
- fftw3-deploy
- fitsio-deploy
- freetype-deploy
- gadget-deploy
- gcc-deploy
- gmp-deploy
- gnuplot-deploy
- gromacs-deploy
- gsl-deploy
- hdf5-mpich-deploy
- hdf5-openmpi-deploy
- heasoft-deploy
- hmmer-deploy
- htk-deploy
- jasper-deploy
- java-openjdk-deploy
- lapack-deploy
- libpng-deploy
- libsvm-deploy
- libxml2-deploy
- llvm-deploy
- lua-deploy
- matlab-runtime-deploy
- matplotlib-deploy
- mothur-deploy
- mpc-deploy
- mpfr-deploy
- mpich-deploy
- ncurses-deploy
- netcdf-deploy
- numpy-deploy
- oases-deploy
- openblas-deploy
- OpenFOAM-deploy
- OpenMPI
- openssl-deploy
- plink-deploy
- proj-deploy
- python-deploy
- quantum-espresso-noparallel-deploy
- readline-deploy
- repast-deploy
- repast-hpc-deploy
- repo transaction
- rlang-deploy
- root-deploy
- Sakhile-first-deploy
- scalapack-deploy
- scipy-deploy
- shapeit-deploy
- sparsehash-deploy
- sqlite-deploy
- suitesparse-deploy
- tcltk-deploy
- template-deploy
- torque-deploy
- velvet-deploy
- weka-deploy
- wrf-deploy
- zlib-deploy
|
vars/main.yml
|
---
- debug: msg="START connection={{ ansible_connection }} nxos_ntp_options sanity
test"
- name: Apply default ntp config
ignore_errors: true
cisco.nxos.nxos_ntp_options: &id007
state: absent
- block:
- name: Configure ntp with master and default stratum
register: result
cisco.nxos.nxos_ntp_options: &id001
master: true
logging: true
state: present
- assert: &id002
that:
- result.changed == true
- name: Check Idempotence - Configure ntp with master and default stratum
register: result
cisco.nxos.nxos_ntp_options: *id001
- assert: &id004
that:
- result.changed == false
- name: Configure ntp with master and non-default stratum
register: result
cisco.nxos.nxos_ntp_options: &id003
master: true
stratum: 10
state: present
- assert: *id002
- name: Check Idempotence - Configure ntp with master and non-default stratum
register: result
cisco.nxos.nxos_ntp_options: *id003
- assert: *id004
- name: Configure ntp with master and no logging
register: result
cisco.nxos.nxos_ntp_options: &id005
master: true
stratum: 10
logging: false
state: present
- assert: *id002
- name: Check Idempotence - Configure ntp with master and no logging
register: result
cisco.nxos.nxos_ntp_options: *id005
- assert: *id004
- name: Configure ntp with logging and no master
register: result
cisco.nxos.nxos_ntp_options: &id006
master: false
logging: true
state: present
- assert: *id002
- name: Check Idempotence - Configure ntp with logging and no master
register: result
cisco.nxos.nxos_ntp_options: *id006
- assert: *id004
- name: Configure ntp with master and non-default stratum again
register: result
cisco.nxos.nxos_ntp_options: *id003
- assert: *id002
- name: Remove ntp options
register: result
cisco.nxos.nxos_ntp_options: *id007
- assert: *id002
- name: Check Idempotence - Remove
register: result
cisco.nxos.nxos_ntp_options: *id007
- assert: *id004
always:
- name: Cleanup ntp config
register: result
cisco.nxos.nxos_ntp_options: *id007
- debug: msg="END connection={{ ansible_connection }} nxos_ntp_options sanity
test"
|
venv/lib/python3.6/site-packages/ansible_collections/cisco/nxos/tests/integration/targets/nxos_ntp_options/tests/common/sanity.yaml
|
id: ocd-organization/e49e9b65-166c-503d-83d6-3133c4d510fb
jurisdiction: ocd-jurisdiction/country:us/government
classification: committee
name: Senate Committee on Small Business and Entrepreneurship
chamber: upper
sources:
- url: https://theunitedstates.io/
links:
- url: http://www.sbc.senate.gov/
note: homepage
members:
- name: <NAME>
role: Chairman
person_id: ocd-person/18b53a95-c658-5a23-8c3e-074a7e0d27e3
- name: <NAME>
role: Member
person_id: ocd-person/46c3b77a-7d00-5ecc-83d0-04c8fa05bf04
- name: <NAME>
role: Member
person_id: ocd-person/6d497134-2c53-5987-add8-6fffeaa96cda
- name: <NAME>
role: Member
person_id: ocd-person/bceb5ccd-31b0-5d03-894d-03cf2cbb2771
- name: <NAME>
role: Member
person_id: ocd-person/d0bb2662-40b6-52d2-94e6-2bf9d588c954
- name: <NAME>
role: Member
person_id: ocd-person/792faee4-b972-54ed-b42d-47b41a22ccb3
- name: <NAME>
role: Member
person_id: ocd-person/ee89a520-227a-575d-8379-5d36b6e6b2a2
- name: <NAME>
role: Member
person_id: ocd-person/c02182c1-3275-5f3e-9c66-2f0873cc72c7
- name: <NAME>
role: Member
person_id: ocd-person/2f7f8ec5-f652-5c47-8dcb-06d7bc54557c
- name: <NAME>
role: Member
person_id: ocd-person/e4ea61a6-63ea-53e6-b61e-4c735a0b7818
- name: <NAME>
role: Ranking Member
person_id: ocd-person/7a1c13f9-1aac-5461-aa2a-11642749828d
- name: <NAME>
role: Member
person_id: ocd-person/014b069e-c444-50fe-99ef-bf72ae5aecd5
- name: <NAME>
role: Member
person_id: ocd-person/e8bb2501-1603-543f-9667-9089f781aa97
- name: <NAME>
role: Member
person_id: ocd-person/3bb4a99e-9448-5b35-86cd-114bd035535e
- name: <NAME>
role: Member
person_id: ocd-person/86133add-b3ff-52c2-a71e-442b9afeff6b
- name: <NAME>
role: Member
person_id: ocd-person/8342033b-f6c4-5986-b2bf-2ebcd2b0047a
- name: <NAME>
role: Member
person_id: ocd-person/7ec39cbb-df0e-5970-aba1-5722964f55bb
- name: <NAME>
role: Member
person_id: ocd-person/7a996578-ee76-59c8-856c-4f63861a62da
- name: <NAME>
role: Member
person_id: ocd-person/0a7d370e-88bd-5b83-9b93-c8607c036d7a
- name: <NAME>
role: Member
person_id: ocd-person/09a67947-f66a-55ac-b4ba-5c656ad7f67d
extras:
type: senate
|
data/us/committees/Senate-Committee-on-Small-Business-and-Entrepreneurship-e49e9b65-166c-503d-83d6-3133c4d510fb.yml
|
schema_version: 0.3
type: filter
identifier: lightshow
title: Light Show
version: 1
copyright: Meltytech, LLC
creator: <NAME> <<EMAIL>>
license: LGPLv2.1
language: en
tags:
- Video
description: >
An audio visualization filter that colors the image proportional to the
magnitude of the audio spectrum.
parameters:
- identifier: frequency_low
title: Low Frequency
type: integer
description: >
The low end of the frequency range to be used to influence the image
motion.
mutable: yes
readonly: no
default: 20
unit: Hz
- identifier: frequency_high
title: High Frequency
type: integer
description: >
The high end of the frequency range to be used to influence the image
motion.
mutable: yes
readonly: no
default: 20000
unit: Hz
- identifier: threshold
title: Level Threshold
type: float
description: >
The minimum amplitude of sound that must occur within the frequency range
to cause the image to move.
motion.
mutable: yes
readonly: no
default: -30
minimum: -100
maximum: 0
unit: dB
- identifier: osc
title: Oscillation
type: float
description: >
Oscillation can be useful to make the image move back and forth during
long periods of sound.
A value of 0 specifies no oscillation.
mutable: yes
readonly: no
default: 5
minimum: 0
unit: Hz
- identifier: color.*
title: Light Color
type: color
description: |
The color of the light.
Multiple colors can be specified with incrementing suffixes to cause the
waveform to be drawn in a circular gradient. color.1 is the inside of the
circle. Subsequent colors will produce a gradient toward the outside.
By default, the filter has one color defined:
color.1=0xffffffff"
This results in the image being lightened.
To create a gradient, define more colors:
color.2=green color.3=0x77777777 color.4=0x00000000
A color value is a hexadecimal representation of RGB plus alpha channel
as 0xrrggbbaa. Colors can also be the words: white, black, red, green,
or blue. You can also use a HTML-style color values #rrggbb or #aarrggbb.
readonly: no
mutable: yes
widget: color
- identifier: rect
title: Rectangle
description: >
Defines the rectangle that the color should be drawn in.
Format is: "X Y W H".
X, Y, W, H are assumed to be pixel units unless they have the suffix '%'.
type: rect
default: "0 0 100% 100%"
readonly: no
mutable: yes
- identifier: window_size
title: Window Size
type: integer
description: >
The number of samples that the FFT will be performed on. If
window_size is less than the number of samples in a frame, extra samples
will be ignored. If window_size is more than the number of samples in a
frame, samples will be buffered from previous frames to fill the window.
The buffering is performed as a sliding window so that the most recent
samples are always transformed.
mutable: no
readonly: no
default: 2048
|
bin/linux/x64/share/mlt/qt/filter_lightshow.yml
|
---
# tasks file for consul(On Linux)
# Since Ansible 2.0, 'src' parameter of 'unarchive' module accepts external URL(ex. http://example.com/file.zip).
# But this role doesn't use this feature because zip file is always downloaded even if same file already exits in local.
# http://docs.ansible.com/ansible/unarchive_module.html
- block:
- name: Download Consul archive
get_url:
url: "{{ consul_download_url }}"
checksum: "sha256:{{ consul_sha256 }}"
dest: "{{ consul_download_tmppath }}"
register: dl_result
- name: Unarchive Consul
unarchive:
src: "{{ consul_download_tmppath }}"
dest: "{{ consul_bin_dir }}"
copy: no
when: dl_result.changed
- name: Ensure execute bit of Consul binary
file:
path: "{{ consul_bin_dir }}/consul"
mode: 'a+x'
- name: Add CAP_NET_BIND_SERVICE to Consul binary
capabilities:
path: "{{ consul_bin_dir }}/consul"
capability: cap_net_bind_service+ep
state: present
when: (consul_daemon_cap_net_bind is defined) and (consul_daemon_cap_net_bind | bool)
become: yes
- block:
- name: Ensure consul_group exists
group:
name: "{{ consul_group }}"
system: yes
- name: Ensure consul_user exists and belongs to consul_group
user:
name: "{{ consul_owner }}"
group: "{{ consul_group }}"
system: yes
become: yes
when: ansible_os_family != 'Alpine'
- block:
- name: Ensure consul_group exists(on Alpine Linux)
command: "addgroup -S {{ consul_owner }}"
register: cmd_consul_group
changed_when: cmd_consul_group.rc == 0
failed_when: cmd_consul_group.rc != 0 and (cmd_consul_group.stderr.find('in use') == -1 or cmd_consul_group.stderr.find(consul_group) == -1)
- name: Ensure consul_user exists and belongs to consul_group(on Alpine Linux)
command: "adduser -G {{ consul_group }} -H -S -D {{ consul_owner }}"
register: cmd_consul_user
changed_when: cmd_consul_user.rc == 0
failed_when: cmd_consul_user.rc != 0 and (cmd_consul_user.stderr.find('in use') == -1 or cmd_consul_user.stderr.find(consul_owner) == -1)
become: yes
when: ansible_os_family == 'Alpine'
|
tasks/Linux.yml
|
---
- name: Get the target instance zone name
uri:
url: http://169.254.169.254/latest/meta-data/placement/availability-zone
return_content: yes
register: response
- name: Chown dirctory to user other than root
file:
path: "/var/www/spring_app_{{ aws_profile }}_{{ item }}"
owner: "{{ ansible_user }}"
recurse: yes
mode: u=rwX,g=rX,o=rX
- name: Set fact for target instance zone name
set_fact:
zone_name: "{{ response.content }}"
- name: Set fact for target instance zone name
set_fact:
app_version: "{{ ansible_date_time.epoch }}-{{ aws_profile }}-{{ branch }}"
when: app_version is not defined
- name: Set a new version number for our Spring App
lineinfile:
dest: "/var/www/spring_app_{{ aws_profile }}_{{ item }}/pom.xml"
regexp: "<version>"
line: "<version>{{ app_version }}</version>"
state: present
when: app_version is defined
- name: Configure our application.proprties to include zone name property as well as other properties
template:
src: application.properties
dest: "/var/www/spring_app_{{ aws_profile }}_{{ item }}/src/main/resources/application-{{ aws_profile }}.properties"
- name: Set our profile in application proprties
blockinfile:
dest: "/var/www/spring_app_{{ aws_profile }}_{{ item }}/pom.xml"
insertafter: "<profiles>"
marker: "<!-- ANSIBLE MANAGED PROFILE BLOCK {{ aws_profile }} -->"
content: |
<profile>
<id>{{ aws_profile }}</id>
<activation>
<activeByDefault>true</activeByDefault>
</activation>
</profile>
- name: Package our Spring App
command: "mvn clean package -f /var/www/spring_app_{{ aws_profile }}_{{ item }}/pom.xml --activate-profiles {{ aws_profile }}"
- name: Get Spring App name
command: mvn -q -Dexec.executable=echo -Dexec.args='${project.name}' --non-recursive exec:exec -f /var/www/spring_app_{{ aws_profile }}_{{ item }}/pom.xml
register: app_name
- name: Link our jar file to services
file:
path: "/etc/init.d/spring_app_{{ aws_profile }}_{{ item }}"
state: link
src: "/var/www/spring_app_{{ aws_profile }}_{{ item }}/target/{{ app_name.stdout }}-{{ app_version }}.jar"
follow: no
- name: Chown dirctory to user other than root
file:
path: "/var/www/spring_app_{{ aws_profile }}_{{ item }}"
owner: "{{ ansible_user }}"
recurse: yes
mode: u=rwX,g=rX,o=rX
- name: Create a custom init.d script that takes our custom arguments
template:
dest: "/etc/init.d/spring_app_{{ aws_profile }}_{{ item }}_args"
src: "spring-with-args"
mode: u=rwx,g=rX,o=rX
- name: Chmod init.d script with +x
file:
path: "/etc/init.d/spring_app_{{ aws_profile }}_{{ item }}_args"
mode: "a+x"
- name: Auto start our spring app on boot on defaults
command: "update-rc.d spring_app_{{ aws_profile }}_{{ item }}_args defaults"
args:
chdir: "/etc/init.d/"
- name: Auto start our spring app on boot enable 5
command: "update-rc.d spring_app_{{ aws_profile }}_{{ item }}_args enable 5"
args:
chdir: "/etc/init.d/"
|
roles/deploy_spring_app/tasks/package-app.yml
|
---
- name: Setting Variables
set_fact:
dlipy3_pip: dlipy3_pip_post_install.txt
dlipy3_conda: dlipy3_conda_post_install.txt
dlipy2_pip: dlipy2_pip_post_install.txt
dlipy2_conda: dlipy2_conda_post_install.txt
dlinsights_pip: dlinsights_pip_post_install.txt
dlinsights_conda: dlinsights_conda_post_install.txt
- name: Set dependencies directory variable
set_fact:
dependencies_dir: "{{ hostvars['localhost']['deps_path_local'] }}"
- name: Set installation directory variable
set_fact:
install_dir: "/opt/anaconda3"
- name: Get route to client
command: "{{ hostvars['localhost']['python_executable_local'] }} \
{{ hostvars['localhost']['scripts_path_local'] }}/python/ip_route_get_to.py \
{{ inventory_hostname }}"
delegate_to: localhost
register: host_ip
#dlipy3 env
- name: Check if dlipy3 post data exists (PIP)
stat:
path: "{{ ansible_env.HOME }}/{{ dlipy3_pip }}"
register: dlipy3_pip_result
- name: Activate dlipy3 environment and gather post data (PIP)
shell: "source {{ install_dir }}/bin/activate dlipy3 && {{ item }}"
loop:
- "conda list > {{ vars.dlipy3_pip }} "
become: yes
when: not dlipy3_pip_result.stat.exists
- name: Copy data back to deployer (PIP)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlipy3_pip }}"
dest: "{{ dependencies_dir }}"
when: not dlipy3_pip_result.stat.exists
- name: Check if dlipy3 post data exists (Conda)
stat:
path: "{{ ansible_env.HOME }}/{{ dlipy3_conda }}"
register: dlipy3_conda_result
- name: Activate dlipy3 environment and gather post data (Conda)
shell: "source {{ install_dir }}/bin/activate dlipy3 && {{ item }}"
loop:
- "conda list --explicit > {{ vars.dlipy3_conda }}"
become: yes
when: not dlipy3_conda_result.stat.exists
- name: Copy data back to deployer (Conda)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlipy3_conda }}"
dest: "{{ dependencies_dir }}"
when: not dlipy3_conda_result.stat.exists
#dlipy2 env
- name: Check if dlipy2 post data exists (PIP)
stat:
path: "{{ ansible_env.HOME }}/{{ dlipy2_pip }}"
register: dlipy2_pip_result
- name: Activate dlipy2 environment and gather post data (PIP)
shell: "source {{ install_dir }}/bin/activate dlipy2 && {{ item }}"
loop:
- "conda list > {{ vars.dlipy2_pip }}"
become: yes
when: not dlipy2_pip_result.stat.exists
- name: Copy data back to deployer (PIP)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlipy2_pip }}"
dest: "{{ dependencies_dir }}"
when: not dlipy2_pip_result.stat.exists
- name: Check if dlipy2 post data exists (Conda)
stat:
path: "{{ ansible_env.HOME }}/{{ dlipy2_conda }}"
register: dlipy2_conda_result
- name: Activate dlipy2 environment and gather post data (Conda)
shell: "source {{ install_dir }}/bin/activate dlipy2 && {{ item }}"
loop:
- "conda list --explicit > {{ vars.dlipy2_conda }}"
become: yes
when: not dlipy2_conda_result.stat.exists
- name: Copy data back to deployer (Conda)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlipy2_conda }}"
dest: "{{ dependencies_dir }}"
when: not dlipy2_conda_result.stat.exists
#dlinsights env
- name: Check if dlinsights post data exists (PIP)
stat:
path: "{{ ansible_env.HOME }}/{{ dlinsights_pip }}"
register: dlinsights_pip_result
- name: Activate dlinsights and gather post data (PIP)
shell: "source {{ install_dir }}/bin/activate dlinsights && {{ item }}"
loop:
- "conda list > {{ vars.dlinsights_pip }}"
become: yes
when: not dlinsights_pip_result.stat.exists
- name: Copy data back to deployer (PIP)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlinsights_pip }}"
dest: "{{ dependencies_dir }}"
when: not dlinsights_pip_result.stat.exists
- name: Check if dlinsights post data exists (Conda)
stat:
path: "{{ ansible_env.HOME }}/{{ dlinsights_conda }}"
register: dlinsights_conda_result
- name: Activate dlinsights and gather post data (Conda)
shell: "source {{ install_dir }}/bin/activate dlinsights && {{ item }}"
loop:
- "conda list --explicit > {{ vars.dlinsights_conda }}"
become: yes
when: not dlinsights_conda_result.stat.exists
- name: Copy data back to deployer (Conda)
fetch:
src: "{{ ansible_env.HOME }}/{{ dlinsights_conda }}"
dest: "{{ dependencies_dir }}"
when: not dlinsights_conda_result.stat.exists
|
software/wmla120_ansible/engr_mode_env_post_install_gather.yml
|
homepage: https://github.com/GianlucaGuarini/executor
changelog-type: markdown
hash: d83a3d61ba4735ac6954c5da6bdb1c55ca289786aec9283db2bd802edc69031b
test-bench-deps:
executor: -any
base: ==4.9.*
hspec: ! '>=2.2'
process: ! '>=1.4 && <1.5'
async: ! '>=2.1 && <2.2'
doctest: ! '>=0.8.0'
maintainer: <EMAIL>
synopsis: Shell helpers
changelog: ! '# Revision history for executor
## 0.0.4 -- 2017-08-27
* Add better unit test
* Deprecate `execSync` and `execListSync` in favor of `exec` and `execSequenceSync`
* Return IO (String) to display the result of the commands triggered
## 0.0.3 -- 2017-08-20
* Reducing unecessary code
## 0.0.2 -- 2017-08-20
* Make the code a bit cleaner and consistent
## 0.0.1 -- 2017-08-19
* First version. Released on an unsuspecting world.
'
basic-deps:
base: ! '>=4.9 && <4.10'
process: ! '>=1.4 && <1.5'
async: ! '>=2.1 && <2.2'
all-versions:
- 0.0.1
- 0.0.2
- 0.0.3
- 0.0.4
author: <NAME>
latest: 0.0.4
description-type: markdown
description: ! "# executor\nHaskell module to execute single or multiple shell commands\n\n[![Build
Status][travis-image]][travis-url]\n[![MIT License][license-image]][license-url]\n\n#
API\n\n## exec\n\nExecute a single shell command returning its output\n\n```hs\nimport
Executor (exec)\n\nmain = do\n -- execute a simple `ls` in the current folder\n
\ res <- exec \"echo hi\"\n -- hi\\n\n```\n\n## execSequenceSync\n\nExecute a list
of shell commands in sequence synchronously returning their results in a list\n\n```hs\nimport
Executor (execSequenceSync)\n\nmain = do\n -- execute synchronously the following
commands\n res <- execSequenceSync [\n \"echo hi\",\n \"sleep 1\",\n
\ \"echo goodbye\"\n ]\n -- [\"hi\\n\", \"\", \"goodbye\\n\"]\n```\n\n[travis-image]:https://img.shields.io/travis/GianlucaGuarini/executor.svg?style=flat-square\n[travis-url]:https://travis-ci.org/GianlucaGuarini/executor\n\n[license-image]:http://img.shields.io/badge/license-MIT-000000.svg?style=flat-square\n[license-url]:LICENSE\n"
license-name: MIT
|
packages/ex/executor.yaml
|
name: ISCSIServer
uid: '@azure/arm-storsimple1200series.ISCSIServer'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: ISCSIServer
remarks: ''
isPreview: false
isDeprecated: false
type: interface
properties:
- name: backupScheduleGroupId
uid: '@azure/arm-storsimple1200series.ISCSIServer.backupScheduleGroupId'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: backupScheduleGroupId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'backupScheduleGroupId: string'
return:
description: ''
type: string
- name: chapId
uid: '@azure/arm-storsimple1200series.ISCSIServer.chapId'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: chapId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'chapId?: undefined | string'
return:
description: ''
type: undefined | string
- name: description
uid: '@azure/arm-storsimple1200series.ISCSIServer.description'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: description
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'description?: undefined | string'
return:
description: ''
type: undefined | string
- name: id
uid: '@azure/arm-storsimple1200series.ISCSIServer.id'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: id
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'id?: undefined | string'
return:
description: ''
type: undefined | string
- name: name
uid: '@azure/arm-storsimple1200series.ISCSIServer.name'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: name
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'name?: undefined | string'
return:
description: ''
type: undefined | string
- name: reverseChapId
uid: '@azure/arm-storsimple1200series.ISCSIServer.reverseChapId'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: reverseChapId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'reverseChapId?: undefined | string'
return:
description: ''
type: undefined | string
- name: storageDomainId
uid: '@azure/arm-storsimple1200series.ISCSIServer.storageDomainId'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: storageDomainId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'storageDomainId: string'
return:
description: ''
type: string
- name: type
uid: '@azure/arm-storsimple1200series.ISCSIServer.type'
package: '@azure/arm-storsimple1200series'
summary: ''
fullName: type
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'type?: undefined | string'
return:
description: ''
type: undefined | string
extends: <xref uid="@azure/arm-storsimple1200series.BaseModel" />
|
docs-ref-autogen/@azure/arm-storsimple1200series/ISCSIServer.yml
|
documentType: LandingData
title: Az Azure AD feltételes hozzáférésének dokumentációja
metadata:
document_id: null
title: 'Az Azure Active Directory feltételes hozzáférésének dokumentációja – oktatóanyagok, rövid útmutatók, alapfogalmak és referenciák | Microsoft Docs'
description: 'Ebből a cikkből megtudhatja, hogyan konfigurálhatja és tesztelheti az Azure Active Directory feltételes hozzáférését.'
services: active-directory
ms.subservice: conditional-access
author: MicrosoftGuyJFlo
manager: daveba
ms.service: active-directory
ms.tgt_pltfrm: na
ms.devlang: na
ms.topic: landing-page
ms.date: 08/14/2018
ms.author: joflore
abstract:
description: 'Megismerheti, hogyan alakíthat ki egyensúlyt a biztonság és a termelékenység között az Azure AD feltételes hozzáférésének konfigurálásával. A feltételes hozzáférési szabályzatokkal figyelembe veheti a felhőalapú alkalmazások elérésének módját a hozzáféréssel kapcsolatos döntések meghozásakor. A hozzáférési döntésekben például figyelembe veheti a következő kérdéseket:<ul><li>Mi a felhasználó hálózati helye?</li><li>Felügyelt eszközről kezdeményezték a hozzáférési kísérletet?</li><li>Melyik ügyfélalkalmazásból kezdeményezték a kapcsolódási kísérletet?</li><li>Történt hozzáférési kísérlet kockázatos bejelentkezéssel?</li></ul>Az alábbi forrásanyagokkal gyorsan felkészülhet.'
aside:
image:
alt: null
height: 110
src: ./media/index/video.png
width: 246
title: A feltételes hozzáférés ismertetése
href: 'https://www.youtube.com/watch?v=XruceejcCKQ'
width: 246
sections:
- title: 5 perces gyors útmutatók
items:
- type: paragraph
text: 'Megtudhatja, hogyan konfigurálhat feltételes hozzáférési szabályzatokat gyakori hozzáférési forgatókönyvekhez.'
- type: list
style: icon48
items:
- image:
src: ./media/index/i_security-management.png
text: MFA megkövetelése adott alkalmazásokhoz
href: app-based-mfa.md
- image:
src: ./media/index/i_security-management.png
text: Hozzáférés letiltása munkameneti kockázat észlelésekor
href: app-sign-in-risk.md
- image:
src: ./media/index/i_security-management.png
text: A használati feltételek elfogadásának kötelezővé tétele
href: require-tou.md
- title: Részletes útmutatók
items:
- type: paragraph
text: 'Megismerheti, hogyan migrálhatók a klasszikus feltételes hozzáférési szabályzatok.'
- type: list
style: unordered
items:
- html: <a href="/azure/active-directory/conditional-access/policy-migration-mfa">Többtényezős hitelesítést megkövetelő klasszikus szabályzat migrálása</a>
|
articles/active-directory/conditional-access/index.yml
|
{{- if .Values.autoIdler.enabled -}}
apiVersion: apps/v1
kind: Deployment
metadata:
name: {{ include "lagoon-core.autoIdler.fullname" . }}
labels:
{{- include "lagoon-core.autoIdler.labels" . | nindent 4 }}
spec:
selector:
matchLabels:
{{- include "lagoon-core.autoIdler.selectorLabels" . | nindent 6 }}
template:
metadata:
annotations:
checksum/api.secret: {{ include (print $.Template.BasePath "/api.secret.yaml") . | sha256sum }}
checksum/secret: {{ include (print $.Template.BasePath "/secret.yaml") . | sha256sum }}
{{- with .Values.autoIdler.podAnnotations }}
{{- toYaml . | nindent 8 }}
{{- end }}
labels:
{{- include "lagoon-core.autoIdler.selectorLabels" . | nindent 8 }}
spec:
{{- with .Values.imagePullSecrets }}
imagePullSecrets:
{{- toYaml . | nindent 8 }}
{{- end }}
securityContext:
{{- toYaml (coalesce .Values.autoIdler.podSecurityContext .Values.podSecurityContext) | nindent 8 }}
containers:
- name: auto-idler
securityContext:
{{- toYaml .Values.autoIdler.securityContext | nindent 10 }}
image: "{{ .Values.autoIdler.image.repository }}:{{ coalesce .Values.autoIdler.image.tag .Values.imageTag .Chart.AppVersion }}"
imagePullPolicy: {{ .Values.autoIdler.image.pullPolicy }}
env:
- name: ELASTICSEARCH_URL
value: {{ .Values.elasticsearchURL }}
- name: JWTSECRET
valueFrom:
secretKeyRef:
name: {{ include "lagoon-core.fullname" . }}-jwtsecret
key: JWTSECRET
- name: LOGSDB_ADMIN_PASSWORD
valueFrom:
secretKeyRef:
name: {{ include "lagoon-core.api.fullname" . }}
key: LOGSDB_ADMIN_PASSWORD
- name: GRAPHQL_ENDPOINT
value: http://{{ include "lagoon-core.api.fullname" . }}:{{ .Values.api.service.port }}/graphql
- name: CRONJOBS
value: |-
30 * * * * /idle-clis.sh
0 */4 * * * /idle-services.sh
# for historical reasons this variable must be set
- name: LAGOON_ENVIRONMENT_TYPE
value: production
resources:
{{- toYaml .Values.autoIdler.resources | nindent 10 }}
{{- with .Values.autoIdler.nodeSelector }}
nodeSelector:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.autoIdler.affinity }}
affinity:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- with .Values.autoIdler.tolerations }}
tolerations:
{{- toYaml . | nindent 8 }}
{{- end }}
{{- end }}
|
argocd/lagoon-core/templates/auto-idler.deployment.yaml
|
name: test
on: [pull_request]
jobs:
test:
strategy:
fail-fast: false
matrix:
rust_version: [stable]
platform:
- { target: x86_64-pc-windows-msvc, os: windows-latest }
- { target: x86_64-unknown-linux-gnu, os: ubuntu-latest }
- { target: x86_64-apple-darwin, os: macos-latest }
- { target: aarch64-apple-ios, os: macos-latest }
- { target: aarch64-linux-android, os: ubuntu-latest }
runs-on: ${{ matrix.platform.os }}
steps:
- uses: actions/checkout@v2
- name: install stable
uses: actions-rs/toolchain@v1
with:
toolchain: stable
target: ${{ matrix.platform.target }}
- name: install webkit2gtk (ubuntu only)
if: contains(matrix.platform.target, 'gnu')
run: |
sudo apt-get update
sudo apt-get install -y webkit2gtk-4.0 libgtksourceview-3.0-dev libayatana-appindicator3-dev
- name: install webview2 (windows only)
if: contains(matrix.platform.target, 'windows')
shell: pwsh
run: |
Invoke-WebRequest https://go.microsoft.com/fwlink/p/?LinkId=2124703 -OutFile installwebview.exe -UseBasicParsing
cmd /C start /wait installwebview.exe /silent /install
- name: Get current date
run: echo "CURRENT_DATE=$(date +'%Y-%m-%d')" >> $GITHUB_ENV
- name: Cache cargo registry
uses: actions/cache@v2.1.4
with:
path: ~/.cargo/registry
# Add date to the cache to keep it up to date
key: ${{ matrix.platform }}-stable-cargo-registry-${{ hashFiles('Cargo.toml') }}-${{ env.CURRENT_DATE }}
# Restore from outdated cache for speed
restore-keys: |
${{ matrix.platform }}-stable-cargo-registry-${{ hashFiles('Cargo.toml') }}
${{ matrix.platform }}-stable-cargo-registry-
- name: Cache cargo index
uses: actions/cache@v2.1.4
with:
path: ~/.cargo/git
# Add date to the cache to keep it up to date
key: ${{ matrix.platform }}-stable-cargo-index-${{ hashFiles('Cargo.toml') }}-${{ env.CURRENT_DATE }}
# Restore from outdated cache for speed
restore-keys: |
${{ matrix.platform }}-stable-cargo-index-${{ hashFiles('Cargo.toml') }}
${{ matrix.platform }}-stable-cargo-index-
- name: Cache cargo target
uses: actions/cache@v2
with:
path: target
# Add date to the cache to keep it up to date
key: ${{ matrix.platform }}-stable-cargo-core-${{ hashFiles('Cargo.toml') }}-${{ env.CURRENT_DATE }}
# Restore from outdated cache for speed
restore-keys: |
${{ matrix.platform }}-stable-cargo-core-${{ hashFiles('Cargo.toml') }}
${{ matrix.platform }}-stable-cargo-core-
- name: build wry
run: cargo build --features tray --target ${{ matrix.platform.target }}
- name: build tests and examples
shell: bash
if: (
!contains(matrix.platform.target, 'android') &&
!contains(matrix.platform.target, 'ios'))
run: cargo test --no-run --verbose --features tray --target ${{ matrix.platform.target }}
- name: run tests
if: (
!contains(matrix.platform.target, 'android') &&
!contains(matrix.platform.target, 'ios'))
run: cargo test --verbose --features tray --target ${{ matrix.platform.target }}
|
.github/workflows/build.yml
|
---
http_interactions:
- request:
method: get
uri: https://graph.facebook.com/v2.4/search?access_token=DUMMY_TOKEN&q=disney&type=adinterest
body:
encoding: US-ASCII
string: ''
headers:
User-Agent:
- Faraday v0.8.9
Accept-Encoding:
- gzip;q=1.0,deflate;q=0.6,identity;q=0.3
Accept:
- ! '*/*'
response:
status:
code: 200
message: !binary |-
T0s=
headers:
!binary "QWNjZXNzLUNvbnRyb2wtQWxsb3ctT3JpZ2lu":
- !binary |-
Kg==
!binary "Q29udGVudC1UeXBl":
- !binary |-
YXBwbGljYXRpb24vanNvbjsgY2hhcnNldD1VVEYtOA==
!binary "WC1GYi1UcmFjZS1JZA==":
- !binary |-
RDMzcTV0aWFKVTQ=
!binary "WC1GYi1SZXY=":
- !binary |-
MTkyODE1NA==
!binary "RXRhZw==":
- !binary |-
IjNhZDFhYmUwNGRmZGQ4NGJjOGJjMjlhYTg4MjNhODFhNjQzYjFjZmQi
!binary "UHJhZ21h":
- !binary |-
bm8tY2FjaGU=
!binary "Q2FjaGUtQ29udHJvbA==":
- !binary |-
cHJpdmF0ZSwgbm8tY2FjaGUsIG5vLXN0b3JlLCBtdXN0LXJldmFsaWRhdGU=
!binary "RmFjZWJvb2stQXBpLVZlcnNpb24=":
- !binary |-
djIuNA==
!binary "RXhwaXJlcw==":
- !binary |-
U2F0LCAwMSBKYW4gMjAwMCAwMDowMDowMCBHTVQ=
!binary "VmFyeQ==":
- !binary |-
QWNjZXB0LUVuY29kaW5n
!binary "Q29udGVudC1FbmNvZGluZw==":
- !binary |-
Z3ppcA==
!binary "WC1GYi1EZWJ1Zw==":
- !binary |-
dlhlTm01TCtWeGljVFFiblVOaVdHTUpnQ3BFWmsrV0JCbGt5Z1FENUZVOHRo
ckxuazhoTGV4UU8zeGpTVEp6dkF5WVBCUWtkUkhla1l6WmlNTmpSVFE9PQ==
!binary "RGF0ZQ==":
- !binary |-
VGh1LCAxMCBTZXAgMjAxNSAwOTo0NzoxNCBHTVQ=
!binary "Q29ubmVjdGlvbg==":
- !binary |-
a2VlcC1hbGl2ZQ==
!binary "Q29udGVudC1MZW5ndGg=":
- !binary |-
Mzg0
body:
encoding: ASCII-8BIT
string: !binary |-
H4sIAAAAAAAAA62Ty07DMBBFf8XyCqQs/LbpjseSRwVILFCFrMTQEYkTxU5R
qfrvuKpKWzUtWbCzrJm5Z+bOLHBho8Wj1wWGAo+wIoQzTSRjxEicYW8rl76f
pw692DKiGwjezdF1XTXWz1OA7QpwPndvAb5TJFVSGWYYyXBj4zQVnmS4cCFv
oYlQezzyXVlmONYN5KnwVRfAuxCQ9QUCX3QhtnO8zHZxjFDaKEq3ODsohwgq
JSmihxKMXd2Ubl+SSMK1VlT1SqIx5LFrXejR5heMST5Ue0D3yQ3Kkxdii7Lx
YGq9d2UPBFFcEDEU4t59rQGcj66NFnyVXnsUmqSuqGGKHV+Jp5go6oDOCphB
SDrnh2SSaJ4WZCjZkPFQowll1Bxxyraf6/RHF+o29lgm07IQ9p+WrW7HcCUu
tky3ECKq33/PZ20demjhA7wt0V09g759EsJQSuVpuP2BaClEOhjdP5BLD5Vd
pW786tUkWpnTmn8OZLL8AZ7kr85bBAAA
http_version:
recorded_at: Thu, 10 Sep 2015 09:47:14 GMT
recorded_with: VCR 2.3.0
|
spec/fixtures/ad_interest_search_disney.yml
|
- name: Obtain the mount path for the application
shell: >
kubectl get pods -n {{ app_ns }} -l {{ app_label }}
-o custom-columns=:.spec.containers[].volumeMounts[].mountPath --no-headers
args:
executable: /bin/bash
register: mount
- name: Record the mount path for the application
set_fact:
mount_path: "{{ mount.stdout }}"
- name: Get the first backup name which is full backup by default
shell: velero get backup | grep {{ schedule_name }} | tail -n1 | awk '{print $1}'
args:
executable: /bin/bash
register: first_full_bkp
- name: Record the first full backup name
set_fact:
first_full_backup: "{{ first_full_bkp.stdout }}"
- name: Get the first incremental backup name
shell: velero get backup | grep {{ schedule_name }} | tail -n2 | head -n1 | awk '{print $1}'
args:
executable: /bin/bash
register: first_incr_bkp
- name: Record the first incremental backup name
set_fact:
first_incremental_backup: "{{ first_incr_bkp.stdout }}"
- name: Get the second incremental backup name
shell: velero get backup | grep {{ schedule_name }} | tail -n3 | head -n1 | awk '{print $1}'
args:
executable: /bin/bash
register: second_incr_bkp
- name: Record the second incremental backup name
set_fact:
second_incremental_backup: "{{ second_incr_bkp.stdout }}"
- name: Get the last full backup name which is after two incremental backups
shell: velero get backup | grep {{ schedule_name }} | tail -n4 | head -n1 | awk '{print $1}'
args:
executable: /bin/bash
register: last_full_bkp
- name: Record the last full backup name
set_fact:
last_full_backup: "{{ last_full_bkp.stdout }}"
- name: Restore the first incremental backup
include: "./restore.yml"
vars:
velero_backup_name: "{{ first_incremental_backup }}"
app_ns_new: "first-incr-restore-ns"
- name: Check the data consistency
shell: >
kubectl exec -ti {{ restore_app_pod }} -n first-incr-restore-ns
-- sh -c "cd {{ mount_path }} && ls"
args:
executable: /bin/bash
register: data_status
failed_when: "'incr-file1' not in data_status.stdout"
- name: Restore the second incremental backup
include: "./restore.yml"
vars:
velero_backup_name: "{{ second_incremental_backup }}"
app_ns_new: "second-incr-restore-ns"
- name: Check the data consistency
shell: >
kubectl exec -ti {{ restore_app_pod }} -n second-incr-restore-ns
-- sh -c "cd {{ mount_path }} && ls"
args:
executable: /bin/bash
register: data_status
failed_when:
- "'incr-file1' not in data_status.stdout"
- "'incr-file2' not in data_status.stdout"
|
experiments/zfs-localpv/functional/backup_and_restore/incremental_restore.yml
|
require: rubocop-rspec
Rails:
Enabled: true
AllCops:
TargetRubyVersion: 2.4
# RuboCop has a bunch of cops enabled by default. This setting tells RuboCop
# to ignore them, so only the ones explicitly set in this file are enabled.
# DisabledByDefault: true
Exclude:
- '*/templates//'
- '*/vendor//'
- 'actionpack/lib/action_dispatch/journey/parser.rb'
- 'railties/test/fixtures/tmp/*/'
- 'db/schema.rb'
- 'config/initializers/devise.rb'
- 'node_modules/**/*'
- 'bin/webpack'
- 'bin/yarn'
- 'bin/webpack-dev-server'
- 'db/migrate/20190401222059_devise_create_admins.rb'
- 'config/initializers/simple_form.rb'
- 'config/initializers/simple_form_bootstrap.rb'
Style/Documentation:
Enabled: false
Style/FrozenStringLiteralComment:
Enabled: false
Bundler/OrderedGems:
Enabled: false
Style/ClassAndModuleChildren:
Enabled: false
Rails/LexicallyScopedActionFilter:
Exclude:
Style/SymbolArray:
EnforcedStyle: brackets
Metrics/LineLength:
Max: 100
Exclude:
- 'config/environments/production.rb'
- 'config/initializers/backtrace_silencers.rb'
- 'config/initializers/content_security_policy.rb'
Bundler/OrderedGems:
Enabled: false
Metrics/MethodLength:
Exclude:
- 'db/migrate/*'
- 'lib/tasks/populate.rake'
Metrics/AbcSize:
Exclude:
- 'lib/tasks/populate.rake'
Style/MixinUsage:
Exclude:
- 'bin/*'
Style/ExpandPathArguments:
Exclude:
- 'bin/*'
- 'spec/rails_helper.rb'
Style/StringLiterals:
Exclude:
- 'bin/*'
- 'config/puma.rb'
- 'config/environments/production.rb'
Style/StderrPuts:
Exclude:
- 'bin/*'
Style/BlockComments:
Exclude:
- 'spec/spec_helper.rb'
Layout/SpaceInsideArrayLiteralBrackets:
Exclude:
- 'config/environments/production.rb'
Layout/Tab:
Enabled: false
Style/FormatString:
EnforcedStyle: 'percent'
Layout/AlignHash:
Exclude:
- 'config/initializers/simple_form_bootstrap.rb'
- 'config/initializers/simple_form.rb'
Metrics/BlockLength:
Exclude:
- 'config/environments/development.rb'
- 'spec/**/*'
- 'lib/tasks/populate.rake'
- 'config/routes.rb'
- 'config/initializers/simple_form_bootstrap.rb'
Naming/VariableNumber:
EnforcedStyle: non_integer
RSpec/MultipleExpectations:
Max: 3
Exclude:
- 'spec/features/**/*'
RSpec/ExampleLength:
Max: 30
RSpec/HookArgument:
EnforcedStyle: each
RSpec/LetSetup:
Enabled: false
Style/Lambda:
Exclude:
- 'config/initializers/simple_form_bootstrap.rb'
Lint/UnusedBlockArgument:
Exclude:
- 'config/initializers/simple_form_bootstrap.rb'
Rails/SkipsModelValidations:
Exclude:
- 'app/controllers/admins/sections_controller.rb'
|
.rubocop.yml
|
uid: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map*"
fullName: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map"
name: "map"
nameWithType: "SuggestOptionsConverter.map"
members:
- uid: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map(com.azure.search.documents.implementation.models.SuggestOptions)"
fullName: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map(SuggestOptions obj)"
name: "map(SuggestOptions obj)"
nameWithType: "SuggestOptionsConverter.map(SuggestOptions obj)"
summary: "Maps from <xref uid=\"com.azure.search.documents.implementation.models.SuggestOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"com.azure.search.documents.implementation.models.SuggestOptions\"></xref> to <xref uid=\"com.azure.search.documents.models.SuggestOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"SuggestOptions\"></xref>."
parameters:
- name: "obj"
type: "<xref href=\"com.azure.search.documents.implementation.models.SuggestOptions?alt=com.azure.search.documents.implementation.models.SuggestOptions&text=SuggestOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public static SuggestOptions map(SuggestOptions obj)"
returns:
type: "<xref href=\"com.azure.search.documents.models.SuggestOptions?alt=com.azure.search.documents.models.SuggestOptions&text=SuggestOptions\" data-throw-if-not-resolved=\"False\" />"
- uid: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map(com.azure.search.documents.models.SuggestOptions)"
fullName: "com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map(SuggestOptions obj)"
name: "map(SuggestOptions obj)"
nameWithType: "SuggestOptionsConverter.map(SuggestOptions obj)"
summary: "Maps from <xref uid=\"com.azure.search.documents.models.SuggestOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"SuggestOptions\"></xref> to <xref uid=\"com.azure.search.documents.implementation.models.SuggestOptions\" data-throw-if-not-resolved=\"false\" data-raw-source=\"com.azure.search.documents.implementation.models.SuggestOptions\"></xref>."
parameters:
- name: "obj"
type: "<xref href=\"com.azure.search.documents.models.SuggestOptions?alt=com.azure.search.documents.models.SuggestOptions&text=SuggestOptions\" data-throw-if-not-resolved=\"False\" />"
syntax: "public static SuggestOptions map(SuggestOptions obj)"
returns:
type: "<xref href=\"com.azure.search.documents.implementation.models.SuggestOptions?alt=com.azure.search.documents.implementation.models.SuggestOptions&text=SuggestOptions\" data-throw-if-not-resolved=\"False\" />"
type: "method"
metadata: {}
package: "com.azure.search.documents.implementation.converters"
artifact: com.azure:azure-search-documents:11.2.0-beta.3
|
preview/docs-ref-autogen/com.azure.search.documents.implementation.converters.SuggestOptionsConverter.map.yml
|
{% set version = "1.3.1" %}
{% set posix = 'm2-' if win else '' %}
{% set native = 'm2w64-' if win else '' %}
package:
name: r-seriation
version: {{ version|replace("-", "_") }}
source:
url:
- {{ cran_mirror }}/src/contrib/seriation_{{ version }}.tar.gz
- {{ cran_mirror }}/src/contrib/Archive/seriation/seriation_{{ version }}.tar.gz
sha256: ce63b4b116f1cd4d8ef8865e9c1b9aacd206071d130d3ed72696b0de873d8321
build:
merge_build_host: true # [win]
number: 0
rpaths:
- lib/R/lib/
- lib/
requirements:
build:
- {{ compiler('c') }} # [not win]
- {{ compiler('m2w64_c') }} # [win]
- {{ compiler('fortran') }} # [not win]
- {{ compiler('m2w64_fortran') }} # [win]
- {{ posix }}filesystem # [win]
- {{ posix }}make
- {{ posix }}sed # [win]
- {{ posix }}coreutils # [win]
- {{ posix }}zip # [win]
host:
- r-base
- r-mass
- r-tsp
- r-cluster
- r-colorspace
- r-dendextend
- r-gclus
- r-gplots
- r-qap
- r-registry
run:
- r-base
- {{ native }}gcc-libs # [win]
- r-mass
- r-tsp
- r-cluster
- r-colorspace
- r-dendextend
- r-gclus
- r-gplots
- r-qap
- r-registry
test:
commands:
- $R -e "library('seriation')" # [not win]
- "\"%R%\" -e \"library('seriation')\"" # [win]
about:
home: https://github.com/mhahsler/seriation
license: GPL-3.0-only
summary: Infrastructure for ordering objects with an implementation of several seriation/sequencing/ordination techniques to reorder matrices, dissimilarity matrices, and dendrograms. Also provides (optimally) reordered heatmaps, color images and clustering visualizations like dissimilarity plots, and visual assessment
of cluster tendency plots (VAT and iVAT).
license_family: GPL3
license_file:
- {{ environ["PREFIX"] }}/lib/R/share/licenses/GPL-3
extra:
recipe-maintainers:
- conda-forge/r
# Package: seriation
# Type: Package
# Title: Infrastructure for Ordering Objects Using Seriation
# Version: 1.2-8
# Date: 2019-08-27
# Authors@R: c( person("Michael", "Hahsler", role = c("aut", "cre", "cph"), email = "<EMAIL>"), person("Christian", "Buchta", role = c("aut", "cph")), person("Kurt", "Hornik", role = c("aut", "cph")), person("Fionn", "Murtagh", role = c("ctb", "cph")), person("Michael", "Brusco", role = c("ctb", "cph")), person("Stephanie", "Stahl", role = c("ctb", "cph")), person("Hans-Friedrich", "Koehn", role = c("ctb", "cph")))
# Description: Infrastructure for ordering objects with an implementation of several seriation/sequencing/ordination techniques to reorder matrices, dissimilarity matrices, and dendrograms. Also provides (optimally) reordered heatmaps, color images and clustering visualizations like dissimilarity plots, and visual assessment of cluster tendency plots (VAT and iVAT).
# Classification/ACM: G.1.6, G.2.1, G.4
# URL: https://github.com/mhahsler/seriation
# BugReports: https://github.com/mhahsler/seriation/issues
# Depends: R (>= 2.14.0)
# Imports: TSP, qap, grid, cluster, gclus, dendextend, colorspace, MASS, gplots, registry, methods, stats, grDevices
# Suggests: biclust, testthat, DendSer, GA
# License: GPL-3
# Copyright: The code in src/bea.f is Copyright (C) 1991 <NAME>; src/bbwrcg.f, src/arsa.f and src/bburcg.f are Copyright (C) 2005 <NAME>, <NAME>, and <NAME>. All other code is Copyright (C) <NAME>, <NAME>, and <NAME>.
# NeedsCompilation: yes
# Packaged: 2019-08-27 17:04:01 UTC; hahsler
# Author: <NAME> [aut, cre, cph], <NAME> [aut, cph], <NAME> [aut, cph], <NAME> [ctb, cph], <NAME> [ctb, cph], <NAME> [ctb, cph], <NAME> [ctb, cph]
# Maintainer: <NAME> <<EMAIL>>
# Repository: CRAN
# Date/Publication: 2019-08-27 17:50:02 UTC
|
recipe/meta.yaml
|
openapi: 3.0.0
info:
version: 1.2.1
title: An include file to define NetworkProvider endpoints
license:
name: Apache 2.0
paths:
/api/networkProviders:
post:
operationId: createNetworkProvider
summary: Create a NetworkProvider
description: Create a NetworkProvider, available only to system administrators
parameters: []
security:
- bearer_token: []
tags:
- NetworkProvider
requestBody:
$ref: '#/components/requestBodies/CreateNetworkProviderInput'
responses:
'200':
description: NetworkProvider created
content:
application/json:
schema:
$ref: '../api.yml#/components/schemas/ResourceCreatedResponse'
'403':
description: Authenticated user not allowed to perform this operation
'400':
description: Bad request
get:
operationId: listNetworkProviders
summary: List NetworkProviders
description: List NetworkProviders, available only to system administrators
parameters:
- $ref: '../api.yml#/components/parameters/offsetParam'
- $ref: '../api.yml#/components/parameters/limitParam'
- $ref: '../api.yml#/components/parameters/nameSearch'
security:
- bearer_token: []
tags:
- NetworkProvider
responses:
'200':
description: Query response with totalCount and list of records
content:
application/json:
schema:
$ref: '#/components/schemas/ListNetworkProviderResponse'
'403':
description: Authenticated user not allowed to perform this operation
'400':
description: Bad request
/api/networkProviders/{id}:
parameters:
- $ref: '../api.yml#/components/parameters/idParam'
get:
operationId: loadNetworkProvider
summary: Get NetworkProvider
description: Get NetworkProvider
parameters: []
security:
- bearer_token: []
tags:
- NetworkProvider
responses:
'200':
description: NetworkProvider record
content:
application/json:
schema:
$ref: '#/components/schemas/NetworkProviderResponse'
'403':
description: Authenticated user not allowed to perform this operation
'404':
description: NetworkProvider not found
put:
operationId: updateNetworkProvider
summary: Update NetworkProvider
description: Update NetworkProvider, available only to system administrators
parameters: []
security:
- bearer_token: []
tags:
- NetworkProvider
requestBody:
$ref: '#/components/requestBodies/UpdateNetworkProviderInput'
responses:
'204':
description: NetworkProvider update succeeded
'400':
description: Bad request
'403':
description: Authenticated user not allowed to perform this operation
'404':
description: NetworkProvider not found
delete:
operationId: deleteNetworkProvider
summary: Delete NetworkProvider
description: Delete NetworkProvider, available only to system administrators
parameters: []
security:
- bearer_token: []
tags:
- NetworkProvider
responses:
'204':
description: NetworkProvider record was deleted
'403':
description: Authenticated user not allowed to perform this operation
'404':
description: NetworkProvider not found
components:
schemas:
NetworkProvider:
type: object
required:
- name
properties:
name:
type: string
NetworkProviderResponse:
allOf:
- $ref: '#/components/schemas/NetworkProvider'
- type: object
properties:
id:
type: string
ListNetworkProviderResponse:
allOf:
- $ref: '../api.yml#/components/schemas/ListResponse'
- type: object
properties:
records:
type: array
items:
$ref: '#/components/schemas/NetworkProviderResponse'
default: []
requestBodies:
CreateNetworkProviderInput:
content:
application/json:
schema:
$ref: '#/components/schemas/NetworkProvider'
description: NetworkProvider to be created
required: true
UpdateNetworkProviderInput:
content:
application/json:
schema:
$ref: '#/components/schemas/NetworkProvider'
description: NetworkProvider properties to be updated
required: true
|
docs/openapi/endpoints/network-provider.yml
|
name: ubuntu
on: [push]
jobs:
build-linux:
runs-on: ubuntu-20.04
defaults:
run:
shell: bash -l {0}
strategy:
matrix:
python-version: [3.8]
steps:
- uses: actions/checkout@v2
name: Check out repository
- name: Setup Miniconda
uses: conda-incubator/setup-miniconda@v2.1.1
with:
auto-activate-base: true
activate-environment: ""
- name: Install system dependencies
run: |
sudo apt-get -y install libhdf5-serial-dev
sudo apt-get -y install libeigen3-dev
sudo apt-get -y install lcov
sudo apt-get -y install doxygen
sudo apt-get -y install cmake
- name: Build and test OpenCAP
run: |
cd opencap; mkdir build;
cd build
cmake -DCODE_COVERAGE=ON -DCMAKE_BUILD_TYPE=RelWithDebInfo ..
make
ctest --output-on-failure
cd ../../
- name: Build and test PyOpenCAP
run: |
conda install -c psi4/label/dev psi4
pip install pytest
pip install pytest-cov
pip install h5py==3.2.1
pip install numpy
pip install pyscf
pip install codecov
pip install numgrid
pip install pandas
pip install .
cd pyopencap
pytest --cov=pyopencap --cov-report=xml
cd ..
- name: Upload reports
run: |
cd opencap/build
lcov --directory . --capture --output-file coverage.info
lcov --remove coverage.info '/usr/*' "${HOME}"'/.cache/*' --output-file coverage.info
lcov --remove coverage.info '*eigen*' "${HOME}"'/.cache/*' --output-file coverage.info
lcov --remove coverage.info '*_deps*' "${HOME}"'/.cache/*' --output-file coverage.info
lcov --remove coverage.info '*tests*' "${HOME}"'/.cache/*' --output-file coverage.info
lcov --list coverage.info
bash <(curl -s https://codecov.io/bash) -f coverage.info || echo "Codecov did not collect coverage C++ reports"
cd ../../pyopencap
bash <(curl -s https://codecov.io/bash) -f coverage.xml || echo "Codecov did not collect coverage Python reports"
|
.github/workflows/ubuntu.yml
|
name: cftime test
on:
pull_request:
push:
branches: [master]
jobs:
run:
runs-on: ${{ matrix.os }}
strategy:
matrix:
python-version: [ "3.7", "3.8", "3.9"]
os: [windows-latest, ubuntu-latest, macos-latest]
platform: [x64, x32]
# debug on a single os/platform/python version
# python-version: [ "3.9"]
# os: [ubuntu-latest]
# platform: [x64]
exclude:
- os: macos-latest
platform: x32
env:
TRAVIS_BUILD_DIR: ${{ github.workspace }}
TRAVIS_REPO_SLUG: ${{ github.repository }}
steps:
- uses: actions/checkout@v2
- name: Setup Conda
uses: s-weigand/setup-conda@v1
with:
activate-conda: false
conda-channels: conda-forge
- name: Setup Conda Env
shell: bash -l {0}
run: |
conda create --name TEST python=${{ matrix.python-version }} --file requirements.txt --file requirements-dev.txt
source activate TEST
# enabling coverage slows down the tests dramaticaly
#CYTHON_COVERAGE=1 pip install -v -e . --no-deps --force-reinstall
pip install -v -e . --no-deps --force-reinstall
conda info --all
conda list
- name: Run Tests
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
shell: bash -l {0}
run: |
source activate TEST
py.test -vv test
# - name: Coveralls
# env:
# GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# COVERALLS_REPO_TOKEN: ${{ secrets.GITHUB_TOKEN }}
# if: ${{ matrix.os =='ubuntu-latest' && matrix.python-version == '3.9' && matrix.platform == 'x64' }}
# run: |
# source activate TEST
# coveralls --service=github-actions
- name: Tarball
if: startsWith(matrix.os,'ubuntu')
shell: bash -l {0}
run: |
source activate TEST
python setup.py --version ;
pip wheel . -w dist --no-deps ;
check-manifest --verbose ;
twine check dist/* ;
- name: Docs
if: startsWith(matrix.os,'ubuntu')
shell: bash -l {0}
run: |
source activate TEST
pushd docs && make html linkcheck O=-W && popd;
#rm -f docs/build/html/_static/jquery-*.js
#rm -f docs/build/html/_static/underscore-*.js
#rm -f docs/build/html/.buildinfo
#sh ./ci/deploy_docs.sh
run_aarch64:
name: "run (aarch64, ${{ matrix.pyver }})"
strategy:
matrix:
pyver: [cp37-cp37m, cp38-cp38, cp39-cp39]
fail-fast: false
runs-on: ubuntu-latest
env:
py: /opt/python/${{ matrix.pyver }}/bin/python
img: quay.io/pypa/manylinux2014_aarch64
python-version: ${{ matrix.pyver }}
steps:
- uses: actions/checkout@v2
- name: Set up QEMU
id: qemu
uses: docker/setup-qemu-action@v1
- name: Build and Test
run: |
docker run --rm -v ${{ github.workspace }}:/ws:rw --workdir=/ws \
${{ env.img }} \
bash -exc 'yum install -y wget && \
echo "Set Up Conda Env" && \
wget https://github.com/conda-forge/miniforge/releases/download/4.8.2-1/Miniforge3-4.8.2-1-Linux-aarch64.sh -O miniconda.sh && \
export MINICONDA_PATH=/root/miniconda && \
chmod +x miniconda.sh && ./miniconda.sh -b -p $MINICONDA_PATH && \
export PATH=$MINICONDA_PATH/bin:$PATH && \
export Python=${{ env.python-version }} && \
conda create --name TEST python=${Python:2:1}.${Python:3:1} --file requirements.txt --file requirements-dev.txt && \
source activate TEST && \
pip install -v -e . --no-deps --force-reinstall && \
conda info --all && \
conda list && \
py.test -vv test && \
python setup.py --version && \
pip wheel . -w dist --no-deps && \
check-manifest --verbose && \
pip install readme-renderer && \
twine check dist/* && \
deactivate'
|
.github/workflows/miniconda.yml
|
name: ci
on:
push:
pull_request:
jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
node-version: [12.x, 14.x]
steps:
- uses: actions/checkout@v2
- name: Use Node.js ${{ matrix.node-version }}
uses: actions/setup-node@v1
with:
node-version: ${{ matrix.node-version }}
- run: npm install
- run: npm run build
- name: Upload artifacts
uses: actions/upload-artifact@v2
with:
name: index.js
path: index.js
deploy:
needs: ['build']
if: success() && github.event_name == 'push' && github.repository_owner == 'Neur0toxine' && github.ref == 'refs/heads/master'
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Unshallow repository and determine previous version
run: |
git fetch origin --unshallow --tags
export PREVIOUS_VERSION=`git describe --abbrev=0 --tags || true`
echo PREVIOUS_VERSION=$PREVIOUS_VERSION >> $GITHUB_ENV
echo Previous version is $PREVIOUS_VERSION
- uses: actions/download-artifact@v2
with:
name: index.js
- name: Determine new version
run: |
export CURRENT_VERSION=`cat index.js | grep '@version' | awk '{print $3;}'`
echo CURRENT_VERSION=$CURRENT_VERSION >> $GITHUB_ENV
echo Current version is $CURRENT_VERSION
- name: Create Release
id: create_release
uses: actions/create-release@v1
if: env.PREVIOUS_VERSION != env.CURRENT_VERSION
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ env.CURRENT_VERSION }}
release_name: ${{ env.CURRENT_VERSION }}
body: ''
draft: false
prerelease: false
- name: Upload Release Asset
id: upload-release-asset
if: env.PREVIOUS_VERSION != env.CURRENT_VERSION
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_path: index.js
asset_name: index.js
asset_content_type: text/javascript
|
.github/workflows/ci.yml
|
name: Like expression
tests:
- name: Exact match (1)
expression: "'abc' LIKE 'abc'"
result: true
- name: Exact match (2)
expression: "'ab\\c' LIKE 'ab\\c'"
result: true
- name: Exact match (negate)
expression: "'abc' NOT LIKE 'abc'"
result: false
- name: Percentage operator (1)
expression: "'abc' LIKE 'a%b%c'"
result: true
- name: Percentage operator (2)
expression: "'azbc' LIKE 'a%b%c'"
result: true
- name: Percentage operator (3)
expression: "'azzzbzzzc' LIKE 'a%b%c'"
result: true
- name: Percentage operator (4)
expression: "'a%b%c' LIKE 'a%b%c'"
result: true
- name: Percentage operator (5)
expression: "'ac' LIKE 'abc'"
result: false
- name: Percentage operator (6)
expression: "'' LIKE 'abc'"
result: false
- name: Percentage operator (7)
expression: "'.ab.cde.' LIKE '.%.%.'"
result: true
- name: Percentage operator (8)
expression: "'ab.cde' LIKE '.%.%.'"
result: false
- name: Underscore operator (1)
expression: "'abc' LIKE 'a_b_c'"
result: false
- name: Underscore operator (2)
expression: "'a_b_c' LIKE 'a_b_c'"
result: true
- name: Underscore operator (3)
expression: "'abzc' LIKE 'a_b_c'"
result: false
- name: Underscore operator (4)
expression: "'azbc' LIKE 'a_b_c'"
result: false
- name: Underscore operator (5)
expression: "'azbzc' LIKE 'a_b_c'"
result: true
- name: Underscore operator (6)
expression: "'.a.b.' LIKE '._._.'"
result: true
- name: Underscore operator (7)
expression: "'abcd.' LIKE '._._.'"
result: false
- name: Escaped underscore wildcards (1)
expression: "'a_b_c' LIKE 'a\\_b\\_c'"
result: true
- name: Escaped underscore wildcards (2)
expression: "'a_b_c' NOT LIKE 'a\\_b\\_c'"
result: false
- name: Escaped underscore wildcards (3)
expression: "'azbzc' LIKE 'a\\_b\\_c'"
result: false
- name: Escaped underscore wildcards (4)
expression: "'abc' LIKE 'a\\_b\\_c'"
result: false
- name: Escaped percentage wildcards (1)
expression: "'abc' LIKE 'a\\%b\\%c'"
result: false
- name: Escaped percentage wildcards (2)
expression: "'a%b%c' LIKE 'a\\%b\\%c'"
result: true
- name: Escaped percentage wildcards (3)
expression: "'azbzc' LIKE 'a\\%b\\%c'"
result: false
- name: Escaped percentage wildcards (4)
expression: "'abc' LIKE 'a\\%b\\%c'"
result: false
- name: With access to event attributes
expression: "myext LIKE 'abc%123\\%456\\_d_f'"
eventOverrides:
myext: "abc123123%456_dzf"
result: true
- name: With access to event attributes (negated)
expression: "myext NOT LIKE 'abc%123\\%456\\_d_f'"
eventOverrides:
myext: "abc123123%456_dzf"
result: false
|
cesql_tck/like_expression.yaml
|
name: TestCaseAssociatedResult
uid: azure-devops-extension-api.TestCaseAssociatedResult
package: azure-devops-extension-api
summary: ''
fullName: TestCaseAssociatedResult
remarks: ''
isPreview: false
isDeprecated: false
type: interface
properties:
- name: completedDate
uid: azure-devops-extension-api.TestCaseAssociatedResult.completedDate
package: azure-devops-extension-api
summary: ''
fullName: completedDate
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'completedDate: Date'
return:
description: ''
type: Date
- name: configuration
uid: azure-devops-extension-api.TestCaseAssociatedResult.configuration
package: azure-devops-extension-api
summary: ''
fullName: configuration
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'configuration: TestConfigurationReference'
return:
description: ''
type: <xref uid="azure-devops-extension-api.TestConfigurationReference" />
- name: outcome
uid: azure-devops-extension-api.TestCaseAssociatedResult.outcome
package: azure-devops-extension-api
summary: ''
fullName: outcome
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'outcome: UserFriendlyTestOutcome'
return:
description: ''
type: <xref uid="azure-devops-extension-api.UserFriendlyTestOutcome" />
- name: plan
uid: azure-devops-extension-api.TestCaseAssociatedResult.plan
package: azure-devops-extension-api
summary: ''
fullName: plan
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'plan: TestPlanReference'
return:
description: ''
type: <xref uid="azure-devops-extension-api.TestPlanReference" />
- name: pointId
uid: azure-devops-extension-api.TestCaseAssociatedResult.pointId
package: azure-devops-extension-api
summary: ''
fullName: pointId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'pointId: number'
return:
description: ''
type: number
- name: resultId
uid: azure-devops-extension-api.TestCaseAssociatedResult.resultId
package: azure-devops-extension-api
summary: ''
fullName: resultId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'resultId: number'
return:
description: ''
type: number
- name: runBy
uid: azure-devops-extension-api.TestCaseAssociatedResult.runBy
package: azure-devops-extension-api
summary: ''
fullName: runBy
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'runBy: IdentityRef'
return:
description: ''
type: <xref uid="azure-devops-extension-api.IdentityRef" />
- name: runId
uid: azure-devops-extension-api.TestCaseAssociatedResult.runId
package: azure-devops-extension-api
summary: ''
fullName: runId
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'runId: number'
return:
description: ''
type: number
- name: suite
uid: azure-devops-extension-api.TestCaseAssociatedResult.suite
package: azure-devops-extension-api
summary: ''
fullName: suite
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'suite: TestSuiteReference'
return:
description: ''
type: <xref uid="azure-devops-extension-api.TestSuiteReference" />
- name: tester
uid: azure-devops-extension-api.TestCaseAssociatedResult.tester
package: azure-devops-extension-api
summary: ''
fullName: tester
remarks: ''
isPreview: false
isDeprecated: false
syntax:
content: 'tester: IdentityRef'
return:
description: ''
type: <xref uid="azure-devops-extension-api.IdentityRef" />
|
docs-ref-autogen/azure-devops-extension-api/TestCaseAssociatedResult.yml
|
AWSTemplateFormatVersion: 2010-09-09
Description: Billing-BillingSubscriptions Template.
This creates Standard Subscriptions to Billing SNS Topics.
Metadata:
AWS::CloudFormation::Interface:
ParameterGroups:
- Label:
default: Stack Dependencies
Parameters:
- BillingTopicsStackName
- Label:
default: Subscription Configuration
Parameters:
- BillsTopicEmailEndpoint
- UrgentBillsTopicEmailEndpoint
- UrgentBillsTopicPhoneEndpoint
ParameterLabels:
BillingTopicsStackName:
default: Billing Topics Stack Name
BillsTopicEmailEndpoint:
default: Bills Topic Email Endpoint
UrgentBillsTopicEmailEndpoint:
default: UrgentBills Topic Email Endpoint
UrgentBillsTopicPhoneEndpoint:
default: UrgentBills Topic Phone Endpoint
Parameters:
BillingTopicsStackName:
Description: Name of the CloudFormation Stack containing Billing Topics
Type: String
MinLength: 2
MaxLength: 64
Default: BillingTopics
AllowedPattern: ^[A-Z][-a-zA-Z0-9]*$
ConstraintDescription: must begin with an upper case letter and contain alphanumeric characters and dashes.
BillsTopicEmailEndpoint:
Description: Email Endpoint for Bills Topic. If blank, no standard subscription will be created
Type: String
Default: ''
AllowedPattern: (^$|[^\s@]+@[^\s@]+\.[^\s@]+$)
ConstraintDescription: must be a valid email address.
UrgentBillsTopicEmailEndpoint:
Description: Email Endpoint for UrgentBills Topic. If blank, no standard subscription will be created
Type: String
Default: ''
AllowedPattern: (^$|[^\s@]+@[^\s@]+\.[^\s@]+$)
ConstraintDescription: must be a valid email address.
UrgentBillsTopicPhoneEndpoint:
Description: Phone Endpoint for UrgentBills Topics. If blank, no standard subscription will be created
Type: String
Default: ''
AllowedPattern: (^$|^\+\d{11,12}$)
ConstraintDescription: must be a valid phone number.
Rules:
ValidateRegion:
Assertions:
- Assert: !Equals [ !Ref 'AWS::Region', us-east-1 ]
AssertDescription: This Template can only be used in Region us-east-1.
Conditions:
ConfigureBillsTopicEmailSubscription: !Not [ !Equals [ !Ref BillsTopicEmailEndpoint, '' ]]
ConfigureUrgentBillsTopicEmailSubscription: !Not [ !Equals [ !Ref UrgentBillsTopicEmailEndpoint, '' ]]
ConfigureUrgentBillsTopicPhoneSubscription: !Not [ !Equals [ !Ref UrgentBillsTopicPhoneEndpoint, '' ]]
Resources:
BillsTopicEmailSubscription:
Type: AWS::SNS::Subscription
Properties:
TopicArn: !ImportValue
Fn::Sub: ${BillingTopicsStackName}-BillsTopic
Protocol: email
Endpoint: !Ref BillsTopicEmailEndpoint
Condition: ConfigureBillsTopicEmailSubscription
UrgentBillsTopicEmailSubscription:
Type: AWS::SNS::Subscription
Properties:
TopicArn: !ImportValue
Fn::Sub: ${BillingTopicsStackName}-UrgentBillsTopic
Protocol: email
Endpoint: !Ref UrgentBillsTopicEmailEndpoint
Condition: ConfigureUrgentBillsTopicEmailSubscription
UrgentBillsTopicPhoneSubscription:
Type: AWS::SNS::Subscription
Properties:
TopicArn: !ImportValue
Fn::Sub: ${BillingTopicsStackName}-UrgentBillsTopic
Protocol: sms
Endpoint: !Ref UrgentBillsTopicPhoneEndpoint
Condition: ConfigureUrgentBillsTopicPhoneSubscription
|
Billing-BillingSubscriptions.yaml
|
name: Push to Docker Registries
on:
push:
branches:
- master
paths-ignore:
- README.md
workflow_dispatch:
env:
DOCKER_ORG: unfor19
DOCKER_REPO: aws-webui
jobs:
docker:
runs-on: ubuntu-20.04
strategy:
matrix:
include:
- DOCKERFILE_PATH: Dockerfile
DOCKER_BUILD_TARGET: server
DOCKER_TAG_ONLY: latest
NODE_VERSION: 14
ALPINE_VERSION: 3.14
steps:
- uses: actions/checkout@v2
- name: Prepare
id: prepare
run: |
set -x
DOCKER_TAG_LATEST=${DOCKER_ORG}/${DOCKER_REPO}:${{ matrix.DOCKER_TAG_ONLY }}
echo ::set-output name=docker_tag_latest::${DOCKER_TAG_LATEST}
DOCKER_TAG_RELEASE=${DOCKER_ORG}/${DOCKER_REPO}:${{ matrix.DOCKER_TAG_ONLY }}-${GITHUB_SHA:0:8}
echo ::set-output name=docker_tag_release::${DOCKER_TAG_RELEASE}
set +x
- name: Set up QEMU
uses: docker/setup-qemu-action@v1
- name: Set up Docker Buildx
id: buildx
uses: docker/setup-buildx-action@master
with:
install: true
- name: Cache Docker layers
uses: actions/cache@v2
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-v2-${{ github.sha }}
restore-keys: |
${{ runner.os }}-buildx-v2
- name: Build Docker Image
uses: docker/build-push-action@v2
with:
context: .
platforms: linux/amd64
builder: ${{ steps.buildx.outputs.name }}
file: ${{ matrix.Dockerfile }}
build-args: |
NODE_VERSION=${{ matrix.NODE_VERSION }}
ALPINE_VERSION=${{ matrix.ALPINE_VERSION }}
target: ${{ matrix.DOCKER_BUILD_TARGET }}
push: false
tags: |
${{ steps.prepare.outputs.docker_tag_latest }}
${{ steps.prepare.outputs.docker_tag_release }}
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,mode=max,dest=/tmp/.buildx-cache-new
load: true
- name: Login to DockerHub
uses: docker/login-action@v1
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Push to DockerHub
uses: docker/build-push-action@v2
with:
context: .
platforms:
linux/amd64,linux/arm64
push: true
tags: |
${{ steps.prepare.outputs.docker_tag_latest }}
${{ steps.prepare.outputs.docker_tag_release }}
- name: Move cache
run: |
rm -rf /tmp/.buildx-cache
mv /tmp/.buildx-cache-new /tmp/.buildx-cache
|
.github/workflows/docker-latest.yml
|
uuid: 5065e260-db35-4ec8-b82f-cb72e2239ff8
langcode: en
status: true
dependencies:
config:
- page_manager.page.home_page_right_sidebar
- views.view.em_editors_picks
- views.view.em_example_10
- views.view.em_featured_center_mode_slider
- views.view.em_popular_content
module:
- panels
- system
- views
id: home_page_right_sidebar-panels_variant-1
label: '2 Columns Grid'
variant: panels_variant
variant_settings:
blocks:
3fc2ecb7-9a90-4f8b-bf2c-41ec3335cba8:
id: 'views_block:em_example_10-block_1'
label: ''
provider: views
label_display: '0'
views_label: ''
items_per_page: none
context_mapping: { }
region: middle_above_first
uuid: 3fc2ecb7-9a90-4f8b-bf2c-41ec3335cba8
weight: 1
6a48c1ca-a4f0-4d35-9d98-a2758963fd97:
id: 'views_block:em_editors_picks-block_1'
label: ''
provider: views
label_display: visible
views_label: ''
items_per_page: none
context_mapping: { }
region: top
uuid: 6a48c1ca-a4f0-4d35-9d98-a2758963fd97
weight: 1
8aa62d78-34c4-47fa-b98d-69ec2dd57e8e:
id: 'views_block:em_popular_content-block_3'
label: ''
provider: views
label_display: visible
views_label: ''
items_per_page: none
context_mapping: { }
region: middle_above_second
uuid: 8aa62d78-34c4-47fa-b98d-69ec2dd57e8e
weight: 1
6f9cc9e6-782f-4a70-b936-846acabf6131:
id: 'views_block:em_featured_center_mode_slider-block_2'
label: ''
provider: views
label_display: '0'
views_label: ''
items_per_page: none
context_mapping: { }
region: featured
uuid: 6f9cc9e6-782f-4a70-b936-846acabf6131
weight: 1
id: panels_variant
uuid: 4fdaf814-9801-44ce-9f3e-9f583da6064a
label: null
weight: 0
layout: home_1
layout_settings: { }
page_title: '2 Columns Grid'
storage_type: page_manager
storage_id: home_page_right_sidebar-panels_variant-1
builder: ipe
page: home_page_right_sidebar
weight: 0
selection_criteria:
-
id: request_path
pages: /right-sidebar/two-columns-grid
negate: false
context_mapping: { }
selection_logic: and
static_context: { }
|
config/sync-em/page_manager.page_variant.home_page_right_sidebar-panels_variant-1.yml
|
name: Circuit Parachain Build & Test CI
on:
pull_request:
paths:
- circuit-parachain/**/*.rs
- circuit-parachain/**/Cargo.toml
- circuit-parachain/**/Cargo.lock
- .github/workflows/circuit-parachain.yml
env:
RUST_BACKTRACE: 1
CARGO_TERM_COLOR: always
jobs:
format:
runs-on: self-hosted
steps:
- name: ☁️Checkout git repo
uses: actions/checkout@master
- name: ⚙️Get nightly rust toolchain with wasm target
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-09-29
profile: minimal
components: rustfmt
override: true
- name: 📜 Format code
uses: actions-rs/cargo@v1
with:
command: fmt
args: --all --manifest-path circuit-parachain/Cargo.toml -- --check
lint:
runs-on: self-hosted
steps:
- name: ☁️Checkout git repo
uses: actions/checkout@master
- name: ⚙️Get nightly rust toolchain with wasm target
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-09-29
target: wasm32-unknown-unknown
components: clippy
override: true
- name: 🕒 Cache Rust binaries and packages
uses: actions/cache@v2
id: cache-rust
with:
path: |
~/.cargo/registry
~/.cargo/git
circuit-parachain/target
key: ${{ runner.os }}-cargo-${{ hashFiles('circuit-parachain/Cargo.lock') }}
- name: 📑 Lint code
uses: actions-rs/cargo@v1
with:
command: clippy
args: --all --manifest-path circuit-parachain/Cargo.toml
build:
runs-on: self-hosted
needs: [format, lint]
steps:
- name: ☁ Checkout git repo
uses: actions/checkout@master
- name: ⚙️Get nightly rust toolchain with wasm target
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-09-29
target: wasm32-unknown-unknown
override: true
- name: 🕒 Cache Rust binaries and packages
uses: actions/cache@v2
id: cache-rust
with:
path: |
~/.cargo/registry
~/.cargo/git
circuit-parachain/target
key: ${{ runner.os }}-cargo-${{ hashFiles('circuit-parachain/Cargo.lock') }}
- name: 🏭 Build circuit
uses: actions-rs/cargo@v1
continue-on-error: false
with:
command: build
args: --locked --manifest-path circuit-parachain/Cargo.toml
- name: 📤 Upload binary
uses: actions/upload-artifact@v2
with:
name: ${{ github.sha }}-parachain-artifacts
path: circuit-parachain/target/debug/circuit-collator
if-no-files-found: error
test:
runs-on: self-hosted
needs: [build]
steps:
- name: ☁️Checkout git repo
uses: actions/checkout@master
- name: ⚙️Get nightly rust toolchain with wasm target
uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2021-09-29
target: wasm32-unknown-unknown
override: true
- name: 🕒 Cache Rust binaries and packages
uses: actions/cache@v2
id: cache-rust
with:
path: |
~/.cargo/registry
~/.cargo/git
circuit-parachain/target
key: ${{ runner.os }}-cargo-${{ hashFiles('circuit-parachain/Cargo.lock') }}
- name: Download circuit
uses: actions/download-artifact@v2
with:
name: ${{ github.sha }}-parachain-artifacts
path: circuit-parachain/target/debug/
- name: 📼 Run unit tests
continue-on-error: false
run: |
chmod +x circuit-parachain/target/debug/circuit-collator
circuit-parachain/target/debug/circuit-collator --alice --log=main,debug --tmp > /dev/null 2>&1 &
cargo test --workspace --locked --manifest-path circuit-parachain/Cargo.toml
|
.github/workflows/circuit-parachain.yml
|
api_name: []
items:
- children:
- azure.mgmt.hanaonazure.models.Display
- azure.mgmt.hanaonazure.models.ErrorResponse
- azure.mgmt.hanaonazure.models.ErrorResponseException
- azure.mgmt.hanaonazure.models.Operation
- azure.mgmt.hanaonazure.models.ProviderInstance
- azure.mgmt.hanaonazure.models.ProxyResource
- azure.mgmt.hanaonazure.models.Resource
- azure.mgmt.hanaonazure.models.SapMonitor
- azure.mgmt.hanaonazure.models.Tags
- azure.mgmt.hanaonazure.models.TrackedResource
- azure.mgmt.hanaonazure.models.OperationPaged
- azure.mgmt.hanaonazure.models.SapMonitorPaged
- azure.mgmt.hanaonazure.models.ProviderInstancePaged
- azure.mgmt.hanaonazure.models.HanaProvisioningStatesEnum
fullName: azure.mgmt.hanaonazure.models
kind: import
langs:
- python
module: azure.mgmt.hanaonazure.models
name: models
type: package
uid: azure.mgmt.hanaonazure.models
references:
- fullName: azure.mgmt.hanaonazure.models.Display
isExternal: false
name: Display
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.Display
- fullName: azure.mgmt.hanaonazure.models.ErrorResponse
isExternal: false
name: ErrorResponse
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.ErrorResponse
- fullName: azure.mgmt.hanaonazure.models.ErrorResponseException
isExternal: false
name: ErrorResponseException
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.ErrorResponseException
- fullName: azure.mgmt.hanaonazure.models.Operation
isExternal: false
name: Operation
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.Operation
- fullName: azure.mgmt.hanaonazure.models.ProviderInstance
isExternal: false
name: ProviderInstance
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.ProviderInstance
- fullName: azure.mgmt.hanaonazure.models.ProxyResource
isExternal: false
name: ProxyResource
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.ProxyResource
- fullName: azure.mgmt.hanaonazure.models.Resource
isExternal: false
name: Resource
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.Resource
- fullName: azure.mgmt.hanaonazure.models.SapMonitor
isExternal: false
name: SapMonitor
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.SapMonitor
- fullName: azure.mgmt.hanaonazure.models.Tags
isExternal: false
name: Tags
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.Tags
- fullName: azure.mgmt.hanaonazure.models.TrackedResource
isExternal: false
name: TrackedResource
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.TrackedResource
- fullName: azure.mgmt.hanaonazure.models.OperationPaged
isExternal: false
name: OperationPaged
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.OperationPaged
- fullName: azure.mgmt.hanaonazure.models.SapMonitorPaged
isExternal: false
name: SapMonitorPaged
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.SapMonitorPaged
- fullName: azure.mgmt.hanaonazure.models.ProviderInstancePaged
isExternal: false
name: ProviderInstancePaged
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.ProviderInstancePaged
- fullName: azure.mgmt.hanaonazure.models.HanaProvisioningStatesEnum
isExternal: false
name: HanaProvisioningStatesEnum
parent: azure.mgmt.hanaonazure.models
uid: azure.mgmt.hanaonazure.models.HanaProvisioningStatesEnum
|
docs-ref-autogen/azure-mgmt-hanaonazure/azure.mgmt.hanaonazure.models.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-19 20:58"
game: "Unreal Tournament"
name: "DM-Agressive"
author: "<NAME>"
description: "None"
releaseDate: "2003-12"
attachments:
- type: "IMAGE"
name: "DM-Agressive_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/A/DM-Agressive_shot_1.png"
- type: "IMAGE"
name: "DM-Agressive_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/Maps/DeathMatch/A/DM-Agressive_shot_2.png"
originalFilename: "dm-agressive(re).zip"
hash: "de4509cca4f55c29de7f825b6a5b1e3ceb631f4b"
fileSize: 1316059
files:
- name: "Ali's.uax"
fileSize: 268150
hash: "903062f75d4a84ab462b0d289588c04e2fffd778"
- name: "DM-Agressive.unr"
fileSize: 2110668
hash: "da578a84606fcfe26d7a6eccd095d221502b8c68"
otherFiles: 2
dependencies:
DM-Agressive.unr:
- status: "OK"
name: "Ali's"
downloads:
- url: "http://medor.no-ip.org/index.php?dir=Maps/DeathMatch&file=dm-agressive%28re%29.zip"
main: false
repack: false
state: "OK"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/Maps/DeathMatch/A/dm-agressive(re).zip"
main: true
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/DeathMatch&file=dm-agressive.zip"
main: false
repack: false
state: "OK"
- url: "http://www.unrealplayground.com/forums/downloads.php?do=file&id=4744"
main: false
repack: false
state: "MISSING"
- url: "http://www.unrealplayground.com/forums/downloads.php?do=file&id=2431"
main: false
repack: false
state: "MISSING"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/Maps/DeathMatch/A/d/e/4509cc/dm-agressive(re).zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/Maps/DeathMatch/A/d/e/4509cc/dm-agressive(re).zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "DeathMatch"
title: "DM-Agressive"
playerCount: "8-10"
themes:
Ancient: 0.2
Nali Castle: 0.4
Indistrial: 0.4
bots: true
|
content/Unreal Tournament/Maps/DeathMatch/A/d/e/4509cc/dm-agressive_[de4509cc].yml
|
environment:
matrix:
- job_name: Ubuntu Linux builds
appveyor_build_worker_image: Ubuntu2004
- job_name: Windows MinGW builds
appveyor_build_worker_image: Visual Studio 2015
- job_name: Windows VS 2019 builds
appveyor_build_worker_image: Visual Studio 2019
- job_name: MacOS builds
appveyor_build_worker_image: macos-bigsur
for:
-
matrix:
only:
- job_name: Ubuntu Linux builds
install:
- sudo apt-get update
- sudo apt-get -y install libasound2-dev
test_script:
- ./build-all.sh Debug
- git clean -fdx
- rm -rf small3d
- ./build-all.sh Release
- git clean -fdx
- rm -rf small3d
- ./build-all.sh Debug opengl
- git clean -fdx
- rm -rf small3d
- ./build-all.sh Release opengl
-
matrix:
only:
- job_name: Windows MinGW builds
install:
- choco install vulkan-sdk
- refreshenv
- set SELECTED_MINGW=C:\mingw-w64\x86_64-8.1.0-posix-seh-rt_v6-rev0
- set PATH=%PATH%;%SELECTED_MINGW%\mingw64\bin
- rmdir /Q /S C:\msys64
- rmdir /Q /S C:\MinGW
- del "C:\Program Files\Git\usr\bin\sh.exe"
test_script:
- .\build-all-mingw.bat Debug
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-mingw.bat Release
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-mingw.bat Debug opengl
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-mingw.bat Release opengl
-
matrix:
only:
- job_name: Windows VS 2019 builds
install:
- choco install vulkan-sdk
- refreshenv
test_script:
- .\build-all-vs.bat Debug
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-vs.bat Release
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-vs.bat Debug opengl
- git clean -fdx
- rmdir /Q /S small3d
- .\build-all-vs.bat Release opengl
-
matrix:
only:
- job_name: MacOS builds
test_script:
- ./build-all.sh Debug opengl
- git clean -fdx
- rm -rf small3d
- ./build-all.sh Release opengl
build: off
|
appveyor.yml
|
pt_BR:
devise:
confirmations:
confirmed: "Sua conta foi confirmada. Por favor, faÁa login."
send_instructions: "Em alguns minutos vocÍ receber· um email com instruÁıes sobre como confirmar sua conta."
send_paranoid_instructions: "Se seu email j· existir em nossa base, vocÍ receber· em alguns minutos uma mensagem de email com instruÁıes sobre como confirmar sua conta."
failure:
already_authenticated: "VocÍ j· fez login"
inactive: "Sua conta ainda n„o foi ativada."
invalid: "Email ou senha inv·lidos."
invalid_token: "Token de autenticaÁ„o inv·lido."
invited: "VocÍ tem um convite pendente. Aceite-o para terminar de criar sua conta."
locked: "Sua conta est· bloqueada."
not_found_in_database: "Email ou senha inv·lidos."
timeout: "Sua seÁ„o expirou; por favor, faÁa login novamente para continuar."
unauthenticated: "VocÍ precisa fazer login ou registrar-se para continuar."
unconfirmed: "VocÍ precisa confirmar sua conta antes de continuar."
mailer:
confirmation_instructions:
subject: "Confirme sua conta DMPonline ??"
reset_password_instructions:
subject: "Restabelecer as instruÁıes sobre senha"
unlock_instructions:
subject: "Desbloquear as instruÁıes"
omniauth_callbacks:
failure: "N„o foi possÌvel autenticar vocÍ de %{kind} porque \"%{reason}\"."
success: "AutenticaÁ„o da conta %{kind} bem-sucedida."
passwords:
no_token: "VocÍ sÛ pode acessar esta p·gina a partir de um email para redefiniÁ„o de senha. Se vocÍ est· vindo de um email de redefiniÁ„o de senha, assegure-se de ter usado toda a URL fornecida."
send_instructions: "Em alguns minutos vocÍ receber· um email com instruÁıes sobre como redefinir sua senha."
send_paranoid_instructions: "Se seu email j· est· em nossa base, vocÍ receber· em alguns minutos um email para recuperaÁ„o de senha."
updated: "Sua senha foi modificada com sucesso. VocÍ j· est· logado."
updated_not_active: "Sua senha foi modificada com sucesso."
registrations:
destroyed: "Adeus! Sua conta foi cancelada com sucesso. Esperamos vÍ-lo novamente em breve."
signed_up: "Bem-vindo! Registro realizado com sucesso."
signed_up_but_inactive: "VocÍ registrou-se com sucesso. PorÈm, n„o conseguimos deix·-lo entrar porque sua conta ainda n„o foi ativada."
signed_up_but_locked: "VocÍ registrou-se com sucesso. PorÈm, n„o conseguimos deix·-lo entrar porque sua conta est· bloqueada."
signed_up_but_unconfirmed: "Uma mensagem com um link de confirmaÁ„o foi enviada para seu email. Por favor, abra o link para ativar sua conta. Se vocÍ n„o receber o email de confirmaÁ„o, verifique seu filtro de spam."
update_needs_confirmation: "Sua conta foi atualizada com sucesso, mas precisamos conferir seu novo email. Por favor, veja seu email e clique no link de confirmaÁ„o para terminar de confirmar seu novo email."
updated: "Sua conta foi atualizada com sucesso."
sessions:
signed_in: "Entrada bem sucedida."
signed_out: "SaÌda bem sucedida."
unlocks:
send_instructions: "VocÍ receber· em poucos minutos um email com intruÁıes para desbloquear sua conta."
send_paranoid_instructions: "Se sua conta j· existe, vocÍ receber· em poucos minutos um email com instruÁıes para desbloque·-la."
unlocked: "Conta desbloqueada com sucesso. Por favor, faÁa login para continuar."
errors:
messages:
already_confirmed: "j· estava confirmado, por favor tente entrar"
confirmation_period_expired: "precisa ser confirmada no prazo de %{period}; por favor, solicite uma nova??"
expired: "expirou, por favor, solicite uma nova"
not_found: "n„o encontrado"
not_locked: "n„o foi bloqueado"
not_saved:
one: "1 erro impediu este %{resource} de ser salvo:"
other: "%{count} erros impediram este %{resource} de ser salvo:"
|
config/locales/devise/devise.pt_BR.yml
|
---
months:
0:
- name: Fastelavn
regions: [dk]
function: easter(year)
function_modifier: -49
type: informal
- name: Palmesøndag
regions: [dk]
function: easter(year)
function_modifier: -7
type: informal
- name: Skærtorsdag
regions: [dk]
function: easter(year)
function_modifier: -3
- name: Langfredag
regions: [dk]
function: easter(year)
function_modifier: -2
- name: Påskedag
regions: [dk]
function: easter(year)
- name: 2. påskedag
regions: [dk]
function: easter(year)
function_modifier: 1
- name: Store Bededag
regions: [dk]
function: easter(year)
function_modifier: 26
- name: <NAME>sdag
regions: [dk]
function: easter(year)
function_modifier: 39
- name: Pinsedag
regions: [dk]
function: easter(year)
function_modifier: 49
- name: 2. Pinsedag
regions: [dk]
function: easter(year)
function_modifier: 50
1:
- name: Nytårsdag
regions: [dk]
mday: 1
4:
- name: 1. april
regions: [dk]
mday: 1
type: informal
- name: Danmarks besættelse
regions: [dk]
mday: 9
type: informal
- name: Dronningens fødselsdag
regions: [dk]
mday: 16
type: informal
5:
- name: Arbejdernes kampdag
regions: [dk]
mday: 1
type: informal
- name: Danmarks befrielse
regions: [dk]
mday: 5
type: informal
6:
- name: Grundlovsdag
regions: [dk]
mday: 5
type: informal
- name: Valdemarsdag og Genforeningsdag
regions: [dk]
mday: 15
type: informal
- name: <NAME> aften
regions: [dk]
mday: 23
type: informal
11:
- name: Mortensaften
regions: [dk]
mday: 10
type: informal
12:
- name: <NAME>
regions: [dk]
mday: 13
type: informal
- name: Juleaftensdag
regions: [dk]
mday: 24
type: informal
- name: 1. juledag
regions: [dk]
mday: 25
- name: 2. juledag
regions: [dk]
mday: 26
tests: |
{Date.civil(2007,1,1) => 'Nytårsdag',
Date.civil(2007,2,18) => 'Fastelavn',
Date.civil(2007,4,9) => 'Danmarks besættelse',
Date.civil(2007,4,16) => 'Dronningens fødselsdag',
Date.civil(2007,4,5) => 'Skærtorsdag',
Date.civil(2007,4,6) => 'Langfredag',
Date.civil(2007,4,8) => 'Påskedag',
Date.civil(2007,4,9) => '2. påskedag',
Date.civil(2007,5,1) => 'Arbejdernes kampdag',
Date.civil(2007,5,4) => 'Store Bededag',
Date.civil(2007,5,17) => '<NAME>',
Date.civil(2007,5,27) => 'Pinsedag',
Date.civil(2007,5,28) => '2. Pinsedag',
Date.civil(2007,6,5) => 'Grundlovsdag',
Date.civil(2007,12,24) => 'Juleaftensdag',
Date.civil(2007,12,25) => '1. juledag',
Date.civil(2007,12,26) => '2. juledag'}.each do |date, name|
assert_equal name, (Holidays.on(date, :dk, :informal)[0] || {})[:name]
end
|
dk.yaml
|
interactions:
- request:
body: !!python/unicode '{"username": "nouser", "authType": "password", "name":
"Example", "useProxy": "false", "enabled": "true", "ip": "127.0.0.1", "managedPlugins":
"false", "agentCapable": "false", "password": "<PASSWORD>", "verifyHost": "false",
"port": 8834}'
headers:
Accept: ['*/*']
Accept-Encoding: ['gzip, deflate']
Connection: [keep-alive]
Content-Length: ['239']
Content-Type: [application/json]
Cookie: [TNS_SESSIONID=SESSIONID]
User-Agent: [pyTenable/0.3.12 (pyTenable/0.3.12; Python/2.7.15)]
X-SecurityCenter: ['0000000000']
method: POST
uri: https://securitycenter.home.cugnet.net/rest/scanner
response:
body: {string: !!python/unicode '{"type":"regular","response":{"id":"9","name":"Example","description":"","ip":"127.0.0.1","port":"8834","useProxy":"false","enabled":"true","verifyHost":"false","managePlugins":"false","authType":"password","cert":null,"username":"nouser","password":"SET","agentCapable":"false","version":null,"webVersion":null,"admin":"false","msp":"false","numScans":"0","numHosts":"0","numSessions":"0","numTCPSessions":"0","loadAvg":"0.0","uptime":-1,"status":"8192","pluginSet":null,"loadedPluginSet":null,"serverUUID":null,"createdTime":"1553266985","modifiedTime":"1553266985","zones":[],"nessusManagerOrgs":[]},"error_code":0,"error_msg":"","warnings":[],"timestamp":1553266985}
'}
headers:
cache-control: ['no-store, no-cache, must-revalidate']
connection: [Keep-Alive]
content-length: ['671']
content-type: [application/json]
date: ['Fri, 22 Mar 2019 15:03:05 GMT']
expires: ['Thu, 19 Nov 1981 08:52:00 GMT']
keep-alive: ['timeout=15, max=100']
pragma: [no-cache]
securitycenter: [5.8.0]
server: [Apache]
strict-transport-security: [max-age=31536000; includeSubDomains]
x-content-type-options: [nosniff]
x-frame-options: [DENY]
x-xss-protection: [1; mode=block]
status: {code: 200, message: OK}
|
tests/sc/cassettes/test_scanners_create_success.yaml
|
--- !ruby/hash:SeasonHash
title: NG騎士ラムネ&40EX ビクビクトライアングル愛の嵐大作戦
watchable: true
thumbnail_url: https://cs1.anime.dmkt-sp.jp/anime_kv/img/10/20/2/10202_1_6.png?1427216400000
outline: あの大冒険から3年後,ラムネは中学生となりハラハラワールドのことも忘れてしまっまっていた。しかし,突如ミルクが現れ,謎の鉄仮面が命を狙ってくる。アララ王国は大きな危機にさらされていた。今こそ勇者ラムネス復活のとき。
tags: !ruby/array:Hashie::Array
- !ruby/hash:TagHash
name: コメディ/ギャグ
type: genre
- !ruby/hash:TagHash
name: ロボット/メカ
type: genre
- !ruby/hash:TagHash
name: 音楽:りゅうてつし
type: staff
- !ruby/hash:TagHash
name: キャラクターデザイン:菅沼栄治
type: staff
- !ruby/hash:TagHash
name: 音楽:松井忠重
type: staff
- !ruby/hash:TagHash
name: 脚本:隅沢克之
type: staff
- !ruby/hash:TagHash
name: キャラクターデザイン:斎藤卓也
type: staff
- !ruby/hash:TagHash
name: 演出:ますなりこうじ
type: staff
- !ruby/hash:TagHash
name: 監督:ますなりこうじ
type: staff
- !ruby/hash:TagHash
name: NG騎士ラムネ&40シリーズ
type: other
- !ruby/hash:TagHash
name: 製作年代:1990年代
type: other
- !ruby/hash:TagHash
name: 佐藤俊彦
type: other
- !ruby/hash:TagHash
name: 田中英行
type: other
- !ruby/hash:TagHash
name: 製作年:1991年
type: other
- !ruby/hash:TagHash
name: 伊東岳彦
type: other
- !ruby/hash:TagHash
name: ますなりこうじ
type: other
episodes: !ruby/array:Hashie::Array
- !ruby/hash:EpisodeHash
episode_no: ビクビクトライアングル愛の嵐大作戦1
title: 愛ふたたび
description: 草尾毅:ラムネ/横山智佐:ミルク/玉川紗己子:ココア/神代知衣:タマQ/飯塚昭三:妖神ゴブーリキ/矢尾一樹:ダ・サイダー/TARAKO:ヘビメタコ<br><br>監督:ますなりこうじ/演出:ますなりこうじ/絵コンテ:ますなりこうじ/企画:佐藤俊彦/脚本:隅沢克之/キャラクター原案:伊東岳彦/オリジナル・アニメーション・キャラクターデザイン:斎藤卓也/キャラクターデザイン:菅沼栄治/メカデザイン:中原レイ/音楽:松井忠重,りゅうてつし/音響監督:田中英行<br><br>次話→so32145892
length_seconds: 1747
content_id: so32145891
default_thread_id: 1508746536
channel_id: 2632720
thumbnail_url: http://tn.smilevideo.jp/smile?i=32145891
- !ruby/hash:EpisodeHash
episode_no: ビクビクトライアングル愛の嵐大作戦2
title: 愛流されて
description: 草尾毅:ラムネ/横山智佐:ミルク/玉川紗己子:ココア/神代知衣:タマQ/飯塚昭三:妖神ゴブーリキ/矢尾一樹:ダ・サイダー/TARAKO:ヘビメタコ<br><br>監督:ますなりこうじ/演出:ますなりこうじ/絵コンテ:ますなりこうじ/企画:佐藤俊彦/脚本:隅沢克之/キャラクター原案:伊東岳彦/オリジナル・アニメーション・キャラクターデザイン:斎藤卓也/キャラクターデザイン:菅沼栄治/メカデザイン:中原レイ/音楽:松井忠重,りゅうてつし/音響監督:田中英行<br><br>so32145891←前話|次話→so32145943 第一話→so32145891
length_seconds: 1761
content_id: so32145892
default_thread_id: 1508746532
channel_id: 2632720
thumbnail_url: http://tn.smilevideo.jp/smile?i=32145892
- !ruby/hash:EpisodeHash
episode_no: ビクビクトライアングル愛の嵐大作戦3
title: 愛は勝つ
description: 草尾毅:ラムネ/横山智佐:ミルク/玉川紗己子:ココア/神代知衣:タマQ/飯塚昭三:妖神ゴブーリキ/矢尾一樹:ダ・サイダー/TARAKO:ヘビメタコ<br><br>監督:ますなりこうじ/演出:ますなりこうじ/絵コンテ:ますなりこうじ/企画:佐藤俊彦/脚本:隅沢克之/キャラクター原案:伊東岳彦/オリジナル・アニメーション・キャラクターデザイン:斎藤卓也/キャラクターデザイン:菅沼栄治/メカデザイン:中原レイ/音楽:松井忠重,りゅうてつし/音響監督:田中英行<br><br>so32145892←前話 第一話→so32145891
length_seconds: 1744
content_id: so32145943
default_thread_id: 1508746825
channel_id: 2632720
thumbnail_url: http://tn.smilevideo.jp/smile?i=32145943
cast: "[キャスト]<br>草尾毅:ラムネ/横山智佐:ミルク/玉川紗己子:ココア/神代知衣:タマQ/飯塚昭三:妖神ゴブーリキ/矢尾一樹:ダ・サイダー/TARAKO:ヘビメタコ"
staff: "[スタッフ]<br>監督:ますなりこうじ/演出:ますなりこうじ/絵コンテ:ますなりこうじ/企画:佐藤俊彦/脚本:隅沢克之/キャラクター原案:伊東岳彦/オリジナル・アニメーション・キャラクターデザイン:斎藤卓也/キャラクターデザイン:菅沼栄治/メカデザイン:中原レイ/音楽:松井忠重,りゅうてつし/音響監督:田中英行"
produced_year: "[製作年]<br>1991年"
copyright: "(c) PRODUCTION REED 1991"
related_seasons: !ruby/array:Hashie::Array
- !ruby/hash:SeasonHash
title: NG騎士ラムネ&40
- !ruby/hash:SeasonHash
title: VS騎士ラムネ&40 炎
- !ruby/hash:SeasonHash
title: VS騎士ラムネ&40FRESH
- !ruby/hash:SeasonHash
title: NG騎士ラムネ&40DX ワクワク時空 炎の大捜査戦
- !ruby/hash:SeasonHash
title: NG騎士ラムネ&40総集編
|
db/fixtures/seasons/season_01721.yml
|
version: '3.3'
services:
dbsetup:
depends_on:
- mssql
build:
context: .
dockerfile: Dockerfile-dbsetup
environment:
TRAM_DB_SERVER: "mssql"
TRAM_SA_PASSWORD: "<PASSWORD>"
TRAM_DB: "TramDb"
TRAM_SCHEMA: "eventuate"
TRAM_SCHEMA2: "schema1"
eventuatetramtests:
depends_on:
- zookeeper
- kafka
- mssql
- cdcservice1
- cdcservice2
build: .
environment:
KafkaBootstrapServers: "kafka:29092"
ConnectionStrings__EventuateTramDbConnection: "Server=mssql;Database=TramDb;User Id=sa;Password=<PASSWORD>"
volumes:
- ./bin/Release/netcoreapp2.2/publish:/app
zookeeper:
image: confluentinc/cp-zookeeper:latest
environment:
ZOOKEEPER_CLIENT_PORT: 2181
ZOOKEEPER_TICK_TIME: 2000
kafka:
image: confluentinc/cp-kafka:latest
depends_on:
- zookeeper
environment:
KAFKA_BROKER_ID: 1
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://kafka:29092,PLAINTEXT_HOST://localhost:9092
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
cdcservice1:
image: eventuateio/eventuate-tram-cdc-mysql-service:${CDC_SERVICE_DOCKER_VERSION}
depends_on:
- kafka
- mssql
environment:
SPRING_DATASOURCE_URL: jdbc:sqlserver://mssql;databaseName=TramDb
SPRING_DATASOURCE_USERNAME: sa
SPRING_DATASOURCE_PASSWORD: <PASSWORD>
SPRING_DATASOURCE_TEST_ON_BORROW: "true"
SPRING_DATASOURCE_VALIDATION_QUERY: SELECT 1
SPRING_DATASOURCE_DRIVER_CLASS_NAME: com.microsoft.sqlserver.jdbc.SQLServerDriver
SPRING_PROFILES_ACTIVE: EventuatePolling
EVENTUATELOCAL_KAFKA_BOOTSTRAP_SERVERS: kafka:29092
EVENTUATELOCAL_ZOOKEEPER_CONNECTION_STRING: zookeeper:2181
EVENTUATE_DATABASE_SCHEMA: eventuate
EVENTUATELOCAL_CDC_POLLING_INTERVAL_IN_MILLISECONDS: 500
EVENTUATELOCAL_CDC_MAX_EVENTS_PER_POLLING: 1000
EVENTUATELOCAL_CDC_MAX_ATTEMPTS_FOR_POLLING: 100
EVENTUATELOCAL_CDC_POLLING_RETRY_INTERVAL_IN_MILLISECONDS: 500
EVENTUATELOCAL_CDC_READER_NAME: Reader1
EVENTUATELOCAL_CDC_LEADERSHIP_LOCK_PATH: /eventuatelocal/cdc/leader/1
cdcservice2:
image: eventuateio/eventuate-tram-cdc-mysql-service:${CDC_SERVICE_DOCKER_VERSION}
depends_on:
- kafka
- mssql
environment:
SPRING_DATASOURCE_URL: jdbc:sqlserver://mssql;databaseName=TramDb
SPRING_DATASOURCE_USERNAME: sa
SPRING_DATASOURCE_PASSWORD: <PASSWORD>
SPRING_DATASOURCE_TEST_ON_BORROW: "true"
SPRING_DATASOURCE_VALIDATION_QUERY: SELECT 1
SPRING_DATASOURCE_DRIVER_CLASS_NAME: com.microsoft.sqlserver.jdbc.SQLServerDriver
SPRING_PROFILES_ACTIVE: EventuatePolling
EVENTUATELOCAL_KAFKA_BOOTSTRAP_SERVERS: kafka:29092
EVENTUATELOCAL_ZOOKEEPER_CONNECTION_STRING: zookeeper:2181
EVENTUATE_DATABASE_SCHEMA: schema1
EVENTUATELOCAL_CDC_POLLING_INTERVAL_IN_MILLISECONDS: 500
EVENTUATELOCAL_CDC_MAX_EVENTS_PER_POLLING: 1000
EVENTUATELOCAL_CDC_MAX_ATTEMPTS_FOR_POLLING: 100
EVENTUATELOCAL_CDC_POLLING_RETRY_INTERVAL_IN_MILLISECONDS: 500
EVENTUATELOCAL_CDC_READER_NAME: Reader2
EVENTUATELOCAL_CDC_LEADERSHIP_LOCK_PATH: /eventuatelocal/cdc/leader/2
mssql:
image: microsoft/mssql-server-linux:2017-latest
environment:
SA_PASSWORD: "<PASSWORD>"
ACCEPT_EULA: "Y"
MSSQL_MEMORY_LIMIT_MB: "500"
|
IO.Eventuate.Tram.IntegrationTests/docker-compose.yml
|
name: Release
on:
push:
tags:
- v*.*.*
# pull_request:
# branches:
# - master
jobs:
goreleaser:
strategy:
matrix:
platform: [ubuntu-latest, windows-latest]
runs-on: ${{ matrix.platform }}
steps:
-
name: Checkout
uses: actions/checkout@v3
with:
fetch-depth: 0
-
name: Fetch all tags
run: git fetch --force --tags
-
name: Set up Go
uses: actions/setup-go@v3
with:
go-version: 1.18
-
name: Build Dependencies (Linux)
if: matrix.platform == 'ubuntu-latest'
run: |
sudo apt-get update && sudo apt-get install gcc-aarch64-linux-gnu jq snapd --yes
echo 'deb [trusted=yes] https://repo.goreleaser.com/apt/ /' | sudo tee /etc/apt/sources.list.d/goreleaser.list
sudo apt update
sudo apt install nfpm
sudo snap install --classic snapcraft
-
if: matrix.platform == 'ubuntu-latest'
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v3
with:
distribution: goreleaser
version: latest
args: release --rm-dist -f hack/goreleaser/linux.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
if: matrix.platform == 'windows-latest'
name: Run GoReleaser
uses: goreleaser/goreleaser-action@v3
with:
distribution: goreleaser
version: latest
args: release --rm-dist -f hack/goreleaser/windows.yml
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
-
if: matrix.platform == 'ubuntu-latest'
name: Upload assets
uses: actions/upload-artifact@v3
with:
name: disass_linux
path: dist/*
-
if: matrix.platform == 'windows-latest'
name: Upload assets
uses: actions/upload-artifact@v3
with:
name: disass_windows
path: dist/*
|
.github/workflows/release.yml
|
---
- config:
- testset: "Testes Básicos"
- test:
- group: "Metodos SAT-CF-e"
- name: "AssociarAssinatura"
- url: "/hub/v1/associarassinatura"
- method: "POST"
- body: "numero_caixa=1&sequencia_cnpj=123456&assinatura_ac=xyz"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "AtualizarSoftwareSAT"
- url: "/hub/v1/atualizarsoftwaresat"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "BloquearSAT"
- url: "/hub/v1/bloquearsat"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "CancelarUltimaVenda"
- url: "/hub/v1/cancelarultimavenda"
- method: "POST"
- body: "numero_caixa=1&chave_cfe=CFe123&dados_cancelamento=<CFeCanc>...</CFeCanc>"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "ComunicarCertificadoICPBRASIL"
- url: "/hub/v1/comunicarcertificadoicpbrasil"
- method: "POST"
- body: "numero_caixa=1&certificado=CERTIFICADO"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "ConfigurarInterfaceDeRede"
- url: "/hub/v1/configurarinterfacederede"
- method: "POST"
- body: "numero_caixa=1&configuracao=<config>...</config>"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "ConsultarNumersoSessao"
- url: "/hub/v1/consultarnumerosessao"
- method: "POST"
- body: "numero_caixa=1&numero_sessao=7"
- expected_status: [501]
- test:
- group: "Metodos SAT-CF-e"
- name: "ConsultarSAT"
- url: "/hub/v1/consultarsat"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "ConsultarStatusOperacional"
- url: "/hub/v1/consultarstatusoperacional"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "DesbloquearSAT"
- url: "/hub/v1/desbloquearsat"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "EnviarDadosVenda"
- url: "/hub/v1/enviardadosvenda"
- method: "POST"
- body: "numero_caixa=1&dados_venda=<CFe>...</CFe>"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "ExtrairLogs"
- url: "/hub/v1/extrairlogs"
- method: "POST"
- body: "numero_caixa=1"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "TesteFimAFim"
- url: "/hub/v1/testefimafim"
- method: "POST"
- body: "numero_caixa=1&dados_venda=<CFe>...</CFe>"
- expected_status: [200]
- test:
- group: "Metodos SAT-CF-e"
- name: "TrocarCodigoDeAtivacao"
- url: "/hub/v1/trocarcodigodeativacao"
- method: "POST"
- body: "numero_caixa=1&opcao=1&novo_codigo=112233445&novo_codigo_confirmacao=112233445"
- expected_status: [501]
|
test/tests/smoke.yaml
|
before_script:
#- yum install which -y
# install ssh-agent if not already installed, it is required by docker
- 'which ssh-agent || ( yum install openssh-clients -y )'
# run ssh-agent (inside the build environment)
- eval $(ssh-agent -s)
# add the ssh key stored in SSH_PRIVATE_KEY variable to the agent store
#- ssh-add <(echo "$SSH_PRIVATE_KEY")
# for docker builds disable host key checking although this can lead to
# mitm attacks; only use this in docker or it will overwrite the host
# ssh config!
#- mkdir -p ~/.ssh
#- '[[ -f /.dockerenv ]] && echo -e "Host *\n\tStrictHostKeyChecking no\n\n" > ~/.ssh/config'
- whoami
- pwd
after_script:
- whoami
- pwd
stages:
- develop
- preprod
- prod
develop:
stage: develop
only:
# This build will run only when something is pushed to the develop branch.
- develop
script:
- pwd && su dev && whoami
- cd /media/disk1/www-demo/botble/
- git pull
- bootstrap="$(git log -1 --pretty=%B | grep bootstrap)" && ([[ $bootstrap ]] && bash bin/bootstrap.sh -e d)
- sass="$(git log -1 --pretty=%B | grep sass)" && ([[ $sass ]] && sass public/themes/main/sass/common.scss:public/themes/main/css/common.css --style compressed)
- npm="$(git log -1 --pretty=%B | grep npm)" && ([[ $npm ]] && npm update && npm run dev)
- php artisan config:clear
- php artisan cache:clear
- php artisan view:clear
- chown -R dev:dev /media/disk1/www-demo/botble/
- pwd
preprod:
stage: preprod
only:
# This build will run only when something is pushed to the develop branch.
- preprod
script:
- pwd && whoami
- ssh -tt <EMAIL> "cd /home/giaxd/domains/preprod.giaxd.com/public_html;git pull --no-edit;"
# - bootstrap="$(git log -1 --pretty=%B | grep bootstrap)" && ([[ $bootstrap ]] && bash bin/bootstrap.sh -e d)
- pwd && whoami
prod:
stage: prod
only:
# This build will run only when something is pushed to the develop branch.
- master
script:
- whoami
- ssh -tt <EMAIL> "cd /home/giaxd/domains/giaxd.com/public_html && ls -al && git pull --no-edit"
# - bootstrap="$(git log -1 --pretty=%B | grep bootstrap)" && ([[ $bootstrap ]] && bash bin/bootstrap.sh -e d)
- pwd
- whoami
|
.gitlab-ci.yml
|
# define your jenkins URL here
#unclassified:
# location:
# url: http://192.168.1.10/
jenkins:
# slaveAgentPort: 50000
securityRealm:
local:
allowsSignup: false
enableCaptcha: false
# configure your AD connection below, comment out local if you choose to
#activeDirectory:
#bindPassword:
#customDomain: true
#domains:
#- bindName:
#bindPassword:
#name:
#servers:
#site:
#tlsConfiguration:
#groupLookupStrategy:
# define a fallback user in case AD connection's not working
#internalUsersDatabase:
# jenkinsInternalUser: "admin"
#removeIrrelevantGroups: false
#startTls: true
authorizationStrategy:
#loggedInUsersCanDoAnything:
# allowAnonymousRead: false
projectMatrix:
permissions:
- "Overall/Administer:admin"
- "Overall/Read:authenticated"
# include your cloud configs here
# this is just an initial config to test your connection
# and build images from inside jenkins if you'd want to
clouds:
- docker:
dockerApi:
connectTimeout: 60
dockerHost:
uri: "unix:///var/run/docker.sock"
readTimeout: 60
name: "docker local"
# detailed template for a docker-slave
# - docker:
# dockerApi:
# connectTimeout: 60
# dockerHost:
# uri: "unix:///var/run/docker.sock"
# readTimeout: 60
# name: "docker slave"
# templates:
# - connector:
# attach:
# user: "jenkins"
# dockerTemplateBase:
# cpuPeriod: 0
# cpuQuota: 0
# image: "address/of/conainer/image/inside/registry/ansible-slave"
# pullCredentialsId: "your-credentials-id"
# instanceCapStr: "10"
# labelString: "ansible-docker-slave"
# mode: EXCLUSIVE
# name: "docker-slave"
# pullStrategy: PULL_ALWAYS
# pullTimeout: 300
# remoteFs: "/home/jenkins"
|
casc.yml
|
- name: Create the shared and releases directory for the SP dashboard
file: path={{ spdashboard_data_dir }}/{{item}} state=directory owner=root group=root mode=775
with_items:
- releases
- name: Create shared and writeable directories for logs and data
file: path={{ spdashboard_data_dir }}/shared/{{item}} state=directory owner={{ spdashboard_fpm_user }} group=root mode=775
with_items:
- logs
- sessions
- cache
- name: Create spdashboard branch dir
file:
path: "{{spdashboard_branch_dir }}"
state: directory
owner: root
group: root
mode: 755
- name: Install git
yum: name=git state=present
- name: Copy makerelease.sh
copy: src=makeRelease.sh dest={{spdashboard_branch_dir}}/makeRelease.sh mode=770
- name: Make release
command: "./makeRelease.sh {{ spdashboard_branch }}"
environment:
HOME: "{{ openconext_builds_dir }}"
args:
chdir: "{{ spdashboard_branch_dir }}"
- name: Unpack current version
unarchive: src="{{openconext_builds_dir }}/Releases/sp-dashboard-{{ spdashboard_branch | replace('/', '_') }}.tar.gz" dest="{{ spdashboard_data_dir}}/releases" copy=no
- name: Create current symlink
file: src="{{ spdashboard_data_dir}}/releases/sp-dashboard-{{ spdashboard_branch | replace('/', '_') }}" dest="{{ spdashboard_data_dir}}/current" state=link
- name: Delete config
file: path={{spdashboard_data_dir}}/current/app/{{ item }} state=absent
with_items:
- config/parameters.yml
- name: Delete cache, log and sessions
file: path={{spdashboard_data_dir}}/current/var/{{ item }} state=absent
with_items:
- logs
- sessions
- cache
- name: Create symlink to logs,sessions and cache
file: src={{spdashboard_data_dir}}/shared/{{ item }} dest={{ spdashboard_data_dir}}/releases/sp-dashboard-{{ spdashboard_branch | replace('/', '_') }}/var/{{item}} owner=root group=root state=link
with_items:
- logs
- sessions
- cache
- name: Install config file
template: src=parameters.yml.j2 dest={{ spdashboard_data_dir}}/current/app/config/parameters.yml
- name: Clear the cache
command: php72 bin/console cache:clear --env=prod
args:
chdir: "{{ spdashboard_data_dir }}/current/"
- name: Run database migrations
command: php72 bin/console doctrine:migrations:migrate
args:
chdir: "{{ spdashboard_data_dir }}/current/"
- name: Run translation import
command: php72 bin/console lexik:translations:import
args:
chdir: "{{ spdashboard_data_dir }}/current/"
- name: Clean the cache
command: php72 bin/console cache:clear --env=prod
args:
chdir: "{{ spdashboard_data_dir }}/current/"
notify: restart php72-fpm
- name: Chown the cachedir recursively again
file: dest={{spdashboard_data_dir}}/shared/cache owner=spdashboard recurse=yes
|
ansible/roles/spdashboard/tasks/install-branch.yml
|
interactions:
- request:
body: null
headers:
Connection: [keep-alive]
Content-Length: ['0']
User-Agent: [Azure-Storage/1.4.0-1.4.0 (Python CPython 3.7.0; Darwin 18.2.0)]
x-ms-client-request-id: [a11a25e4-e475-11e8-9ac5-acde48001122]
x-ms-date: ['Fri, 09 Nov 2018 23:17:29 GMT']
x-ms-version: ['2018-03-28']
method: PUT
uri: https://storagename.queue.core.windows.net/encryptionqueued9c213ac
response:
body: {string: ''}
headers:
Date: ['Fri, 09 Nov 2018 23:17:29 GMT']
Server: [Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0]
Transfer-Encoding: [chunked]
x-ms-request-id: [5483ab33-1003-0057-7982-7826a6000000]
x-ms-version: ['2018-03-28']
status: {code: 201, message: Created}
- request:
body: '<?xml version=''1.0'' encoding=''utf-8''?>
<QueueMessage><MessageText>{"EncryptedMessageContents": "HFjLgr65gh5NXfbHN0wTRg==",
"EncryptionData": {"WrappedContentKey": {"KeyId": "key1", "EncryptedKey": "<KEY>
"Algorithm": "A256KW"}, "EncryptionAgent": {"Protocol": "1.0", "EncryptionAlgorithm":
"AES_CBC_256"}, "ContentEncryptionIV": "/P/Db72Fy/aL3TezqVvlAQ==", "KeyWrappingMetadata":
{"EncryptionLibrary": "Python 1.4.0"}}}</MessageText></QueueMessage>'
headers:
Connection: [keep-alive]
Content-Length: ['501']
User-Agent: [Azure-Storage/1.4.0-1.4.0 (Python CPython 3.7.0; Darwin 18.2.0)]
x-ms-client-request-id: [a132fdbc-e475-11e8-9ac5-acde48001122]
x-ms-date: ['Fri, 09 Nov 2018 23:17:30 GMT']
x-ms-version: ['2018-03-28']
method: POST
uri: https://storagename.queue.core.windows.net/encryptionqueued9c213ac/messages
response:
body: {string: "\uFEFF<?xml version=\"1.0\" encoding=\"utf-8\"?><QueueMessagesList><QueueMessage><MessageId>2cc1a7d7-94ab-43bc-8454-b92751d58d32</MessageId><InsertionTime>Fri,\
\ 09 Nov 2018 23:17:30 GMT</InsertionTime><ExpirationTime>Fri, 16 Nov 2018\
\ 23:17:30 GMT</ExpirationTime><PopReceipt>AgAAAAMAAAAAAAAAnDrvYoJ41AE=</PopReceipt><TimeNextVisible>Fri,\
\ 09 Nov 2018 23:17:30 GMT</TimeNextVisible></QueueMessage></QueueMessagesList>"}
headers:
Content-Type: [application/xml]
Date: ['Fri, 09 Nov 2018 23:17:29 GMT']
Server: [Windows-Azure-Queue/1.0 Microsoft-HTTPAPI/2.0]
Transfer-Encoding: [chunked]
x-ms-request-id: [5483ab3a-1003-0057-7e82-7826a6000000]
x-ms-version: ['2018-03-28']
status: {code: 201, message: Created}
version: 1
|
tests/recordings/test_queue_encryption.test_put_with_strict_mode.yaml
|
name: Australian News RSS Feeds
feeds:
- name: ABC Australia
feed: 'http://www.abc.net.au/news/feed/2942460/rss.xml'
url: 'https://www.abc.net.au/news/'
- name: 9NEWS
feed: 'http://www.9news.com.au/rss'
url: 'https://www.9news.com.au/'
- name: Daily Telegraph
feed: 'http://www.dailytelegraph.com.au/feed'
url: 'http://www.dailytelegraph.com.au/'
- name: 'SMH'
feed: 'http://feeds.smh.com.au/rssheadlines/top.xml'
url: 'https://www.smh.com.au/'
- name: News.com.au - Australia's News site
feed: 'https://www.news.com.au/feed/'
url: 'https://www.news.com.au/'
- name: The Australian
feed: 'https://www.theaustralian.com.au/feed/'
url: 'https://www.theaustralian.com.au/'
- name: The Age News Headlines
feed: 'http://feeds.theage.com.au/rssheadlines/top.xml'
url: 'https://www.theage.com.au/'
- name: SBS Australia
feed: 'http://www.sbs.com.au/news/rss/Section/Top+Stories'
url: 'https://www.sbs.com.au/'
- name: <NAME>
feed: 'https://www.heraldsun.com.au/rss'
url: 'http://www.heraldsun.com.au/'
- name: 'Huffington Post Australia '
feed: 'http://www.huffingtonpost.com.au/rss/index.xml'
url: 'https://www.huffingtonpost.com.au/'
- name: The Courier Mail
feed: 'http://www.couriermail.com.au/feed'
url: 'http://www.couriermail.com.au/'
- name: Perth Now
feed: 'http://www.perthnow.com.au/feed'
url: 'https://www.perthnow.com.au/'
- name: 'Brisbane Times'
feed: 'http://feeds.brisbanetimes.com.au/rssheadlines/top.xml'
url: 'https://www.brisbanetimes.com.au/'
- name: The Canberra Times
feed: 'https://www.canberratimes.com.au/rss.xml'
url: 'https://www.canberratimes.com.au/'
- name: WA Today
feed: 'http://feeds.watoday.com.au/rssheadlines/top.xml'
url: 'https://www.watoday.com.au/'
- name: Gold Coast Bulletin
feed: 'http://www.goldcoastbulletin.com.au/feed'
url: 'http://www.goldcoastbulletin.com.au/'
- name: The Mercury
feed: 'http://www.themercury.com.au/feed'
url: 'http://www.themercury.com.au/'
- name: NT News
feed: 'https://www.ntnews.com.au/news/rss'
url: 'http://www.ntnews.com.au/'
- name: 'Crikey'
feed: 'https://www.crikey.com.au/feed/'
url: 'https://www.crikey.com.au/'
- name: The Northern Star
feed: 'https://www.northernstar.com.au/feeds/rss/homepage/'
url: 'https://www.northernstar.com.au/'
- name: InDaily
feed: 'http://indaily.com.au/feed/'
url: 'https://indaily.com.au/'
- name: Independent Australia
feed: 'http://feeds.feedburner.com/IndependentAustralia'
url: 'https://independentaustralia.net/'
- name: 'Townsville Bulletin '
feed: 'https://www.townsvillebulletin.com.au/news/rss'
url: 'http://www.townsvillebulletin.com.au/'
- name: The Shovel
feed: 'http://www.theshovel.com.au/feed/rss'
url: 'http://www.theshovel.com.au/'
- name: Business News
feed: 'http://www.businessnews.com.au/rssfeed/latest.rss'
url: 'https://www.businessnews.com.au/'
- name: Kalkine Media
feed: 'https://kalkinemedia.com/feed/'
url: 'https://kalkinemedia.com/'
- name: ' Coffs Coast Advocate'
feed: 'https://www.coffscoastadvocate.com.au/feeds/rss/homepage/'
url: 'https://www.coffscoastadvocate.com.au/'
- name: International Business Times AU
feed: 'https://www.ibtimes.com.au/rss'
url: 'https://www.ibtimes.com.au/'
- name: Daily Examiner
feed: 'https://www.dailyexaminer.com.au/feeds/rss/homepage/'
url: 'https://www.dailyexaminer.com.au/'
- name: Australian Newsagency Blog
feed: 'https://www.newsagencyblog.com.au/feed/'
url: 'https://www.newsagencyblog.com.au/'
- name: Whitsunday Times
feed: 'https://www.whitsundaytimes.com.au/feeds/rss/homepage/'
url: 'https://www.whitsundaytimes.com.au/'
- name: Tasmanian Times
feed: 'https://tasmaniantimes.com/?/feeds/rss'
url: 'https://tasmaniantimes.com/'
- name: Ballina Advocate
feed: 'https://www.ballinaadvocate.com.au/feeds/rss/homepage/'
url: 'https://www.ballinaadvocate.com.au/'
- name: 'Goulburn Post '
feed: 'https://www.goulburnpost.com.au/rss.xml'
url: 'https://www.goulburnpost.com.au/'
- name: <NAME>
feed: 'https://www.alicespringsnews.com.au/feed/'
url: 'https://www.alicespringsnews.com.au/'
- name: '<NAME>'
feed: 'http://feeds.sydneysun.com/rss/ae0def0d9b645403'
url: 'https://www.sydneysun.com/'
- name: '16 News '
feed: 'http://www.16news.com.au/index.php/feed/'
url: 'http://www.16news.com.au/'
- name: The Australian Jewish News
feed: 'https://www.jewishnews.net.au/feed'
url: 'https://www.jewishnews.net.au/'
- name: '<NAME>'
feed: 'http://feeds.perthherald.com/rss/12878be9fc2ca79c'
url: 'https://www.perthherald.com/'
- name: Ozzie News
feed: 'https://www.ozzienews.com/feed'
url: 'https://www.ozzienews.com/'
|
local/rss/australia.yaml
|
en:
hyrax:
icons:
physical_instantiation: 'physical-instantiation-icon'
select_type:
physical_instantiation:
name: "Physical Instantiation"
description: "Physical instantiation works"
simple_form:
hints:
physical_instantiation:
holding_organization: 'The organization that manages this copy of the asset.'
local_instantiation_identifier: 'An identifier used locally for this copy of the asset.'
media_type: 'The high-level nature of the content of this copy of the asset.'
format: 'The physical media on which the content is stored.'
location: 'A description of where the item is located.'
generations: 'The version of this copy of the asset.'
digitization_date: 'The date on which this copy of the asset was digitized. Not relevant for all instantiations.'
date: 'The date on which this copy of the asset was created. Valid Date formats are (YYYY-MM-DD, YYYY-MM, YYYY e.g. 2009-01-30, 2009-01, 2009).'
annotation: 'Any supplementary information or notes about this copy of the asset.'
rights_summary: 'Any information about copyright, usage, or access rights to this specific copy of the asset.'
rights_link: 'A URI pointing to a standardized declaration of rights, such as those from <a href="http://rightsstatements.org" target="_blank">RightsStatements.org</a>.'
dimensions: 'Measurement associated with the physical format, such as a 7 inch audio reel.'
standard: 'Identifies the broadcast standard video signal (e.g. NTSC, PAL) or the audio encoding (e.g. Dolby A, vertical cut).'
duration: 'Identifies the length of the recording, preferably in a timestamp format like HH:MM:SS.SSS or HH:MM:SS;FF.'
time_start: 'Identifies the timecode at which the content begins in this recording, preferably in a timestamp format like HH:MM:SS or HH:MM:SS;FF.'
tracks: 'The number and types of tracks found in this copy of the asset.'
channel_configuration: 'The arrangement or configuration of specific channels or layers of information in this copy of the asset, ex. 2-track mono, 8-track stereo, or video track with alpha channel.'
alternative_modes: 'List of any equivalent alternatives to the primary visual or sound information that exists in this copy of the asset, ex. ClosedCaptions, Subtitles, Language Dubs.'
colors: 'The colors used in the presentation of content in this copy of the asset.'
|
config/locales/physical_instantiation.en.yml
|
name: bcftools_isec
description: Apply set operations to VCF files
keywords:
- variant calling
- intersect
- union
- complement
- VCF
tools:
- isec:
description: |
Computes intersections, unions and complements of VCF files.
homepage: http://samtools.github.io/bcftools/bcftools.html
documentation: http://www.htslib.org/doc/bcftools.html
doi: 10.1093/bioinformatics/btp352
params:
- outdir:
type: string
description: |
The pipeline's output directory. By default, the module will
output files into `$params.outdir/<SOFTWARE>`
- publish_dir_mode:
type: string
description: |
Value for the Nextflow `publishDir` mode parameter.
Available: symlink, rellink, link, copy, copyNoFollow, move.
- enable_conda:
type: boolean
description: |
Run the module with Conda using the software specified
via the `conda` directive
- singularity_pull_docker_container:
type: boolean
description: |
Instead of directly downloading Singularity images for use with Singularity,
force the workflow to pull and convert Docker containers instead.
input:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- vcfs:
type: files
description: |
List containing 2 or more vcf files
e.g. [ 'file1.vcf', 'file2.vcf' ]
- tbis:
type: files
description: |
List containing the tbi index files corresponding to the vcfs input files
e.g. [ 'file1.vcf.tbi', 'file2.vcf.tbi' ]
output:
- meta:
type: map
description: |
Groovy Map containing sample information
e.g. [ id:'test', single_end:false ]
- results:
type: directory
description: Folder containing the set operations results perform on the vcf files
pattern: "${prefix}"
- version:
type: file
description: File containing software version
pattern: "*.{version.txt}"
authors:
- "@joseespinosa"
- "@drpatelh"
|
software/bcftools/isec/meta.yml
|
title: Брак
month: "03"
day: "11"
intro:
text:
- "Как пища есть необходимое условие жизни отдельного человека, так брак есть необходимое условие жизни человечества; и как злоупотребление пищей порождает зло для отдельного человека, так и злоупотребления брака порождают величайший вред для отдельного человека и для человечества."
author:
body:
- index: 1
text:
- "Сожительство, последствием которого может быть деторождение, есть истинный, действительный брак; всякие же обряды, заявления, условия не составляют брака и употребляются большею частью для того, чтобы признать все предшествующие сожительства не браком."
author:
- index: 2
text:
- "Ты можешь пренебречь своею обязанностью перед супругом или супругой, можешь избавиться от той печали, которую дают тебе эти обязанности, можешь уйти. Но что же ты найдешь?"
- "Ту же печаль, но без сознания исполненной обязанности."
author: <NAME>
- index: 3
text:
- "Брак, как условие, есть обязательство двух людей разных полов иметь детей только друг от друга. Нарушение этого условия есть обман, измена и преступление."
author:
- index: 4
text:
- "Великое дело то, когда две души чувствуют, что они соединены навеки с тем, чтобы поддерживать друг друга во всяком труде, во всяком горе, помогать друг другу во всяком страдании и быть соединенным друг с другом в те молчаливые невыразимые минуты последнего прощания."
author: <NAME>
- index: 5
text:
- "Какого великого блага могут достигнуть два любящие супруга, если они поставят своей целью совершенствование и будут помогать в этом друг другу: напоминанием, советом, примером?"
author:
- index: 6
text:
- "И приступили ко Христу фарисеи и, искушая Его, говорили Ему: по всякой ли причине позволительно человеку разводиться с женою своею?"
- "Он сказал им в ответ: не читали ли вы, что Сотворивший в начале мужчину и женщину сотворил их? И сказал: посему оставит человек отца и мать и прилепится к жене своей, и будут два одною плотью (Быт. 1, 27; 2, 24). Так что они уже не двое, но одна плоть. Итак, что Бог сочетал, того человек да не разлучает."
author: Мф., гл. 19, ст. 3, 4, 5, 6
- index: 7
text:
- "Всякий, разводящийся с женою своею и женящийся на другой, прелюбодействует, и всякий, женящийся на разведенной с мужем, прелюбодействует."
author: Лк., гл. 16, ст. 18
conclusion:
text:
- "Соединение мужчины и женщины для продолжения рода человеческого есть дело такое великое и важное и для каждого отдельного человека, и для всего человечества, что делать его нельзя кое-как и как кому вздумается и как кому приятно, а надо делать его так, как решили о нем и обдумали его жившие прежде нас мудрые и святые люди."
author:
|
pages/03/11.yml
|
geocoder:
extends:
file: docker-compose-common.yml
service: geocoder
links:
- parser
- elasticsearch
- influxdb
environment: &MONITORED_JVM_ENV
IS_MONITORED: 'true'
INFLUXDB_HOST: &INFLUXDB_HOST influxdb
INFLUXDB_PORT: &INFLUXDB_PORT 8086
INFLUXDB_USER: &INFLUXDB_USER root
INFLUXDB_PASSWORD: &INFLUXDB_PASSWORD root
parser:
extends:
file: docker-compose-common.yml
service: parser
loader:
extends:
file: docker-compose-common.yml
service: loader
links:
- elasticsearch
elasticsearch:
extends:
file: docker-compose-common.yml
service: elasticsearch
ui:
extends:
file: docker-compose-common.yml
service: ui
links:
- geocoder
# services that are not extended
cadvisor:
image: google/cadvisor:0.16.0
ports:
- "9090:8080"
volumes:
- /:/rootfs:ro
- /var/run:/var/run:rw
- /sys:/sys:ro
- /var/lib/docker/:/var/lib/docker:ro
links:
- influxdb
command: -storage_driver=influxdb -storage_driver_db=cadvisor -storage_driver_host=influxdb:8086
influxdb:
image: tutum/influxdb:0.8.8
ports:
- 8083:8083 # Admin Site
- 8086:8086 # API
environment:
# Extra auto-create magic provided by this image
PRE_CREATE_DB: "cadvisor;metrics"
grafana:
build: docker/grafana
ports:
- 3000:3000
links:
- influxdb
# Logspout setup taken from:
# https://github.com/gliderlabs/logspout#route-all-container-output-to-remote-syslog
logspout:
image: gliderlabs/logspout:master
ports:
- "8000:80"
volumes:
- /var/run/docker.sock:/tmp/docker.sock
links:
- logstash
environment:
LOGSPOUT: ignore
# Logstash only supports the older RFC 3164 syslog protocol
SYSLOG_FORMAT: rfc3164
# Use syslog+tcp:// if UDP is not supported
command: syslog://logstash:5514
logstash:
build: docker/logstash
expose:
- 5514
links:
- elasticsearch
environment:
LOGSPOUT: ignore
command: logstash --verbose -f /opt/grasshopper.conf -e 'input{ syslog{ port => 5514 } } output{ elasticsearch{ protocol => "http" host => "elasticsearch" } }'
kibana:
build: docker/kibana
environment:
LOGSPOUT: ignore
links:
- elasticsearch
ports:
- "5601:5601"
|
docker-compose-full.yml
|
en:
nav:
networks: Networks
networks_description: "Manage Networks"
network_map: "Net-Node Map"
network_map_description: "Show defined relationships and status between node and networks"
interfaces: "Bus Interfaces"
interfaces_description: "Override default bus order for network interfaces"
scaffold:
networks: Networks
networks_description: "Manage Networks"
allocations: "Allocated IPs"
allocations_description: "Track Network IP Assignments"
# UI
nets: &network_names
admin: "Admin"
public: "Public"
private: "Private"
storage: "Storage"
common: &network_common
name: "Name"
description: "Description"
deployment: "Deployment"
router: "Router"
address: "Address"
pref: "Preference"
vlan: "VLAN"
bridge: "Bridge"
team: "Team"
pbr: "PBR"
network: "Network"
pref: "Pref"
conduit: "Conduit(s)"
group: "Group"
category: "Category"
ranges: "Range(s)"
first: "First IP"
last: "Last IP"
v6prefix: "IPv6 Prefix"
role: role
save: "Save"
roles:
network-server: "Hardware NetBus Map"
network-admin: "Admin Network"
network-public: "Public Network"
network-private: "Private Network"
network-storage: "Storage Network"
network-bmc: "BMC Network"
network-pilot: "Pilot Network"
network-lldpd: "LLDPD Network Service"
networks:
index:
title: "Installed Networks"
add: "Add Network"
generated: "[category]-[group]"
<<: *network_common
show:
<<: *network_common
role_missing: "WARNING: Missing Network Role Mapping!"
map:
title: "Network-Node Map"
node: "Node"
<<: *network_common
network_ranges:
index:
title: "Network Ranges"
network: "Network"
<<: *network_common
network_routers:
index:
title: "Network Routers"
<<: *network_common
show:
title: "Router Detail"
<<: *network_common
roles:
index:
<<: *network_common
interfaces:
index:
title: "Bus Interface Maps"
pattern: "Pattern to Match"
bus_order: "Bus Order"
|
core/rails/config/locales/network/en.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-10-20 18:18"
game: "Unreal Tournament 2004"
name: "ONS-FortKneelis_2"
author: "es0terica"
description: "The Liandri Mining Corporation stumbled upon this deserted shrine atop\
\ a set of cliffs. Archaeologists went nuts, and the corporation set up the caverns\
\ for Onslaught play."
releaseDate: "2004-09"
attachments:
- type: "IMAGE"
name: "ONS-FortKneelis_2_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Onslaught/F/ONS-FortKneelis_2_shot_1.png"
- type: "IMAGE"
name: "ONS-FortKneelis_2_shot_4.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Onslaught/F/ONS-FortKneelis_2_shot_4.png"
- type: "IMAGE"
name: "ONS-FortKneelis_2_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Onslaught/F/ONS-FortKneelis_2_shot_2.png"
- type: "IMAGE"
name: "ONS-FortKneelis_2_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Onslaught/F/ONS-FortKneelis_2_shot_3.png"
originalFilename: "ons-fortkneelis_2.zip"
hash: "93b3fa0ea2e3d182e87993f45811cf6a74303499"
fileSize: 2522751
files:
- name: "FK_04.utx"
fileSize: 2198606
hash: "31eaf9809fa87699f472ab3e836b5da4c1932d0c"
- name: "ONS-FortKneelis_2.ut2"
fileSize: 5067856
hash: "c5f6e7d04025a19ab65301c82f9d524cec311908"
otherFiles: 4
dependencies: {}
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament%202004/Maps/Onslaught/F/ons-fortkneelis_2.zip"
main: true
repack: false
state: "OK"
- url: "http://ut2004.ut-files.com/index.php?dir=Maps/Onslaught/&file=ons-fortkneelis_2.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament%202004/Maps/Onslaught/F/9/3/b3fa0e/ons-fortkneelis_2.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament%202004/Maps/Onslaught/F/9/3/b3fa0e/ons-fortkneelis_2.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Onslaught"
title: "Fort Kneelis"
playerCount: "16-20"
themes:
Industrial: 0.4
Natural: 0.6
bots: true
|
content/Unreal Tournament 2004/Maps/Onslaught/F/9/3/b3fa0e/ons-fortkneelis_2_[93b3fa0e].yml
|
---
#-------------------------------------------------------------------------------
# VARIABLES
#-------------------------------------------------------------------------------
- name: 'include variables'
include_vars: "{{lookup('first_found', params)}}"
vars:
params:
files:
- "vars/{{ansible_distribution_major_version}}.yml"
- 'vars/main.yml'
tags:
- install
- mariadb
#-------------------------------------------------------------------------------
# PACKAGING
#-------------------------------------------------------------------------------
- name: 'package install mariadb'
package:
state: 'present'
name: "{{package_name_mariadb}}"
tags:
- install
- mariadb
- name: 'package install mariadb-server'
package:
state: 'present'
name: "{{package_name_mariadb_server}}"
register: register_package_install_mariadb_server
tags:
- install
- mariadb
#-------------------------------------------------------------------------------
# FILES
#-------------------------------------------------------------------------------
- name: 'file create mariadb_backup.dir'
file:
state: 'directory'
path: "{{file_dst_mariadb_backup_dir}}"
mode: "{{file_mode_mariadb_backup_dir}}"
owner: "{{file_owner_mariadb_backup_dir}}"
group: "{{file_group_mariadb_backup_dir}}"
tags:
- install
- mariadb
- name: 'file create mariadb_etc.dir'
file:
state: 'directory'
path: "{{file_dst_mariadb_etc_dir}}"
mode: "{{file_mode_mariadb_etc_dir}}"
owner: "{{file_owner_mariadb_etc_dir}}"
group: "{{file_group_mariadb_etc_dir}}"
tags:
- install
- mariadb
- name: 'file create credentials.cnf'
template:
src: "{{file_src_credentials_cnf}}"
dest: "{{file_dst_credentials_cnf}}"
mode: "{{file_mode_credentials_cnf}}"
owner: "{{file_owner_credentials_cnf}}"
group: "{{file_group_credentials_cnf}}"
no_log: 'true'
tags:
- install
- mariadb
- name: 'file create mysql-clients.cnf'
template:
src: "{{file_src_mysql_clients_cnf}}"
dest: "{{file_dst_mysql_clients_cnf}}"
mode: "{{file_mode_mysql_clients_cnf}}"
owner: "{{file_owner_mysql_clients_cnf}}"
group: "{{file_group_mysql_clients_cnf}}"
tags:
- install
- mariadb
- name: 'file create mariadb_run-backup'
template:
src: "{{file_src_mariadb_run_backup}}"
dest: "{{file_dst_mariadb_run_backup}}"
mode: "{{file_mode_mariadb_run_backup}}"
owner: "{{file_owner_mariadb_run_backup}}"
group: "{{file_group_mariadb_run_backup}}"
tags:
- install
- mariadb
- name: 'stat file monit.d'
stat:
path: "{{file_dst_monit_d}}"
register: register_monit_d
when: (mariadb_monitor_monit_state is match('true|yes|enable'))
tags:
- install
- mariadb
- name: 'file create monit.d'
file:
state: 'directory'
path: "{{file_dst_monit_d}}"
mode: "{{file_mode_monit_d}}"
owner: "{{file_owner_monit_d}}"
group: "{{file_group_monit_d}}"
when: (mariadb_monitor_monit_state is match('true|yes|enable')) and (register_monit_d.stat.exists == False)
tags:
- install
- mariadb
#-------------------------------------------------------------------------------
# COMMANDS
#-------------------------------------------------------------------------------
- name: 'command check service monit'
shell: "{{command_cmd_check_service_monit}}"
register: register_check_service_monit
changed_when: (register_check_service_monit.rc > 1)
failed_when: (register_check_service_monit.rc > 1)
check_mode: 'no'
when: (mariadb_monitor_monit_state is match('true|yes|enable'))
tags:
- install
- mariadb
|
mariadb/tasks/main/install.yml
|
name: CI
# Controls when the workflow will run
on:
# Triggers the workflow on push or pull request events but only for the main branch
push:
branches: [master]
# A workflow run is made up of one or more jobs that can run sequentially or in parallel
jobs:
# This workflow contains a single job called "build"
build:
# The type of runner that the job will run on
runs-on: ubuntu-latest
# Steps represent a sequence of tasks that will be executed as part of the job
steps:
# Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
- uses: actions/checkout@v2
- name: Use Node.js
uses: actions/setup-node@v1
with:
node-version: "14.x"
# 缓存node_modules,用于加速node的后续的构建
- name: Cache node modules
uses: actions/cache@v1
id: cache
with:
path: node_nodules
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Install Dependencies
if: steps.cache.outputs.cache-hit != 'true'
run: npm i
- name: Generate Pages
run: |
npm run build
- name: Deploy 🚀
uses: JamesIves/github-pages-deploy-action@4.1.4
with:
repository-name: linqing01007/vue-mimail
branch: ghPages # The branch the action should deploy to.
folder: ./dist # The folder the action should deploy.
ssh-key: ${{ secrets.MiMail }}
# 同步到gitee
# - name: Sync to Gitee
# uses: wearerequired/git-mirror-action@master
# env:
# # 注意在 Settings->Secrets 配置 GITEE_RSA_PRIVATE_KEY
# SSH_PRIVATE_KEY: ${{ secrets.GITEE_RSA_PRIVATE_KEY }}
# with:
# # 注意替换为你的 GitHub 源仓库地址
# source-repo: <EMAIL>:doocs/advanced-java.git
# # 注意替换为你的 Gitee 目标仓库地址
# destination-repo: <EMAIL>:Doocs/advanced-java.git
# # 发布gitee pages
# - name: Build Gitee Pages
# uses: yanglbme/gitee-pages-action@main
# with:
# # 注意替换为你的 Gitee 用户名
# gitee-username: yanglbme
# # 注意在 Settings->Secrets 配置 GITEE_PASSWORD
# gitee-password: ${{ secrets.GITEE_PASSWORD }}
# # 注意替换为你的 Gitee 仓库,仓库名严格区分大小写,请准确填写,否则会出错
# gitee-repo: doocs/advanced-java
# # 要部署的分支,默认是 master,若是其他分支,则需要指定(指定的分支必须存在)
# branch: main
|
.github/workflows/master.yml
|
name: Utoipa build
on:
push:
paths:
- "**.rs"
- "**Cargo.toml"
pull_request:
branches: [ master ]
paths:
- "**.rs"
- "**Cargo.toml"
env:
CARGO_TERM_COLOR: always
jobs:
test:
strategy:
matrix:
testset:
- utoipa
- utoipa-gen
- utoipa-swagger-ui
fail-fast: true
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 2
- uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
target/
key: ${{ runner.os }}-cargo-${{ hashFiles('**/Cargo.lock') }}
- name: Resolve changed paths
id: changes
run: |
root_changed=false
gen_changed=false
swagger_changed=false
while read -r change; do
if [[ "$change" == "utoipa-gen" ]]; then
gen_changed=true
elif [[ "$change" == "utoipa-swagger-ui" ]]; then
swagger_changed=true
else
root_changed=true
fi
done < <(git diff --name-only ${{ github.sha }}~ ${{ github.sha }} | grep .rs | awk -F \/ '{print $1}')
echo "::set-output name=root_changed::$root_changed"
echo "::set-output name=gen_changed::$gen_changed"
echo "::set-output name=swagger_changed::$swagger_changed"
- name: Run tests
run: |
if [[ "${{ matrix.testset }}" == "utoipa" ]] && [[ ${{ steps.changes.outputs.root_changed }} == true ]]; then
cargo test --features uuid
cargo test --test path_response_derive_test_no_serde_json --no-default-features
cargo test --test component_derive_no_serde_json --no-default-features
cargo test --test path_derive_actix --test path_parameter_derive_actix --features actix_extras
cargo test --test component_derive_test --features chrono,decimal,uuid
cargo test --test component_derive_test --features chrono_with_format
cargo test --test path_derive_rocket --features rocket_extras,json
elif [[ "${{ matrix.testset }}" == "utoipa-gen" ]] && [[ ${{ steps.changes.outputs.gen_changed }} == true ]]; then
cargo test -p utoipa-gen --features utoipa/actix_extras
elif [[ "${{ matrix.testset }}" == "utoipa-swagger-ui" ]] && [[ ${{ steps.changes.outputs.swagger_changed }} == true ]]; then
cargo test -p utoipa-swagger-ui --features actix-web,rocket
fi
|
.github/workflows/build.yaml
|
name: Release
on:
push:
branches: [ $default-branch ]
tags:
- "*"
workflow_dispatch:
jobs:
build:
runs-on: macos-10.15
steps:
- uses: actions/checkout@v2
with:
fetch-depth: 0
- name: Setup Rust environment
uses: ATiltedTree/setup-rust@v1.0.4
with:
rust-version: stable
- name: Setup Node.js environment
uses: actions/setup-node@v2.5.0
with:
node-version: 17.x
- uses: actions/cache@v2
with:
path: |
~/.cargo/bin/
~/.cargo/registry/index/
~/.cargo/registry/cache/
~/.cargo/git/db/
src-tauri/target
key: ${{ runner.os }}-cargo
- name: Install Yarn
run: npm install -g yarn
- name: Install Dependencies
run: yarn
- name: Build Front-End
run: yarn build:vite
- name: Build
run: yarn tauri build
- name: Read Utility Version
run: echo "VERSION=$(cat src-tauri/tauri.conf.json | grep version | head -n1 | awk '{ print substr($2, 2, length($2)-2) }')" >> $GITHUB_ENV
- name: Move DMG Image
run: mv "./src-tauri/target/release/bundle/dmg/Tongfang Utility_"$VERSION"_x64.dmg" "./Tongfang-Utility-macOS.dmg"
- name: Move DMG Image
run: mv "./src-tauri/target/release/bundle/macos/Tongfang Utility.app.tar.gz" "./Tongfang-Utility-macOS-update.app.tar.gz"
- name: Create Release
id: create_release
uses: ncipollo/release-action@v1.9.0
with:
allowUpdates: true
token: ${{ secrets.token }}
- name: Upload Artifacts
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.token }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_name: Tongfang-Utility-macOS.dmg
asset_path: ./Tongfang-Utility-macOS.dmg
asset_content_type: application/octet-stream
- name: Upload Artifacts
uses: actions/upload-release-asset@v1
env:
GITHUB_TOKEN: ${{ secrets.token }}
with:
upload_url: ${{ steps.create_release.outputs.upload_url }}
asset_name: Tongfang-Utility-macOS-update.app.tar.gz
asset_path: ./Tongfang-Utility-macOS-update.app.tar.gz
asset_content_type: application/gzip
|
.github/workflows/release.yml
|
copyright: Copyright © 2020 Breakside Inc.
kind: class
introduced: 2020.1.0
inherits: JSObject
summary: An alert that is included in a `UIAlertController`
topics:
-
name: Creating an Alert Action
members:
-
name: initWithTitle
kind: init
summary: Create an action with given properties
arguments:
-
name: title
type: String
summary: The title for the action button
-
name: style
type: UIAlertAction.Style
summary: The style for the action button
-
name: action
type: function
summary: The function to call when the action button is clicked
-
name: target
type: object
default: undefined
summary: The object to use as `this` when calling the action
function
-
name: Title
members:
-
name: title
type: String
summary: The title of the action
-
name: Style
members:
-
name: Style
kind: enum
suffix: enum
summary: The possible action styles
topics:
-
name: Options
members:
-
name: normal
summary: A normal button style
-
name: default
summary: The button style that indicates this is the default action
-
name: cancel
summary: The button style that indicates this is action used to
cancel the request
-
name: destructive
summary: The button style that indicates this action will perform
a destructive operation like deleting data
-
name: style
type: Style
summary: The style of this action
-
name: Action
members:
-
name: action
type: function
summary: The function to call when this action is clicked
-
name: target
type: function
summary: The object to use as `this` when calling the `action` function
|
Documentation/Code/UIKit/UIAlertAction.doc.yaml
|
documentType: LandingData
title: Azure Sentinel 預覽文件
metadata:
title: Azure Sentinel 預覽文件 - 教學課程、快速入門 |Microsoft Docs
meta.description: Azure Sentinel is a cloud-native SIEM that provides intelligent security analytics for your entire enterprise at cloud scale.
services: sentinel
author: cmcclister
manager: carolz
ms.service: sentinel
ms.tgt_pltfrm: na
ms.devlang: na
ms.topic: landing-page
ms.date: 02/28/19
ms.author: cmcclister
abstract:
description: Azure Sentinel 預覽是雲端原生 SIEM,其可為您的整個企業提供雲端規模的智慧型安全性分析。 取得無限的雲端速度和規模,以協助專注於真正重要的事情。 從您的雲端或內部部署資產、Office 365、Azure 資源和其他雲端輕鬆收集資料。 透過 Microsoft 安全性分析專家提供的內建機器學習有效地偵測威脅。 使用內建協調流程和自動化劇本自動執行威脅回應。
sections:
- title: 5 分鐘快速入門
items:
- type: paragraph
text: 了解如何將資料上線至 Azure Sentinel,並掌握您的資料和潛在威脅。
- type: list
style: icon48
items:
- image:
src: media/index/deploy.svg
text: 使 Azure Sentinel 上線
href: /azure/sentinel/quickstart-onboard
- image:
src: media/index/get-started.svg
text: 開始使用 Azure Sentinel
href: /azure/sentinel/quickstart-get-visibility
- title: 逐步教學課程
items:
- type: paragraph
text: 了解如何定義安全性原則,保護您的資源免受惡意活動攻擊,並回應安全性警示和事件。
- type: list
style: ordered
items:
- html: <a href="/azure/sentinel/tutorial-detect-threats">使用 Azure Sentinel 偵測威脅</a>
- html: <a href="/azure/sentinel/tutorial-respond-threats-playbook">設定自動化的威脅回應</a>
- title: 透過 Microsoft Learn 增進您的技巧
items:
- type: list
style: cards
className: cardsFTitle
items:
- title: Azure 中的安全性簡介
href: 'https://docs.microsoft.com/learn/azure'
image:
src: media/index/tutorial.svg
href: 'https://docs.microsoft.com/learn/azure'
- type: paragraph
text: <a href="/learn/browse/?term=security">更多互動式學習...</a>
- title: 參考
items:
- type: list
style: cards
className: cardsD
items:
- html: '<a href="https://azure.microsoft.com/updates/?product=sentinel">服務更新</a>'
|
articles/sentinel/index.yml
|
name: continuous-integration
on:
push:
branches:
- main
- master
tags:
- 'v*'
pull_request:
jobs:
test-with-cov:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.8]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install wheel
pip install -e .[testing]
- name: Run pytest
run: |
pytest --cov=gptables --cov-report=xml --cov-report=term-missing
coverage xml
- name: Upload to Codecov
if: github.repository == 'best-practice-impact/gptables'
uses: codecov/codecov-action@v1
with:
name: gptables-pytests-py3.8
flags: pytests
file: ./coverage.xml
fail_ci_if_error: true
linux:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: [3.6, 3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install wheel
pip install --upgrade-strategy eager -e .[testing]
- name: Run pytest
run: pytest
windows:
name: Tests on Windows
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.8
- uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install wheel
pip install --upgrade-strategy eager -e .[testing]
- name: Run pytest
run: pytest
build_and_deploy_docs:
name: Build and deploy docs to Pages
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v2
with:
python-version: 3.7
- uses: actions/cache@v2
with:
path: ~\AppData\Local\pip\Cache
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
restore-keys: |
${{ runner.os }}-pip-
- name: Install Python dependencies
run: |
python -m pip install --upgrade pip
pip install wheel
pip install --upgrade-strategy eager -e .[docs]
- name: Build the book
run: |
(cd docs && make html)
publish:
name: Publish to PyPi
needs: [linux, windows, test-with-cov]
if: startsWith(github.event.ref, 'refs/tags/v')
runs-on: ubuntu-latest
steps:
- name: Checkout source
uses: actions/checkout@v2
- name: Set up Python 3.7
uses: actions/setup-python@v1
with:
python-version: 3.7
- name: Build package
run: |
pip install wheel
python setup.py sdist bdist_wheel
- name: Publish
uses: pypa/gh-action-pypi-publish@v1.1.0
with:
user: __token__
password: ${{ secrets.PYPI_KEY }}
|
.github/workflows/ci.yml
|
resource_types:
- name: git-branches
type: registry-image
source:
repository: aoldershaw/git-branches-resource
resources:
- name: feature-branches
type: git-branches
source:
uri: https://github.com/aoldershaw/examples
branch_regex: 'feature/(?P<feature>.*)'
- name: examples
type: git
source:
uri: https://github.com/aoldershaw/examples
jobs:
- name: set-feature-pipelines
plan:
- in_parallel:
- get: feature-branches
trigger: true
- get: examples
- load_var: branches
file: feature-branches/branches.json
- across:
- var: branch
values: ((.:branches))
set_pipeline: dev
file: examples/pipelines/multi-branch/template.yml
instance_vars: {feature: ((.:branch.groups.feature))}
vars: {branch: ((.:branch.name))}
- name: cleanup-inactive-workspaces
plan:
- get: feature-branches
passed: [set-feature-pipelines]
trigger: true
- load_var: active_branches
file: feature-branches/branches.json
- task: cleanup
config:
platform: linux
image_resource:
type: registry-image
source: {repository: hashicorp/terraform}
params:
ACTIVE_BRANCHES: ((.:active_branches))
TERRAFORM_BACKEND_CONFIG:
terraform:
backend:
gcs:
bucket: concourse-examples
prefix: multi-branch/terraform
credentials: ((concourse_artifacts_json_key))
run:
path: sh
args:
- -c
- |
set -euo pipefail
apk add jq
active_features="$(echo "$ACTIVE_BRANCHES" | jq '[.[].groups.feature]')"
echo "$TERRAFORM_BACKEND_CONFIG" > backend.tf.json
terraform init
active_workspaces="$(terraform workspace list | grep -v '^[*]' | tr -d ' ' | jq --raw-input --slurp 'split("\n") | map(select(. != ""))')"
jq -nr "$active_workspaces - $active_features | .[]" | while read extra_workspace
do
echo "deleting workspace $extra_workspace"
terraform workspace select "$extra_workspace"
terraform init
terraform destroy -auto-approve
terraform workspace select default
terraform workspace delete "$extra_workspace"
done
|
pipelines/multi-branch/tracker.yml
|
name: pytorch-widedeep
on:
push:
branches:
- master
pull_request:
branches:
- master
jobs:
codestyle:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install black flake8
- name: Code Style (Black/Flake8)
run: |
# Black code style
black --check --diff pytorch_widedeep tests examples setup.py
# Stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E901,E999,F821,F822,F823 --ignore=E266 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --ignore=E203,E266,E501,E722,F401,F403,F405,F811,W503,C901 --statistics
test:
runs-on: ubuntu-latest
strategy:
fail-fast: true
matrix:
python-version: [3.7, 3.8, 3.9]
steps:
- uses: actions/checkout@v2
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v2
with:
python-version: ${{ matrix.python-version }}
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install pytest-cov codecov .
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
- name: Test with pytest
run: |
pytest --doctest-modules pytorch_widedeep --cov-report xml --cov-report term --disable-pytest-warnings --cov=pytorch_widedeep tests/
- name: Upload coverage
uses: actions/upload-artifact@v2
with:
name: coverage${{ matrix.python-version }}
path: .coverage
finish:
needs: test
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install coverage
- name: Download all artifacts
# Downloads coverage1, coverage2, etc.
uses: actions/download-artifact@v2
- name: Convert coverage
run: |
coverage combine coverage*/.coverage*
coverage report --fail-under=95
coverage xml
- name: upload coverage to Codecov
uses: codecov/codecov-action@v1
with:
fail_ci_if_error: true
|
.github/workflows/build.yml
|
---
#
# ci/pipeline.yml
#
# Pipeline structure file for a BOSH Release pipeline
#
# DO NOT MAKE CHANGES TO THIS FILE. Instead, modify
# ci/settings.yml and override what needs overridden.
# This uses spruce, so you have some options there.
#
# author: <NAME> <<EMAIL>>
# created: 2016-03-30
meta:
name: (( param "Please name your pipeline" ))
release: (( grab meta.name ))
target: (( param "Please identify the name of the target Concourse CI" ))
url: (( param "Please specify the full url of the target Concourse CI" ))
pipeline: (( concat meta.name "-boshrelease" ))
manifest:
path: (( concat "manifests/" meta.name ".yml" ))
git:
email: (( param "Please provide the git email for automated commits" ))
name: (( param "Please provide the git name for automated commits" ))
image:
name: starkandwayne/concourse
tag: latest
aws:
bucket: (( concat meta.pipeline "-pipeline" ))
region_name: us-east-1
access_key: (( param "Please set your AWS Access Key ID for your pipeline S3 Bucket" ))
secret_key: (( param "Please set your AWS Secret Key ID for your pipeline S3 Bucket" ))
github:
uri: (( concat "<EMAIL>:" meta.github.owner "/" meta.github.repo ))
owner: (( param "Please specify the name of the user / organization that owns the Github repository" ))
repo: (( param "Please specify the name of the Github repository" ))
branch: master
private_key: (( param "Please generate an SSH Deployment Key for this repo and specify it here" ))
access_token: (( param "Please generate a Personal Access Token to be used for creating github releases (do you have a ci-bot?)" ))
bosh-lite:
target: (( param "Please specify the BOSH target URI for the bosh-lite to run test deployments against" ))
cacert: (( param "Please specify the BOSH Director Root CA cert" ))
username: admin
password: (( param "Please specify the BOSH Director admin password" ))
deployment: (( concat meta.name "-testflight" ))
shout:
topic: (( concat meta.name "-pipeline" ))
url: (( param "Please specify the Shout! endpoint" ))
username: (( param "Please specify the Shout! operations username" ))
password: (( param "Please specify the Shout! operations password" ))
metadata:
build-team-name: $BUILD_TEAM_NAME
build-job-name: $BUILD_JOB_NAME
build-pipeline-name: $BUILD_PIPELINE_NAME
links:
pipeline: (( concat meta.url "/teams/$BUILD_TEAM_NAME/pipelines/$BUILD_PIPELINE_NAME" ))
build: (( concat meta.shout.links.pipeline "/jobs/$BUILD_JOB_NAME/builds/$BUILD_NAME" ))
groups:
- name: (( grab meta.pipeline ))
jobs:
- testflight
- pre
- rc
- shipit
- major
- minor
- name: blobs
jobs:
- genesis
- safe
- esuf
- cf
- cf7
- cloudfoundry-utils
- bosh-backup-and-restore
- bosh-cli
- credhub-cli
- fly
- spruce
|
ci/pipelines/base.yml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2018-11-01 22:19"
variationOf: "e3bdd84850ec5167eaeec87ccb87f163ae0c8f5b"
game: "Unreal Tournament 2004"
name: "ONS-Dinora"
author: "Sjoerd -Hourences- <NAME>"
description: "As the raging storm retreats, an epic battle emerges on the rocky coast\
\ of Dinora island. War erupts between stone outcrops and ancient shipwrecks, whilst\
\ the scorching sun struggles through the heavens, once again capturing the land\
\ beneath."
releaseDate: "2004-09"
attachments:
- type: "IMAGE"
name: "ons-dinora_shot_3f7a9787_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament%202004/Maps/Onslaught/D/ons-dinora_shot_3f7a9787_1.png"
originalFilename: "ons-dinora.7z"
hash: "3f7a978741d8af6ab10f4aede3471cd8f282aa38"
fileSize: 10397895
files:
- name: "HourDinoraTex.utx"
fileSize: 18741403
hash: "22ca7dbe0faa1ef3f7117a71a93dfaa4e86136e5"
- name: "ONS-Dinora.ut2"
fileSize: 8668078
hash: "1603691d4af3a18c914673d9cd4a59fa9789d053"
- name: "HourDinora.usx"
fileSize: 4056062
hash: "2d579af82f16c14cf290446669a2a155ffc4a81a"
otherFiles: 1
dependencies:
HourDinora.usx:
- status: "OK"
name: "HourDinoraTex"
ONS-Dinora.ut2:
- status: "OK"
name: "HourDinora"
- status: "OK"
name: "HourDinoraTex"
downloads:
- url: "https://gamebanana.com/maps/download/15370"
main: false
repack: false
state: "OK"
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament%202004/Maps/Onslaught/D/ons-dinora.7z"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament%202004/Maps/Onslaught/D/3/f/7a9787/ons-dinora.7z"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament%202004/Maps/Onslaught/D/3/f/7a9787/ons-dinora.7z"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Onslaught"
title: "Dinora Beach Region"
playerCount: "8-16"
themes: {}
bots: true
|
content/Unreal Tournament 2004/Maps/Onslaught/D/3/f/7a9787/ons-dinora_[3f7a9787].yml
|
OpenLoyalty\Component\Campaign\Domain\Campaign:
type: entity
repositoryClass: OpenLoyalty\Component\Campaign\Infrastructure\Persistence\Doctrine\Repository\DoctrineCampaignRepository
table: ol__campaign
id:
campaignId:
type: campaign_id
column: id
fields:
reward:
type: string
moreInformationLink:
type: text
nullable: true
column: more_information_link
pushNotificationText:
type: text
nullable: true
column: push_notification_text
active:
type: boolean
options:
default: 0
pointValue:
type: decimal
scale: 2
precision: 14
nullable: true
costInPoints:
type: decimal
scale: 2
precision: 14
column: cost_in_points
levels:
type: campaign_levels_json_array
segments:
type: campaign_segments_json_array
unlimited:
type: boolean
options:
default: 1
singleCoupon:
type: boolean
options:
default: 1
limit:
type: integer
nullable: true
column: usage_limit
limitPerUser:
type: integer
nullable: true
column: limit_per_user
coupons:
type: campaign_coupons_json_array
rewardValue:
type: decimal
scale: 2
precision: 14
nullable: true
column: reward_value
tax:
type: integer
nullable: true
column: tax
taxPriceValue:
type: decimal
scale: 2
precision: 14
nullable: true
column: tax_price_value
labels:
type: labels_json_array
column: labels
nullable: true
daysInactive:
type: integer
nullable: true
column: days_inactive
daysValid:
type: integer
nullable: true
column: daysValid
transactionPercentageValue:
type: integer
nullable: true
column: transaction_percentage_value
categories:
type: campaign_categories_json_array
nullable: true
featured:
type: boolean
options:
default: 1
public:
type: boolean
options:
default: 0
connectType:
type: string
nullable: true
column: connect_type
earningRuleId:
type: guid
nullable: true
column: earning_rule_id
fulfillmentTracking:
type: boolean
options:
default: false
column: fulfillment_tracking
embedded:
campaignActivity:
class: OpenLoyalty\Component\Campaign\Domain\Model\CampaignActivity
campaignVisibility:
class: OpenLoyalty\Component\Campaign\Domain\Model\CampaignVisibility
brandIcon:
class: OpenLoyalty\Component\Campaign\Domain\Model\CampaignBrandIcon
oneToMany:
photos:
targetEntity: OpenLoyalty\Component\Campaign\Domain\Entity\CampaignPhoto
mappedBy: campaign
fetch: EAGER
cascade: ["persist", "remove"]
orphanRemoval: true
onDelete: CASCADE
|
lib/OpenLoyalty/Component/Campaign/Infrastructure/Persistence/Doctrine/ORM/Campaign.orm.yml
|
---
- name: create workdir
command: mktemp -d /tmp/degoss.XXXXXXXXXX
register: workdir
changed_when: degoss_changed_when
# set play facts
- name: establish facts
set_fact:
degoss_tmp_root: "{{ workdir.stdout.strip() }}"
degoss_test_root: "{{ workdir.stdout.strip() }}/tests"
degoss_goss_install_dir: "{{ workdir.stdout.strip() }}/bin"
degoss_goss_bin: "{{ workdir.stdout.strip() }}/bin/goss"
goss_file_basename: "{{ goss_file.split('/')[-1] }}"
# switch to pinned vs latest
- include_tasks: versions/latest.yml
when: goss_version == "latest"
- include_tasks: versions/pinned.yml
when: goss_version != "latest"
# create goss directories
- name: create goss directories
file: path={{ item }} state=directory
with_items:
- "{{ degoss_tmp_root }}"
- "{{ degoss_test_root }}"
- "{{ degoss_goss_install_dir }}"
changed_when: degoss_changed_when
# download goss
- name: install
get_url:
url: "{{ goss_download_url }}"
dest: "{{ degoss_goss_bin }}-{{ item }}"
mode: 0755
with_items:
- "{{ goss_real_version }}"
changed_when: degoss_changed_when
# symlink
- name: link
file:
state: link
src: "{{ degoss_goss_bin }}-{{ goss_real_version }}"
dest: "{{ degoss_goss_bin }}"
force: true
changed_when: degoss_changed_when
# deploy test files including the main and additional test files
- name: deploy test files
copy:
src: |-
{%- if item.startswith('/') -%}
{{ item }}
{%- else -%}
{{ playbook_dir | default(".") }}/{{ item }}
{%- endif -%}
dest: "{{ degoss_test_root }}"
mode: 0644
directory_mode: 0755
setype: user_tmp_t
with_items: "{{ [goss_file] + goss_addtl_files + goss_addtl_dirs }}"
changed_when: degoss_changed_when
# run the tests
- name: run tests
goss:
executable: "{{ degoss_goss_bin }}"
path: "./{{ goss_file_basename }}"
cwd: "{{ degoss_test_root }}"
format: "{{ goss_output_format }}"
env_vars: "{{ goss_env_vars }}"
failed_when: false
register: goss_output
tags: [format_goss_output]
# clean everything up
- name: clean
file: path={{ degoss_tmp_root }} state=absent
when: degoss_clean | bool
changed_when: degoss_changed_when
# our output callback plugin will catch the tag of this and format output accordingly
- name: report errors
fail:
msg:
message: Goss Tests Failed.
rc: "{{ goss_output.rc }}"
tmp_root: "{{ degoss_tmp_root }}"
version: "{{ goss_real_version }}"
when: goss_output.get('goss_failed') | bool
tags: [format_goss_output]
- name: report module errors
fail: msg="Goss Module Run Failed."
when: goss_output.get('msg')|lower == "module failure"
tags: [format_goss_stacktrace]
|
tasks/main.yml
|
---
# splunk_config.yml
plugin: splunkenizer
os:
set_hostname: true
aws:
keypair_name: 'aws_key'
#region: 'eu-central-1'
ami: 'ami-0badcc5b522737046' # Redhat 8 (may need to disable selinux)
#ssh_username: 'ec2-user'
instance_type: 't2.micro'
#instance_type: 'c5.2xlarge'
ssh_private_key_path: '~/.ssh/aws_key.pem'
security_groups: ['Splunk_Basic']
# AWS Storage
block_device_mapping:
- # do not remove this, it is part of the format
DeviceName: "/dev/sda1" # Redhat 8
Ebs.VolumeSize: 50 # Size in GB
#Ebs.DeleteOnTermination: true # Default
#Ebs.VolumeType: "GP2" # General performance - you might want something faster
# Splunk default settings
splunk_defaults:
# splunk_env_name: splk
# splunk_version: '8.2.3'
splunk_download:
splunk: true
splunkforwarder: true
# splunk_admin_password: '<PASSWORD>'
# splunk_license_file: Splunk_Enterprise.lic
# splunk_indexes:
# test1:
# test2_metrics:
# datatype: metric
# Define Indexer Volumes (filesystem must exist)
splunk_indexer_volumes:
hot:
s2volume:
# Create a bucket in AWS and name it here. Make sure the EC2 instances have permission to write to this bucket.
path: "s3://<your_s3_bucketname>/<subfolder>"
storageType: remote
splunk_volume_defaults:
VolumeDataSize_Free_MB: 800 # Will calculate maxVolumeDataSizeMB as 'fs_free - VolumeDataSize_Free_MB'
# Define the volumes to be used for the indexes
homePath: hot
coldPath: hot
remotePath: s2volume
splunk_set_servername: true
splunk_set_default_hostname: true
splunk_loginpage_print_userpw: false
# splunk_loginpage_print_roles: false
splunk_ssl:
web:
enable: true
inputs:
enable: true
outputs:
enable: true
# Indexer Cluster settings
splunk_idxclusters:
- idxc_name: idxc1
idxc_password: <PASSWORD>
idxc_replication_port: 9887
idxc_site_rf: 'origin:2, total:3'
idxc_site_sf: 'origin:2, total:3'
idxc_rf: 2
idxc_sf: 2
# Search Head Cluster settings
splunk_shclusters:
- shc_name: shc1
shc_site: site0
shc_password: <PASSWORD>
shc_replication_port: 9887
# Splunk hosts with its settings
splunk_hosts:
# Deployment Server
- name: ds
roles:
- deployment_server
- deployer
shcluster: shc1
# Cluster Master
- name: cm
roles:
- cluster_master
- monitoring_console
#- license_master
idxcluster: idxc1
site: site0
# Cluster Indexers (site1)
- iter:
prefix: idx
numbers: 1..2
roles:
- indexer
idxcluster: idxc1
site: site1
# Cluster Indexers (site2)
- iter:
prefix: idx
numbers: 3..4
roles:
- indexer
idxcluster: idxc1
site: site2
# Search Heads
- iter:
prefix: sh
numbers: 1..3
roles:
- search_head
shcluster: shc1
# Universal Forwarder
- name: uf
roles:
- universal_forwarder
|
examples/cm_4idxc2site_3shc_ds_uf_SmartStore.yml
|
---
result: FAILURE
failure_category: network
failure_cause: 'network reset rubygems.org: el-7'
timestamp: 2016-06-01 16:56:21 UTC
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/43/
trigger_url: http://manhattan.ci.chef.co/job/omnibus-toolchain-trigger-git_poll/22/
duration: 1h43m15s
runs:
el-7:
result: FAILURE
failure_category: network
failure_cause: network reset rubygems.org
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=el-7,project=omnibus-toolchain,role=builder/43/
duration: 56s
debian-6-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=debian-6,project=omnibus-toolchain,role=builder/43/
duration: 11m45s
el-5-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=el-5,project=omnibus-toolchain,role=builder/43/
duration: 12m43s
el-6-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=el-6,project=omnibus-toolchain,role=builder/43/
duration: 14m17s
freebsd-10-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=freebsd-10,project=omnibus-toolchain,role=builder/43/
duration: 15m44s
freebsd-9-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=freebsd-9,project=omnibus-toolchain,role=builder/43/
duration: 19m50s
ubuntu-10.04-i386:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i386,platform=ubuntu-10.04,project=omnibus-toolchain,role=builder/43/
duration: 11m
solaris-10-i86pc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=i86pc,platform=solaris-10,project=omnibus-toolchain,role=builder/43/
duration: 10m53s
aix-6.1-powerpc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=powerpc,platform=aix-6.1,project=omnibus-toolchain,role=builder/43/
duration: 21m49s
solaris-10-sun4v:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=sun4v,platform=solaris-10,project=omnibus-toolchain,role=builder/43/
duration: 56m26s
debian-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=debian-6,project=omnibus-toolchain,role=builder/43/
duration: 19m9s
el-5:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=el-5,project=omnibus-toolchain,role=builder/43/
duration: 19m15s
el-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=el-6,project=omnibus-toolchain,role=builder/43/
duration: 23m34s
freebsd-10:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=freebsd-10,project=omnibus-toolchain,role=builder/43/
duration: 31m12s
freebsd-9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=freebsd-9,project=omnibus-toolchain,role=builder/43/
duration: 46m20s
ios_xr-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=ios_xr-6,project=omnibus-toolchain,role=builder/43/
duration: 21m4s
mac_os_x-10.9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=mac_os_x-10.9,project=omnibus-toolchain,role=builder/43/
duration: 14m47s
nexus-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=nexus-7,project=omnibus-toolchain,role=builder/43/
duration: 18m37s
ubuntu-10.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/omnibus-toolchain-build/architecture=x86_64,platform=ubuntu-10.04,project=omnibus-toolchain,role=builder/43/
duration: 18m33s
|
reports/stages/manhattan.ci.chef.co/job/omnibus-toolchain-build/43.yaml
|
items:
- uid: azure-arm-iothub - v2.1.0.RoutingEventHubProperties
name: RoutingEventHubProperties
fullName: RoutingEventHubProperties
children:
- azure-arm-iothub - v2.1.0.RoutingEventHubProperties.connectionString
- azure-arm-iothub - v2.1.0.RoutingEventHubProperties.name
- azure-arm-iothub - v2.1.0.RoutingEventHubProperties.resourceGroup
- azure-arm-iothub - v2.1.0.RoutingEventHubProperties.subscriptionId
langs:
- typeScript
type: interface
summary: ''
package: azure-arm-iothub - v2
- uid: azure-arm-iothub - v2.1.0.RoutingEventHubProperties.connectionString
name: connectionString
fullName: connectionString
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'connectionString: string'
return:
type:
- string
description: ''
package: azure-arm-iothub - v2
- uid: azure-arm-iothub - v2.1.0.RoutingEventHubProperties.name
name: name
fullName: name
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'name: string'
return:
type:
- string
description: ''
package: azure-arm-iothub - v2
- uid: azure-arm-iothub - v2.1.0.RoutingEventHubProperties.resourceGroup
name: resourceGroup
fullName: resourceGroup
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'resourceGroup?: string'
return:
type:
- string
description: ''
package: azure-arm-iothub - v2
- uid: azure-arm-iothub - v2.1.0.RoutingEventHubProperties.subscriptionId
name: subscriptionId
fullName: subscriptionId
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'subscriptionId?: string'
return:
type:
- string
description: ''
package: azure-arm-iothub - v2
|
typescript/docs-ref-autogen/azure-arm-iothub/1.0.RoutingEventHubProperties.yml
|
title: Managing SharePoint Online
metadata:
title: 'Course MS-300T02-A: Managing SharePoint Online'
description: 'Course MS-300T02-A: Managing SharePoint Online'
uid: course.ms-300t02
courseNumber: 'MS-300T02-A'
hoursToComplete: 24
iconUrl: /media/learn/certification/course.svg
skillsGained:
- skill: 사용자 프로필 속성 구성 방법 설명
- skill: 대상 그룹 관리 방법
- skill: 앱 카탈로그 사용으로 사용자 지정 앱 만드는 방법 설명
- skill: 용어 그룹, 용어 집합 및 용어 생성 및 관리
- skill: 최신 및 기존 검색 경험간 차이점 설명
learningPartnersLink: /learn/certifications/partners
locales:
- en
levels:
- intermediate
roles:
- administrator
products:
- m365
- office-sp
- office-onedrive
- office-teams
- office-yammer
exams:
- uid: exam.ms-300
summary: "본 과정에서는 적절한 권한 설정으로 사용자 프로필 및 앱을 관리하는 방법을 학습합니다. 관리되는 메타데이터, Business Connectivity Services를 계획 및 구성하는 방법과 Office 365에서 콘텐츠를 찾고 소비하고 관리할 수 있는 검색 방법을 설명합니다.\n본 과정은 조직에서 팀워크 인프라를 배포하고 관리하는 IT 전문가를 대상으로 합니다.\nTeamwork 관리자 역할을 위한 4가지 일련의 과정 중 두 번째입니다. 본 과정은 시험 MS-300: Microsoft 365 Teamwork 배포 준비에 도움이 됩니다.\n#### 대상 그룹 프로필\nMicrosoft 365 Teamwork Administrator역할을 위한 과정입니다. 효율적이고 효과적인 협업 및 활용에 초점을 맞춘 Office 365 및 Azure 워크로드의 구성, 배포 및 관리를 담당하는 역할입니다. 비즈니스 요구 사항을 충족시키기 위해 앱, 서비스 및 지원 인프라를 관리합니다. 관리자는 제품과 서비스를 인식 및 구별할 수 있지만 각 제품이나 서비스를 직접 관리하지는 않습니다.\nTeamwork Administrator는 SharePoint (Online, 온-프레미스, 하이브리드), OneDrive 및 Teams의 배포, 관리, 마이그레이션 및 보안에 관한 능력이 있어야 합니다. \n\nTeamwork 관리자는 다음과 같은 앱 및 서비스와의 통합 지점을 기본적으로 이해하고 있습니다: Office, PowerApps, Flow, Yammer, Microsoft Graph, Stream, Planner, 및 Project. 관리자는 기간 업무(LOB) 애플리케이션을 비롯한 타사 앱 및 서비스를 통합하는 방법을 알아야 합니다."
prerequisitesSection: |-
학습자는 이미 다음의 기술을 갖추고 본 과정을 시작해야 합니다:
- Windows PowerShell을 사용한 경험
- Active Directory 및 Azure AD 기본 이해
- 모바일 장치 관리 및 다른 운영 체제 (Android 및 macOS) 기본 이해
outlineSection: |-
### 모듈 1: 사용자 프로필 및 앱 관리
본 모듈은 SharePoint Online의 사용자 프로필 및 사용자 지정에 관한 것입니다. SharePoint Online에서 사용자 프로필을 관리하는 방법을 설명합니다. 앱 카탈로그를 사용하여 SharePoint Online에 사용자 지정 비즈니스 앱을 제공하는 방법에 대해서도 설명합니다.
#### 단원
- 사용자 프로필 관리
- 사용자 지정 및 앱 계획 및 구성
이 모듈을 완료하면 학생들은 다음을 수행할 수 있습니다:
- 사용자 프로필 속성 구성 방법 설명
- 대상 그룹 관리 방법
- 앱 카탈로그 사용으로 사용자 지정 앱 만드는 방법 설명
### 모듈 2: 관리형 메타데이터 및 비즈니스 연결 계획 및 구성
본 모듈은 관리되는 메타데이터 및 Business Connectivity Services에 관한 것입니다. 용어 집합을 가져오는 프로세스와 관리되는 메타데이터 개념을 소개합니다. Business Connectivity Services를 관리하고 외부 목록을 만드는 방법도 설명합니다.
#### 단원
- 관리형 메타데이터 계획 및 구성
- BCS(Business Connectivity Service) 및 보안 저장소 계획 및 구성
이 모듈을 완료하면 학생들은 다음을 수행할 수 있습니다:
- 관리형 메타데이터 기능 설명
- 용어 그룹, 용어 집합 및 용어 생성 및 관리
- BDC 모델 주요 구성 요소 설명
- BCS 고급 아키텍처 설명
- 보안 저장소 서비스 목적 설명
### 모듈 3: 검색 계획 및 구현
본 모듈은 엔터프라이즈 검색 서비스 애플리케이션에 관한 것입니다. 콘텐츠 원본 계획 및 구성, 크롤링 일정 및 크롤링 규칙에 대한 세부 정보를 제공합니다. 또한 쿼리 규칙, 결과 유형 및 디스플레이 템플릿과 같은 다양한 검색 구성 요소의 검색 관리를 프로비저닝하는 방법을 설명합니다.
#### 단원
- SharePoint Online 검색 개요
- 검색 관리
이 모듈을 완료하면 학생들은 다음을 수행할 수 있습니다:
- 최신 및 기존 검색 경험간 차이점 설명
- 콘텐츠 소스 계획 및 구성 방법 설명
- 검색 스키마 관리 방법 설명
- 쿼리 규칙 사용 및 배포 설명
- 결과 소스 사용 및 배포 설명
### 모듈 4: 과정 마무리
#### 랩 : 용어 집합 가져오기
|
learn-certs-pr/courses/ms-300t02.yml
|
---
result: FAILURE
failure_category: unknown
failure_cause: 'unknown: mac_os_x'
timestamp: 2016-02-13 08:36:10 UTC
url: http://manhattan.ci.chef.co/job/harmony-build/90/
trigger_url: http://manhattan.ci.chef.co/job/harmony-trigger-nightly-master/66/
duration: 6m39s
runs:
mac_os_x-10.8:
result: FAILURE
failure_category: unknown
failure_cause: unknown
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=mac_os_x-10.8,project=harmony,role=builder/90/
duration: 4m5s
solaris-10-i86pc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=i86pc,platform=solaris-10,project=harmony,role=builder/90/
duration: 2m12s
aix-6.1-powerpc:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=powerpc,platform=aix-6.1,project=harmony,role=builder/90/
duration: 2m5s
solaris-10-sun4v:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=sun4v,platform=solaris-10,project=harmony,role=builder/90/
duration: 6m36s
debian-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=debian-6,project=harmony,role=builder/90/
duration: 2m12s
el-5:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=el-5,project=harmony,role=builder/90/
duration: 2m17s
el-6:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=el-6,project=harmony,role=builder/90/
duration: 2m33s
el-7:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=el-7,project=harmony,role=builder/90/
duration: 1m40s
freebsd-10:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=freebsd-10,project=harmony,role=builder/90/
duration: 2m39s
freebsd-9:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=freebsd-9,project=harmony,role=builder/90/
duration: 2m11s
ubuntu-10.04:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=ubuntu-10.04,project=harmony,role=builder/90/
duration: 2m10s
windows-2008r2:
result: SUCCESS
url: http://manhattan.ci.chef.co/job/harmony-build/architecture=x86_64,platform=windows-2008r2,project=harmony,role=builder/90/
duration: 2m18s
|
reports/stages/manhattan.ci.chef.co/job/harmony-build/90.yaml
|
- name: "Create user and group for nginx"
group: name=nginx state=present
- user:
name: nginx
group: nginx
create_home: no
shell: "/sbin/nologin"
state: present
- name: Set PAM limits params for nginx user
pam_limits:
domain: "nginx"
limit_type: "-"
limit_item: "{{item.key}}"
value: "{{item.value}}"
dest: "/etc/security/limits.d/nginx.conf"
with_dict: "{{ngx_ulimit_params}}"
- name: Install dependencies
yum:
name: "{{item}}"
state: latest
with_items: "{{ngx_dependencies}}"
- name: Installation and Configuration.
file: path={{ngx_install_path}} state=directory
- file: path={{ngx_log_path}} owner=nginx group=nginx state=directory mode=0755
- unarchive:
src: "{{ngx_local_package_path}}"
dest: "{{ngx_install_path}}"
owner: nginx
group: nginx
keep_newer: yes
when: ngx_local_package_path is defined
- block:
- name: Download nginx package once.
get_url:
url: "{{ngx_remote_package_path}}"
dest: "/tmp/nginx-{{ngx_version}}.tar.gz"
force: yes
register: ngx_pkg_temp_local
run_once: yes
delegate_to: 127.0.0.1
- unarchive:
src: "{{ngx_pkg_temp_local.dest}}"
dest: "{{ngx_install_path}}"
owner: nginx
group: nginx
keep_newer: yes
when: ngx_local_package_path is not defined
- file:
src: "{{ngx_install_path}}/nginx-{{ngx_version}}"
path: "{{ngx_install_path}}/nginx"
force: yes
state: link
- name: Distribute default nginx configuration files to target hosts.
template:
src: "templates/nginx.service.j2"
dest: "/usr/lib/systemd/system/nginx.service"
force: yes
- template:
src: "templates/nginx.conf.j2"
dest: "{{ngx_install_path}}/nginx/conf/nginx.conf"
force: yes
notify:
- restart nginx service
- name: Distribute logrotate config for nginx
template:
src: "templates/nginx_logrotate.j2"
dest: "/etc/logrotate.d/nginx"
force: yes
- name: Enable nginx service is running.
service:
name: nginx
state: started
daemon_reload: yes
enabled: yes
- include_tasks: exporter.yml
when: ngx_exporter_enable
|
ansible/nginx/tasks/main.yml
|
name: Build
on:
push:
branches:
- dev
- main
- master
pull_request:
jobs:
build:
runs-on: ubuntu-20.04
container: docker://helife/epilight
steps:
- uses: actions/checkout@v2
- name: Build
shell: bash
run: "make 2>&1 | tee build_output"
- name: Send notification
shell: bash
if: ${{ github.event_name == 'push' && always() }}
run: |
checkout=$(git log -1 --pretty=format:"*Checkout:%d: %h %s*")
errors=$(bash -c 'cat build_output | grep "error: " -A 2 | python3 -c "import json,sys; print(json.dumps(sys.stdin.read())[1:-1])"')
if [[ -z $errors ]]; then
errors="No errors."
fi
errors_count=$(bash -c 'cat build_output | grep "error: " | wc -l')
echo '{"content": "**'$GITHUB_REPOSITORY'**\n'$checkout'\nCompilation: '$errors_count' errors.\n```\n'$errors'\n```"}' > notification_data
cat notification_data
curl --data "$(cat notification_data)" --header "Content-Type: application/json" https://discord.com/api/webhooks/823450813899866143/BBDxIXReQq8zMX6DTtCWL9bTrXZC_zamaAoLZ4zH7pETRi5T9loJSWozYeK1NfaCZFtE
- uses: actions/upload-artifact@v2
with:
name: build_output
path: build_output
- name: Preparing a clean env
run: make fclean
- name: Build with debug mode
if: always()
shell: bash
run: "EPIDEBUG=1 make 2>&1 | tee build_output_debug"
- name: Send notification
shell: bash
if: ${{ github.event_name == 'push' && always() }}
run: |
checkout=$(git log -1 --pretty=format:"*Checkout:%d: %h %s*")
warnings=$(bash -c 'cat build_output_debug | grep "warning: " -A 2 | python3 -c "import json,sys; print(json.dumps(sys.stdin.read())[1:-1])"')
if [[ -z $warnings ]]; then
warnings="No warnings."
fi
warnings_count=$(bash -c 'cat build_output_debug | grep "warning: " | wc -l')
echo '{"content": "**'$GITHUB_REPOSITORY'**\n'$checkout'\nDebug compilation: '$warnings_count' warnings.\n```\n'$warnings'\n```"}' > notification_data
cat notification_data
curl --data "$(cat notification_data)" --header "Content-Type: application/json" https://discord.com/api/webhooks/<KEY>
- uses: actions/upload-artifact@v2
with:
name: build_output_debug
path: build_output_debug
|
.github/workflows/build.yml
|
globals:
last: c144
includes:
- from: c144.png
to: images/c144.png
- from: t144.png
to: images/t144.png
pages:
- _id: c144
_path: 144.html
_template: page.html
_post_to_facebook: '<NAME> v. the Susan B. Komen Foundation'
prev: c143
next: c145
datetime: 2012-02-06 05:00:00
src: images/c144.png
thumbnail: images/t144.png
width: 845
height: 660
name: Pink
facebook: 1
description: <NAME>'s use of pink comes back to bite her in the tail.
alt: <NAME>'s done looking for the critics, 'cause they're everywhere.
notes: |
<p><b><date></b>. In a way, I can understand why the Susan G. Komen Foundation didn't expect such a huge <a href="http://www.huffingtonpost.com/2012/02/01/susan-g-komen_n_1247262.html">uproar</a> over its decision to cut Planned Parenthood funding. There are lots of breast cancer groups out there that need funding. And if people prefer to give money to Planned Parenthood, they can (<a href="http://www.businessweek.com/news/2012-02-05/web-fury-spurs-komen-reversal-3-million-for-planned-parenthood.html">and did</a>) do so directly, without going through the Komen Foundation.</p>
<p>After all, churches engage in charity all the time. There's some concern about co-mingling the charitable and the evangelical, but for the most part, it's not a big deal. If that bothers you, there are plenty of secular alternatives to religious charities.</p>
<p>But churches don't own poverty in the same way the Komen Foundation owns breast cancer. The search for a cure has been <a href="http://www.huffingtonpost.com/2010/12/07/komen-foundation-charities-cure_n_793176.html">trademarked</a>, <a href="http://www.thenation.com/article/166072/why-komenplanned-parenthood-breakup-while-it-lasted-was-good-feminism">branded</a>, and <a href="http://www.youtube.com/watch?v=3QPZfcYTUaA">monetized</a>. It's ostensibly for a good cause. But by appointing itself guardian of the breast cancer narrative, the Komen Foundation has set itself up for backlash from those whom the Foundation has failed to speak for.</p>
<p>Food for legal thought: <a href="http://caselaw.lp.findlaw.com/scripts/getcase.pl?navby=CASE&court=US&vol=514&page=159"><i>Qualitex Co. v. Jacobson Products Co., Inc.</i></a>, 514 U.S. 159 (1995) (establishing that a color could be trademarked) and <a href="http://www.aclu.org/free-speech-womens-rights/aclu-challenges-patents-breast-cancer-genes-0">the breast cancer gene patents</a>.</p>
<p>Shout-out to Lauren.</p>
transcript: |
<NAME>: You have a package Lop! ;
Negligent Lop: Must be that shirt I ordered. ;
<NAME>: And a letter for
you <NAME>. ;
Tort Bunny: For me? ;
-----
Tort Bunny (reading): Dear Ms. Bunny, ;
Tort Bunny (reading): It has come to our attention that you wear the
color pink while engaging in tortious activities.
As the lawful owners of trademarks covering the
color pink, we hereby demand that you cease and
desist from using the color pink forthwith. ;
Tort Bunny (reading): Sincerely,
The Susan G. Komen Foundation ;
Not shown - "P.S. We know where you live." ;
-----
Tort Bunny: This is ridiculous! You can't own a color.
I wear a pink bow. I own a pink gun. I
use a pink laptop. Hell, my sh!t be pink! ;
Tort Bunny: Do they honestly expect to
take that all away from me? ;
-----
<NAME>: Your poop is pink? ;
Tort Bunny: Pepto-Bismol is a wonderful thing. ;
Negligent Lop is wearing his brand new red "HAMME(RED)" shirt. ;
revision: 144
|
meta/c144.yaml
|
server:
shutdown: graceful
port: 8085
undertow:
buffer-size: 1024
direct-buffers: on
always-set-keep-alive: true
error:
include-exception: true
include-stacktrace: ALWAYS
include-message: ALWAYS
compression:
enabled: true
min-response-size: 1024
mime-types: application/javascript,application/json,application/xml,text/html,text/xml,text/plain,text/css,image/*
spring:
main:
allow-bean-definition-overriding: true
application:
name: sparkzxl-report
aop:
proxy-target-class: true
#配置freemarker
freemarker:
# 设置模板后缀名
suffix: .ftl
# 设置文档类型
content-type: text/html
# 设置页面编码格式
charset: UTF-8
# 设置页面缓存
cache: false
prefer-file-system-access: false
# 设置ftl文件路径
template-loader-path:
- classpath:/templates
# 设置静态文件路径,js,css等
mvc:
static-path-pattern: /**
resource:
static-locations: classpath:/static/,classpath:/public/
datasource:
dynamic:
druid: # 全局druid参数,绝大部分值和默认保持一致。(现已支持的参数如下,不清楚含义不要乱设置)
# 连接池的配置信息
# 初始化大小,最小,最大
initial-size: 5
min-idle: 5
maxActive: 20
# 配置获取连接等待超时的时间
maxWait: 60000
# 配置间隔多久才进行一次检测,检测需要关闭的空闲连接,单位是毫秒
timeBetweenEvictionRunsMillis: 60000
# 配置一个连接在池中最小生存的时间,单位是毫秒
minEvictableIdleTimeMillis: 300000
validationQuery: SELECT 1 FROM DUAL
testWhileIdle: true
testOnBorrow: false
testOnReturn: false
# 打开PSCache,并且指定每个连接上PSCache的大小
poolPreparedStatements: true
maxPoolPreparedStatementPerConnectionSize: 20
# 配置监控统计拦截的filters,去掉后监控界面sql无法统计,'wall'用于防火墙
filters: stat,wall,slf4j
# 通过connectProperties属性来打开mergeSql功能;慢SQL记录
connectionProperties: druid.stat.mergeSql\=true;druid.stat.slowSqlMillis\=5000
datasource:
master:
url: jdbc:mysql://sparkzxl.mysql.rds.aliyuncs.com/report?characterEncoding=UTF-8&useUnicode=true&useSSL=false&tinyInt1isBit=false&allowPublicKeyRetrieval=true&serverTimezone=Asia/Shanghai
username: zhouxinlei
password: <PASSWORD>
driver-class-name: com.mysql.cj.jdbc.Driver
mybatis-plus:
mapper-locations: classpath*:org/jeecg/modules/**/xml/*Mapper.xml
global-config:
banner: false
db-config:
id-type: ASSIGN_ID
table-underline: true
configuration:
call-setters-on-nulls: true
custom:
mapper-scan: org.jeecg.modules.**.mapper*
#jeecg专用配置
jeecg :
# 本地:local\Minio:minio\阿里云:alioss
uploadType: local
path :
#文件上传根目录 设置
upload: D://opt//upFiles
#webapp文件路径
webapp: D://opt//webapp
#阿里云oss存储配置
oss:
endpoint: oss-cn-beijing.aliyuncs.com
accessKey: ??
secretKey: ??
bucketName: jeecgos
staticDomain: ??
# minio文件上传
minio:
minio_url: http://minio.jeecg.com
minio_name: ??
minio_pass: ??
bucketName: otatest
#大屏报表参数设置
jmreport:
mode: dev
#是否需要校验token
is_verify_token: false
#必须校验方法
verify_methods: remove,delete,save,add,update
knife4j:
group-name: 报表应用
base-package: com.github.sparkzxl.report
title: 报表应用
description: 报表应用管理
terms-of-service-url: https://www.sparksys.top
version: 1.0
license: Powered By sparkzxl
licenseUrl: https://github.com/sparkzxl
contact:
url: https://github.com/sparkzxl
name: zhouxinlei
email: <EMAIL>
|
sparkzxl-report/src/main/resources/application.yml
|
version: '2'
services:
crawlstreams:
build:
context: .
args:
http_proxy: ${HTTP_PROXY}
https_proxy: ${HTTPS_PROXY}
environment:
- "CDX_SERVER=http://cdxserver:8080/tc"
- "WAYBACK_SERVER=http://pywb:8080/test/"
- "KAFKA_BOOTSTRAP_SERVERS=kafka:9092"
- "WEBHDFS_PREFIX=http://warc-server:8000/by-filename/"
depends_on:
- cdxserver
- kafka
ports:
- "8000:8000"
# OutbackCDX
cdxserver:
image: nlagovau/outbackcdx:0.5.1
command: "java -jar outbackcdx.jar -d /cdx-data -p 8080 -b 0.0.0.0"
ports:
- "9090:8080"
volumes:
- ./integration-testing/outbackcdx-data:/cdx-data
# Kafka
kafka:
image: wurstmeister/kafka:2.12-2.1.0
ports:
- "9094:9094"
environment:
HOSTNAME_COMMAND: "docker info | grep ^Name: | cut -d' ' -f 2"
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,OUTSIDE:PLAINTEXT
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://:9092,OUTSIDE://_{HOSTNAME_COMMAND}:9094
KAFKA_LISTENERS: PLAINTEXT://:9092,OUTSIDE://:9094
KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
KAFKA_NUM_RECOVERY_THREADS_PER_DATA_DIR: 10
KAFKA_LOG_RETENTION_HOURS: -1
KAFKA_LOG_RETENTION_BYTES: -1
KAFKA_NUM_PARTITIONS: 4
KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'false'
LOG4J_LOGGER_KAFKA: WARN
KAFKA_CREATE_TOPICS: "fc.tocrawl.npld:16:1,fc.tocrawl.bypm:16:1,fc.crawled:16:1"
# persistent files are in consistent locations, each server supplies a /kafka folder.
KAFKA_BROKER_ID: 1
KAFKA_LOG_DIRS: /kafka/kafka-logs/kafka-logs-broker-1
volumes:
- /var/run/docker.sock:/var/run/docker.sock:ro
- ./kafka:/kafka
depends_on:
- zookeeper
# Kafka UI
ui:
image: ukwa/docker-trifecta
ports:
- "9990:9000"
environment:
- "ZK_HOST=zookeeper:2181"
depends_on:
- zookeeper
- kafka
# Needs a Zookeeper too
# ----
zookeeper:
image: zookeeper:3.4
ports:
- "2181:2181"
|
docker-compose.yml
|
name: Build Release Asset - Windows x64
on:
push:
branches: [ master ]
tags:
- 'v*'
jobs:
build:
runs-on: windows-latest
steps:
- uses: actions/checkout@v2
- uses: dorny/paths-filter@v2.2.0
id: filter
with:
filters: |
version:
- 'version.txt'
- name: Cancel workflow
if: steps.filter.outputs.version == 'false'
uses: andymckay/cancel-action@0.2
- name: Setup .NET Core
uses: actions/setup-dotnet@v1
with:
dotnet-version: 3.1.101
- name: Install dependencies
run: dotnet restore
- name: Build
run: dotnet build --configuration Release --no-restore
- name: Test
run: dotnet test --no-restore --verbosity normal
- name: Publish x64 release
run: dotnet publish SwagLyricsGUI --self-contained true -p:PublishTrimmed=true -o Builds\win-x64\ -r win-x64 -c Release
- name: Compile setup script
shell: powershell
run: .\inno_compile.ps1
- name: Set version env
run: echo ::set-env name=RELEASE_VER::$(cat version.txt)
- name: Check if release exist
shell: powershell
run: echo ::set-env name=LATEST_TAG::$($WebResponse = curl https://api.github.com/repos/SwagLyrics/SwagLyricsGUI/releases/latest; $WebResponse.Content | ConvertFrom-Json | Select tag_name | foreach {$_.tag_name})
- name: Create Release
id: create_release
uses: actions/create-release@v1
if: ${{ env.RELEASE_VER != env.LATEST_TAG }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
tag_name: ${{ env.RELEASE_VER }}
release_name: Release ${{ env.RELEASE_VER }}
draft: false
prerelease: false
- name: Set Assets url
if: ${{ env.RELEASE_VER != env.LATEST_TAG }}
run: echo "::set-env name=UPLOAD_URL::${{ steps.create_release.outputs.upload_url }}"
- name: Get release URL
id: release_url
shell: powershell
if: ${{ env.RELEASE_VER == env.LATEST_TAG }}
run: echo "::set-env name=UPLOAD_URL::$($WebResponse = curl https://api.github.com/repos/SwagLyrics/SwagLyricsGUI/releases/latest; $WebResponse.Content | ConvertFrom-Json | Select upload_url | foreach {$_.upload_url})"
- name: Upload a Release Asset
id: upload-release-asset
uses: actions/upload-release-asset@v1.0.2
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
# The URL for uploading assets to the release
upload_url: ${{ env.UPLOAD_URL }}
# The path to the asset you want to upload
asset_path: Installer\win-x64\SwagLyricsGUI-setup-x64.exe
# The name of the asset you want to upload
asset_name: SwagLyricsGUI-setup-x64.exe
# The content-type of the asset you want to upload. See the supported Media Types here: https://www.iana.org/assignments/media-types/media-types.xhtml for more information
asset_content_type: application
|
.github/workflows/win-installer-action.yml
|
name: CI
on: [ push, pull_request ]
jobs:
ubuntu18-clang9-dev:
runs-on: ubuntu-18.04
env:
CC: clang-9
CXX: clang++-9
BUILD_TYPE: Dev
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-clang10-dev:
runs-on: ubuntu-20.04
env:
CC: clang-10
CXX: clang++-10
BUILD_TYPE: Dev
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-clang10-debug-san:
runs-on: ubuntu-20.04
env:
CC: clang-10
CXX: clang++-10
BUILD_TYPE: Debug
WITH_PROJ: ON
CXXFLAGS: -fsanitize=address,undefined,integer -fno-sanitize-recover=all -fno-omit-frame-pointer
LDFLAGS: -fsanitize=address,undefined,integer
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-clang10-release:
runs-on: ubuntu-20.04
env:
CC: clang-10
CXX: clang++-10
BUILD_TYPE: Release
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu18-gcc7-dev:
runs-on: ubuntu-18.04
env:
CC: gcc-7
CXX: g++-7
BUILD_TYPE: Dev
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-gcc9-dev:
runs-on: ubuntu-20.04
env:
CC: gcc-9
CXX: g++-9
BUILD_TYPE: Dev
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-gcc10-dev:
runs-on: ubuntu-20.04
env:
CC: gcc-10
CXX: g++-10
CXXFLAGS: -Wno-stringop-overflow
BUILD_TYPE: Dev
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
ubuntu20-gcc10-release:
runs-on: ubuntu-20.04
env:
CC: gcc-10
CXX: g++-10
BUILD_TYPE: Release
WITH_PROJ: ON
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-ubuntu
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
macos10-dev:
runs-on: macos-10.15
env:
CC: clang
CXX: clang++
BUILD_TYPE: Dev
WITH_PROJ: OFF
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-macos
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
macos11-dev:
runs-on: macos-11.0
env:
CC: clang
CXX: clang++
BUILD_TYPE: Dev
WITH_PROJ: OFF
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-macos
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
macos11-release:
runs-on: macos-11.0
env:
CC: clang
CXX: clang++
BUILD_TYPE: Release
WITH_PROJ: OFF
steps:
- uses: actions/checkout@v2
with:
submodules: true
- uses: ./.github/actions/install-macos
- uses: ./.github/actions/cmake
- uses: ./.github/actions/build
- uses: ./.github/actions/ctest
|
out/osmcode/libosmium/.github_workflows_ci.yml
|
name: test
on:
push:
branches: [main]
pull_request:
branches: [main]
jobs:
build:
strategy:
matrix:
os: [ubuntu-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Setup Node.js
uses: actions/setup-node@v1
with:
node-version: "12.x"
- name: Install Yarn
run: npm install -g yarn
- name: Get yarn cache directory path
id: yarn-cache-dir-path
run: echo "::set-output name=dir::$(yarn cache dir)"
- uses: actions/cache@v2
id: yarn-cache
with:
path: ${{ steps.yarn-cache-dir-path.outputs.dir }}
key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }}
restore-keys: |
${{ runner.os }}-yarn-
- name: Install Deps
run: yarn install --pure-lockfile
- name: Build & Test
run: yarn test
- name: Ensure dist/ folder is up-to-date
run: |
yarn build
if [ "$(git diff --ignore-space-at-eol | wc -l)" -gt "0" ]; then
echo "Detected uncommitted changes after build. See status below:"
git diff
exit 1
fi
create-minio-bucket:
runs-on: ubuntu-latest
steps:
- run: |
wget -q https://dl.min.io/client/mc/release/linux-amd64/mc
chmod a+x mc
./mc mb play/actions-cache || exit 0
test-save:
needs: create-minio-bucket
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Generate files in working directory
shell: bash
run: src/create-cache-files.sh ${{ runner.os }} test-cache
- name: Generate files outside working directory
shell: bash
run: src/create-cache-files.sh ${{ runner.os }} ~/test-cache
- name: Save cache
uses: ./
with:
endpoint: play.min.io
accessKey: "<KEY>"
secretKey: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
bucket: actions-cache
use-fallback: false
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
test-restore:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
endpoint: play.min.io
accessKey: "<KEY>"
secretKey: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
bucket: actions-cache
use-fallback: false
key: test-${{ runner.os }}-${{ github.run_id }}
path: |
test-cache
~/test-cache
- name: Verify cache files in working directory
shell: bash
run: src/verify-cache-files.sh ${{ runner.os }} test-cache
- name: Verify cache files outside working directory
shell: bash
run: src/verify-cache-files.sh ${{ runner.os }} ~/test-cache
test-restore-keys:
needs: test-save
strategy:
matrix:
os: [ubuntu-latest, windows-latest, macOS-latest]
fail-fast: false
runs-on: ${{ matrix.os }}
steps:
- name: Checkout
uses: actions/checkout@v2
- name: Restore cache
uses: ./
with:
endpoint: play.min.io
accessKey: "<KEY>"
secretKey: "zuf+tfteSlswRu7BJ86wekitnifILbZam1KYY3TG"
bucket: actions-cache
use-fallback: false
key: test-${{ runner.os }}-${{ github.run_id }}-${{ github.sha }}
path: |
test-cache
~/test-cache
restore-keys: test-${{ runner.os }}-
- name: Verify cache files in working directory
shell: bash
run: src/verify-cache-files.sh ${{ runner.os }} test-cache
- name: Verify cache files outside working directory
shell: bash
run: src/verify-cache-files.sh ${{ runner.os }} ~/test-cache
|
.github/workflows/test.yml
|
--- !<MAP_PACK>
contentType: "MAP_PACK"
firstIndex: "2018-11-01 07:21"
game: "Unreal Tournament"
name: "Alornmappack"
author: "TheMadMonk"
description: "None"
releaseDate: "2000-04"
attachments:
- type: "IMAGE"
name: "Alornmappack_shot_3.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/MapPacks/A/Alornmappack_shot_3.png"
- type: "IMAGE"
name: "Alornmappack_shot_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/MapPacks/A/Alornmappack_shot_1.png"
- type: "IMAGE"
name: "Alornmappack_shot_2.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal%20Tournament/MapPacks/A/Alornmappack_shot_2.png"
originalFilename: "alornmappack.zip"
hash: "1a2e6ec5718d15b1c170bf6041b1f92faeec447f"
fileSize: 2002242
files:
- name: "DM-Alorn.unr"
fileSize: 1886243
hash: "352199b1eff7b6effd52da8d23617d0bf9a5f363"
- name: "CTF-Alorn.unr"
fileSize: 1845980
hash: "a41d3b81528d3df74b4486bd8eea3889821bbf9c"
- name: "DOM-Alorn.unr"
fileSize: 1887741
hash: "02b2abaae0c2cb8e7ba956f008b9689812357c46"
otherFiles: 4
dependencies: {}
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal%20Tournament/MapPacks/A/alornmappack.zip"
main: true
repack: false
state: "OK"
- url: "http://ut99maps.gamezoo.org/maps.html"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/DeathMatch&file=DM-UTCP-AlornMapPack.zip"
main: false
repack: false
state: "OK"
- url: "http://medor.no-ip.org/index.php?dir=Maps/MapPacks&file=alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/MapPacks/MapsA_B/&file=alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/MapPacks/&file=alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "http://www.ut-files.com/index.php?dir=Maps/Misc/&file=alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "http://uttexture.com/UT/Downloads/Maps/MapPacks/MapsA_B/alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal%20Tournament/MapPacks/A/1/a/2e6ec5/alornmappack.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal%20Tournament/MapPacks/A/1/a/2e6ec5/alornmappack.zip"
main: false
repack: false
state: "OK"
deleted: false
maps:
- name: "CTF-Alorn"
title: "Alorn Water Temple"
author: "TheMadMonk"
- name: "DM-Alorn"
title: "Alorn Water Temple"
author: "TheMadMonk"
- name: "DOM-Alorn"
title: "Alorn Water Temple"
author: "TheMadMonk"
gametype: "Mixed"
themes:
Natural: 0.2
Nali Castle: 0.8
|
content/Unreal Tournament/MapPacks/A/1/a/2e6ec5/alornmappack_[1a2e6ec5].yml
|
name: Check
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
defaults:
run:
shell: bash
jobs:
check:
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
build: [Release]
options: [""]
include:
- os: windows-latest
os_options: "-DCMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake"
- os: ubuntu-latest
build_docs: true
- os: ubuntu-latest
build: Release
options: "-DBUILD_SHARED_LIBS=OFF"
- os: ubuntu-latest
build: Release
options: "-DBUILD_RDIFF=OFF"
- os: ubuntu-latest
build: Release
options: "-G Ninja -DCMAKE_C_COMPILER=clang"
- os: ubuntu-latest
build: Debug
steps:
- uses: actions/checkout@v2
- name: Install Dependencies
# Unfortunately available Marketplace Actions for this are in a mess, so we do it manually.
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
sudo apt-get update -y
sudo apt-get install -y libpopt-dev libb2-dev doxygen graphviz ninja-build
elif [ "$RUNNER_OS" == "macOS" ]; then
brew update
brew install popt
elif [ "$RUNNER_OS" == "Windows" ]; then
vcpkg update
vcpkg --triplet x64-windows install libpopt
fi
- name: Configure CMake
# Configure CMake in a 'build' subdirectory.
# See https://cmake.org/cmake/help/latest/variable/CMAKE_BUILD_TYPE.html?highlight=cmake_build_type
run: cmake -B '${{github.workspace}}/build' -DCMAKE_BUILD_TYPE=${{matrix.build}} ${{matrix.os_options}} ${{matrix.options}}
- name: Build all
# Build your program with the given configuration.
run: cmake --build '${{github.workspace}}/build' --config ${{matrix.build}}
- name: Run tests
working-directory: ${{github.workspace}}/build
# Execute tests defined by the CMake configuration.
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: ctest -C ${{matrix.build}} --output-on-failure
- name: Build install
# Build your program with the given configuration.
run: cmake --install 'build' --config ${{matrix.build}} --prefix 'install'
- name: Build docs
if: ${{matrix.build_docs}}
run: cmake --build '${{github.workspace}}/build' --target doc
- name: Upload build
uses: actions/upload-artifact@v2
with:
name: build results ${{matrix.os}} ${{matrix.build}} ${{matrix.options}}
path: ${{github.workspace}}/build
if-no-files-found: error
- name: Upload install
uses: actions/upload-artifact@v2
with:
name: install results ${{matrix.os}} ${{matrix.build}} ${{matrix.options}}
path: ${{github.workspace}}/install
if-no-files-found: error
|
out/librsync/librsync/.github_workflows_check.yml
|
title: Narendiran Chembu
# logo: "images/logo.svg" # You can add own logo image.
logo: images/cg_logo.png
description: Personal Website
baseurl: "" # The subpath of your site, e.g. /blog
url: "" # The base hostname & protocol for your site, e.g. http://example.com
# Author Settings
author:
name: <NAME>
# bio: Hi, my name is <NAME>.
# Contact links
# twitter: https://twitter.com/ # Add your Twitter handle
facebook: https://facebook.com/cgnarendiran/ # Add your Facebook handle
# dribbble: https://dribbble.com/ # Add your Dribbble handle
instagram: https://instagram.com/cgnarendiran/ # Add your Instagram handle
# pinterest: https://pinterest.com/ # Add your Pinterest handle
email: <EMAIL> # Add your Email address
linkedin: https://linkedin.com/in/cgnarendiran/
# Hero Section
hero: true # To enable the section hero, use the value true. To turn off use the value false.
hero-title: Narendiran Chembu # Add your hero title
hero-subtitle: |
Trying to understand this resplendent chaos called life.
You can read more <a style="color:turquoise" href="/about/">about me</a> or check out <a style="color:turquoise" href="/blog/">my blog</a>
and <a style="color:turquoise" href="/projects/">projects</a>
# Add your hero subtitle
hero-image: images/ml_bb_6.jpg # Add background image in section hero
# Footer
footer-image: images/ml_bb_6.jpg # Add background image in footer
# Comment Section (Disqus)
# disqus-identifier: cgnarendiran # Add your shortname for Disqus Comment. For example mr-brown
# Google Analytics
google-analytics: "" # Add your identifier. For example UA-99631805-1.
# Newsletter
mailchimp: "" # Add your MailChimp form identifier. For example - blogenjoyfreedom.us16.list-manage.com/subscribe/post?u=cd56155d76ddeeb4c0bc4d080&id=7a526cd13e
# Build settings
markdown: kramdown
permalink: pretty
plugins:
- jekyll-paginate
- jekyll-sitemap
- jekyll/tagging
paginate: 3
# paginate_path: "/page/:num"
paginate_path: "/blog/page:num/"
# Pages Path
# defaults:
# - scope:
# path: '_pages'
# values:
# permalink: /:basename:output_ext
include:
- _pages
collections:
projects:
output: true
defaults:
- scope:
path: ""
type: "projects"
values:
layout: "project"
permalink: "/projects/:title/"
- scope:
path: ""
type: "posts"
values:
layout: "post"
permalink: "/blog/:title/"
# Tags
tag_page_dir: tag
tag_page_layout: tag_page
tag_permalink_style: pretty
sass:
sass_dir: _sass
style: compressed
|
_config.yml
|
wandb_version: 1
_current_progress_remaining:
desc: null
value: 1
_custom_logger:
desc: null
value: 'False'
_episode_num:
desc: null
value: 0
_last_episode_starts:
desc: null
value: '[ True]'
_last_obs:
desc: null
value: "[[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\
\ 0\n 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n\
\ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n \
\ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n 0\
\ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n 0 0\
\ 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 1 1 0 0 0 0 0 0 0 0 0 0\n 0 0 0\
\ 0 1 1 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0\n 0 0 0 0]]"
_last_original_obs:
desc: null
value: None
_logger:
desc: null
value: <stable_baselines3.common.logger.Logger object at 0x7fce0c1c56d8>
_n_updates:
desc: null
value: 0
_total_timesteps:
desc: null
value: 50000
_vec_normalize_env:
desc: null
value: None
_wandb:
desc: null
value:
cli_version: 0.12.9
framework: torch
is_jupyter_run: false
is_kaggle_kernel: false
python_version: 3.6.13
start_time: 1641349536
t:
1:
- 1
- 3
2:
- 1
- 3
3:
- 16
- 22
4: 3.6.13
5: 0.12.9
8:
- 5
action_noise:
desc: null
value: None
action_space:
desc: null
value: MultiBinary(9)
algo:
desc: null
value: PPO
batch_size:
desc: null
value: 64
board_size:
desc: null
value:
- 16
- 16
clip_range:
desc: null
value: <function constant_fn.<locals>.func at 0x7fce333991e0>
clip_range_vf:
desc: null
value: None
device:
desc: null
value: cuda
ent_coef:
desc: null
value: 0.0
env:
desc: null
value: <stable_baselines3.common.vec_env.vec_video_recorder.VecVideoRecorder object
at 0x7fce0c845fd0>
env_name:
desc: null
value: ConwayEnv
ep_info_buffer:
desc: null
value: deque([], maxlen=100)
ep_success_buffer:
desc: null
value: deque([], maxlen=100)
eval_env:
desc: null
value: None
gae_lambda:
desc: null
value: 0.95
gamma:
desc: null
value: 0.99
goal_location:
desc: null
value:
- 12
- 12
learning_rate:
desc: null
value: 0.0003
lr_schedule:
desc: null
value: <function constant_fn.<locals>.func at 0x7fce821e72f0>
max_grad_norm:
desc: null
value: 0.5
n_envs:
desc: null
value: 1
n_epochs:
desc: null
value: 10
n_steps:
desc: null
value: 2048
num_timesteps:
desc: null
value: 0
observation_space:
desc: null
value: MultiBinary(256)
policy:
desc: null
value: "ActorCriticPolicy(\n (features_extractor): FlattenExtractor(\n (flatten):\
\ Flatten(start_dim=1, end_dim=-1)\n )\n (mlp_extractor): MlpExtractor(\n \
\ (shared_net): Sequential()\n (policy_net): Sequential(\n (0): Linear(in_features=256,\
\ out_features=64, bias=True)\n (1): Tanh()\n (2): Linear(in_features=64,\
\ out_features=64, bias=True)\n (3): Tanh()\n )\n (value_net): Sequential(\n\
\ (0): Linear(in_features=256, out_features=64, bias=True)\n (1): Tanh()\n\
\ (2): Linear(in_features=64, out_features=64, bias=True)\n (3): Tanh()\n\
\ )\n )\n (action_net): Linear(in_features=64, out_features=9, bias=True)\n\
\ (value_net): Linear(in_features=64, out_features=1, bias=True)\n)"
policy_class:
desc: null
value: <class 'stable_baselines3.common.policies.ActorCriticPolicy'>
policy_kwargs:
desc: null
value: '{}'
policy_type:
desc: null
value: MlpPolicy
rollout_buffer:
desc: null
value: <stable_baselines3.common.buffers.RolloutBuffer object at 0x7fce82459e10>
sde_sample_freq:
desc: null
value: -1
seed:
desc: null
value: None
start_time:
desc: null
value: 1641349543.229862
target_kl:
desc: null
value: None
tensorboard_log:
desc: null
value: ./gol_results/
total_timesteps:
desc: null
value: 50000
use_sde:
desc: null
value: 'False'
verbose:
desc: null
value: 1
vf_coef:
desc: null
value: 0.5
|
wandb/run-20220104_212536-2fahxff6/files/config.yaml
|
title: Ja50n - go ahead
SEOTitle: Ja50n的博客 | go ahead
email: <EMAIL>
description: ""
keyword: ""
url: "https://blog.ulinkmega.com" # your host, for absolute URL
baseurl: "" # for example, '/blog' if your blog hosted on 'host/blog'
rss_url: atom.xml
logo: img/logo.png
author: Ja50n
# SNS settings
RSS: true
weibo_username: TINE59
github_username: Ja50n
tele_username: Ja50nQiu
# twitter_username: TINE4359
# Build settings
# from 2016, 'pygments' is unsupported on GitHub Pages. Use 'rouge' for highlighting instead.
highlighter: rouge
permalink: pretty
paginate: 6
exclude: ["less","node_modules","Gruntfile.js","package.json","README.md"]
excerpt_separator: <!--more-->
header-img: /img/blog-bg.jpg
anchorjs: true # if you want to customize anchor. check out line:181 of `post.html`
valine_use: true
gitments: false
# Gems
# from PR#40, to support local preview for Jekyll 3.0
plugins: [jekyll-paginate]
# Markdown settings
# replace redcarpet to kramdown,
# although redcarpet can auto highlight code, the lack of header-id make the catalog impossible, so I switch to kramdown
# document: http://jekyllrb.com/docs/configuration/#kramdown
markdown: kramdown
kramdown:
input: GFM # use Github Flavored Markdown !important
# Analytics settings
# Baidu Analytics
ba_track_id: c319ee4a1709fab66b5c289fe63aac77
# Google Analytics
ga_track_id: 'UA-130184697-1' # Format: UA-xxxxxx-xx
ga_domain: https://blog.ulinkmega.com
# Sidebar settings
sidebar: true # whether or not using Sidebar.
sidebar-about-description: "喜欢编程,喜欢设计,喜欢互联网,喜欢好的交互体验,机器视觉从业者。"
sidebar-avatar: https://myulinkblog.oss-cn-shenzhen.aliyuncs.com/2018-10.jpg # use absolute URL, seeing it's used in both `/` and `/about/`
# Featured Tags
featured-tags: true # whether or not using Feature-Tags
featured-condition-size: 2 # A tag will be featured if the size of it is more than this condition value
# Friends
friends: [
{
title: "Hux Blog",
href: "http://huangxuan.me"
},{
title: "JASKNi",
href: "http://blog.jaskni.com/"
},{
title: "土木坛子",
href: "https://tumutanzi.com/"
},{
title: "MARKSZのBlog",
href: "https://molunerfinn.com/"
},{
title: "周良粥凉",
href: "https://imzl.com/"
},{
title: "土木坛子",
href: "https://tumutanzi.com/"
},{
title: "常阳时光",
href: "https://cyhour.com/"
},{
title: "南歌鹿人",
href: "http://www.ma-am.cn/"
},{
title: "Typeblog",
href: "https://typeblog.net/"
},{
title: "K<NAME>",
href: "http://blog.zhowkev.in/"
},{
title: "faw.life",
href: "http://faw.life/"
},{
title: "森纯博客",
href: "https://isenchun.cn/"
},{
title: "浮白载笔",
href: "http://www.winature.com/"
},{
title: "水八口记",
href: "https://blog.shuiba.co/"
},{
title: "随望淡思",
href: "https://www.lushaojun.com/"
}
]
# Custome
strRaw: "{% raw %}"
strEndRaw: "{% endraw %}"
|
_config.yml
|
title: Dokumentacja usługi Windows Virtual Desktop
summary: Dostarczaj środowisko pulpitu wirtualnego i aplikacje zdalne na każde urządzenie. Połącz usługę Microsoft 365 i platformę Azure, aby udostępnić użytkownikom jedyne środowisko systemu Windows 10 obsługujące wiele sesji — przy wyjątkowej skali i obniżonych kosztach IT.
metadata:
title: Dokumentacja usługi Windows Virtual Desktop
description: Usługa Windows Virtual Desktop dla platformy Azure to środowisko systemu Windows 10 Enterprise dla wielu użytkowników wirtualizowane w chmurze.
ms.service: virtual-desktop
services: virtual-desktop
ms.topic: landing-page
manager: femila
author: Heidilohr
ms.author: helohr
ms.date: 03/06/2020
ms.openlocfilehash: 4e0d8bdc1506fc8eb4c6087a6b01011fade3c77f
ms.sourcegitcommit: 56b0c7923d67f96da21653b4bb37d943c36a81d6
ms.translationtype: MT
ms.contentlocale: pl-PL
ms.lasthandoff: 04/06/2021
ms.locfileid: "106446795"
landingContent:
- title: Informacje o usłudze Windows Virtual Desktop
linkLists:
- linkListType: overview
links:
- text: Co to jest Windows Virtual Desktop?
url: overview.md
- title: Rozpoczynanie pracy z usługą Windows Virtual Desktop
linkLists:
- linkListType: tutorial
links:
- text: Tworzenie puli hostów za pomocą witryny Azure Marketplace
url: create-host-pools-azure-marketplace.md
- text: Zarządzanie grupami aplikacji dla usługi Windows Virtual Desktop
url: manage-app-groups.md
- text: Tworzenie puli hostów na potrzeby weryfikacji aktualizacji usług
url: create-validation-host-pool.md
- text: Konfigurowanie alertów usługi
url: set-up-service-alerts.md
- title: Rozpoczynanie pracy z usługą Windows Virtual Desktop (klasyczną)
linkLists:
- linkListType: tutorial
links:
- text: Tworzenie dzierżawy w usłudze Windows Virtual Desktop
url: ./virtual-desktop-fall-2019/tenant-setup-azure-active-directory.md
- text: Tworzenie puli hostów za pomocą witryny Azure Marketplace
url: create-host-pools-azure-marketplace.md
- text: Zarządzanie grupami aplikacji dla usługi Windows Virtual Desktop
url: manage-app-groups.md
- text: Tworzenie jednostek usługi i przypisań ról za pomocą programu PowerShell
url: ./virtual-desktop-fall-2019/create-service-principal-role-powershell.md
- title: Łączenie się z usługą Windows Virtual Desktop
linkLists:
- linkListType: how-to-guide
links:
- text: Klient klasyczny systemu Windows
url: connect-windows-7-10.md
- text: Klient internetowy
url: connect-web.md
- text: Klient systemu Android
url: connect-android.md
- text: Klient systemu macOS
url: connect-macos.md
- text: Klient systemu iOS
url: connect-ios.md
|
articles/virtual-desktop/index.yml
|
apiVersion: apiextensions.k8s.io/v1beta1
kind: CustomResourceDefinition
metadata:
name: linstorcsidrivers.piraeus.linbit.com
spec:
additionalPrinterColumns:
- JSONPath: .status.NodeReady
name: NodeReady
type: boolean
- JSONPath: .status.ControllerReady
name: ControllerReady
type: boolean
group: piraeus.linbit.com
names:
kind: LinstorCSIDriver
listKind: LinstorCSIDriverList
plural: linstorcsidrivers
singular: linstorcsidriver
scope: Namespaced
subresources:
status: {}
validation:
openAPIV3Schema:
description: LinstorCSIDriver is the Schema for the linstorcsidrivers API
properties:
apiVersion:
description: 'APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the latest
internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources'
type: string
kind:
description: 'Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds'
type: string
metadata:
type: object
spec:
description: LinstorCSIDriverSpec defines the desired state of LinstorCSIDriver
properties:
controllerEndpoint:
description: Cluster URL of the linstor controller. If not set, will
be determined from the current resource name.
type: string
csiAttacherImage:
description: Name of the CSI external attacher image. See https://kubernetes-csi.github.io/docs/external-attacher.html
type: string
csiControllerServiceAccountName:
description: Name of the service account used by the CSI controller
pods
type: string
csiNodeDriverRegistrarImage:
description: Name of the CSI node driver registrar image. See https://kubernetes-csi.github.io/docs/node-driver-registrar.html
type: string
csiNodeServiceAccountName:
description: Name of the service account used by the CSI node pods
type: string
csiProvisionerImage:
description: Name of the CSI external provisioner image. See https://kubernetes-csi.github.io/docs/external-provisioner.html
type: string
csiResizerImage:
description: Name of the CSI external resizer image. See https://kubernetes-csi.github.io/docs/external-resizer.html
type: string
csiSnapshotterImage:
description: Name of the CSI external snapshotter image. See https://kubernetes-csi.github.io/docs/external-snapshotter.html
type: string
imagePullSecret:
description: Name of a secret with authentication details for the `LinstorPluginImage`
registry
type: string
linstorHttpsClientSecret:
description: 'Name of the secret containing: (a) `ca.pem`: root certificate
used to validate HTTPS connections with Linstor (PEM format, without
password) (b) `client.key`: client key used by the linstor client
(PEM format, without password) (c) `client.cert`: client certificate
matching the client key (PEM format, without password) If set, HTTPS
is used for connecting and authenticating with linstor'
type: string
linstorPluginImage:
description: Image that contains the linstor-csi driver plugin
type: string
priorityClassName:
description: priorityClassName is the name of the PriorityClass for
the csi driver pods
type: string
required:
- imagePullSecret
- linstorPluginImage
type: object
status:
description: LinstorCSIDriverStatus defines the observed state of LinstorCSIDriver
properties:
ControllerReady:
description: CSI controller ready status
type: boolean
NodeReady:
description: CSI node components ready status
type: boolean
errors:
description: Errors remaining that will trigger reconciliations.
items:
type: string
type: array
required:
- ControllerReady
- NodeReady
- errors
type: object
type: object
version: v1alpha1
versions:
- name: v1alpha1
served: true
storage: true
|
charts/piraeus/crds/piraeus.linbit.com_linstorcsidrivers_crd.yaml
|
---
apiVersion: apiextensions.k8s.io/v1beta1
kind: CustomResourceDefinition
metadata:
creationTimestamp: null
name: tasklaunchers.task.projectriff.io
spec:
group: task.projectriff.io
names:
kind: TaskLauncher
plural: tasklaunchers
scope: ""
validation:
openAPIV3Schema:
description: TaskLauncher is the Schema for the tasklaunchers API
properties:
apiVersion:
description: 'APIVersion defines the versioned schema of this representation
of an object. Servers should convert recognized schemas to the latest
internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#resources'
type: string
kind:
description: 'Kind is a string value representing the REST resource this
object represents. Servers may infer this from the endpoint the client
submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/api-conventions.md#types-kinds'
type: string
metadata:
type: object
spec:
description: TaskLauncherSpec defines the desired state of TaskLauncher
properties:
build:
description: Build resolves the image from a build resource.
properties:
applicationRef:
description: ApplicationRef references an application in this namespace.
type: string
containerRef:
description: ContainerRef references a container in this namespace.
type: string
type: object
type: object
status:
description: TaskLauncherStatus defines the observed state of TaskLauncher
type: object
type: object
version: v1alpha1
versions:
- name: v1alpha1
served: true
storage: true
status:
acceptedNames:
kind: ""
plural: ""
conditions: []
storedVersions: []
|
config/crd/bases/task.projectriff.io_tasklaunchers.yaml
|
language: generic
sudo: true
stages:
- test-ui
addons:
postgresql: '9.6'
env:
global:
- PGPORT=5432
jobs:
include:
- stage: test-ui
name: 'Build, Test and Package UI'
install:
- nvm install 9
language: scala
scala:
- 2.12
jdk: openjdk8
services:
- postgresql
cache:
- sbt
- ivy2
before_script:
- psql -c 'create database archway;' -U postgres
script:
- export ARCHWAY_VERSION=$(date +%Y-%m-%d)-${TRAVIS_COMMIT}
- npm i -g typescript || travis_terminate 1;
- npm i || travis_terminate 1;
- npm run-script prepare || travis_terminate 1;
- npm test
- './flyway/flyway migrate -url="jdbc:postgresql://localhost:5432/archway" -user=postgres -password=<PASSWORD>'
- ./sbt common/test || travis_terminate 1;
- ./sbt api/test || travis_terminate 1;
- ./sbt provisioning/test || travis_terminate 1;
- './sbt "set every test in assembly := {}" api/assembly'
- './sbt "set every test in assembly := {}" integration-test/assembly'
- ./sbt integration-test/test:package || travis_terminate 1;
- ./sbt common/test:package || travis_terminate 1;
- java -jar ./cloudera-integration/validator.jar -p $TRAVIS_BUILD_DIR/cloudera-integration/parcel/archway-meta/parcel.json || travis_terminate 1;
- java -jar ./cloudera-integration/validator.jar -r $TRAVIS_BUILD_DIR/cloudera-integration/parcel/archway-meta/permissions.json || travis_terminate 1;
- java -jar ./cloudera-integration/validator.jar -s $TRAVIS_BUILD_DIR/cloudera-integration/csd/descriptor/service.sdl || travis_terminate 1;
- echo $ARCHWAY_VERSION
deploy:
skip_cleanup: true
provider: script
script: export ARCHWAY_VERSION=${TRAVIS_TAG} &&./publish.sh parcel archway && ./publish.sh manifest && ./publish.sh csd && ./publish.sh ship
on:
tags: true
all_branches: true
|
.travis.yml
|
on:
push:
# Sequence of patterns matched against refs/tags
tags:
- 'v*' # Push events to matching v*, i.e. v1.0, v20.15.10
name: Create Release
env:
# Could, potentially automatically parse
# the bin name, but let's do it automatically for now.
RELEASE_BIN: cbsh
RELEASE_BIN_WINDOWS: cbsh.exe
# Space separated paths to include in the archive.
# Start relative paths with a dot if you don't want
# paths to be preserved. Use "/" as a delimiter.
RELEASE_ADDS: README.md LICENSE examples
jobs:
build:
name: Build release
runs-on: ${{ matrix.os }}
strategy:
matrix:
build: [linux, macos, windows]
include:
- build: linux
os: ubuntu-latest
rust: stable
- build: macos
os: macos-latest
rust: stable
- build: windows
os: windows-2016
rust: stable
steps:
- uses: actions/checkout@v1
- name: Install Rust (rustup)
run: rustup update ${{ matrix.rust }} --no-self-update && rustup default ${{ matrix.rust }}
if: matrix.os != 'macos-latest'
shell: bash
- name: Install Rust (macos)
run: |
curl https://sh.rustup.rs | sh -s -- -y
echo "$HOME/.cargo/bin" >> $GITHUB_PATH
if: matrix.os == 'macos-latest'
- name: Build
run: cargo build --verbose --release
if: matrix.os == 'ubuntu-latest'
- name: Build
run: cargo build --verbose --release
if: matrix.os == 'windows-2016'
- name: Build (macos)
run: cargo build --verbose --release
env:
MACOSX_DEPLOYMENT_TARGET: 10.9
if: matrix.os == 'macos-latest'
- name: Create artifact directory
run: mkdir artifacts
- name: Create archive for Linux
run: 7z a -ttar -so -an ./target/release/${{ env.RELEASE_BIN }} ${{ env.RELEASE_ADDS }} | 7z a -si ./artifacts/${{ env.RELEASE_BIN }}-linux-x86_64.tar.gz
if: matrix.os == 'ubuntu-latest'
- name: Install p7zip
# 7Zip not available on MacOS, install p7zip via homebrew.
run: brew install p7zip
if: matrix.os == 'macos-latest'
- name: Create archive for MacOS
run: 7z a -tzip ./artifacts/${{ env.RELEASE_BIN }}-mac-x86_64.zip ./target/release/${{ env.RELEASE_BIN }} ${{ env.RELEASE_ADDS }}
if: matrix.os == 'macos-latest'
- name: Create archive for Windows
run: |
choco install 7zip
7z a -tzip ./artifacts/${{ env.RELEASE_BIN }}-windows-x86_64.zip ./target/release/${{ env.RELEASE_BIN_WINDOWS }} ${{ env.RELEASE_ADDS }}
if: matrix.os == 'windows-2016'
# This will double-zip
# See - https://github.com/actions/upload-artifact/issues/39
- uses: actions/upload-artifact@v1
name: Upload archive
with:
name: ${{ runner.os }}
path: artifacts/
|
.github/workflows/release.yml
|
swagger: "2.0"
info:
title: Calculator Service
description: HTTP service for adding numbers, a goa teaser
version: ""
host: localhost:8000
consumes:
- application/json
- application/xml
- application/gob
produces:
- application/json
- application/xml
- application/gob
paths:
/add/{a}/{b}:
get:
tags:
- calc
summary: add calc
operationId: calc#add
parameters:
- name: a
in: path
description: Left operand
required: true
type: integer
- name: b
in: path
description: Right operand
required: true
type: integer
responses:
"200":
description: OK response.
schema:
type: integer
format: int64
schemes:
- http
/ui/{filepath}:
get:
tags:
- calc
summary: Download ui/
operationId: calc#/ui/{*filepath}
parameters:
- name: filepath
in: path
description: Relative file path
required: true
type: string
responses:
"200":
description: File downloaded
schema:
type: file
"404":
description: File not found
schema:
$ref: '#/definitions/Error'
schemes:
- http
- https
/ui/swagger.json:
get:
tags:
- calc
summary: Download ./gen/http/openapi.json
operationId: calc#/ui/swagger.json
responses:
"200":
description: File downloaded
schema:
type: file
schemes:
- http
- https
definitions:
Error:
title: 'Mediatype identifier: application/vnd.goa.error; view=default'
type: object
properties:
fault:
type: boolean
description: Is the error a server-side fault?
example: true
id:
type: string
description: ID is a unique identifier for this particular occurrence of the
problem.
example: 123abc
message:
type: string
description: Message is a human-readable explanation specific to this occurrence
of the problem.
example: parameter 'p' must be an integer
name:
type: string
description: Name is the name of this class of errors.
example: bad_request
temporary:
type: boolean
description: Is the error temporary?
example: false
timeout:
type: boolean
description: Is the error a timeout?
example: false
description: Error response result type (default view)
example:
fault: false
id: 123abc
message: parameter 'p' must be an integer
name: bad_request
temporary: false
timeout: true
required:
- name
- id
- message
- temporary
- timeout
- fault
|
gen/http/openapi.yaml
|
displays:
default:
width: 400
height: 300
modes:
- mode1
slides:
video_test:
- type: video
video: mpf_video_small_test
- type: text
text: Video Test
y: bottom+20%
- type: text
text: ""
y: bottom+10%
video_test2:
- type: video
video: mpf_video_small_test
control_events:
- event: play1
action: play
- event: stop1
action: stop
- event: pause1
action: pause
- event: seek1
action: seek
value: .5
- event: position1
action: position
value: 4
- event: mute
action: volume
value: 0
- type: text
text: Video Control Events Test
y: bottom+20%
- type: text
text: ""
y: bottom+10%
video_test3:
- type: video
video: mpf_video_small_test
control_events:
- event: pre_show_slide
action: seek
value: .5
video_test4:
- type: video
video: mpf_video_small_test
control_events:
- event: show_slide
action: seek
value: .5
video_test5:
- type: video
video: mpf_video_small_test
control_events:
- event: pre_slide_leave
action: seek
value: .5
video_test6:
- type: video
video: mpf_video_small_test
control_events:
- event: slide_leave
action: seek
value: .5
video_test7:
- type: video
video: mpf_video_small_test
auto_play: true
end_behavior: loop
volume: .2
control_events:
- event: seek1
action: seek
value: .9
video_test8:
- type: video
video: mpf_video_small_test
auto_play: false
end_behavior: stop
volume: 0.8
control_events:
- event: play1
action: play
- event: seek1
action: seek
value: .9
video_test9:
- type: text
text: Machine slide, no video
slide_player:
show_slide1: video_test
show_slide2: video_test2
show_slide3: video_test3
show_slide4: video_test4
show_slide5: video_test5
show_slide6: video_test6
show_slide7: video_test7
show_slide8: video_test8
show_slide9: video_test9
videos:
mpf_video_small_test:
width: 100
height: 70
|
example_configs/video/config/test_video.yaml
|
--- !<MAP>
contentType: "MAP"
firstIndex: "2019-09-14 12:38"
game: "Unreal"
name: "MH-[SP]FairyValley"
author: "THUNDERBOLT"
description: "None"
releaseDate: "2016-05"
attachments:
- type: "IMAGE"
name: "mh-spfairyvalley_shot_4db413ac_1.png"
url: "https://f002.backblazeb2.com/file/unreal-archive-images/Unreal/Maps/Monster%20Hunt/S/mh-spfairyvalley_shot_4db413ac_1.png"
originalFilename: "MH-[SP]FairyValley.zip"
hash: "4db413acd333765be09484eb56de5455fc9897db"
fileSize: 16078978
files:
- name: "Harmony.umx"
fileSize: 402688
hash: "7bb915168f93755c2662f2735e9165ff0bb291bc"
- name: "ShadowManNature.utx"
fileSize: 5652455
hash: "42285e28708ebd3cc240551ef613dcd674aa3cd4"
- name: "WSWestern_Pack1.u"
fileSize: 4225497
hash: "4e4df1842b5135050c3a14f3d12afb3030330a3e"
- name: "Swaytrees.u"
fileSize: 888616
hash: "299c9d0333473454d32674186652bd2c21124b96"
- name: "MH-[SP]FairyValley.unr"
fileSize: 13950169
hash: "e9341ebd07b9f798098abf4e250be5e7334f9f54"
- name: "Forestdecs.u"
fileSize: 104162
hash: "afde5b1ed66405b9068a99ab667d24f9956735e7"
- name: "Animal_Pack1.u"
fileSize: 2192402
hash: "08067207103da3bef9656aa1ffafd23a105632d2"
- name: "SM_Nature.u"
fileSize: 256477
hash: "2190c782d79b5ef52f37238352526f3d2cc2de41"
- name: "forestpack.utx"
fileSize: 1056777
hash: "31cfc2aa68ee4b9138d0e73fbd15594282be60a8"
- name: "BullPlants2004.u"
fileSize: 410790
hash: "935d52b1489ba4859ac72ce2effc15757f396564"
- name: "TobioTrees.u"
fileSize: 840279
hash: "f8d48a4c4896ecb588c8574dc48166322567dfee"
- name: "VisionsOfHope.umx"
fileSize: 387409
hash: "b0fb9238b24418b183cd3bf5778a3fbedd14df8d"
- name: "DestinationEarth.umx"
fileSize: 1697535
hash: "7fa8b9e30e4e4935ff06f7843a93f5f41ea5ca8a"
otherFiles: 0
dependencies:
MH-[SP]FairyValley.unr:
- status: "OK"
name: "Forestdecs"
- status: "MISSING"
name: "Reaper"
- status: "MISSING"
name: "SP_MonsterHunt"
- status: "MISSING"
name: "SP_XidiaMPack"
- status: "MISSING"
name: "Basilisk"
- status: "OK"
name: "SM_Nature"
- status: "MISSING"
name: "silverback"
- status: "OK"
name: "WSWestern_Pack1"
- status: "OK"
name: "ShadowManNature"
- status: "OK"
name: "BullPlants2004"
- status: "MISSING"
name: "AssaultDeemer"
- status: "MISSING"
name: "SPAngel"
- status: "MISSING"
name: "muton"
- status: "MISSING"
name: "SP_GhandiWeapons"
- status: "MISSING"
name: "AssaultM16"
- status: "OK"
name: "Animal_PACK1"
- status: "MISSING"
name: "AssaultM79"
- status: "MISSING"
name: "SPGargoyle"
- status: "OK"
name: "TobioTrees"
- status: "OK"
name: "Swaytrees"
- status: "MISSING"
name: "Predator"
- status: "MISSING"
name: "ds_ga"
- status: "MISSING"
name: "HellDemon"
Forestdecs.u:
- status: "OK"
name: "forestpack"
downloads:
- url: "https://f002.backblazeb2.com/file/unreal-archive-files/Unreal/Maps/Monster%20Hunt/S/MH-%5BSP%5DFairyValley.zip"
main: true
repack: false
state: "OK"
- url: "https://files.vohzd.com/unrealarchive/Unreal/Maps/Monster%20Hunt/S/4/d/b413ac/MH-%255BSP%255DFairyValley.zip"
main: false
repack: false
state: "OK"
- url: "https://unreal-archive-files.eu-central-1.linodeobjects.com/Unreal/Maps/Monster%20Hunt/S/4/d/b413ac/MH-%255BSP%255DFairyValley.zip"
main: false
repack: false
state: "OK"
deleted: false
gametype: "Monster Hunt"
title: "MH-[SP]FairyValley"
playerCount: "12/16"
themes:
Nali Temple: 0.2
Ancient: 0.1
Natural: 0.4
Nali Castle: 0.3
bots: true
|
content/Unreal/Maps/Monster Hunt/S/4/d/b413ac/mh-spfairyvalley_[4db413ac].yml
|
fixture_1:
raw_form_data:
first_name: Fred
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1953-03-04'
ssn: '666328412'
address_line1: 9371 Signal Station Dr
city: Manassas
state_code: VA
zip_code: '20111-8258'
email_address: <EMAIL>
phone: 888-123-0001
preferred_facility: '523'
fixture_2:
raw_form_data:
first_name: Patty
middle_name:
last_name: Regpatient
birth_sex: Female
birth_date: '1987-03-22'
ssn: '666059255'
address_line1: 3433 THIRD ST
city: ST PETERSBURG
state_code: FL
zip_code: '33701'
email_address: <EMAIL>
phone: 888-123-0002
preferred_facility: '674'
fixture_3:
raw_form_data:
first_name: Sally
middle_name:
last_name: Regpatient
birth_sex: Female
birth_date: '1980-03-11'
ssn: '661059120'
address_line1: 10307 VENITIA REAL AVE APT 202
city: TAMPA
state_code: FL
zip_code: '33647-4010'
email_address: <EMAIL>
phone: 888-123-0003
preferred_facility: '528A7'
fixture_4:
raw_form_data:
first_name: Martin
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1950-01-01'
ssn: '669029484'
address_line1: 10307 Venitia Real Ave Apt 201
city: Tampa
state_code: FL
zip_code: '33647-4010'
email_address: <EMAIL>
phone: 888-123-0004
preferred_facility: '612A4'
fixture_5:
raw_form_data:
first_name: Alfred
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1978-02-11'
ssn: '628503403'
address_line1: 810 Vermont Ave
city: Washington
state_code: DC
zip_code: '20001'
email_address: <EMAIL>
phone: 888-123-0005
preferred_facility: '623'
fixture_6:
raw_form_data:
first_name: Six
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1907-12-31'
ssn: '666384023'
address_line1: 3422 GREENE ST
city: TAMPA
state_code: FL
zip_code: '33615'
email_address: <EMAIL>
phone: 888-123-0006
preferred_facility: '757'
fixture_7:
raw_form_data:
first_name: Joey
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1950-11-18'
ssn: '620589933'
address_line1: 44 DENSTRATH RD
city: EDZELL
state_code: SCOTLAND
zip_code: '0368918'
email_address: <EMAIL>
phone: 888-123-0007
preferred_facility: '648'
fixture_8:
raw_form_data:
first_name: Vetcheck
middle_name:
last_name: Regpatient
birth_sex: Female
birth_date: '1960-03-01'
ssn: '662049004'
address_line1: 123 Some Street
city: Springfield
state_code: IL
zip_code: '62704'
email_address: <EMAIL>
phone: 888-123-0008
preferred_facility: '989'
fixture_9:
raw_form_data:
first_name: Gary
middle_name:
last_name: Regpatient
birth_sex: Male
birth_date: '1975-03-01'
ssn: '661035829'
address_line1: 3305 W Alline Ave
city: Tampa
state_code: FL
zip_code: '33611-2721'
email_address: <EMAIL>
phone: 888-123-0009
preferred_facility: '989'
fixture_10:
raw_form_data:
first_name: Jean-Luc
middle_name:
last_name: Picard
birth_sex: Male
birth_date: '1975-07-13'
ssn: '628785325'
address_line1: 123 Federation Parkway
city: Washington
state_code: DC
zip_code: '20001-2721'
email_address: <EMAIL>
phone: 888-123-0010
preferred_facility: '523'
fixture_11:
raw_form_data:
first_name: Christopher
middle_name:
last_name: Pike
birth_sex: Male
birth_date: '1942-01-01'
ssn: '628782353'
address_line1: 123 Federation Parkway
city: Washington
state_code: DC
zip_code: '20001'
email_address: <EMAIL>
phone: 888-123-0011
preferred_facility: '988'
fixture_12:
raw_form_data:
first_name: Marjorie
middle_name: Jacqueline
last_name: Simpson
birth_sex: Female
birth_date: '1956-03-19'
ssn: '319712341'
address_line1: 742 Evergreeen Terrace
city: Springfield
state_code: IL
zip_code: '62704'
email_address: <EMAIL>
phone: 888-123-0012
preferred_facility: '987'
|
modules/covid_vaccine/spec/fixtures/expanded_registration_submissions.yml
|
---
- block:
- name: 创建 {{ addon.name }} addons 目录
when: addon is defined
file: path="{{ addon_dir }}/{{ item }}" state=directory
with_items: "{{ addon.directories }}"
- name: 配置 {{ addon.name }} addon 部署依赖文件
when: addon is defined and addon.dependencies is defined
template:
src: "{{ item.1 }}.j2"
dest: "{{ addon_dir }}/{{ item.1 }}"
with_subelements:
- "{{ addon.dependencies }}"
- files
register: copy_addon_deps
- name: 创建 {{ addon.name }} addon 依赖
when: addon is defined and addon.dependencies is defined and copy_addon_deps
command: |
{{ bin_dir }}/kubectl --kubeconfig={{ admin_kubeconfig }} \
apply -f {{ addon_dir }}/{{ item.1 }}
with_subelements:
- "{{ addon.dependencies }}"
- files
register: apply_addon_deps
until: apply_addon_deps.rc == 0
retries: 10
delay: "{{ retry_stagger | random + 2 }}"
- name: 检查 {{ addon.name }} addon 依赖状态
when: addon is defined and addon.dependencies is defined and apply_addon_deps
shell: |
{{ bin_dir }}/kubectl -n {{ item.namespace | default('') }} --kubeconfig={{ admin_kubeconfig }} \
get {{ item.kind }} --no-headers -o=custom-columns=NAME:.metadata.name,STATUS:.status.phase | \
grep {{ item.name }} | awk '{ print $2}' | uniq
with_items: "{{ addon.dependencies }}"
register: check_addon_deps
until: check_addon_deps.stdout == 'Running' or check_addon_deps.stdout == 'Active'
retries: 10
delay: "{{ retry_stagger | random + 30 }}"
- name: 配置 {{ addon.name }} addon 部署文件
when: addon is defined and addon.files is defined
copy:
src: "{{ item }}"
dest: "{{ addon_dir }}/{{ item }}"
with_items: "{{ addon.files }}"
register: copy_addon_files
- name: 生成 {{ addon.name }} addon 部署文件
when: addon is defined
template:
src: "{{ item }}.j2"
dest: "{{ addon_dir }}/{{ item }}"
with_items: "{{ addon.templates }}"
register: copy_addon_templates
- name: 部署 {{ addon.name }} addon
when: addon is defined and copy_addon_templates
command: |
{{ bin_dir }}/kubectl --kubeconfig={{ admin_kubeconfig }} \
apply -f {{ addon_dir }}/{{ item }}
with_items: "{{ addon.directories }}"
register: apply_addon
until: apply_addon.rc == 0
retries: 1
delay: "{{ retry_stagger | random + 3 }}"
run_once: true
delegate_to: "{{ groups['masters'][0] }}"
|
roles/k8s-addon/tasks/main.yml
|
items:
- uid: \Google\Cloud\Vision\V1\Position
name: Position
id: Position
summary: |-
A 3D position in the image, used primarily for Face detection landmarks.
A valid Position must have both x and y coordinates.
The position coordinates are in the same scale as the original image.
Generated from protobuf message <code>google.cloud.vision.v1.Position</code>
type: class
langs:
- php
children:
- \Google\Cloud\Vision\V1\Position::__construct()
- \Google\Cloud\Vision\V1\Position::getX()
- \Google\Cloud\Vision\V1\Position::setX()
- \Google\Cloud\Vision\V1\Position::getY()
- \Google\Cloud\Vision\V1\Position::setY()
- \Google\Cloud\Vision\V1\Position::getZ()
- \Google\Cloud\Vision\V1\Position::setZ()
properties:
- name: x
description: |-
X coordinate.
Generated from protobuf field <code>float x = 1;</code>
- name: "y"
description: |-
Y coordinate.
Generated from protobuf field <code>float y = 2;</code>
- name: z
description: |-
Z coordinate (or depth).
Generated from protobuf field <code>float z = 3;</code>
- uid: \Google\Cloud\Vision\V1\Position::__construct()
name: __construct
id: __construct
summary: Constructor.
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
parameters:
- type: array
name: data
description: '{ Optional. Data for populating the Message object. @type
float $x X coordinate. @type float $y Y coordinate. @type
float $z Z coordinate (or depth). }'
- uid: \Google\Cloud\Vision\V1\Position::getX()
name: getX
id: getX
summary: |-
X coordinate.
Generated from protobuf field <code>float x = 1;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
- uid: \Google\Cloud\Vision\V1\Position::setX()
name: setX
id: setX
summary: |-
X coordinate.
Generated from protobuf field <code>float x = 1;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
parameters:
- type: float
name: var
- uid: \Google\Cloud\Vision\V1\Position::getY()
name: getY
id: getY
summary: |-
Y coordinate.
Generated from protobuf field <code>float y = 2;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
- uid: \Google\Cloud\Vision\V1\Position::setY()
name: setY
id: setY
summary: |-
Y coordinate.
Generated from protobuf field <code>float y = 2;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
parameters:
- type: float
name: var
- uid: \Google\Cloud\Vision\V1\Position::getZ()
name: getZ
id: getZ
summary: |-
Z coordinate (or depth).
Generated from protobuf field <code>float z = 3;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
- uid: \Google\Cloud\Vision\V1\Position::setZ()
name: setZ
id: setZ
summary: |-
Z coordinate (or depth).
Generated from protobuf field <code>float z = 3;</code>
parent: \Google\Cloud\Vision\V1\Position
type: method
langs:
- php
parameters:
- type: float
name: var
|
testdata/golden/V1.Position.yml
|
version: '2'
services:
django:
build:
context: .
dockerfile: Dockerfile-django
container_name: {{ cookiecutter.repo_name }}_django
entrypoint: /usr/bin/wait-for-it.sh postgres:5432 -t 60 --
command: python manage.py runserver 0.0.0.0:8000
# command: /usr/local/bin/gunicorn {{ cookiecutter.repo_name }}.wsgi:application -b :8000 --reload
ports:
- "8000:8000"
environment:
- PYTHONUNBUFFERED=0
volumes:
- "./static/public:/files/public:ro"
- "./{{ cookiecutter.repo_name }}:/app/{{ cookiecutter.repo_name }}"
- "./.data-media:/files/media"
- "./.data-ipython:/root/.ipython"
# Add static files to the container to get the translations
- "./static:/app/static:ro"
depends_on:
- node
- postgres
- redis
postgres:
image: "postgres:9.6.1"
volumes:
- "./.data-postgres:/var/lib/postgresql/data"
celery:
build:
context: .
dockerfile: Dockerfile-django
entrypoint: /usr/bin/wait-for-it.sh postgres:5432 -t 60 --
volumes:
- "./{{ cookiecutter.repo_name }}:/app/{{ cookiecutter.repo_name }}:ro"
depends_on:
- postgres
- redis
command: celery worker --app {{ cookiecutter.repo_name }} --autoscale 10,3 --loglevel INFO
celery_beat:
build:
context: .
dockerfile: Dockerfile-django
volumes:
- "./{{ cookiecutter.repo_name }}:/app/{{ cookiecutter.repo_name }}:ro"
- ".data-celery/:/app/celery"
depends_on:
- redis
# Disable pidfile by specifying an empty one. We used fixed container_name which provides single-running-process
# guarantee and the lack of pidfile ensures that Celery Beat starts even if the Docker container was killed and
# then restarted (in which case the pidfile would still be present).
command: celery beat --app {{ cookiecutter.repo_name }} --loglevel INFO --pidfile= --schedule /app/celery/celerybeat-schedule
redis:
image: "redis:3.2.6"
node:
build:
context: .
dockerfile: Dockerfile-node
volumes:
- "./static/public:/static/public"
- "./static/src:/static/src:ro"
command: npm run dev
|
{{cookiecutter.repo_name}}/docker-compose.yml
|
title: journee-mondiale-de-la-population-2010
date: '2010-07-11T00:00:00.000Z'
image: 'https://www.populationdata.net/wp-content/uploads/chine-fillette.jpg'
fr:
title: Journée mondiale de la population 2010
body: >-
A l'occasion de la Journée mondiale de la population célébrée le 11
juillet, et dont le thème cette année est « Tout le monde compte », l'ONU
rappelle l'importance essentielle de disposer de données précises sur
l'évolution de la population, pour assurer le succès des politiques de
développement et la réalisation des Objectifs du Millénaire pour le
développement (OMD).<!--more-->Le 11 juillet 1987, la population mondiale
atteignait 5 milliards de personnes. Le 11 juillet 2010, elle atteint
presque les 6,9 milliards. Le 11 juillet 2050, elle dépassera probablement
9 milliards de personnes. En soixante ans, le nombre d'humains sur terre va
quasiment doubler. Ces prévisions, comme les tendances démographiques des
pays, des villes, des régions ou des continents de la planète, ont une
influence déterminante sur la lutte contre la pauvreté, sur la promotion du
développement, sur les politiques de santé publique, de lutte contre le
VIH/Sida, sur les investissements à faire dans l'éducation, la promotion de
l'égalité des sexes ou l'émancipation des femmes. <p>Sans données
démographiques sûres, impossible de préparer l'avenir, il faut donc compter
tout le monde pour s'assurer que tout le monde est pris en compte. C'est le
message de cette Journée mondiale 2010 de la population.</p> <p>Dans une
déclaration diffusée à cette occasion, le Secrétaire général de l'ONU, Ban
Ki-moon, a particulièrement souligné cette dimension. « L'accès à des
données fiables est l'une des conditions de la bonne gouvernance, de la
transparence et de la responsabilité effective. Les données démographiques
aident les dirigeants et les responsables dans l'élaboration des politiques
et les prises de décisions », a-t-il insisté, rappelant aussi l'importance
de disposer de données solides pour faire face avec efficacité aux crises
humanitaires.</p> <p>Pour <NAME>, « être compté, c'est être visible »,
ce qui est fondamental pour les plus vulnérables, femmes, enfants, jeunes
et marginalisés. « Les données précises peuvent inciter les responsables
politiques nationaux à prendre en compte des droits et des besoins des
femmes et des jeunes et contribuer à l'édification d'une société plus
équitable et prospère », a-t-il ajouté, saluant la tenue de recensements de
plus en plus réguliers et précis dans le monde, notamment dans « de
nombreux pays qui, par le passé, n'avaient pas pu mener à bien leur
recensement national ».</p> <p>« Ce n'est qu'en prenant en compte les
besoins de tous, femmes, hommes, filles et garçons, que nous parviendrons à
réaliser les Objectifs du Millénaire pour le développement et à défendre
les valeurs communes des Nations Unies », a-t-il conclu, se faisant l'écho
des propos de la Directrice du Fonds des Nations Unies pour la population
(UNFPA), <NAME>.</p> <p>Dans un message pour cette Journée
mondiale, <NAME> rappelle que « la dynamique de la population notamment
les taux de croissance, la pyramide des âges, la fécondité et la mortalité,
la migration influe sur chaque aspect du développement humain, social et
économique ».</p> <p>Par exemple, en Asie, recensements et enquêtes ont
permis de reconnaître les déséquilibres entre les sexes et la baisse
drastique du nombre de filles, une réalité qui a entrainé une prise de
conscience des gouvernants et la mise en uvre de politiques destinées à
corriger cette tendance. En Europe, l'analyse des données a conduit à
examiner les niveaux d'immigration nécessaires pour maintenir le niveau de
population et contrebalancer son vieillissement. Dans d'autres pays encore,
des taux plus élevés d'utilisation des contraceptifs et le niveau de
présence d'un personnel qualifié aux accouchements ont montré des progrès
dans le domaine de la santé maternelle. </p> <p>Pour <NAME>, «
les recensements, les enquêtes et les statistiques des Etats fournissent
des données d'importance critique pour guider les plans, politiques et
programmes destinés à améliorer la vie de tous ».</p> <p>Source : ONU
(communiqué)<br /></p>
en:
title: Journée mondiale de la population 2010
body: ''
|
data/posts/2010-07-10_journee-mondiale-de-la-population-2010.yml
|
tosca_definitions_version: tosca_simple_yaml_1_1_0
policy_types:
onap.policies.optimization.resource.HpaPolicy:
derived_from: onap.policies.optimization.Resource
version: 1.0.0
name: onap.policies.optimization.resource.HpaPolicy
properties:
flavorFeatures:
type: list
required: true
entry_schema:
type: policy.data.flavorFeatures_properties
data_types:
policy.data.flavorFeatures_properties:
derived_from: tosca.nodes.Root
properties:
id:
type: string
required: true
type:
type: string
required: true
directives:
type: list
required: true
entry_schema:
type: policy.data.directives_properties
flavorProperties:
type: list
required: true
entry_schema:
type: policy.data.flavorProperties_properties
policy.data.directives_properties:
derived_from: tosca.nodes.Root
properties:
type:
type: string
attributes:
type: list
entry_schema:
type: policy.data.directives_attributes_properties
policy.data.directives_attributes_properties:
derived_from: tosca.nodes.Root
properties:
attribute_name:
type: string
attribute_value:
type: string
policy.data.flavorProperties_properties:
derived_from: tosca.nodes.Root
properties:
hpa-feature:
type: string
required: true
mandatory:
type: string
required: true
score:
type: string
required: false
architecture:
type: string
required: true
hpa-version:
type: string
required: true
directives:
type: list
required: true
entry_schema:
type: policy.data.directives_properties
hpa-feature-attributes:
type: list
required: true
entry_schema:
type: policy.data.hpa-feature-attributes_properties
policy.data.hpa-feature-attributes_properties:
derived_from: tosca.nodes.Root
properties:
hpa-attribute-key:
type: string
required: true
hpa-attribute-value:
type: string
required: true
operator:
type: list
required: true
entry_schema:
type: string
constraints:
- valid_values:
- <
- <=
- '>'
- '>='
- =
- '!='
- any
- all
- subset
unit:
type: string
required: false
|
models-examples/src/main/resources/policytypes/onap.policies.optimization.resource.HpaPolicy.yaml
|
php-worker:
build:
context: ${DOCKERFILE_PATH_HOST}/php-worker
args:
- CHANGE_SOURCE=${CHANGE_SOURCE}
- LARADOCK_PHP_VERSION=${PHP_VERSION}
- PHALCON_VERSION=${PHALCON_VERSION}
- INSTALL_BZ2=${PHP_WORKER_INSTALL_BZ2}
- INSTALL_GD=${PHP_WORKER_INSTALL_GD}
- INSTALL_IMAGEMAGICK=${PHP_WORKER_INSTALL_IMAGEMAGICK}
- INSTALL_GMP=${PHP_WORKER_INSTALL_GMP}
- INSTALL_GNUPG=${PHP_WORKER_INSTALL_GNUPG}
- INSTALL_LDAP=${PHP_WORKER_INSTALL_LDAP}
- INSTALL_PGSQL=${PHP_WORKER_INSTALL_PGSQL}
- INSTALL_MONGO=${PHP_WORKER_INSTALL_MONGO}
- INSTALL_BCMATH=${PHP_WORKER_INSTALL_BCMATH}
- INSTALL_SOCKETS=${PHP_WORKER_INSTALL_SOCKETS}
- INSTALL_MEMCACHED=${PHP_WORKER_INSTALL_MEMCACHED}
- INSTALL_OCI8=${PHP_WORKER_INSTALL_OCI8}
- INSTALL_PHALCON=${PHP_WORKER_INSTALL_PHALCON}
- INSTALL_SOAP=${PHP_WORKER_INSTALL_SOAP}
- INSTALL_ZIP_ARCHIVE=${PHP_WORKER_INSTALL_ZIP_ARCHIVE}
- INSTALL_MYSQL_CLIENT=${PHP_WORKER_INSTALL_MYSQL_CLIENT}
- INSTALL_AMQP=${PHP_WORKER_INSTALL_AMQP}
- INSTALL_CASSANDRA=${PHP_WORKER_INSTALL_CASSANDRA}
- INSTALL_GEARMAN=${PHP_WORKER_INSTALL_GEARMAN}
- INSTALL_GHOSTSCRIPT=${PHP_WORKER_INSTALL_GHOSTSCRIPT}
- INSTALL_SWOOLE=${PHP_WORKER_INSTALL_SWOOLE}
- INSTALL_TAINT=${PHP_WORKER_INSTALL_TAINT}
- INSTALL_FFMPEG=${PHP_WORKER_INSTALL_FFMPEG}
- INSTALL_AUDIOWAVEFORM=${PHP_WORKER_INSTALL_AUDIOWAVEFORM}
- INSTALL_REDIS=${PHP_WORKER_INSTALL_REDIS}
- INSTALL_IMAP=${PHP_WORKER_INSTALL_IMAP}
- INSTALL_XMLRPC=${PHP_WORKER_INSTALL_XMLRPC}
- PUID=${PHP_WORKER_PUID}
- PGID=${PHP_WORKER_PGID}
- IMAGEMAGICK_VERSION=${PHP_WORKER_IMAGEMAGICK_VERSION}
environment:
- TZ=${TZ}
volumes:
- ${CODE_PATH_HOST}:${CODE_PATH_CONTAINER}${APP_CODE_CONTAINER_FLAG}
- ${DOCKERFILE_PATH_HOST}/php-worker/supervisord.d:/etc/supervisord.d
- ${RUNTIME_PATH_HOST}${PHP_WORKER_LOG}:/var/log
networks:
- protected
|
composes/common/php-worker.yml
|
_id: 6b561360-d82a-11e9-83fc-773c7dcbb421
message: >-
A srk.lprx.zacharythomas.github.io.zra.sc uraemia, gonadotrophins
[URL=http://epochcreations.com/lasuna/ - buy lasuna online[/URL -
[URL=http://tamilappstatus.com/amantadine/ - amantadine[/URL - cheapest
amantadine [URL=http://gardeningwithlarry.com/retin-a/ - retin a[/URL -
[URL=http://meetatsonoma.com/buy-prednisone/ - kanine prednisone order online
10 mg no rx[/URL - [URL=http://theriversidegrove.com/desyrel/ - desyrel[/URL
- [URL=http://meetatsonoma.com/viagra-pills/ - viagra buy in canada[/URL -
[URL=http://solepost.com/buy-propecia-online/ - finasteride[/URL -
[URL=http://sallyrjohnson.com/kamagra/ - kamagra buy online[/URL -
[URL=http://healinghorsessanctuary.com/cleocin/ - cleocin without dr
prescription[/URL - oscillating decompensation: <a
href="http://epochcreations.com/lasuna/">lasuna</a> <a
href="http://tamilappstatus.com/amantadine/">amantadine for sale</a> <a
href="http://gardeningwithlarry.com/retin-a/">buy retin a cream</a> <a
href="http://meetatsonoma.com/buy-prednisone/">prednisone 10 mg</a> <a
href="http://theriversidegrove.com/desyrel/">desyrel lowest price</a> desyrel
<a href="http://meetatsonoma.com/viagra-pills/">sildenafil female</a> <a
href="http://solepost.com/buy-propecia-online/">propecia rebate</a> <a
href="http://sallyrjohnson.com/kamagra/">sildenafil soft</a> kamagra in canada
<a href="http://healinghorsessanctuary.com/cleocin/">online cleocin</a>
cleocin no prescription laparoscopy, bronchoscopic immobile
http://epochcreations.com/lasuna/ lasuna http://tamilappstatus.com/amantadine/
amantadine without a prescription http://gardeningwithlarry.com/retin-a/ buy
tretinoin cream 0.05 http://meetatsonoma.com/buy-prednisone/ buy prednisone
without prescription http://theriversidegrove.com/desyrel/ buy trazodone
http://meetatsonoma.com/viagra-pills/ viagra pills
http://solepost.com/buy-propecia-online/ propecia vs finasteride
http://sallyrjohnson.com/kamagra/ viagra cost canada
http://healinghorsessanctuary.com/cleocin/ cleocin administer tight.
name: akeiyuvedkoce
email: <PASSWORD>
url: 'http://epochcreations.com/lasuna/'
hidden: ''
date: '2019-09-16T02:33:51.697Z'
|
_data/comments/elasticsearch-restore/comment-1568601231699.yml
|
items:
- uid: '@azure/arm-servicemap.PortReference'
name: PortReference
fullName: PortReference
children:
- '@azure/arm-servicemap.PortReference.id'
- '@azure/arm-servicemap.PortReference.ipAddress'
- '@azure/arm-servicemap.PortReference.kind'
- '@azure/arm-servicemap.PortReference.machine'
- '@azure/arm-servicemap.PortReference.name'
- '@azure/arm-servicemap.PortReference.portNumber'
- '@azure/arm-servicemap.PortReference.type'
langs:
- typeScript
type: interface
summary: ''
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.id'
name: id
fullName: id
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'id: string'
return:
type:
- string
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.ipAddress'
name: ipAddress
fullName: ipAddress
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'ipAddress?: undefined | string'
return:
type:
- undefined | string
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.kind'
name: kind
fullName: kind
children: []
langs:
- typeScript
type: property
summary: ''
syntax:
content: 'kind: "ref:port"'
return:
type:
- '"ref:port"'
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.machine'
name: machine
fullName: machine
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'machine?: MachineReference'
return:
type:
- '@azure/arm-servicemap.MachineReference'
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.name'
name: name
fullName: name
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'name?: undefined | string'
return:
type:
- undefined | string
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.portNumber'
name: portNumber
fullName: portNumber
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'portNumber?: undefined | number'
return:
type:
- undefined | number
package: '@azure/arm-servicemap'
- uid: '@azure/arm-servicemap.PortReference.type'
name: type
fullName: type
children: []
langs:
- typeScript
type: property
summary: ''
optional: true
syntax:
content: 'type?: undefined | string'
return:
type:
- undefined | string
package: '@azure/arm-servicemap'
references:
- uid: '@azure/arm-servicemap.MachineReference'
name: MachineReference
spec.typeScript:
- name: MachineReference
fullName: MachineReference
uid: '@azure/arm-servicemap.MachineReference'
|
docs-ref-autogen/@azure/arm-servicemap/PortReference.yml
|
l_chinese:
mem_eager_traders.1.name:0 "在我們的邊界遇上雀躍的商人"
mem_eager_traders.1.desc:0 "正當一條從§Y[ruthcap.GetAdj]§! 連接至我們邊界的安全路徑剛剛成立時,我們就收到了邊境海關充滿憂慮的報告。數以百計的熱源被探測到正以高速向著我們的邊界接近中,原來全都是民用貨船。面對猶如巨浪般的商人洶湧而至,當地的海關官員對於小小的關口能否應付數量如此龐大的商人感到困惑。"
mem_eager_traders.1.xenophile:0 "他們真的有這麼渴望跟我們碰面?那麼好吧,就讓他們進來。"
mem_eager_traders.1.xenophobe:0 "加強邊境把關,叫他們立刻離開。"
mem_eager_traders.1.individualist:0 "這些邊境監管只會阻礙雙方貿易,立即讓他們進來。"
mem_eager_traders.1.fanindividualist:0 "太好了,我們正準備立即派出貨運船出發。"
mem_eager_traders.2.name:0 "互搶飯碗"
mem_eager_traders.2.desc:0 "似乎自從幾個月之前大量來自 §Y[ruthcap.GetAdj]§! 的商人湧入後,我們的經濟就受到了打擊。這些商人在我們的星球上所出售的貨品不外乎是廉價首飾或者是經常壞掉的次貨,甚至乎有許多惡意收購及敲詐勒索嚇退一些商戶而形成壟斷。我們估計國內的經濟可能需要數年的時間去恢復。"
mem_eager_traders.3.name:0 "奇怪的經濟均衡"
mem_eager_traders.3.desc:0 "現時與§Y[ruthcap.GetName]§! 的貿易狀況可以算是挺好,我們雙方都能夠出售多餘的貨品給予對方,結果大家都沒有從交易中獲得虧蝕或盈餘,連§Y[ruthcap.GetName]§!的人民都暗地稱奇。"
mem_eager_traders.1.resigned:0 "派遣一些官員去減輕他們的負擔。"
mem_eager_traders.2.annoyed:0 "這跟我們想像中來得差。"
mem_eager_traders.2.fascinated:0 "我們竟然上了當?真可笑。"
mem_eager_traders.4.name:0 "意志剋服唯物主義"
mem_eager_traders.4.desc:0 "似乎自從幾個月之前大量來自§Y[ruthcap.GetAdj]§! 的商人湧入後,這些商人在我們的星球上所出售的貨品不外乎是價錢過高的奢侈品或首飾。幸好我們的人民對這些東西不感興趣,而那些商人在沒有嚴重損害我們的經濟下就離開了。"
mem_eager_traders.4.option:0 "這值得我們去慶祝!"
mem_eager_traders.5.name:0 "第一批[peacefultraders.GetAdj] 商人到達"
mem_eager_traders.5.desc:0 "當成功設立一條能夠安全地連接§Y[peacefultraders.GetName]§! 與§Y[Root.GetName]§! 的路徑後,第一批敢於嘗試創新的商人開始摸索這個新市場。雖然第一條運輸路徑依然處於摸索階段,不過我們可以預期將來雙方貿易會愈來愈興旺。"
mem_eager_traders.5.xenophile:0 "太好了,快點給我引進一些 §Y[peacefultraders.GetAdj]§!的特產!"
mem_eager_traders.5.xenophile.tooltip:0 "([Root.GetName]):\n §YXenophile§! 人口獲得§G+5%§!的幸福值持續§Y12§!個月\n §YXenophobe§! 人口獲得§R-10%§! 的幸福值持續§§Y12§!個月"
mem_eager_traders.5.xenophobe:0 "我們必須禁止一切來自§Y[peacefultraders.GetName]§! 的入口,以保護我們的文化遺產。"
mem_eager_traders.5.xenophobe.tooltip:0 "([Root.GetName]):\n §YXenophobe§!人口獲得§G+5%§!的幸福值持續§Y12§!個月\n §YXenophile§!人口獲得§R-10%§!的幸福值持續§Y12§!個月\n §YIndividualist§!人口獲得§R-10%§!的幸福值持續§Y12§!個月"
mem_eager_traders.5.individualist:0 "我們需要更多能源貨幣"
mem_eager_traders.5.mercenary:0 "善良的商人?看起來他們用得上我們僱傭兵的服務。"
mem_eager_traders.5.peaceful:0 "開戰會令到雙方的損失變得難以估算。"
mem_eager_traders.6.name:0 "僱傭兵從§Y[peacefultraders.GetName]§! _回來了!"
mem_eager_traders.6.desc:0 "這幾年跟§Y[peacefultraders.GetAdj]§!一直合作的 §Y[Root.GetAdj]§! 僱傭兵可算得上非常成功,§Y[Root.GetName]§!族群現在以能幹的戰士和精明的商人見稱,在經過各種歷險之後很多都帶著滿滿的錢包回到家鄉。"
mem_eager_traders.6.good:0 "他們可謂專業人士。"
|
mem_sulfuric_transplant/reference_files/mem_stable_20190322/localisation/mem_eager_traders_l_traditional_chinese.yml
|
---
metadata:
title: |
RSPCA Court Summons Prototype
short title: |
Summons Automation
organisation: Flinders University
show login: True
---
default screen parts:
post: |
<div style="display: flex; height: 200px; align-items: flex-end; flex-direction: row-reverse">
<div style="margin: 5px"><img src="https://staff.flinders.edu.au/etc.clientlibs/flinders/clientlibs/clientlib-site/resources/images/flinderuni_main_logo_black.png" height="40">
</div>
<div style="margin: 5px; text-align: right">
<cite><small><i>
Developed by Flinders University in collaboration with RSPCA
</i></small></cite>
</div>
</div>
---
#add files that we need to include (questions, registry lists etc.)
include:
- RSPCA.questions.yml
- RSPCA.registries.yml
#- RSPCA.display_output.yml
---
#objects that we need e.g endpoints and logos (can add this later)
objects:
#- placeholder.registries
#- placeholder.RSPCA_logo
- endpoints: DADict
---
#css file (can add this later)
features:
css: X2.01.SACAT.css
debug: True
hide standard menu: False
---
##come back to this later and add in a logo to be displayed on all pages
---
#driver code (create the endpoints to test elgibility
mandatory: True
code: |
###Can only be two options so only need two endpoints
if RSPCAeligible:
#set the endpoint to the final screen we want to display
endpoints['final_screen']
else:
#not eligbile so show ineligible screen
endpoints['ineligible']
---
event: endpoints['final_screen']
question: ""
subquestion: |
### This is the final endpoint screen
This screen will display the information that had been entered
This screen will also display options to save the document as docx/pdf
This is for form type: ${ RSPCAform }
**Summary of defendant information:**
Defendant|Input
-----------|----------
Name | ${ def_name }
DOB | ${ def_DOB }
Street | ${ def_street }
Suburb | ${ def_suburb }
State | ${ def_state }
Postcode | ${ def_postcode }
Email | ${ def_email }
Phone Number | ${ def_phone }
Drivers Licence | ${ def_licence }
**Summary of hearing details:**
Registry|Details
-----------|----------
Registry | ${ RSPCA_registrieslist[RSPCAcourt].item('registry') }
Address | ${ RSPCA_registrieslist[RSPCAcourt].item('address') }
Email | ${ RSPCA_registrieslist[RSPCAcourt].item('email') }
Phone | ${ RSPCA_registrieslist[RSPCAcourt].item('phone') }
Website | ${ RSPCA_registrieslist[RSPCAcourt].item('website') }
Need to add time/date still
Need to add summary of offence details
Need to add attachment details (pdf/docx)
buttons:
- Exit: exit
url: https://www.rspca.org.au/
- Restart: restart
---
#add attachment details
---
event: endpoints['ineligible']
question: Not Eligible
subquestion:
This disclaimer has not been accpeted or other possible issues tbc
buttons:
- Exit: exit
url: https://www.rspca.org.au/
- Restart: restart
---
|
docassemble/LLAW33012021S1RSPCA1/data/questions/RSPCA.main.yml
|