text stringlengths 1 1.05M |
|---|
# Generated by Powerlevel10k configuration wizard on 2021-04-09 at 23:20 CEST.
# Based on romkatv/powerlevel10k/config/p10k-lean.zsh, checksum 43791.
# Wizard options: nerdfont-complete + powerline, small icons, unicode, lean, 1 line,
# compact, many icons, fluent, transient_prompt, instant_prompt=quiet.
# Type `p10k configure` to generate another config.
#
# Config for Powerlevel10k with lean prompt style. Type `p10k configure` to generate
# your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done
# shellcheck disable=SC1073
# shellcheck disable=SC1072
# shellcheck disable=SC2148
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh -o extended_glob
# Unset all configuration options. This allows you to apply configuration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR'
# Zsh >= 5.1 is required.
autoload -Uz is-at-least && is-at-least 5.1 || return
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
os_icon # os identifier
dir # current directory
vcs # git status
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
asdf # asdf version manager (https://github.com/asdf-vm/asdf)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
# php_version # php version (https://www.php.net/)
# laravel_version # laravel php framework version (https://laravel.com/)
# java_version # java version (https://www.java.com/)
# package # name@version from package.json (https://docs.npmjs.com/files/package.json)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
phpenv # php version from phpenv (https://github.com/phpenv/phpenv)
scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv)
haskell_stack # haskell version from stack (https://haskellstack.org/)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli account and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
taskwarrior # taskwarrior task count (https://taskwarrior.org/)
# time # current time
# ip # ip address and bandwidth usage for a specified network interface
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# wifi # wifi speed
# example # example user-defined segment (see prompt_example function below)
)
# Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to `moderate`, some icons will have an extra space after them. This is meant to avoid
# icon overlap when using non-monospace fonts. When set to `none`, spaces are not added.
typeset -g POWERLEVEL9K_ICON_PADDING=none
# Basic style options that define the overall look of your prompt. You probably don't want to
# change them.
typeset -g POWERLEVEL9K_BACKGROUND= # transparent background
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_{LEFT,RIGHT}_WHITESPACE= # no surrounding whitespace
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SUBSEGMENT_SEPARATOR=' ' # separate segments with a space
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SEGMENT_SEPARATOR= # no end-of-line symbol
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=true
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=false
# Connect left prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# Ruler, a.k.a. the horizontal line before each prompt. If you set it to true, you'll
# probably want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false above and
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' ' below.
typeset -g POWERLEVEL9K_SHOW_RULER=false
typeset -g POWERLEVEL9K_RULER_CHAR='─' # reasonable alternative: '·'
typeset -g POWERLEVEL9K_RULER_FOREGROUND=242
# Filler between left and right prompt on the first prompt line. You can set it to '·' or '─'
# to make it easier to see the alignment between left and right prompt and to separate prompt
# from command output. It serves the same purpose as ruler (see above) without increasing
# the number of prompt lines. You'll probably want to set POWERLEVEL9K_SHOW_RULER=false
# if using this. You might also like POWERLEVEL9K_PROMPT_ADD_NEWLINE=false for more compact
# prompt.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' '
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=242
# Add a space between the end of left prompt and the filler.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=' '
# Add a space between the filler and the start of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=' '
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=
# Custom icon.
# typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐'
################################[ prompt_char: prompt symbol ]################################
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
##################################[ dir: current directory ]##################################
# Default current directory color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=31
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=103
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=39
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.go-version
.ruby-version
.lua-version
.java-version
.perl-version
.php-version
.tool-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
stack.yaml
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# If set to "first" ("last"), remove everything before the first (last) subdirectory that contains
# files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is
# /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first)
# or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers
# and other directories don't.
#
# Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer.
# This moves the truncation point to the right (positive offset) or to the left (negative offset)
# relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0"
# respectively.
typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable and non-existent directories. See POWERLEVEL9K_LOCK_ICON
# and POWERLEVEL9K_DIR_CLASSES below.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v3
# The default icon shown next to non-writable and non-existent directories when
# POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3.
# typeset -g POWERLEVEL9K_LOCK_ICON='⭐'
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different
# directories. It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory ($PWD) is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. An empty string.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins.
#
# If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3, non-writable and non-existent directories
# acquire class suffix _NOT_WRITABLE and NON_EXISTENT respectively.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK ''
# '~(|/*)' HOME ''
# '*' DEFAULT '')
#
# Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with one
# of the following classes depending on its writability and existence: WORK, WORK_NOT_WRITABLE or
# WORK_NON_EXISTENT.
#
# Simply assigning classes to directories doesn't have any visible effects. It merely gives you an
# option to define custom colors and icons for different directory classes.
#
# # Styling for WORK.
# typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NOT_WRITABLE.
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=39
#
# # Styling for WORK_NON_EXISTENT.
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_ANCHOR_FOREGROUND=39
#
# If a styling parameter isn't explicitly defined for some class, it falls back to the classless
# parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls
# back to POWERLEVEL9K_DIR_FOREGROUND.
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=()
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='%fin '
#####################################[ vcs: git status ]######################################
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON='\uF126 '
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
# Formatter for Git status.
#
# Example output: master ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
if (( $1 )); then
# Styling for up-to-date Git status.
local meta='%f' # default foreground
local clean='%76F' # green foreground
local modified='%178F' # yellow foreground
local untracked='%39F' # blue foreground
local conflicted='%196F' # red foreground
else
# Styling for incomplete and stale Git status.
local meta='%244F' # grey foreground
local clean='%244F' # grey foreground
local modified='%244F' # grey foreground
local untracked='%244F' # grey foreground
local conflicted='%244F' # grey foreground
fi
local res
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
local branch=${(V)VCS_STATUS_LOCAL_BRANCH}
# If local branch name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show local branch name in full without truncation, delete the next line.
(( $#branch > 32 )) && branch[13,-13]="…" # <-- this line
res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}${branch//\%/%%}"
fi
if [[ -n $VCS_STATUS_TAG
# Show tag only if not on a branch.
# Tip: To always show tag, delete the next line.
&& -z $VCS_STATUS_LOCAL_BRANCH # <-- this line
]]; then
local tag=${(V)VCS_STATUS_TAG}
# If tag name is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
# Tip: To always show tag name in full without truncation, delete the next line.
(( $#tag > 32 )) && tag[13,-13]="…" # <-- this line
res+="${meta}#${clean}${tag//\%/%%}"
fi
# Display the current Git commit if there is no branch and no tag.
# Tip: To always display the current Git commit, delete the next line.
[[ -z $VCS_STATUS_LOCAL_BRANCH && -z $VCS_STATUS_TAG ]] && # <-- this line
res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}"
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# ⇠42 if behind the push remote.
(( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}"
(( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" "
# ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42.
(( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
# "─" if the number of unstaged files is unknown. This can happen due to
# POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower
# than the number of files in the Git index, or due to bash.showDirtyState being set to false
# in the repository config. The number of staged and untracked files may also be unknown
# in this case.
(( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Don't count the number of unstaged, untracked and conflicted files in Git repositories with
# more than this many files in the index. Negative value means infinity.
#
# If you are working in Git repositories with tens of millions of files and seeing performance
# sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output
# of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's
# config: `git config bash.showDirtyState false`.
typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1
# Don't show Git status in prompt for repositories whose workdir matches this pattern.
# For example, if set to '~', the Git repository at $HOME/.git will be ignored.
# Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'.
typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~'
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter(1)))+${my_git_format}}'
typeset -g POWERLEVEL9K_VCS_LOADING_CONTENT_EXPANSION='${$((my_git_formatter(0)))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Icon color.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_COLOR=76
typeset -g POWERLEVEL9K_VCS_LOADING_VISUAL_IDENTIFIER_COLOR=244
# Custom icon.
# typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
typeset -g POWERLEVEL9K_VCS_PREFIX='%fon '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
# These settings are used for repositories other than Git or when gitstatusd fails and
# Powerlevel10k has to fall back to using vcs_info.
typeset -g POWERLEVEL9K_VCS_CLEAN_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_MODIFIED_FOREGROUND=178
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=160
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
###################[ command_execution_time: duration of the last command ]###################
# Show duration of the last command if takes at least this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=101
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='%ftook '
#######################[ background_jobs: presence of background jobs ]#######################
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]###############
# Default asdf color. Only used to display tools for which there is no color override (see below).
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND.
typeset -g POWERLEVEL9K_ASDF_FOREGROUND=66
# There are four parameters that can be used to hide asdf tools. Each parameter describes
# conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at
# least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to
# hide a tool, it gets shown.
#
# Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and
# POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands:
#
# asdf local python 3.8.1
# asdf global python 3.8.1
#
# After running both commands the current python version is 3.8.1 and its source is "local" as
# it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false,
# it'll hide python version in this case because 3.8.1 is the same as the global version.
# POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't
# contain "local".
# Hide tool versions that don't come from one of these sources.
#
# Available sources:
#
# - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable"
# - local `asdf current` says "set by /some/not/home/directory/file"
# - global `asdf current` says "set by /home/username/file"
#
# Note: If this parameter is set to (shell local global), it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES.
typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global)
# If set to false, hide tool versions that are the same as global.
#
# Note: The name of this parameter doesn't reflect its meaning at all.
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW.
typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false
# If set to false, hide tool versions that are equal to "system".
#
# Note: If this parameter is set to true, it won't hide tools.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM.
typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true
# If set to non-empty value, hide tools unless there is a file matching the specified file pattern
# in the current directory, or its parent directory, or its grandparent directory, and so on.
#
# Note: If this parameter is set to empty value, it won't hide tools.
# Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments.
# Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB.
#
# Example: Hide nodejs version when there is no package.json and no *.js files in the current
# directory, in `..`, in `../..` and so on.
#
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json'
typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB=
# Ruby version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=168
# typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar'
# Python version from asdf.
typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar'
# Go version from asdf.
typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Node.js version from asdf.
typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar'
# Rust version from asdf.
typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=37
# typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar'
# .NET Core version from asdf.
typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=134
# typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_DOTNET_SHOW_ON_UPGLOB='*.foo|*.bar'
# Flutter version from asdf.
typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=38
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar'
# Lua version from asdf.
typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Java version from asdf.
typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=32
# typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar'
# Perl version from asdf.
typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=67
# typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Erlang version from asdf.
typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=125
# typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar'
# Elixir version from asdf.
typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=129
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar'
# Postgres version from asdf.
typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=31
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar'
# PHP version from asdf.
typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=99
# typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar'
# Haskell version from asdf.
typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=172
# typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar'
# Julia version from asdf.
typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=70
# typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=39
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=34
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=74
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usage: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=35
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=220
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=160
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=66
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=96
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=66
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=178
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=166
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=110
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=110
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]##############
# Taskwarrior color.
typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=74
# Taskwarrior segment format. The following parameters are available within the expansion.
#
# - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`.
# - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`.
#
# Zero values are represented as empty parameters.
#
# The default format:
#
# '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT'
#
# typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=178
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=180
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=180
# Context format when running with privileges: bold user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%B%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
typeset -g POWERLEVEL9K_CONTEXT_PREFIX='%fwith '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=37
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# If set to "false", won't show virtualenv if pyenv is already shown.
# If set to "if-different", won't show virtualenv if it's the same as pyenv.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=37
# Anaconda segment format. The following parameters are available within the expansion.
#
# - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment.
# - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment.
# - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below).
# - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version).
#
# CONDA_PROMPT_MODIFIER can be configured with the following command:
#
# conda config --set env_prompt '({default_env}) '
#
# The last argument is a Python format string that can use the following variables:
#
# - prefix The same as CONDA_PREFIX.
# - default_env The same as CONDA_DEFAULT_ENV.
# - name The last segment of CONDA_PREFIX.
# - stacked_env Comma-separated list of names in the environment stack. The first element is
# always the same as default_env.
#
# Note: '({default_env}) ' is the default value of env_prompt.
#
# The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER
# without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former
# is empty.
typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}'
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=37
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide python version if it's equal to "system".
typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true
# Pyenv segment format. The following parameters are available within the expansion.
#
# - P9K_CONTENT Current pyenv environment (pyenv version-name).
# - P9K_PYENV_PYTHON_VERSION Current python version (python --version).
#
# The default format has the following logic:
#
# 1. Display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION" if $P9K_PYENV_PYTHON_VERSION is not
# empty and unequal to $P9K_CONTENT.
# 2. Otherwise display just "$P9K_CONTENT".
typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_PYENV_PYTHON_VERSION:#$P9K_CONTENT}:+ $P9K_PYENV_PYTHON_VERSION}'
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=37
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide go version if it's equal to "system".
typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=70
# Hide node version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global)
# If set to false, hide node version if it's the same as global:
# $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide node version if it's equal to "system".
typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=70
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=70
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=37
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=37
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=134
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ php_version: php version (https://www.php.net/) ]######################
# PHP version color.
typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=99
# Show PHP version only when in a PHP project subdirectory.
typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ laravel_version: laravel php framework version (https://laravel.com/) ]###########
# Laravel version color.
typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=161
# Custom icon.
# typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
####################[ java_version: java version (https://www.java.com/) ]####################
# Java version color.
typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=32
# Show java version only when in a java project subdirectory.
typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true
# Show brief version.
typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false
# Custom icon.
# typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]####
# Package color.
typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=117
# Package format. The following parameters are available within the expansion.
#
# - P9K_PACKAGE_NAME The value of `name` field in package.json.
# - P9K_PACKAGE_VERSION The value of `version` field in package.json.
#
# typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=168
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide ruby version if it's equal to "system".
typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=168
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=32
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide lua version if it's equal to "system".
typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=32
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide java version if it's equal to "system".
typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=67
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide perl version if it's equal to "system".
typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############
# PHP color.
typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=99
# Hide php version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global)
# If set to false, hide php version if it's the same as global:
# $(phpenv version-name) == $(phpenv global).
typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide php version if it's equal to "system".
typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]#######
# Scala color.
typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=160
# Hide scala version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global)
# If set to false, hide scala version if it's the same as global:
# $(scalaenv version-name) == $(scalaenv global).
typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false
# If set to false, hide scala version if it's equal to "system".
typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true
# Custom icon.
# typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]###########
# Haskell color.
typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=172
# Hide haskell version if it doesn't come from one of these sources.
#
# shell: version is set by STACK_YAML
# local: version is set by stack.yaml up the directory tree
# global: version is set by the implicit global project (~/.stack/global-project/stack.yaml)
typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local)
# If set to false, hide haskell version if it's the same as in the implicit global project.
typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true
# Custom icon.
# typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile|fluxctl|stern'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=134
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='%fat '
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Don't show terraform workspace if it's literally "default".
typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false
# POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current terraform workspace gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' OTHER)
#
# If your current terraform workspace is "project_test", its class is TEST because "project_test"
# doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' OTHER)
typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=38
# typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=208
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=32
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=32
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or
# POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative
# enough. You can use the following parameters in the expansions. Each of them corresponds to the
# output of `gcloud` tool.
#
# Parameter | Source
# -------------------------|--------------------------------------------------------------------
# P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)'
# P9K_GCLOUD_ACCOUNT | gcloud config get-value account
# P9K_GCLOUD_PROJECT_ID | gcloud config get-value project
# P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)'
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'.
#
# Obtaining project name requires sending a request to Google servers. This can take a long time
# and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud
# prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets
# set and gcloud prompt segment transitions to state COMPLETE.
#
# You can customize the format, icon and colors of gcloud segment separately for states PARTIAL
# and COMPLETE. You can also hide gcloud in state PARTIAL by setting
# POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and
# POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty.
typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}'
typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}'
# Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name
# this often. Negative value disables periodic polling. In this mode project name is retrieved
# only when the current configuration, account or project id changes.
typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=32
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=94
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=81
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN
# to see the name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun)|tailscale)[0-9]*'
# If set to true, show one segment per matching network interface. If set to false, show only
# one segment corresponding to the first matching network interface.
# Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION.
typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ ip: ip address and bandwidth usage for a specified network interface ]###########
# IP color.
typeset -g POWERLEVEL9K_IP_FOREGROUND=38
# The following parameters are accessible within the expansion:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_IP_IP | IP address
# P9K_IP_INTERFACE | network interface
# P9K_IP_RX_BYTES | total number of bytes received
# P9K_IP_TX_BYTES | total number of bytes sent
# P9K_IP_RX_RATE | receive rate (since last prompt)
# P9K_IP_TX_RATE | send rate (since last prompt)
typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='$P9K_IP_IP${P9K_IP_RX_RATE:+ %70F⇣$P9K_IP_RX_RATE}${P9K_IP_TX_RATE:+ %215F⇡$P9K_IP_TX_RATE}'
# Show information for the first network interface whose name matches this regular expression.
# Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces.
typeset -g POWERLEVEL9K_IP_INTERFACE='[ew].*'
# Custom icon.
# typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=160
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=70
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=178
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES='\uf58d\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf578'
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
#####################################[ wifi: wifi speed ]#####################################
# WiFi color.
typeset -g POWERLEVEL9K_WIFI_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use different colors and icons depending on signal strength ($P9K_WIFI_BARS).
#
# # Wifi colors and icons for different signal strength levels (low to high).
# typeset -g my_wifi_fg=(68 68 68 68 68) # <-- change these values
# typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values
#
# typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps'
# typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}'
#
# The following parameters are accessible within the expansions:
#
# Parameter | Meaning
# ----------------------+---------------
# P9K_WIFI_SSID | service set identifier, a.k.a. network name
# P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown
# P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second
# P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0
# P9K_WIFI_NOISE | noise in dBm, from -120 to 0
# P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE)
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=66
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
# typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
typeset -g POWERLEVEL9K_TIME_PREFIX='%fat '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and orange text greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -f 208 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=208
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=always
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=quiet
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
# Tell `p10k configure` which file it should overwrite.
typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
<gh_stars>0
/**
* @fileoverview Check whether the given variable is an object or not.
*
*/
'use strict';
/**
* Check whether the given variable is an object or not.
* If the given variable is an object, return true.
* @param {*} obj - Target for checking
* @returns {boolean} Is object?
* @memberof module:type
*/
function isObject(obj) {
return obj === Object(obj);
}
module.exports = isObject;
|
/**
* Default JDClient options
* @typedef {Object} DefaultOptions
* @property {string} token Discord api token
* @property {string} [trigger='.'] The string required to invoke a command
* @property {boolean} [autoconnect=true] Indicates if the client should automatically connect
* @property {boolean} [debug=true] False will supress all debug messages
* @property {number} [verbosity=true] Debug verbose level, setting to `2` will display more debug info
*/
module.exports.DefaultOptions = {
token: '',
trigger: '.',
autoconnect: true,
debug: true,
verbosity: 1
} |
<gh_stars>0
/*
* RadioManager
* RadioManager
*
* OpenAPI spec version: 2.0
* Contact: <EMAIL>
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*/
package com.pluxbox.radiomanager.api.models;
import java.util.Objects;
import com.google.gson.TypeAdapter;
import com.google.gson.annotations.JsonAdapter;
import com.google.gson.annotations.SerializedName;
import com.google.gson.stream.JsonReader;
import com.google.gson.stream.JsonWriter;
import com.pluxbox.radiomanager.api.models.Broadcast;
import com.pluxbox.radiomanager.api.models.BroadcastInputOnly;
import io.swagger.annotations.ApiModel;
import io.swagger.annotations.ApiModelProperty;
import java.io.IOException;
import java.time.OffsetDateTime;
import java.util.ArrayList;
import java.util.List;
/**
* BroadcastDataInput
*/
public class BroadcastDataInput {
@SerializedName("program_id")
private Long programId = null;
@SerializedName("model_type_id")
private Long modelTypeId = null;
@SerializedName("station_id")
private Long stationId = null;
@SerializedName("field_values")
private Object fieldValues = null;
@SerializedName("title")
private String title = null;
@SerializedName("start")
private OffsetDateTime start = null;
@SerializedName("stop")
private OffsetDateTime stop = null;
@SerializedName("genre_id")
private Long genreId = null;
@SerializedName("description")
private String description = null;
@SerializedName("short_name")
private String shortName = null;
@SerializedName("medium_name")
private String mediumName = null;
@SerializedName("website")
private String website = null;
@SerializedName("email")
private String email = null;
@SerializedName("recommended")
private Boolean recommended = null;
@SerializedName("language")
private String language = null;
@SerializedName("published")
private Boolean published = null;
@SerializedName("repetition_uid")
private String repetitionUid = null;
/**
* Gets or Sets repetitionType
*/
@JsonAdapter(RepetitionTypeEnum.Adapter.class)
public enum RepetitionTypeEnum {
_1_WEEK("1 week"),
_2_WEEK("2 week"),
_4_WEEK("4 week"),
_1_MONTH("1 month");
private String value;
RepetitionTypeEnum(String value) {
this.value = value;
}
public String getValue() {
return value;
}
@Override
public String toString() {
return String.valueOf(value);
}
public static RepetitionTypeEnum fromValue(String text) {
for (RepetitionTypeEnum b : RepetitionTypeEnum.values()) {
if (String.valueOf(b.value).equals(text)) {
return b;
}
}
return null;
}
public static class Adapter extends TypeAdapter<RepetitionTypeEnum> {
@Override
public void write(final JsonWriter jsonWriter, final RepetitionTypeEnum enumeration) throws IOException {
jsonWriter.value(enumeration.getValue());
}
@Override
public RepetitionTypeEnum read(final JsonReader jsonReader) throws IOException {
String value = jsonReader.nextString();
return RepetitionTypeEnum.fromValue(String.valueOf(value));
}
}
}
@SerializedName("repetition_type")
private RepetitionTypeEnum repetitionType = null;
@SerializedName("repetition_end")
private OffsetDateTime repetitionEnd = null;
@SerializedName("repetition_start")
private OffsetDateTime repetitionStart = null;
@SerializedName("repetition_days")
private String repetitionDays = null;
@SerializedName("pty_code_id")
private Long ptyCodeId = null;
@SerializedName("tags")
private List<Integer> tags = null;
@SerializedName("presenters")
private List<Integer> presenters = null;
public BroadcastDataInput programId(Long programId) {
this.programId = programId;
return this;
}
/**
* Get programId
* @return programId
**/
@ApiModelProperty(example = "1", value = "")
public Long getProgramId() {
return programId;
}
public void setProgramId(Long programId) {
this.programId = programId;
}
public BroadcastDataInput modelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
return this;
}
/**
* Get modelTypeId
* @return modelTypeId
**/
@ApiModelProperty(example = "2", value = "")
public Long getModelTypeId() {
return modelTypeId;
}
public void setModelTypeId(Long modelTypeId) {
this.modelTypeId = modelTypeId;
}
public BroadcastDataInput stationId(Long stationId) {
this.stationId = stationId;
return this;
}
/**
* Get stationId
* @return stationId
**/
@ApiModelProperty(example = "1", value = "")
public Long getStationId() {
return stationId;
}
public void setStationId(Long stationId) {
this.stationId = stationId;
}
public BroadcastDataInput fieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
return this;
}
/**
* Get fieldValues
* @return fieldValues
**/
@ApiModelProperty(value = "")
public Object getFieldValues() {
return fieldValues;
}
public void setFieldValues(Object fieldValues) {
this.fieldValues = fieldValues;
}
public BroadcastDataInput title(String title) {
this.title = title;
return this;
}
/**
* Get title
* @return title
**/
@ApiModelProperty(example = "FooBar Show", value = "")
public String getTitle() {
return title;
}
public void setTitle(String title) {
this.title = title;
}
public BroadcastDataInput start(OffsetDateTime start) {
this.start = start;
return this;
}
/**
* Get start
* @return start
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", value = "")
public OffsetDateTime getStart() {
return start;
}
public void setStart(OffsetDateTime start) {
this.start = start;
}
public BroadcastDataInput stop(OffsetDateTime stop) {
this.stop = stop;
return this;
}
/**
* Get stop
* @return stop
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", value = "")
public OffsetDateTime getStop() {
return stop;
}
public void setStop(OffsetDateTime stop) {
this.stop = stop;
}
public BroadcastDataInput genreId(Long genreId) {
this.genreId = genreId;
return this;
}
/**
* Get genreId
* @return genreId
**/
@ApiModelProperty(example = "2611", value = "")
public Long getGenreId() {
return genreId;
}
public void setGenreId(Long genreId) {
this.genreId = genreId;
}
public BroadcastDataInput description(String description) {
this.description = description;
return this;
}
/**
* Get description
* @return description
**/
@ApiModelProperty(example = "FooBar BarFoo", value = "")
public String getDescription() {
return description;
}
public void setDescription(String description) {
this.description = description;
}
public BroadcastDataInput shortName(String shortName) {
this.shortName = shortName;
return this;
}
/**
* Get shortName
* @return shortName
**/
@ApiModelProperty(example = "foobar", value = "")
public String getShortName() {
return shortName;
}
public void setShortName(String shortName) {
this.shortName = shortName;
}
public BroadcastDataInput mediumName(String mediumName) {
this.mediumName = mediumName;
return this;
}
/**
* Get mediumName
* @return mediumName
**/
@ApiModelProperty(example = "foobarshow", value = "")
public String getMediumName() {
return mediumName;
}
public void setMediumName(String mediumName) {
this.mediumName = mediumName;
}
public BroadcastDataInput website(String website) {
this.website = website;
return this;
}
/**
* Get website
* @return website
**/
@ApiModelProperty(example = "http://example.com/", value = "")
public String getWebsite() {
return website;
}
public void setWebsite(String website) {
this.website = website;
}
public BroadcastDataInput email(String email) {
this.email = email;
return this;
}
/**
* Get email
* @return email
**/
@ApiModelProperty(example = "<EMAIL>", value = "")
public String getEmail() {
return email;
}
public void setEmail(String email) {
this.email = email;
}
public BroadcastDataInput recommended(Boolean recommended) {
this.recommended = recommended;
return this;
}
/**
* Get recommended
* @return recommended
**/
@ApiModelProperty(example = "true", value = "")
public Boolean isRecommended() {
return recommended;
}
public void setRecommended(Boolean recommended) {
this.recommended = recommended;
}
public BroadcastDataInput language(String language) {
this.language = language;
return this;
}
/**
* Get language
* @return language
**/
@ApiModelProperty(example = "English", value = "")
public String getLanguage() {
return language;
}
public void setLanguage(String language) {
this.language = language;
}
public BroadcastDataInput published(Boolean published) {
this.published = published;
return this;
}
/**
* Get published
* @return published
**/
@ApiModelProperty(example = "true", value = "")
public Boolean isPublished() {
return published;
}
public void setPublished(Boolean published) {
this.published = published;
}
public BroadcastDataInput repetitionUid(String repetitionUid) {
this.repetitionUid = repetitionUid;
return this;
}
/**
* Get repetitionUid
* @return repetitionUid
**/
@ApiModelProperty(example = "1234abcd", value = "")
public String getRepetitionUid() {
return repetitionUid;
}
public void setRepetitionUid(String repetitionUid) {
this.repetitionUid = repetitionUid;
}
public BroadcastDataInput repetitionType(RepetitionTypeEnum repetitionType) {
this.repetitionType = repetitionType;
return this;
}
/**
* Get repetitionType
* @return repetitionType
**/
@ApiModelProperty(example = "1 week", value = "")
public RepetitionTypeEnum getRepetitionType() {
return repetitionType;
}
public void setRepetitionType(RepetitionTypeEnum repetitionType) {
this.repetitionType = repetitionType;
}
public BroadcastDataInput repetitionEnd(OffsetDateTime repetitionEnd) {
this.repetitionEnd = repetitionEnd;
return this;
}
/**
* Get repetitionEnd
* @return repetitionEnd
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", value = "")
public OffsetDateTime getRepetitionEnd() {
return repetitionEnd;
}
public void setRepetitionEnd(OffsetDateTime repetitionEnd) {
this.repetitionEnd = repetitionEnd;
}
public BroadcastDataInput repetitionStart(OffsetDateTime repetitionStart) {
this.repetitionStart = repetitionStart;
return this;
}
/**
* Get repetitionStart
* @return repetitionStart
**/
@ApiModelProperty(example = "2016-01-11T22:01:11+02:00", value = "")
public OffsetDateTime getRepetitionStart() {
return repetitionStart;
}
public void setRepetitionStart(OffsetDateTime repetitionStart) {
this.repetitionStart = repetitionStart;
}
public BroadcastDataInput repetitionDays(String repetitionDays) {
this.repetitionDays = repetitionDays;
return this;
}
/**
* Get repetitionDays
* @return repetitionDays
**/
@ApiModelProperty(value = "")
public String getRepetitionDays() {
return repetitionDays;
}
public void setRepetitionDays(String repetitionDays) {
this.repetitionDays = repetitionDays;
}
public BroadcastDataInput ptyCodeId(Long ptyCodeId) {
this.ptyCodeId = ptyCodeId;
return this;
}
/**
* Get ptyCodeId
* @return ptyCodeId
**/
@ApiModelProperty(example = "1", value = "")
public Long getPtyCodeId() {
return ptyCodeId;
}
public void setPtyCodeId(Long ptyCodeId) {
this.ptyCodeId = ptyCodeId;
}
public BroadcastDataInput tags(List<Integer> tags) {
this.tags = tags;
return this;
}
public BroadcastDataInput addTagsItem(Integer tagsItem) {
if (this.tags == null) {
this.tags = new ArrayList<>();
}
this.tags.add(tagsItem);
return this;
}
/**
* Get tags
* @return tags
**/
@ApiModelProperty(value = "")
public List<Integer> getTags() {
return tags;
}
public void setTags(List<Integer> tags) {
this.tags = tags;
}
public BroadcastDataInput presenters(List<Integer> presenters) {
this.presenters = presenters;
return this;
}
public BroadcastDataInput addPresentersItem(Integer presentersItem) {
if (this.presenters == null) {
this.presenters = new ArrayList<>();
}
this.presenters.add(presentersItem);
return this;
}
/**
* Get presenters
* @return presenters
**/
@ApiModelProperty(value = "")
public List<Integer> getPresenters() {
return presenters;
}
public void setPresenters(List<Integer> presenters) {
this.presenters = presenters;
}
@Override
public boolean equals(java.lang.Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
BroadcastDataInput broadcastDataInput = (BroadcastDataInput) o;
return Objects.equals(this.programId, broadcastDataInput.programId) &&
Objects.equals(this.modelTypeId, broadcastDataInput.modelTypeId) &&
Objects.equals(this.stationId, broadcastDataInput.stationId) &&
Objects.equals(this.fieldValues, broadcastDataInput.fieldValues) &&
Objects.equals(this.title, broadcastDataInput.title) &&
Objects.equals(this.start, broadcastDataInput.start) &&
Objects.equals(this.stop, broadcastDataInput.stop) &&
Objects.equals(this.genreId, broadcastDataInput.genreId) &&
Objects.equals(this.description, broadcastDataInput.description) &&
Objects.equals(this.shortName, broadcastDataInput.shortName) &&
Objects.equals(this.mediumName, broadcastDataInput.mediumName) &&
Objects.equals(this.website, broadcastDataInput.website) &&
Objects.equals(this.email, broadcastDataInput.email) &&
Objects.equals(this.recommended, broadcastDataInput.recommended) &&
Objects.equals(this.language, broadcastDataInput.language) &&
Objects.equals(this.published, broadcastDataInput.published) &&
Objects.equals(this.repetitionUid, broadcastDataInput.repetitionUid) &&
Objects.equals(this.repetitionType, broadcastDataInput.repetitionType) &&
Objects.equals(this.repetitionEnd, broadcastDataInput.repetitionEnd) &&
Objects.equals(this.repetitionStart, broadcastDataInput.repetitionStart) &&
Objects.equals(this.repetitionDays, broadcastDataInput.repetitionDays) &&
Objects.equals(this.ptyCodeId, broadcastDataInput.ptyCodeId) &&
Objects.equals(this.tags, broadcastDataInput.tags) &&
Objects.equals(this.presenters, broadcastDataInput.presenters);
}
@Override
public int hashCode() {
return Objects.hash(programId, modelTypeId, stationId, fieldValues, title, start, stop, genreId, description, shortName, mediumName, website, email, recommended, language, published, repetitionUid, repetitionType, repetitionEnd, repetitionStart, repetitionDays, ptyCodeId, tags, presenters);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("class BroadcastDataInput {\n");
sb.append(" programId: ").append(toIndentedString(programId)).append("\n");
sb.append(" modelTypeId: ").append(toIndentedString(modelTypeId)).append("\n");
sb.append(" stationId: ").append(toIndentedString(stationId)).append("\n");
sb.append(" fieldValues: ").append(toIndentedString(fieldValues)).append("\n");
sb.append(" title: ").append(toIndentedString(title)).append("\n");
sb.append(" start: ").append(toIndentedString(start)).append("\n");
sb.append(" stop: ").append(toIndentedString(stop)).append("\n");
sb.append(" genreId: ").append(toIndentedString(genreId)).append("\n");
sb.append(" description: ").append(toIndentedString(description)).append("\n");
sb.append(" shortName: ").append(toIndentedString(shortName)).append("\n");
sb.append(" mediumName: ").append(toIndentedString(mediumName)).append("\n");
sb.append(" website: ").append(toIndentedString(website)).append("\n");
sb.append(" email: ").append(toIndentedString(email)).append("\n");
sb.append(" recommended: ").append(toIndentedString(recommended)).append("\n");
sb.append(" language: ").append(toIndentedString(language)).append("\n");
sb.append(" published: ").append(toIndentedString(published)).append("\n");
sb.append(" repetitionUid: ").append(toIndentedString(repetitionUid)).append("\n");
sb.append(" repetitionType: ").append(toIndentedString(repetitionType)).append("\n");
sb.append(" repetitionEnd: ").append(toIndentedString(repetitionEnd)).append("\n");
sb.append(" repetitionStart: ").append(toIndentedString(repetitionStart)).append("\n");
sb.append(" repetitionDays: ").append(toIndentedString(repetitionDays)).append("\n");
sb.append(" ptyCodeId: ").append(toIndentedString(ptyCodeId)).append("\n");
sb.append(" tags: ").append(toIndentedString(tags)).append("\n");
sb.append(" presenters: ").append(toIndentedString(presenters)).append("\n");
sb.append("}");
return sb.toString();
}
/**
* Convert the given object to string with each line indented by 4 spaces
* (except the first line).
*/
private String toIndentedString(java.lang.Object o) {
if (o == null) {
return "null";
}
return o.toString().replace("\n", "\n ");
}
}
|
<reponame>weareopensource/roMEANet-SOOS
(function () {
'use strict';
angular
.module('core')
.controller('SidenavController', SidenavController);
SidenavController.$inject = ['$scope', '$state', 'Authentication', 'sideNavs'];
function SidenavController($scope, $state, Authentication, sideNavs) {
var vm = this;
vm.authentication = Authentication;
vm.$state = $state;
vm.sideNav = sideNavs.getSideNav('sidebar');
// close mobile menu when user click
vm.menuClick = function () {
vm.menuToogle = false;
};
/* Initial sideNav, open or not */
if (localStorage.sideblock) {
vm.sideblock = (localStorage.sideblock === 'true');
} else {
vm.sideblock = false;
localStorage.setItem('sideblock', vm.sideblock);
}
if (vm.sideblock) $('.wrapper').toggleClass('toggled');
vm.setSideblock = function () {
vm.sideblock = !vm.sideblock;
localStorage.sideblock = vm.sideblock;
$('.wrapper').toggleClass('toggled');
};
}
}());
|
public class Bird {
private String name;
private int wingspan;
private int age;
public Bird(String name, int wingspan, int age) {
this.name = name;
this.wingspan = wingspan;
this.age = age;
}
public String getName() {
return name;
}
public int getWingspan() {
return wingspan;
}
public int getAge() {
return age;
}
} |
<gh_stars>0
package gov.samhsa.c2s.trypolicy.service.dto;
import lombok.Data;
import java.nio.charset.StandardCharsets;
@Data
public class UploadedDocumentDto {
private Long id;
private String patientMrn;
private byte[] contents;
private String fileName;
private String documentName;
private String contentType;
private String description;
private Long documentTypeCodeId;
private String documentTypeDisplayName;
@Override
public String toString() {
return this.contents == null ? "" : new String(this.getContents(), StandardCharsets.UTF_8);
}
} |
package com.ddd_example.domain;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.stream.Collectors;
public class Cart {
private final List<CartItem> cartItems = new ArrayList<>();
private final List<CartItem> removedCartItems = new ArrayList<>();
public boolean isCheckedOut() {
return isCheckedOut;
}
private boolean isCheckedOut = false;
public void addToCart(CartItem... cartItems) {
List<CartItem> cartItemList = Arrays.asList(cartItems);
cartItemList.forEach(this::addToCart);
}
public void addToCart(CartItem cartItem) {
List<CartItem> match = this.cartItems.stream().filter(item1 -> item1.equals(cartItem)).collect(Collectors.toList());
if (match.size() > 0) {
match.get(0).incrementQuantity(cartItem.getQuantity());
int indexOfMatch = this.cartItems.indexOf(match.get(0));
this.cartItems.remove(indexOfMatch);
this.cartItems.add(indexOfMatch, match.get(0));
} else {
this.cartItems.add(cartItem);
}
}
public void removeFromCart(CartItem cartItem) {
int index = this.cartItems.indexOf(cartItem);
CartItem removedCartItem = this.cartItems.get(index);
addToRemovedList(removedCartItem);
this.cartItems.remove(cartItem);
}
private void addToRemovedList(CartItem cartItem) {
this.removedCartItems.add(cartItem);
}
public void summary() {
System.out.println("=====Cart Summary=======");
cartItems.forEach(System.out::println);
}
public void removedItemsSummary() {
System.out.println("======Removed Cart Summary======");
removedCartItems.forEach(System.out::println);
}
public void checkout() {
this.isCheckedOut = true;
}
public List<CartItem> getCartItems() {
return new ArrayList<>(this.cartItems);
}
}
|
from enum import Enum
HOUSE = 'House'
HOTEL = 'Hotel'
class Card():
def __init__(self, index, value):
self.index = index
self.value = value
class Cashable(Card):
pass
class CashCard(Cashable):
def __repr__(self):
return f'<CashCard (${self.value})>'
class ActionCard(Cashable):
def __init__(self, index, value, action_type, description):
super().__init__(index, value)
self.action_type = action_type
self.description = description
def __repr__(self):
return f'<ActionCard: {self.action_type.value} (${self.value})>'
class PropertyCard(Card):
def __init__(self, index, value, name, colors, rent, buildable):
super().__init__(index, value)
self.name = name
self.colors = colors
self.rent = rent
self.buildable = buildable
def __repr__(self):
return f'<PropertyCard: {self.name} (${self.value})>'
class RentCard(Cashable):
def __init__(self, index, value, colors, wild):
super().__init__(index, value)
self.colors = colors # Set
self.wild = wild # Boolean - Targeting
def __repr__(self):
return f'<RentCard: {self.colors} (${self.value})>'
class ActionType(Enum):
BDAY = "It's my birthday!"
DOUBLE_THE_RENT = "Double the Rent"
DEAL_BREAKER = "Deal Breaker"
JUST_SAY_NO = "Just Say No!"
DEBT_COLLECTOR = "Debt Collector"
SLY_DEAL = "Sly Deal"
FORCED_DEAL = "Forced Deal"
PASS_GO = "Pass Go"
class Color(Enum):
RED = "red"
DBLUE = "darkblue"
LBLUE = "lightblue"
PURPLE = "purple"
GREEN = "green"
ORANGE = "orange"
YELLOW = "yellow"
BROWN = "brown"
RR = "railroad"
UTIL = "utility"
ALL = "all"
def __repr__(self):
return self.value
deck = {
1: PropertyCard(1, 3, HOUSE, {Color.ALL}, [], False),
2: PropertyCard(2, 3, HOUSE, {Color.ALL}, [], False),
3: PropertyCard(3, 3, HOUSE, {Color.ALL}, [], False),
4: PropertyCard(4, 4, HOTEL, {Color.ALL}, [], False),
5: PropertyCard(5, 4, HOTEL, {Color.ALL}, [], False),
6: PropertyCard(6, 4, HOTEL, {Color.ALL}, [], False),
7: ActionCard(7, 2, ActionType.BDAY, "All players give you $2M as a gift."),
8: ActionCard(8, 2, ActionType.BDAY, "All players give you $2M as a gift."),
9: ActionCard(9, 2, ActionType.BDAY, "All players give you $2M as a gift."),
10: ActionCard(10, 1, ActionType.DOUBLE_THE_RENT, "Needs to be played with a rent card."),
11: ActionCard(11, 1, ActionType.DOUBLE_THE_RENT, "Needs to be played with a rent card."),
12: ActionCard(12, 5, ActionType.DEAL_BREAKER, "Steal a complete set from any player (includes any buildings)"),
13: ActionCard(13, 5, ActionType.DEAL_BREAKER, "Steal a complete set from any player (includes any buildings)"),
14: ActionCard(14, 4, ActionType.JUST_SAY_NO, "Use any time when an action card is played against you."),
15: ActionCard(15, 4, ActionType.JUST_SAY_NO, "Use any time when an action card is played against you."),
16: ActionCard(16, 4, ActionType.JUST_SAY_NO, "Use any time when an action card is played against you."),
17: ActionCard(17, 3, ActionType.DEBT_COLLECTOR, "Force any player to pay you $5M"),
18: ActionCard(18, 3, ActionType.DEBT_COLLECTOR, "Force any player to pay you $5M"),
19: ActionCard(19, 3, ActionType.DEBT_COLLECTOR, "Force any player to pay you $5M"),
20: ActionCard(20, 3, ActionType.SLY_DEAL, "Steal a property from a player of your choice (cannot be a part of a full set)!"),
21: ActionCard(21, 3, ActionType.SLY_DEAL, "Steal a property from a player of your choice (cannot be a part of a full set)!"),
22: ActionCard(22, 3, ActionType.SLY_DEAL, "Steal a property from a player of your choice (cannot be a part of a full set)!"),
23: ActionCard(23, 3, ActionType.FORCED_DEAL, "Swap any property with another player (cannot be part of a full set)!"),
24: ActionCard(24, 3, ActionType.FORCED_DEAL, "Swap any property with another player (cannot be part of a full set)!"),
25: ActionCard(25, 3, ActionType.FORCED_DEAL, "Swap any property with another player (cannot be part of a full set)!"),
26: ActionCard(26, 3, ActionType.FORCED_DEAL, "Swap any property with another player (cannot be part of a full set)!"),
27: ActionCard(27, 1, ActionType.PASS_GO, "Draw two extra cards!"),
28: ActionCard(28, 1, ActionType.PASS_GO, "Draw two extra cards!"),
29: ActionCard(29, 1, ActionType.PASS_GO, "Draw two extra cards!"),
30: ActionCard(30, 1, ActionType.PASS_GO, "Draw two extra cards!"),
31: ActionCard(31, 1, ActionType.PASS_GO, "Draw two extra cards!"),
32: ActionCard(32, 1, ActionType.PASS_GO, "Draw two extra cards!"),
33: ActionCard(33, 1, ActionType.PASS_GO, "Draw two extra cards!"),
34: ActionCard(34, 1, ActionType.PASS_GO, "Draw two extra cards!"),
35: ActionCard(35, 1, ActionType.PASS_GO, "Draw two extra cards!"),
36: ActionCard(36, 1, ActionType.PASS_GO, "Draw two extra cards!"),
37: PropertyCard(37, 2, "Electric Company", {Color.UTIL}, [1, 2], True),
38: PropertyCard(38, 2, "Waterworks", {Color.UTIL}, [1, 2], True),
39: PropertyCard(39, 2, "Pennsylvania Railroad", {Color.RR}, [1, 2, 3, 4], True),
40: PropertyCard(40, 2, "Reading Railroad", {Color.RR}, [1, 2, 3, 4], True),
41: PropertyCard(41, 2, "B. & O. Railroad", {Color.RR}, [1, 2, 3, 4], True),
42: PropertyCard(42, 2, "Short Line Railroad", {Color.RR}, [1, 2, 3, 4], True),
43: PropertyCard(43, 1, "Baltic Avenue", {Color.BROWN}, [1, 2], True),
44: PropertyCard(44, 1, "Mediterranean Avenue", {Color.BROWN}, [1, 2], True),
45: PropertyCard(45, 1, "Oriental Avenue", {Color.LBLUE}, [1, 2, 3], True),
46: PropertyCard(46, 1, "Connecticut Avenue", {Color.LBLUE}, [1, 2, 3], True),
47: PropertyCard(47, 1, "Vermont Avenue", {Color.LBLUE}, [1, 2, 3], True),
48: PropertyCard(48, 2, "States Avenue", {Color.PURPLE}, [1, 2, 4], True),
49: PropertyCard(49, 2, "Virginia Avenue", {Color.PURPLE}, [1, 2, 4], True),
50: PropertyCard(50, 2, "St. Charles Place", {Color.PURPLE}, [1, 2, 4], True),
51: PropertyCard(51, 2, "St. James Place", {Color.ORANGE}, [1, 3, 5], True),
52: PropertyCard(52, 2, "Tennessee Avenue", {Color.ORANGE}, [1, 3, 5], True),
53: PropertyCard(53, 2, "New York Avenue", {Color.ORANGE}, [1, 3, 5], True),
54: PropertyCard(54, 3, "Indiana Avenue", {Color.RED}, [2, 3, 6], True),
55: PropertyCard(55, 3, "Illinois Avenue", {Color.RED}, [2, 3, 6], True),
56: PropertyCard(56, 3, "Kentucky Avenue", {Color.RED}, [2, 3, 6], True),
57: PropertyCard(57, 3, "Atlantic Avenue", {Color.YELLOW}, [2, 4, 6], True),
58: PropertyCard(58, 3, "<NAME>", {Color.YELLOW}, [2, 4, 6], True),
59: PropertyCard(59, 3, "Ventnor Avenue", {Color.YELLOW}, [2, 4, 6], True),
60: PropertyCard(60, 4, "Pennsylvania Avenue", {Color.GREEN}, [2, 4, 7], True),
61: PropertyCard(61, 4, "Pacific Avenue", {Color.GREEN}, [2, 4, 7], True),
62: PropertyCard(62, 4, "North Carolina Avenue", {Color.GREEN}, [2, 4, 7], True),
63: PropertyCard(63, 4, "Park Place", {Color.DBLUE}, [3, 8], True),
64: PropertyCard(64, 4, "Boardwalk", {Color.DBLUE}, [3, 8], True),
65: PropertyCard(65, 0, "Wild", {Color.ALL, Color.ALL}, [], True),
66: PropertyCard(66, 0, "Wild", {Color.ALL, Color.ALL}, [], True),
67: PropertyCard(67, 4, "Wild", {Color.RR, Color.LBLUE}, [], True),
68: PropertyCard(68, 2, "Wild", {Color.RR, Color.UTIL}, [], True),
69: PropertyCard(69, 4, "Wild", {Color.RR, Color.GREEN}, [], True),
70: PropertyCard(70, 4, "Wild", {Color.GREEN, Color.DBLUE}, [], True),
71: PropertyCard(71, 3, "Wild", {Color.YELLOW, Color.RED}, [], True),
72: PropertyCard(72, 3, "Wild", {Color.YELLOW, Color.RED}, [], True),
73: PropertyCard(73, 1, "Wild", {Color.LBLUE, Color.BROWN}, [], True),
74: PropertyCard(74, 2, "Wild", {Color.PURPLE, Color.ORANGE}, [], True),
75: PropertyCard(75, 2, "Wild", {Color.PURPLE, Color.ORANGE}, [], True),
76: RentCard(76, 1, {Color.BROWN, Color.LBLUE}, False),
77: RentCard(77, 1, {Color.BROWN, Color.LBLUE}, False),
78: RentCard(78, 1, {Color.RED, Color.YELLOW}, False),
79: RentCard(79, 1, {Color.RED, Color.YELLOW}, False),
80: RentCard(80, 1, {Color.GREEN, Color.DBLUE}, False),
81: RentCard(81, 1, {Color.GREEN, Color.DBLUE}, False),
82: RentCard(82, 1, {Color.RR, Color.UTIL}, False),
83: RentCard(83, 1, {Color.RR, Color.UTIL}, False),
84: RentCard(84, 1, {Color.PURPLE, Color.ORANGE}, False),
85: RentCard(85, 1, {Color.PURPLE, Color.ORANGE}, False),
86: RentCard(86, 3, {Color.ALL}, True),
87: RentCard(87, 3, {Color.ALL}, True),
88: RentCard(88, 3, {Color.ALL}, True),
89: CashCard(89, 1),
90: CashCard(90, 1),
91: CashCard(91, 1),
92: CashCard(92, 1),
93: CashCard(93, 1),
94: CashCard(94, 1),
95: CashCard(95, 2),
96: CashCard(96, 2),
97: CashCard(97, 2),
98: CashCard(98, 2),
99: CashCard(99, 2),
100: CashCard(100, 3),
101: CashCard(101, 3),
102: CashCard(102, 3),
103: CashCard(103, 4),
104: CashCard(104, 4),
105: CashCard(105, 4),
106: CashCard(106, 5),
107: CashCard(107, 5),
108: CashCard(108, 10)
}
property_set_rents = {
Color.UTIL: [1, 2],
Color.RR: [1, 2, 3, 4],
Color.BROWN: [1, 2],
Color.LBLUE: [1, 2, 3],
Color.PURPLE: [1, 2, 4],
Color.ORANGE: [1, 3, 5],
Color.RED: [2, 3, 6],
Color.YELLOW: [2, 4, 6],
Color.GREEN: [2, 4, 7],
Color.DBLUE: [3, 8]
} |
package vec
import "math/cmplx"
type Mass struct {
Sum Value
N int
}
func Combine(m, n Mass) Mass {
return Mass{m.Sum + n.Sum, m.N + n.N}
}
func Subtract(m, n Mass) Mass {
return Mass{m.Sum - n.Sum, m.N - n.N}
}
func (m Mass) Center() Value {
if m.N <= 0 {
return Value(cmplx.NaN())
}
return m.Sum.Scale(1 / float64(m.N))
}
|
use std::ffi::CStr;
fn validate_pattern(pattern: &str, input: &str) -> bool {
let pattern_start = "r#";
if pattern.starts_with(pattern_start) {
let pattern = &pattern[pattern_start.len()..];
let pattern = pattern.replace("\\", "\\\\"); // Escape backslashes in the pattern
let pattern = pattern.replace("\"", "\\\""); // Escape double quotes in the pattern
let pattern = format!(r#"^{}$"#, pattern); // Create a regex pattern to match the entire input string
regex::Regex::new(&pattern)
.map(|re| re.is_match(input))
.unwrap_or(false)
} else {
false // Invalid pattern format, return false
}
}
fn main() {
let pattern = "r#foobar";
let input = "foobar";
assert!(validate_pattern(pattern, input));
let pattern = r#"r#foo\nbar"#;
let input = "foo\nbar";
assert!(validate_pattern(pattern, input));
let pattern = r#"r#foo\\bar"#;
let input = "foo\\bar";
assert!(validate_pattern(pattern, input));
let pattern = r#"r#foo"bar"#;
let input = "foo\"bar";
assert!(validate_pattern(pattern, input));
let pattern = "r#invalidpattern";
let input = "foobar";
assert!(!validate_pattern(pattern, input));
} |
package repository
import "context"
type HelpersRepo interface {
Tes(ctx context.Context) bool
}
|
<reponame>ae0000/dbdiff
package main
import (
"testing"
)
const (
addressSQLProd = "./sql/address_prod.sql"
addressSQLDev = "./sql/address_dev.sql"
prod = "./sql/prod.sql"
dev = "./sql/dev.sql"
)
func TestAddress(t *testing.T) {
diff(addressSQLProd, addressSQLDev)
diff(prod, dev)
t.Error("ha")
}
|
#!/bin/bash
# Yangwenhao 2019-12-16 20:27
model=SuResCNN10
train_cmd="Vector_Score/run.pl --mem 8G"
logdir=Log/PLDA/${model}
feat_dir=Data/checkpoint/${model}/soft/kaldi_feat
data_dir=Data/dataset/voxceleb1/kaldi_feat/voxceleb1_test
trials=$data_dir/trials
test_score=$feat_dir/scores_voxceleb1_test
$train_cmd $logdir/ivector-mean.log \
ivector-mean scp:$feat_dir/train_xvector.scp $feat_dir/mean.vec || exit 1;
$train_cmd $logdir/ivector-compute-lda.log \
ivector-compute-lda --total-covariance-factor=0.0 --dim=200 "ark:ivector-subtract-global-mean scp:$feat_dir/train_xvector.scp ark:- |" ark:$feat_dir/utt2spk $feat_dir/transform.mat || exit 1;
Score/utt2spk_to_spk2utt.pl $feat_dir/utt2spk > $feat_dir/spk2utt
$train_cmd $logdir/ivector-compute-plda.log \
ivector-compute-plda ark:$feat_dir/spk2utt "ark:ivector-subtract-global-mean scp:$feat_dir/train_xvector.scp ark:- | transform-vec $feat_dir/transform.mat ark:- ark:- | ivector-normalize-length ark:- ark:- |" $feat_dir/plda || exit 1;
$train_cmd $logdir/ivector-plda-scoring.log \
ivector-plda-scoring --normalize-length=true \
"ivector-copy-plda --smoothing=0.0 $feat_dir/plda - |" \
"ark:ivector-subtract-global-mean $feat_dir/mean.vec scp:$feat_dir/test_xvector.scp ark:- | transform-vec $feat_dir/transform.mat ark:- ark:- | ivector-normalize-length ark:- ark:- |" \
"ark:ivector-subtract-global-mean $feat_dir/mean.vec scp:$feat_dir/test_xvector.scp ark:- | transform-vec $feat_dir/transform.mat ark:- ark:- | ivector-normalize-length ark:- ark:- |" \
"cat '$trials' | cut -d\ --fields=1,2 |" $feat_dir/scores_voxceleb1_test || exit 1;
eer=`compute-eer <(Score/prepare_for_eer.py $trials $test_score) 2> /dev/null`
mindcf1=`Score/compute_min_dcf.py --p-target 0.01 $test_score $trials 2> /dev/null`
mindcf2=`Score/compute_min_dcf.py --p-target 0.001 $test_score $trials 2> /dev/null`
echo "EER: $eer%"
echo "minDCF(p-target=0.01): $mindcf1"
echo "minDCF(p-target=0.001): $mindcf2"
|
<reponame>nimbus-cloud/cli
package service_test
import (
"cf/api"
. "cf/commands/service"
"cf/models"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
testapi "testhelpers/api"
testassert "testhelpers/assert"
testcmd "testhelpers/commands"
testconfig "testhelpers/configuration"
testreq "testhelpers/requirements"
testterm "testhelpers/terminal"
)
func callCreateService(args []string, inputs []string, serviceRepo api.ServiceRepository) (fakeUI *testterm.FakeUI) {
fakeUI = &testterm.FakeUI{Inputs: inputs}
ctxt := testcmd.NewContext("create-service", args)
config := testconfig.NewRepositoryWithDefaults()
cmd := NewCreateService(fakeUI, config, serviceRepo)
reqFactory := &testreq.FakeReqFactory{}
testcmd.RunCommand(cmd, ctxt, reqFactory)
return
}
var _ = Describe("Testing with ginkgo", func() {
It("TestCreateService", func() {
offering := models.ServiceOffering{}
offering.Label = "cleardb"
plan := models.ServicePlanFields{}
plan.Name = "spark"
plan.Guid = "cleardb-spark-guid"
offering.Plans = []models.ServicePlanFields{plan}
offering2 := models.ServiceOffering{}
offering2.Label = "postgres"
serviceRepo := &testapi.FakeServiceRepo{}
serviceRepo.GetAllServiceOfferingsReturns.ServiceOfferings = []models.ServiceOffering{
offering,
offering2,
}
ui := callCreateService([]string{"cleardb", "spark", "my-cleardb-service"},
[]string{},
serviceRepo,
)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"Creating service", "my-cleardb-service", "my-org", "my-space", "my-user"},
{"OK"},
})
Expect(serviceRepo.CreateServiceInstanceName).To(Equal("my-cleardb-service"))
Expect(serviceRepo.CreateServiceInstancePlanGuid).To(Equal("cleardb-spark-guid"))
})
It("TestCreateServiceWhenServiceAlreadyExists", func() {
offering := models.ServiceOffering{}
offering.Label = "cleardb"
plan := models.ServicePlanFields{}
plan.Name = "spark"
plan.Guid = "cleardb-spark-guid"
offering.Plans = []models.ServicePlanFields{plan}
offering2 := models.ServiceOffering{}
offering2.Label = "postgres"
serviceRepo := &testapi.FakeServiceRepo{CreateServiceAlreadyExists: true}
serviceRepo.GetAllServiceOfferingsReturns.ServiceOfferings = []models.ServiceOffering{offering, offering2}
ui := callCreateService([]string{"cleardb", "spark", "my-cleardb-service"},
[]string{},
serviceRepo,
)
testassert.SliceContains(ui.Outputs, testassert.Lines{
{"Creating service", "my-cleardb-service"},
{"OK"},
{"my-cleardb-service", "already exists"},
})
Expect(serviceRepo.CreateServiceInstanceName).To(Equal("my-cleardb-service"))
Expect(serviceRepo.CreateServiceInstancePlanGuid).To(Equal("cleardb-spark-guid"))
})
})
|
<filename>commands/fun/gender.js
const fetch = require("node-fetch");
const runner = firstName =>
fetch(`https://api.genderize.io/?name=${firstName}`).then(response =>
response.json()
);
module.exports = {
runner,
name: "gender",
description: "Check gender from a name",
args: true,
usage: "john",
async execute(message, args) {
const firstName = message.content.split(" ")[1];
const fullName = message.content.slice(9);
const result = await runner(firstName);
const unknownMessage = `Hmmm... I can't guess if ${fullName} is a male/female`;
const genderMessage = `${fullName} is a ${
result.gender
}, probability: ${result.probability * 100}%`;
return message.channel.send(result.gender ? genderMessage : unknownMessage);
}
};
|
def sort_list(lst):
for i in range(1, len(lst)):
for j in range(0, len(lst)-i):
if lst[j] > lst[j+1]:
lst[j], lst[j+1] = lst[j+1], lst[j]
return lst |
#!/bin/bash
set -eo pipefail
function download_release() {
ver=$1
dirname=binaries/"$ver"
mkdir "$dirname"
basename=dolt-"$PLATFORM_TUPLE"
filename="$basename".tar.gz
filepath=binaries/"$ver"/"$filename"
url="https://github.com/liquidata-inc/dolt/releases/download/$ver/$filename"
curl -L -o "$filepath" "$url"
cd "$dirname" && tar zxf "$filename"
echo "$dirname"/"$basename"/bin
}
get_platform_tuple() {
OS=$(uname)
ARCH=$(uname -m)
if [ "$OS" != Linux -a "$OS" != Darwin ]; then
echo "tests only support linux or macOS." 1>&2
exit 1
fi
if [ "$ARCH" != x86_64 -a "$ARCH" != i386 -a "$ARCH" != i686 ]; then
echo "tests only support x86_64 or x86." 1>&2
exit 1
fi
if [ "$OS" == Linux ]; then
PLATFORM_TUPLE=linux
else
PLATFORM_TUPLE=darwin
fi
if [ "$ARCH" == x86_64 ]; then
PLATFORM_TUPLE="$PLATFORM_TUPLE"-amd64
else
PLATFORM_TUPLE="$PLATFORM_TUPLE"-386
fi
echo "$PLATFORM_TUPLE"
}
PLATFORM_TUPLE=`get_platform_tuple`
function list_dolt_versions() {
grep -v '^ *#' < test_files/dolt_versions.txt
}
function cleanup() {
rm -rf repos binaries
}
mkdir repos binaries
trap cleanup "EXIT"
function setup_repo() {
dir=repos/"$1"
./test_files/setup_repo.sh "$dir"
}
setup_repo HEAD
function test_dolt_version() {
ver=$1
bin=`download_release "$ver"`
echo testing "$ver" at "$bin"
PATH="`pwd`"/"$bin":"$PATH" setup_repo "$ver"
# Run the bats tests with old dolt version hitting repositories from new dolt version
PATH="`pwd`"/"$bin":"$PATH" REPO_DIR="`pwd`"/repos/HEAD bats ./test_files/bats
# Run the bats tests with new dolt version hitting repositories from old dolt version
REPO_DIR="`pwd`"/repos/"$ver" bats ./test_files/bats
}
list_dolt_versions | while IFS= read -r ver; do
test_dolt_version "$ver"
done
|
import React, { PureComponent } from 'react';
import moment from 'moment';
import {
Row,
Col,
Card,
Table,
Form,
Button,
Input,
DatePicker,
Checkbox,
Divider,
Modal,
message,
Tabs,
Select,
InputNumber,
Icon,
} from 'antd';
import PageHeaderLayout from '../../../layouts/PageHeaderLayout';
import styles from '../../Dashboard/Analysis.less';
import listStyles from '../List.less';
const { Option } = Select;
const FormItem = Form.Item;
const { confirm } = Modal;
const { RangePicker } = DatePicker;
const { TabPane } = Tabs;
@Form.create()
export default class List extends PureComponent {
state = {
modalVisible: false,
payModalVisible: false,
inputModalVisible: false,
upModalVisible: false,
};
handleAdd = (e) => {
e.preventDefault();
this.props.form.validateFields((err) => {
if (!err) {
message.success('添加成功');
this.setState({
modalVisible: false,
payModalVisible: false,
});
}
});
}
handleModalVisible = (flag) => {
this.setState({
modalVisible: !!flag,
});
this.props.form.resetFields();
};
hiddenModalVisible = (flag) => {
this.setState({
payModalVisible: !!flag,
});
this.props.form.resetFields();
};
hiddenSysModalVisible = (flag) => {
this.setState({
inputModalVisible: !!flag,
});
this.props.form.resetFields();
};
temDetail = (flag) => {
this.setState({
payModalVisible: !!flag,
});
};
sysTemDetail = (flag) => {
this.setState({
inputModalVisible: !!flag,
});
};
deteteOne =() => {
confirm({
title: '确认删除吗?',
onOk() {
message.error('暂无法删除');
},
});
};
updateOne =(flag) => {
this.setState({
upModalVisible: !!flag,
});
};
updateOneSu =(flag) => {
this.setState({
upModalVisible: !!flag,
});
message.success('修改成功');
};
render() {
const { loading } = this.props;
// table
const columns = [{
title: '类目编号',
dataIndex: 'agreementNo',
width: 200,
fixed: 'left',
sorter: (a, b) => a.agreementNo - b.agreementNo,
}, {
title: '类目名称',
dataIndex: 'agreementName',
}, {
title: '创建人',
dataIndex: 'status',
}, {
title: '创建时间',
dataIndex: 'createDate',
render: val => <span>{moment(val).format('YYYY-MM-DD HH:mm:ss')}</span>,
sorter: (a, b) => a.createDate - b.createDate,
}, {
title: '包含模板',
dataIndex: 'tem',
render: val => <a onClick={this.temDetail}>{val}</a>,
}, {
title: '操作',
key: 'operation',
fixed: 'right',
width: 200,
render: () => (
<span>
<a onClick={this.updateOne}>修改</a>
<Divider type="vertical" />
<a onClick={this.deteteOne}>删除</a>
</span>
),
}];
const columnsVo = [{
title: '类目编号',
dataIndex: 'agreementNo',
sorter: (a, b) => a.agreementNo - b.agreementNo,
}, {
title: '类目名称',
dataIndex: 'agreementName',
}, {
title: '包含模板',
dataIndex: 'tem',
render: val => <a onClick={this.sysTemDetail}>{val}</a>,
}];
const no = Date.parse(new Date()).toString();
const data1 = [
{
key: 1,
no: 1,
agreementNo: `${no}${1}`,
agreementName: '货权转入',
tem: '入库模板',
},
];
const data = [
{
key: 1,
agreementNo: `${no}${1}`,
agreementName: '支付',
status: '李雷',
gender: 'M',
tem: '支付模板',
createDate: new Date(),
},
// {
// key: 2,
// agreementNo: `${no}${2}`,
// agreementName: '收款',
// status: '李雷',
// gender: 'M',
// createDate: new Date(),
// },
// {
// key: 3,
// agreementNo: `${no}${3}`,
// agreementName: '物流',
// status: '李雷',
// gender: 'M',
// createDate: new Date(),
// },
// {
// key: 4,
// agreementNo: `${no}${4}`,
// agreementName: '仓储',
// status: '李雷',
// gender: 'M',
// createDate: new Date(),
// },
// {
// key: 5,
// agreementNo: `${no}${5}`,
// agreementName: '质检',
// status: '李雷',
// gender: 'M',
// createDate: new Date(),
// },
// {
// key: 6,
// agreementNo: `${no}${5}`,
// agreementName: '其他',
// status: '李雷',
// gender: 'M',
// createDate: new Date(),
// },
];
const { getFieldDecorator } = this.props.form;
const formItemLayoutWithOutLabel = {
wrapperCol: {
xs: { span: 24, offset: 0 },
sm: { span: 20, offset: 4 },
},
};
return (
<PageHeaderLayout title="类目列表">
<div className={styles.standardList}>
<div style={{ padding: '30px', marginTop: -30 }}>
<Row gutter={24}>
<Col xl={24} lg={24} md={24} sm={24} xs={24}>
<Tabs type="card">
<TabPane tab="自定义类目" key="1">
<Card
className={styles.salesCard}
loading={loading}
bordered={false}
bodyStyle={{ padding: 24 }}
style={{ marginTop: -24, minHeight: 509 }}
>
<div className={listStyles.tableListForm}>
<Form onSubmit={this.handleSearch} layout="inline">
<Row gutter={{ md: 24, lg: 24, xl: 48 }}>
<Col md={8} sm={24}>
<FormItem label="类目名称">
{getFieldDecorator('b_name')(
<Input placeholder="请输入" />
)}
</FormItem >
</Col>
<Col md={8} sm={24}>
<FormItem label="添加者">
{getFieldDecorator('u_name')(
<Input placeholder="请输入" />
)}
</FormItem>
</Col>
<Col md={8} sm={24}>
<FormItem label="添加时间">
{getFieldDecorator('no')(
<RangePicker />
)}
</FormItem>
</Col>
</Row>
<div style={{ overflow: 'hidden', marginTop: '2%' }}>
<span style={{ float: 'left', marginBottom: 24 }}>
<Button icon="plus" type="primary" style={{ marginRight: '4px' }} onClick={this.handleModalVisible}>创建新的履行类目</Button>
<Checkbox>只显示我创建的类目</Checkbox>
</span>
<span style={{ float: 'right', marginBottom: 24 }}>
<Button type="primary" htmlType="submit">查询</Button>
<Button style={{ marginLeft: 8 }} >重置</Button>
</span>
</div>
</Form>
</div>
<Table
className={listStyles.defaultCursor}
dataSource={data}
columns={columns}
rowKey={record => record.key}
scroll={{ x: 1366 }}
/>
</Card>
</TabPane>
<TabPane tab="系统模板" key="2">
<Card
className={styles.salesCard}
loading={loading}
bordered={false}
bodyStyle={{ padding: 24 }}
style={{ marginTop: -24, minHeight: 509 }}
>
<div className={listStyles.tableListForm}>
<Form onSubmit={this.handleSearch} layout="inline">
<Row gutter={{ md: 24, lg: 24, xl: 48 }}>
<Col md={8} sm={24}>
<FormItem label="类目名称">
{getFieldDecorator('b_name')(
<Input placeholder="请输入" />
)}
</FormItem >
</Col>
<Col md={8} sm={24}>
<span style={{ float: 'right', marginBottom: 24 }}>
<Button type="primary" htmlType="submit">查询</Button>
<Button style={{ marginLeft: 8 }} >重置</Button>
</span>
</Col>
</Row>
</Form>
</div>
<Table
className={listStyles.defaultCursor}
dataSource={data1}
columns={columnsVo}
rowKey={record => record.key}
scroll={{ x: 1366 }}
/>
</Card>
</TabPane>
</Tabs>
</Col>
</Row>
</div>
</div>
<Modal
title="新增履行计划类目"
visible={this.state.modalVisible}
onOk={this.handleAdd}
onCancel={() => this.handleModalVisible()}
style={{ width: 1200 }}
>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="履行计划类目"
>
{getFieldDecorator('mess', {
rules: [
{ required: true, message: '请输入履行计划类目' },
],
})(
<Input placeholder="请输入" />
)}
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="履行计划模板"
>
{getFieldDecorator('tem', {
})(
<Select style={{ width: '100%' }} mode="multiple" placeholder="请选择模板" >
<Option value="LG">支付模板</Option>
<Option value="green">质检模板</Option>
<Option value="DP">入库模板</Option>
<Option value="blue">出库模板</Option>
<Option value="TT">物流模板</Option>
</Select>
)}
</FormItem>
</Modal>
<Modal
title="修改履行计划类目"
visible={this.state.upModalVisible}
onOk={() => this.updateOneSu()}
onCancel={() => this.updateOne()}
style={{ width: 1200 }}
>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="履行计划类目"
>
{getFieldDecorator('mess1', { initialValue: '支付',
rules: [
{ required: true, message: '请输入履行计划类目' },
],
})(
<Input placeholder="请输入" />
)}
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="履行计划模板"
>
{getFieldDecorator('tem1', { initialValue: 'LG',
})(
<Select style={{ width: '100%' }} mode="multiple" placeholder="请选择模板" >
<Option value="LG">支付模板</Option>
<Option value="green">质检模板</Option>
<Option value="DP">入库模板</Option>
<Option value="blue">出库模板</Option>
<Option value="TT">物流模板</Option>
</Select>
)}
</FormItem>
</Modal>
<Modal
title="履行计划模板详情"
visible={this.state.payModalVisible}
onOk={() => this.hiddenModalVisible()}
onCancel={() => this.hiddenModalVisible()}
style={{ width: 1200 }}
>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="首款金额"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled /><span>万元</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="首款时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="尾款金额"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled /><span>万元</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="尾款时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="支付笔数"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled />
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="第一笔金额"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled /><span>万元</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="第一笔时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
<FormItem {...formItemLayoutWithOutLabel}>
<Button disabled type="dashed" style={{ width: '75%', marginLeft: '5%' }}>
<Icon type="plus" /> 增加分次明细
</Button>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="发票开出时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="到达时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
</Modal>
<Modal
title="履行计划模板详情"
visible={this.state.inputModalVisible}
onOk={() => this.hiddenSysModalVisible()}
onCancel={() => this.hiddenSysModalVisible()}
style={{ width: 1200 }}
>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="入库数量"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled min={1} max={10000000} />
<span>/吨</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="单价"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled min={1} max={10000000} />
<span>元/吨</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="总金额"
hasFeedback
>
<InputNumber style={{ width: '100%' }} disabled min={1} max={10000000} />
<span>万元</span>
</FormItem>
<FormItem
labelCol={{ span: 5 }}
wrapperCol={{ span: 15 }}
label="入库时间"
hasFeedback
>
<DatePicker style={{ width: '100%' }} disabled />
</FormItem>
</Modal>
</PageHeaderLayout>
);
}
}
|
#!/bin/sh
# Stops script execution if a command has an error
set -e
INSTALL_ONLY=0
# Loop through arguments and process them: https://pretzelhands.com/posts/command-line-flags
for arg in "$@"; do
case $arg in
-i|--install) INSTALL_ONLY=1 ; shift ;;
*) break ;;
esac
done
if ! hash pgadmin4 2>/dev/null; then
echo "Installing pgAdmin4"
pipx install pgadmin4
fi
# Run
if [ $INSTALL_ONLY = 0 ] ; then
if [ -z "$PORT" ]; then
read -p "Please provide a port for starting pgAdmin4: " PORT
fi
echo "Starting pgAdmin4 on port "$PORT
# TODO: Currently does not use port, can only be used from within VNC
pgadmin4
fi
|
from flask_principal import Permission, RoleNeed
def get_permission_for_role(role: str) -> Permission:
if role == 'admin':
return Permission(RoleNeed('admin'))
elif role == 'employee':
return Permission(RoleNeed('employee'))
elif role == 'employeer':
return Permission(RoleNeed('employeer'))
else:
raise ValueError("Invalid role provided") |
def sort_strings(strings):
# Create a copy of the input list to modify
sorted_strings = strings.copy()
# Use Bubble Sort algorithm to sort the list
for i in range(len(sorted_strings)):
for j in range(0, len(sorted_strings)-i-1):
if sorted_strings[j] > sorted_strings[j+1] :
sorted_strings[j], sorted_strings[j+1] = sorted_strings[j+1], sorted_strings[j]
# Output the new list
return sorted_strings
strings = ["z", "e", "t", "a"]
sorted_strings = sort_strings(strings)
print("The sorted list is: " + str(sorted_strings)) |
package com.jinke.kanbox;
import android.content.Context;
import android.content.SharedPreferences;
/**
* Save Data To SharePreference Or Get Data from SharePreference
*
* @author liweilin
*
*/
public class PushSharePreference {
private Context ctx;
private String fileName;
public PushSharePreference(Context ctx, String fileName) {
this.ctx = ctx;
this.fileName = fileName;
}
/**
* Set int value into SharePreference
*
* @param fileName
* @param key
* @param value
*/
public void saveIntValueToSharePreferences(String key, int value) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.putInt(key, value);
editor.commit();
}
/**
* Set long value into SharePreference
*
* @param key
* @param value
*/
public void saveLongValueToSharePreferences(String key, long value) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.putLong(key, value);
editor.commit();
}
/**
* Set String value into SharePreference
*
* @param fileName
* @param key
* @param value
*/
public void saveStringValueToSharePreferences(String key, String value) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.putString(key, value);
editor.commit();
}
/**
* Set Boolean value into SharePreference
*
* @param fileName
* @param key
* @param value
*/
public void saveBooleanValueToSharePreferences(String key, boolean value) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.putBoolean(key, value);
editor.commit();
}
/**
* Remove key from SharePreference
*
* @param fileName
* @param key
*/
public void removeSharePreferences(String key) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.remove(key);
editor.commit();
}
/**
* 是否包含此字段
*
* @param fileName
* @param key
* @return
*/
public boolean contains(String key) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
return sharePre.contains(key);
}
/**
* Get Integer Value
*
* @param fileName
* @param key
* @return
*/
public Integer getIntValueByKey(String key) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
return sharePre.getInt(key, -1);
}
public Long getLongValueByKey(String key) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
return sharePre.getLong(key, 0);
}
/**
* Get String Value
*
* @param fileName
* @param key
* @return
*/
public String getStringValueByKey(String key) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
return sharePre.getString(key, null);
}
public Boolean getBooleanValueByKey(String key) {
return getBooleanValueByKey(key, false);
}
public Boolean getBooleanValueByKey(String key, boolean defaultValue) {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
return sharePre.getBoolean(key, defaultValue);
}
public void clearSharePreferences() {
SharedPreferences sharePre = ctx.getSharedPreferences(fileName, Context.MODE_PRIVATE);
SharedPreferences.Editor editor = sharePre.edit();
editor.clear();
editor.commit();
}
}
|
/*
* Copyright 2020 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var _ = require('lodash');
var utils = require('./utils');
var database = require('./database');
function StationManager() {
this.stationStatus = {
IDLE: "IDLE",
CONNECTED: "CONNECTED",
WAIT_FOR_CAR_TO_CONNECT: "WAIT_FOR_CAR_TO_CONNECT",
DISPLAY_START_PAGE: "DISPLAY_START_PAGE",
DISPLAY_PLACE_OPTIONS: "DISPLAY_PLACE_OPTIONS",
WAIT_FOR_CAR_TO_ARRIVE_AT_DESTINATION: "WAIT_FOR_CAR_TO_ARRIVE_AT_DESTINATION",
WAIT_FOR_CAR_DISCONNECT: "WAIT_FOR_CAR_DISCONNECT",
NO_CONNECTION: "NO_CONNECTION",
WARNING: "WARNING",
ERROR: "ERROR"
};
}
var stationManager = new StationManager();
StationManager.prototype.connectStation = function(data, fn) {
let stationTemp = {
device_id: data.device_id,
station_id: data.station_id,
status: stationManager.stationStatus.CONNECTED,
ip_address: data.ip_address,
online: true
}
database.findStationById(stationTemp.station_id, function(err, station){
if (err) {
fn(err, null);
} else {
if(_.isEmpty(station)) {
stationManager.createStation(stationTemp, function(err, results) {
if(err) {
fn(err, null);
} else {
fn(null, {success: true});
}
});
} else {
database.updateStationById(stationTemp.station_id, stationTemp, function(err, results){
if(err) {
fn(err, null);
} else {
fn(null, {success: true});
}
});
}
}
});
}
StationManager.prototype.disconnectStation = function(station, fn) {
database.updateStation(station.device_id, {status: stationManager.stationStatus.NO_CONNECTION, online: false}, function(err, results){
if(err) {
fn(err, null);
} else {
fn(null, {success: true});
}
});
}
StationManager.prototype.createStation = function(station, fn) {
database.addStation(station, function(err, results) {
if(err) {
fn(err, null);
}else {
fn(null, {success: true});
}
});
}
StationManager.prototype.getStationInfo = function(device_id, fn) {
database.findStationByDeviceId(device_id, function(err, stations){
if (err) {
fn(err, null);
} else {
if(_.isEmpty(stations)) {
fn(null, true);
}else {
fn(null, stations[0]);
}
}
});
}
StationManager.prototype.changeStatus = function(device_id, status, fn) {
database.updateStationStatus(device_id, status, function(err, results) {
if(err) {
fn(err, null);
} else {
fn(null, results);
}
});
}
module.exports = stationManager; |
<reponame>pjmolina/event-backend<filename>public/app/services/domain/sessionTalkService.js<gh_stars>10-100
angular.module('myApp').service('SessionTalkService', ['$http', '$q', 'baseApi', 'QueryBuilderService', 'EntityUtilService', function ($http, $q, baseApi, QueryBuilderService, EntityUtilService) {
var SessionTalkService = {};
var resourceUrl = baseApi + '/sessionTalks';
var fields = null;
function buildFields() {
if (!fields) {
fields = [
{name: 'sessionType', type: 'string'},
{name: 'name', type: 'string'},
{name: 'track', type: 'int'},
{name: 'language', type: 'string'},
{name: 'starts', type: 'time'},
{name: 'ends', type: 'time'},
{name: 'description', type: 'string'}
];
}
return fields;
}
function getDisplayLabel(sessionTalk) {
return sessionTalk.sessionType;
}
SessionTalkService.getDisplayLabel = getDisplayLabel;
//-- Public API -----
SessionTalkService.getCount = function (opts) {
opts = opts || {};
opts.fields = opts.fields || buildFields();
opts.count = true;
return QueryBuilderService.buildBaucisQuery(opts).then(function(q) {
return $http.get(resourceUrl + q);
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.getList = function (opts) {
opts = opts || {};
opts.fields = opts.fields || buildFields();
return QueryBuilderService.buildBaucisQuery(opts).then(function(q) {
return $http.get(resourceUrl + q).then(function(response) {
response.data.forEach(function(element) {
element._displayLabel = getDisplayLabel(element);
});
return response;
}, function (err) {
return $q.reject(err);
});
}, function (err) {
return $q.reject(err);
});
};
function exportQuery(opts) {
opts = opts || {};
opts.paginate = false;
opts.fields = opts.fields || buildFields();
return QueryBuilderService.buildBaucisQuery(opts).then(function (q) {
return q;
}, function (err) {
return $q.reject(err);
});
}
SessionTalkService.getListAsCsv = function (opts) {
return exportQuery(opts).then(function (q) {
return $http({
method: 'GET',
url: resourceUrl + q,
headers: {'Accept': 'text/csv'}
});
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.getFileAsCsv = function (opts) {
return exportQuery(opts).then(function (q) {
return $http({
method: 'GET',
url: resourceUrl + q,
headers: {'Accept': 'text/csv'}
});
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.getFileAsXml = function (opts) {
return exportQuery(opts).then(function (q) {
return $http({
method: 'GET',
url: resourceUrl + q,
headers: {'Accept': 'text/xml'}
});
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.getFileAsXlsx = function (opts) {
return exportQuery(opts).then(function (q) {
return $http({
method: 'GET',
url: resourceUrl + q,
headers: {'Accept': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'},
responseType: 'blob'
});
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.get = function (link) {
return $http.get(link);
};
SessionTalkService.getDocument = function (id) {
return SessionTalkService.get(resourceUrl + '/' + id ).then(function(response) {
response.data._displayLabel = getDisplayLabel(response.data);
return response;
}, function (err) {
return $q.reject(err);
});
};
SessionTalkService.add = function (item) {
return $http.post(resourceUrl, JSON.stringify(item));
};
SessionTalkService.update = function (item) {
return $http.put(resourceUrl + '/' + item._id, JSON.stringify(item));
};
SessionTalkService.delete = function (id) {
return $http.delete(resourceUrl + '/' + id);
};
SessionTalkService.deleteMany = function (ids) {
return $http.post(resourceUrl + '/deleteByIds', JSON.stringify(ids));
};
SessionTalkService.deleteByQuery = function (opts) {
opts = opts || {};
opts.fields = opts.fields || buildFields();
opts.paginate = false;
return QueryBuilderService.buildBaucisQuery(opts).then(function (q) {
return $http.delete(resourceUrl + q);
}, function (err) {
return $q.reject(err);
});
};
return SessionTalkService;
}]);
|
#!/bin/sh
py.test "$@" test/integration --junitxml results.xml
|
<reponame>DuneRoot/BPL-python
from bpl_lib.network.Network import Network
|
<gh_stars>1-10
import inspect
import ipaddress
import json
from rich import box, print
from rich.console import Console
from rich.table import Table
from ..config.settings import configfile # noqa: E402
from ..config.settings import save_output_dir, settings
console = Console()
def display_output(output, save=None):
if output["code"] != 200:
print(f"\n\n[red]Error Status Code: {output['code']} : {output['msg']}[/red]")
exit(1)
else:
command_called = inspect.stack()[2][3]
if save:
with open(f"{save_output_dir}/{command_called}.json", "w+") as file:
json.dump(output["msg"], file, indent=4)
from rich import print_json
print_json(json.dumps(output["msg"]))
def check_output(output):
# Dump output for building tests
# with open("temp/output.json", "w+") as file:
# json.dump(output, file, indent=4)
if output["code"] != 200:
print(f"\n\n[red]Error Status Code: {output['code']} : {output['msg']}[/red]")
exit(1)
else:
return output["msg"]
def display_account(account):
print(f"Account Name: [#ff8300]{settings.ACCOUNT}[/#ff8300]")
print("\nOther accounts in config file:")
for profiles in configfile.keys():
print(f"\t[cyan]{profiles}[/cyan]")
def display_gateways(gateways):
gateway_table = Table(title="Gateways", box=box.SIMPLE)
gateway_table.add_column("Name")
gateway_table.add_column("SN")
gateway_table.add_column("IP")
gateway_table.add_column("MAC")
gateway_table.add_column("Group")
gateway_table.add_column("Status")
gateway_table.add_column("GROUP/MAC")
for eachgw in gateways["mcs"]:
gateway_table.add_row(
eachgw["name"],
eachgw["serial"],
eachgw["ip_address"],
eachgw["macaddr"],
eachgw["group_name"],
eachgw["status"],
f'[bold cyan]{eachgw["group_name"]}/{eachgw["macaddr"]}[/bold cyan]',
)
console.print(gateway_table)
def display_routes(serial, routes):
# print(routes["summary"])
# print(routes["routes"])
print("")
route_summary = Table(title="Route Summary", show_header=False, box=box.SIMPLE)
route_summary.add_row("Total Routes", str(routes["summary"]["total"]))
route_summary.add_row("Default Routes", str(routes["summary"]["default"]))
route_summary.add_row("Static Routes", str(routes["summary"]["static"]))
route_summary.add_row("Connected Routes", str(routes["summary"]["connected"]))
route_summary.add_row("Overlay Routes", str(routes["summary"]["overlay"]))
console.print(route_summary)
route_table = Table(title=f"Route Table: {serial}", box=box.SIMPLE)
route_table.add_column(
"",
)
route_table.add_column("Destination")
route_table.add_column("D/M")
route_table.add_column("Nexthop")
for route in routes["routes"]:
if route["nexthop"][0]["protocol"] != "Connected":
if route["nexthop"][0]["protocol"] == "Static":
rowstyle = "cyan"
else:
rowstyle = "#d78700"
try:
nexthopip = ipaddress.ip_address(route["nexthop"][0]["address"])
route_table.add_row(
route["nexthop"][0]["protocol"],
route["prefix"] + "/" + str(route["length"]),
"[" + str(route["nexthop"][0]["admin_distance"]) + "/" + str(route["nexthop"][0]["metric"]) + "]",
"via " + route["nexthop"][0]["address"],
style=rowstyle,
)
except ValueError:
# nexthopip = route["nexthop"][0]["address"]
nexthopip = 0
if nexthopip == 0:
if len(route["nexthop"]) > 1:
nexthopaddress = ""
for nh in route["nexthop"]:
nexthopaddress += "ipsec map " + nh["address"] + "\n"
else:
nexthopaddress = "ipsec map " + route["nexthop"][0]["address"]
route_table.add_row(
route["nexthop"][0]["protocol"],
route["prefix"] + "/" + str(route["length"]),
"[" + str(route["nexthop"][0]["admin_distance"]) + "/" + str(route["nexthop"][0]["metric"]) + "]",
nexthopaddress,
style=rowstyle,
)
console.print(route_table)
|
/**
* Created by Home on 1/8/2018.
*/
module.exports = {
url : 'mongodb://localhost/tutorial'
}; |
package codecheck.github.app
case class CommandSetting(repo: Option[Repo] = None) {
def repositoryOwner = repo.map(_.owner)
}
case class Repo(owner: String, name: String)
|
export const Header = {
QRType: 'SPC', // swiss payment code
Version: '0200',
Coding: '1'
};
export const createJson = (CdtrInf, CcyAmt, RmtInf, UltmtDtr = undefined, UltmtCdtr = undefined, AltPmtInf = undefined) => {
return {
Header,
CdtrInf,
CcyAmt,
RmtInf,
UltmtCdtr,
UltmtDtr,
AltPmtInf
};
}
export const addressToBreakSepFormat = a => {
const addressLabels = ['AdrTp', 'Name', 'StrNameOrAdrLine1', 'StrNameOrAdrLine2', 'PstCd', 'TmwNm', 'Ctry'];
if (!a || a === null) {
return addressLabels.map(_ => '');
}
return addressLabels.map(k => a[k]);
}
export const jsonToBreakSepFormat = j => {
const Header = ['QRType', 'Version', 'Coding'].map(k => j['Header'][k]);
const iban = j['CdtrInf']['IBAN'];
const Cdtr = addressToBreakSepFormat(j['CdtrInf']['Cdtr']);
const UltmtCdtr = addressToBreakSepFormat(j['UltmtCdtr']);
const UltmtDtr = addressToBreakSepFormat(j['UltmtDtr']);
const CcyAmt = ['Amt', 'Ccy'].map(k => j['CcyAmt'][k]);
const RmtInf = ['Tp', 'Ref'].map(k => j['RmtInf'][k]);
const RmtInf2 = ['Ustrd', 'Trailer', 'SrdBkgInfo'].map(k => j['RmtInf']['AddInf'][k]);
const AltPmtInf = ['AltPmt1', 'AltPmt2'].map(k => j['AltPmtInf'][k]);
return Header.concat(iban).concat(Cdtr).concat(UltmtCdtr).concat(CcyAmt).concat(UltmtDtr).concat(RmtInf).concat(RmtInf2).concat(AltPmtInf);
};
export const arrayToAddress = (t, idx = 0) => {
const r = {
AdrTp: t[idx],
Name: t[idx + 1],
StrNameOrAdrLine1: t[idx + 2],
StrNameOrAdrLine2: t[idx + 3],
PstCd: t[idx + 4],
TmwNm: t[idx + 5],
Ctry: t[idx + 6]
};
const checkIfNull = Object.keys(r).map(k => r[k] === '').reduce((a, b) => a && b);
if (checkIfNull === true) {
return null;
}
return r;
}
export const arrayToJson = t => {
const IBAN = t[3];
const Cdtr = arrayToAddress(t, 4);
const CdtrInf = { IBAN, Cdtr };
const UltmtCdtr = arrayToAddress(t, 11);
const UltmtDtr = arrayToAddress(t, 20);
const CcyAmt = {Ccy: t[19], Amt: t[18]};
const AddInf = {Ustrd: t[29], Trailer: t[30], SrdBkgInfo: t[31]};
const RmtInf = {Tp: t[27], Ref: t[28], AddInf};
const AltPmtInf = {AltPmt1: t[32], AltPmt2: t[33]};
return {
Header,
CdtrInf,
UltmtCdtr,
UltmtDtr,
CcyAmt,
RmtInf,
AltPmtInf
}
} |
from hashlib import sha256
from random import choice, randrange
from uuid import uuid4
from multihash import SHA2_256
from geostore.dataset_properties import DATASET_KEY_SEPARATOR, TITLE_CHARACTERS
from geostore.types import JsonObject
from .general_generators import (
_random_string_choices,
any_description,
any_https_url,
any_name,
random_string,
)
def any_hex_multihash() -> str:
hex_digest = any_sha256_hex_digest()
return sha256_hex_digest_to_multihash(hex_digest)
def any_sha256_hex_digest() -> str:
return sha256(random_string(20).encode()).hexdigest()
def sha256_hex_digest_to_multihash(hex_digest: str) -> str:
return f"{SHA2_256:x}{32:x}{hex_digest}"
def any_dataset_id() -> str:
return uuid4().hex
def any_dataset_version_id() -> str:
"""Arbitrary-length string"""
return uuid4().hex
def any_dataset_title() -> str:
"""Arbitrary-length string of valid dataset title characters"""
return _random_string_choices(TITLE_CHARACTERS, 20)
def any_dataset_prefix() -> str:
"""Concatenation of dataset title and id"""
return f"{any_dataset_title()}{DATASET_KEY_SEPARATOR}{any_dataset_id()}"
def any_asset_name() -> str:
"""Arbitrary-length string"""
return random_string(20)
def any_dataset_description() -> str:
"""Arbitrary-length string"""
return random_string(100)
def any_linz_geospatial_type() -> str:
return choice(
[
"black and white image",
"circular string",
"color image",
"compound curve",
"curve polygon",
"geometry",
"geometry collection",
"grayscale",
"grid",
"hyperspectral",
"multicurve",
"multilinestring",
"multipoint",
"multipolygon",
"multispectral",
"multisurface",
"linestring",
"point",
"point cloud",
"polygon",
"polyhedral surface",
"rgb",
"tin",
"triangle",
]
)
def any_linz_history() -> str:
"""Arbitrary-length string"""
return random_string(20)
def any_linz_lifecycle() -> str:
return choice(["under development", "preview", "ongoing", "completed", "deprecated"])
def any_provider(role: str) -> JsonObject:
return {
"name": any_name(),
"description": any_description(),
"roles": [role],
"url": any_https_url(),
}
def any_linz_provider_custodian() -> JsonObject:
return any_provider("custodian")
def any_linz_provider_manager() -> JsonObject:
return any_provider("manager")
def any_provider_licensor() -> JsonObject:
return any_provider("licensor")
def any_provider_producer() -> JsonObject:
return any_provider("producer")
def any_epsg() -> int:
return randrange(1_000_000)
def any_version_version() -> str:
return f"{randrange(1_000)}.{randrange(1_000)}.{randrange(1_000)}"
|
import React, { Component } from 'react';
import { Image } from 'semantic-ui-react';
import './Spinner.css';
/**
* Simple wrapper for a react loader component,
* so that we can style it consistenty across components
*/
export default class Spinner extends Component {
render() {
return (
<div className='Spinner'>
<Image
className='Spinner-logo'
style={{
display:'block',
marginLeft:'auto',
marginRight: 'auto',
}}
src='img/neo4j_logo_globe.png'
size='mini'
/>
{ this.props.text || 'Loading' }
</div>
)
}
} |
#!/bin/sh
make -C /Users/chushoutv/Desktop/opencv-3.3.1/bulid/modules/imgproc -f /Users/chushoutv/Desktop/opencv-3.3.1/bulid/modules/imgproc/CMakeScripts/opencv_imgproc_postBuildPhase.make$CONFIGURATION all
|
<reponame>BIGCATDOG/communityBackend<filename>src/vess-service/main.go<gh_stars>0
package main
import (
"github.com/asim/go-micro/v3"
pb "github.com/vess-service/proto/vess"
"log"
"os"
)
const (
DEFAULT_HOST = "localhost:27017"
)
func main() {
// 获取容器设置的数据库地址环境变量的值
dbHost := os.Getenv("DB_HOST")
if dbHost == ""{
dbHost = DEFAULT_HOST
}
session, err := CreateSession(dbHost)
// 创建于 MongoDB 的主会话,需在退出 main() 时候手动释放连接
defer session.Close()
if err != nil {
log.Fatalf("create session error: %v\n", err)
}
repo := &VesselRepository{mgoSession: session}
server := micro.NewService(
micro.Name("go.micro.srv.vessel"),
micro.Version("latest"),
)
server.Init()
// 将实现服务端的 API 注册到服务端
pb.RegisterVesselServiceHandler(server.Server(), &service{repo})
if err := server.Run(); err != nil {
log.Fatalf("failed to serve: %v", err)
}
}
|
from typing import (
Any,
Dict,
)
import logging
from urllib.parse import urljoin
import requests
logger = logging.getLogger(__name__)
class HttpClient:
def __init__(
self,
base_url: str,
api_token: str,
timeout: float = None,
verify_tls: bool = True,
user_agent: str = None,
):
self.base_url = base_url
if not self.base_url.endswith("/"):
self.base_url += "/"
self.verify_tls = verify_tls
self.timeout = timeout
self.user_agent = user_agent
self._session = requests.Session()
self._session.headers.update(
{
"X-Cachet-Token": api_token,
"Accept": "application/json",
"Content-Type": "application/json",
}
)
if user_agent:
self._session.headers.update({"User-Agent": user_agent})
def get(self, path, params=None) -> requests.Response:
return self.request("GET", path, params=params)
def post(self, path, data) -> requests.Response:
return self.request("POST", path, data=data)
def put(self, path, data) -> requests.Response:
return self.request("PUT", path, data=data)
def delete(self, path, resource_id) -> requests.Response:
return self.request("DELETE", "{}/{}".format(path, resource_id))
def request(
self,
method: str,
path: str,
params: Dict[str, Any] = None,
data: Dict[str, Any] = None,
) -> requests.Response:
url = urljoin(self.base_url, path)
response = self._session.request(
method,
url,
params=params,
json=data,
verify=self.verify_tls,
timeout=self.timeout,
)
logger.debug("%s %s", method, response.url)
if response.ok:
return response
logger.debug(response.text)
response.raise_for_status()
raise RuntimeError
|
#!/usr/bin/env bash
set -e
set -x
pwd
if [ -z "$GH_ACCESS_TOKEN" ]
then
echo "ERROR: no GH_ACCESS_TOKEN env var defined for kind/ci.sh"
else
echo "has valid git token for kind/ci.sh"
fi
export WORKING_DIR=/home/runner/work/jx3-versions
export JX_KUBERNETES="true"
export NO_JX_TEST="true"
export KIND_VERSION=0.9.0
export JX_VERSION=$(grep 'version: ' packages/jx-cli.yml | awk '{ print $2}')
echo "using working dir: ${WORKING_DIR}"
echo "using jx version: ${JX_VERSION}"
mkdir $HOME/bin
export PATH=$PATH:$HOME/bin
# setup git credential store
export XDG_CONFIG_HOME=/home/.config
git config credential.helper store
# use a sub dir for downloading to avoid clashing with the jx dir etc
mkdir downloads
cd downloads
curl -L https://github.com/jenkins-x/jx-cli/releases/download/v${JX_VERSION}/jx-cli-linux-amd64.tar.gz | tar xzv
sudo mv jx /usr/local/bin
curl -L https://github.com/kubernetes-sigs/kind/releases/download/v${KIND_VERSION}/kind-linux-amd64 > kind
chmod +x kind
sudo mv kind /usr/local/bin/kind
cd ..
echo "now testing the binaries..."
jx version
kind version
# TODO replace this some day with using a container image?
# download all the plugins
export JX3_HOME=/home/.jx
export JX_HOME=/home/.jx
sudo mkdir -p $JX3_HOME
jx upgrade plugins --boot --path /usr/bin
# BDD test specific part
export BDD_NAME="bdd-kind"
# lets default env vars that don't get populated if not running in jx
export BRANCH_NAME="${BRANCH_NAME:-pr-${GITHUB_RUN_ID}-${GITHUB_RUN_NUMBER}}"
export BUILD_NUMBER="${GITHUB_RUN_NUMBER}"
# the gitops repository template to use
export GITOPS_TEMPLATE_PROJECT="jx3-gitops-repositories/jx3-kind-vault"
mkdir -p /builder/home
.lighthouse/jenkins-x/bdd/ci.sh |
#!/bin/bash
# source build
rm -rf node_modules
npm install
npm run build
# pulish to apache/asf-site
rm -rf .deploy_git
mkdir .deploy_git
cd .deploy_git
# git init
# git remote add apache git@github.com:apache/incubator-weex-site.git
# git fetch apache asf-site
# git reset apache/asf-site
mv ../docs/.vuepress/dist/* ./
echo move to deploy_git successful
rm -rf ./zh/community/biz-emas.html
echo remove biz-emas.html
node ../scripts/generate-redirect-html.js
echo generate redirect html
node ../scripts/generate-htaccess-file.js
echo generate htaccess file
# DATE=`date '+%Y-%m-%d %H:%M:%S'`
# git add -A
# git commit -m "Site updated: ${DATE}"
# git checkout apache/asf-site -b asf-site
# git merge master
# git push apache asf-site
|
<reponame>narahari92/loki
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package loki
import (
"context"
"time"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/narahari92/loki/pkg/audit"
"github.com/narahari92/loki/pkg/wait"
)
// ChaosMaker takes Config and executes chaos scenarios both pre-defined and randomly generated ones.
type ChaosMaker struct {
*Config
logrus.FieldLogger
*audit.Reporter
}
// CreateChaos executes all the chaos scenarios and exits with error on first scenario which fails to recover and
// get into desired state or returns successfully if all systems get back into desired state from all chaos scenarios.
func (cm *ChaosMaker) CreateChaos(ctx context.Context, opts ...HookOption) error {
hook := &Hook{}
for _, opt := range opts {
opt(hook)
}
if cm.Reporter == nil {
cm.Reporter = &audit.Reporter{}
}
if err := cm.readyCheck(ctx, hook); err != nil {
return err
}
if err := cm.loadSystems(ctx, hook); err != nil {
return err
}
if hook.preChaos != nil {
cm.Info("pre chaos hook executing")
result := audit.SuccessResult
message := "Successfully completed pre chaos test hook"
if err := hook.preChaos(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "pre chaos test hook failed").Error()
cm.WithError(err).Warn("pre chaos hook failed")
}
cm.Reporter.Scenarios.PreChaosTests = audit.Message{
Result: result,
Message: message,
}
}
if hook.postChaos != nil {
defer func() {
cm.Info("post chaos hook executing")
result := audit.SuccessResult
message := "Successfully completed post chaos test hook"
if err := hook.postChaos(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "post chaos test hook failed").Error()
cm.WithError(err).Warn("post chaos hook failed")
}
cm.Reporter.Scenarios.PostChaosTests = audit.Message{
Result: result,
Message: message,
}
}()
}
for systemName, provider := range cm.scenarioProviders {
cm.Infof("creating chaos in '%s' system", systemName)
systemType := cm.systemNames[systemName]
system := cm.systems[systemName]
killerCreator, ok := availableKillers[systemType]
if !ok {
errorMsg := "no killer registered for system '%s' of type '%s'"
cm.Errorf(errorMsg, systemName, systemType)
return errors.Errorf(errorMsg, systemName, systemType)
}
killer, err := killerCreator(system)
if err != nil {
errorMsg := "failed to create killer for system '%s' of type '%s'"
cm.WithError(err).Errorf(errorMsg, systemName, systemType)
return errors.Wrapf(err, errorMsg, systemName, systemType)
}
for {
scenario, ok, err := provider.scenario(system)
if err != nil {
cm.Reporter.Miscellaneous = append(
cm.Reporter.Miscellaneous,
audit.Message{
Result: audit.FailureResult,
Message: errors.Wrap(err, "failed to generated scenario").Error(),
},
)
return err
}
if !ok {
break
}
cm.Infof("creating chaos by action:\n%s", scenario.identifiers)
if err := killer.Kill(ctx, scenario.identifiers...); err != nil {
errorMsg := "failed to kill identifiers for system %s of type %s"
cm.Reporter.Scenarios.Scenarios = append(
cm.Reporter.Scenarios.Scenarios,
audit.Scenario{
Identifiers: scenario.identifiers.String(),
Message: audit.Message{
Result: audit.FailureResult,
Message: errors.Wrapf(err, errorMsg, systemName, systemType).Error(),
},
},
)
cm.WithError(err).Errorf(errorMsg, systemName, systemType)
return errors.Wrapf(err, errorMsg, systemName, systemType)
}
ok, err = wait.ExecuteWithBackoff(
ctx,
&wait.ExponentialBackoff{
Cap: 10 * time.Minute,
Factor: 2.0,
Jitter: 0.3,
},
system.Validate,
scenario.timeout,
)
if err != nil {
errorMsg := "failed to validate system '%s'"
cm.Reporter.Scenarios.Scenarios = append(
cm.Reporter.Scenarios.Scenarios,
audit.Scenario{
Identifiers: scenario.identifiers.String(),
Message: audit.Message{
Result: audit.FailureResult,
Message: errors.Wrapf(err, errorMsg, systemName).Error(),
},
},
)
cm.WithError(err).Errorf(errorMsg, systemName)
return errors.Wrapf(err, errorMsg, systemName)
}
if !ok {
errorMsg := "validation failed. system '%s' didn't reach desired state"
cm.Reporter.Scenarios.Scenarios = append(
cm.Reporter.Scenarios.Scenarios,
audit.Scenario{
Identifiers: scenario.identifiers.String(),
Message: audit.Message{
Result: audit.FailureResult,
Message: errors.Errorf(errorMsg, systemName).Error(),
},
},
)
cm.Errorf(errorMsg, systemName)
return errors.Errorf(errorMsg, systemName)
}
cm.Reporter.Scenarios.Scenarios = append(
cm.Reporter.Scenarios.Scenarios,
audit.Scenario{
Identifiers: scenario.identifiers.String(),
Message: audit.Message{
Result: audit.SuccessResult,
Message: "Successfully executed the scenario",
},
},
)
cm.Infof("recovered successfully by chaos by action:\n%s", scenario.identifiers)
}
}
cm.Reporter.Miscellaneous = append(
cm.Reporter.Miscellaneous,
audit.Message{
Result: audit.SuccessResult,
Message: "Successfully executed all scenarios",
},
)
return nil
}
func (cm *ChaosMaker) loadSystems(ctx context.Context, hook *Hook) error {
if hook.preSystemLoad != nil {
cm.Info("pre system load hook executing")
result := audit.SuccessResult
message := "Successfully completed pre system load hook"
if err := hook.preSystemLoad(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "pre system load hook failed").Error()
cm.WithError(err).Warn("pre system load hook failed")
}
cm.Reporter.Load.PreLoad = audit.Message{
Result: result,
Message: message,
}
}
if hook.postSystemLoad != nil {
defer func() {
cm.Info("post system load hook executing")
result := audit.SuccessResult
message := "Successfully completed post system load hook"
if err := hook.postSystemLoad(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "post system load hook failed").Error()
cm.WithError(err).Warn("post system load hook failed")
}
cm.Reporter.Load.PreLoad = audit.Message{
Result: result,
Message: message,
}
}()
}
cm.Info("system(s) are being loaded")
for name, system := range cm.systems {
if err := system.Load(ctx); err != nil {
errorMsg := "system '%s' failed to load"
cm.Load.Message = audit.Message{
Result: audit.FailureResult,
Message: errors.Wrap(err, errorMsg).Error(),
}
cm.WithError(err).Errorf(errorMsg, name)
return errors.Wrapf(err, errorMsg, name)
}
}
cm.Reporter.Load.Message = audit.Message{
Result: audit.SuccessResult,
Message: "system(s) are loaded successfully",
}
cm.Info("system(s) are loaded")
return nil
}
func (cm *ChaosMaker) readyCheck(ctx context.Context, hook *Hook) error {
if hook.preReady != nil {
cm.Info("pre ready hook executing")
result := audit.SuccessResult
message := "Successfully completed pre ready hook"
if err := hook.preReady(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "pre ready hook failed").Error()
cm.WithError(err).Warn("pre ready hook failed")
}
cm.Reporter.Ready.PreReady = audit.Message{
Result: result,
Message: message,
}
}
if hook.postReady != nil {
defer func() {
cm.Info("post ready hook executing")
result := audit.SuccessResult
message := "Successfully completed post ready hook"
if err := hook.postReady(ctx); err != nil {
result = audit.FailureResult
message = errors.Wrap(err, "post ready hook failed").Error()
cm.WithError(err).Warn("post ready hook failed")
}
cm.Reporter.Ready.PostReady = audit.Message{
Result: result,
Message: message,
}
}()
}
cm.Info("initiating readiness check")
ok, err := wait.ExecuteWithBackoff(
ctx,
&wait.ExponentialBackoff{
Duration: 1 * time.Second,
Cap: 10 * time.Minute,
Factor: 1.5,
Jitter: 0.7,
},
cm.ready.Ready,
cm.readyTimeout,
)
if err != nil {
errorMsg := "system(s) failed to reach ready state"
cm.Reporter.Ready.Message = audit.Message{
Result: audit.FailureResult,
Message: errors.Wrap(err, errorMsg).Error(),
}
cm.WithError(err).Error(errorMsg)
return errors.Wrap(err, errorMsg)
}
if !ok {
errorMsg := "system(s) didn't reach ready state"
cm.Reporter.Ready.Message = audit.Message{
Result: audit.FailureResult,
Message: errorMsg,
}
cm.Errorf(errorMsg)
return errors.New(errorMsg)
}
cm.Reporter.Ready.Message = audit.Message{
Result: audit.SuccessResult,
Message: "Successfully completed ready phase",
}
cm.Info("system(s) are ready for chaos testing")
return nil
}
|
import {
isPlainObject,
isFunction,
nextTick,
merge,
invariant,
getDiffProps,
} from '../src/utils';
describe('isPlainObject', () => {
it('true', () => {
expect(isPlainObject({})).toBe(true);
expect(isPlainObject({ a: 'a' })).toBe(true);
expect(isPlainObject({ foo: 'foo' })).toBe(true);
expect(isPlainObject({ bar: () => {} })).toBe(true);
});
it('false', () => {
expect(isPlainObject('a')).toBe(false);
expect(isPlainObject(1)).toBe(false);
expect(isPlainObject(null)).toBe(false);
expect(isPlainObject(undefined)).toBe(false);
});
});
describe('isFunction', () => {
it('true', () => {
expect(isFunction(() => {})).toBe(true);
expect(isFunction(function() {})).toBe(true);
});
it('false', () => {
expect(isFunction('a')).toBe(false);
expect(isFunction(1)).toBe(false);
expect(isFunction({})).toBe(false);
});
});
describe('nextTick', () => {
it('with callback', () => {
const callback = jest.fn();
nextTick(callback);
nextTick(() => {
// At this point in time, the callback should not have been called yet
expect(callback).toBeCalled();
// Fast-forward until all timers have been executed
jest.useFakeTimers();
// Now our callback should have been called!
expect(callback).toBeCalled();
expect(callback).toHaveBeenCalledTimes(1);
});
});
it('no callback', () => {
let i = 0;
nextTick().then(() => {
expect(i).toBe(1);
});
i += 1;
});
});
describe('merge', () => {
it('true', () => {
const obj = { a: '1' };
merge(obj, {});
const obj2 = { a: '1' };
merge(obj2, { b: '2' });
const obj3 = { a: [] };
merge(obj3, { a: [1] });
expect(obj).toEqual({ a: '1' });
expect(obj2).toEqual({ a: '1', b: '2' });
expect(obj3).toEqual({ a: [1] });
});
it('false', () => {
const obj = { a: '1' };
merge(obj, {});
const obj2 = { a: '1' };
merge(obj2, { b: '2' });
const obj3 = { a: [] };
merge(obj3, { a: [1] });
expect(obj).not.toEqual({ a: '2' });
expect(obj2).not.toEqual({ a: '1' });
expect(obj3).not.toEqual({ a: [] });
});
});
describe('invariant', () => {
it('common', () => {
expect(() => invariant(false, '11')).toThrow('[nostore]: ' + '11');
expect(() => invariant(true, '11')).not.toThrow('[nostore]: ' + '11');
});
it('isFunction', () => {
const partialStore = () => {};
let message =
'setStore(...): takes an object of store variables to update or a ' +
'function which returns an object of store variables.';
expect(() =>
invariant(
typeof partialStore === 'object' || typeof partialStore === 'function',
message,
),
).not.toThrow('[nostore]: ' + message);
});
it('isString', () => {
const partialStore = '';
let message =
'setStore(...): takes an object of store variables to update or a ' +
'function which returns an object of store variables.';
expect(() =>
invariant(
typeof partialStore === 'object' || typeof partialStore === 'function',
message,
),
).toThrow('[nostore]: ' + message);
});
});
describe('getDiffProps', () => {
it('true', () => {
expect(getDiffProps({ a: '1' }, { b: 1 })).toEqual(['b']);
expect(getDiffProps({ a: '1', b: '2' }, { b: 1, c: 2 })).toEqual(['c']);
});
it('false', () => {
expect(
getDiffProps({ a: '1', c: '2', b: '22' }, { b: 1, c: '22' }),
).not.toEqual(['b']);
});
});
|
<filename>src/main/java/evilcraft/api/render/IMultiRenderPassBlock.java
package evilcraft.api.render;
import net.minecraft.util.IIcon;
import net.minecraft.world.IBlockAccess;
/**
* Interface for blocks that can render with connected textures, use together
* with {@link ConnectableIcon}.
* @author rubensworks
* @see ConnectableIcon
*/
public interface IMultiRenderPassBlock {
/**
* Get the icon for the given parameters.
* @param side The side to render for.
* @param meta The metadata of the block that will be rendered.
* @param renderPass The renderpass.
* @return The icon.
*/
public IIcon getIcon(int side, int meta, int renderPass);
/**
* Get the amount of required render passes.
* @return Required render passes.
*/
public int getRenderPasses();
/**
* Set the current pass to render at.
* @param pass The new render pass.
*/
public void setRenderPass(int pass);
/**
* If the block should be rendered in this pass.
* @param pass The pass to check.
* @return If it should be rendered.
*/
public boolean shouldRender(int pass);
/**
* Set the {@link CustomRenderBlocks} instance.
* @param renderer The {@link CustomRenderBlocks} instance.
*/
public void setRenderBlocks(CustomRenderBlocks renderer);
/**
* Get the {@link CustomRenderBlocks} instance.
* @return The {@link CustomRenderBlocks} instance.
*/
public CustomRenderBlocks getRenderBlocks();
/**
* Update the tile entity (for updating the icon) at the given coordinates.
* @param world The world.
* @param x X coordinate.
* @param y Y coordinate.
* @param z Z coordinate.
*/
public void updateTileEntity(IBlockAccess world, int x, int y, int z);
/**
* Define whether or not the current rendering is for an inventory block.
* @param isInventoryBlock
*/
public void setInventoryBlock(boolean isInventoryBlock);
}
|
<filename>app/view1/view1.js<gh_stars>0
'use strict';
angular.module('myApp.view1', ['ngRoute'])
.config(['$routeProvider', function ($routeProvider) {
$routeProvider.when('/view1', {
templateUrl: 'view1/view1.html',
controller: 'View1Ctrl',
activetab: 'view1'
});
}]).run(function ($rootScope, $route) {
$rootScope.$route = $route;
}).controller('View1Ctrl', function ($scope, fileListService) {
$scope.fileData = {
'fileContent': '',
'fileSize': 0,
'fileName': '',
'uploadTime': ''
}
$scope.showFileContent = function () {
const file = document.getElementById("fileInput").files[0];
if (file && file.type === 'text/plain') {
const fr = new FileReader();
fr.readAsText(file, "UTF-8");
fr.onload = function (evt) {
document.getElementById("fileInput").innerHTML = evt.target.result;
$scope.fileData.fileContent = fr.result;
$scope.fileData.fileName = document.getElementById("fileInput").files[0].name;
$scope.fileData.fileSize = document.getElementById("fileInput").files[0].size;
$scope.fileData.uploadTime = new Date();
$scope.$apply();
fileListService.updateFileList($scope.fileData);
}
fr.onerror = function (evt) {
document.getElementById("fileInput").innerHTML = "error";
$scope.fileContent = "error";
}
} else {
alert('Only plain text files are accepted');
}
}
}).directive('fileUpload', function () {
return {
restrict: 'A',
link: function (scope, element, attrs) {
let onChangeHandler = scope.$eval(attrs.fileUpload);
element.bind('change', onChangeHandler);
}
};
}); |
#!/bin/bash -e
install -m 644 files/qtum.list "${ROOTFS_DIR}/etc/apt/sources.list.d/"
on_chroot apt-key add - < files/qtum.gpg.key
on_chroot << EOF
apt install ca-certificates -y
apt-get update
EOF
|
package stroeerCore
import (
"encoding/json"
"fmt"
"github.com/eugene-fedorenko/prebid-server/config"
"github.com/mxmCherry/openrtb"
"github.com/eugene-fedorenko/prebid-server/adapters"
"github.com/eugene-fedorenko/prebid-server/errortypes"
"github.com/eugene-fedorenko/prebid-server/openrtb_ext"
"net/http"
)
type StroeerCoreBidder struct {
Url string `json:"url"`
}
type StroeerRootResponse struct {
Bids []StroeerBidResponse `json:"bids"`
}
type StroeerBidResponse struct {
Id string `json:"id"`
BidId string `json:"bidId"`
Cpm float64 `json:"cpm"`
Width uint64 `json:"width"`
Height uint64 `json:"height"`
Ad string `json:"ad"`
CrId string `json:"crid"`
}
func (a *StroeerCoreBidder) MakeBids(internalRequest *openrtb.BidRequest, externalRequest *adapters.RequestData, response *adapters.ResponseData) (*adapters.BidderResponse, []error) {
if response.StatusCode != http.StatusOK {
return nil, []error{&errortypes.BadServerResponse{
Message: fmt.Sprintf("Unexpected http status code: %d.", response.StatusCode),
}}
}
var errors []error
stroeerResponse := StroeerRootResponse{}
if err := json.Unmarshal(response.Body, &stroeerResponse); err != nil {
errors = append(errors, err)
return nil, errors
}
bidderResponse := adapters.NewBidderResponseWithBidsCapacity(len(stroeerResponse.Bids))
bidderResponse.Currency = "EUR"
for _, bid := range stroeerResponse.Bids {
openRtbBid := openrtb.Bid{
ID: bid.Id,
ImpID: bid.BidId,
W: bid.Width,
H: bid.Height,
Price: bid.Cpm,
AdM: bid.Ad,
CrID: bid.CrId,
}
bidderResponse.Bids = append(bidderResponse.Bids, &adapters.TypedBid{
Bid: &openRtbBid,
BidType: openrtb_ext.BidTypeBanner,
})
}
return bidderResponse, errors
}
func (b *StroeerCoreBidder) MakeRequests(internalRequest *openrtb.BidRequest, reqInfo *adapters.ExtraRequestInfo) ([]*adapters.RequestData, []error) {
errors := make([]error, 0, len(internalRequest.Imp))
endpoint := ""
for idx := range internalRequest.Imp {
imp := &internalRequest.Imp[idx]
var bidderExt adapters.ExtImpBidder
if err := json.Unmarshal(imp.Ext, &bidderExt); err != nil {
errors = append(errors, err)
continue
}
var stroeerExt openrtb_ext.ExtImpStroeercore
if err := json.Unmarshal(bidderExt.Bidder, &stroeerExt); err != nil {
errors = append(errors, err)
continue
}
imp.TagID = stroeerExt.Sid
if endpoint == "" {
endpoint = stroeerExt.Endpoint
}
}
if len(endpoint) == 0 {
endpoint = b.Url
}
if internalRequest.Device != nil {
if internalRequest.Device.Geo != nil {
internalRequest.Device.Geo.Type = openrtb.LocationType(1)
}
}
reqJSON, err := json.Marshal(*internalRequest)
if err != nil {
errors = append(errors, err)
return nil, errors
}
headers := http.Header{}
headers.Add("Content-Type", "application/json;charset=utf-8")
headers.Add("Accept", "application/json")
return []*adapters.RequestData{{
Method: "POST",
Uri: endpoint,
Body: reqJSON,
Headers: headers,
}}, errors
}
// Builder builds a new instance of the StroeerCore adapter for the given bidder with the given config.
func Builder(bidderName openrtb_ext.BidderName, config config.Adapter) (adapters.Bidder, error) {
bidder := &StroeerCoreBidder{
Url: config.Endpoint,
}
return bidder, nil
}
|
package com.hapramp.steem;
import com.google.gson.annotations.Expose;
import com.google.gson.annotations.SerializedName;
import com.hapramp.models.CommunityModel;
import java.util.List;
public class CommunityListWrapper {
@Expose
@SerializedName("communities")
List<CommunityModel> communityModels;
public CommunityListWrapper(List<CommunityModel> communityModels) {
this.communityModels = communityModels;
}
public List<CommunityModel> getCommunityModels() {
return communityModels;
}
public void setCommunityModels(List<CommunityModel> communityModels) {
this.communityModels = communityModels;
}
}
|
<reponame>andersonzup/orange-talents-07-template-ecommerce
package br.com.zup.mercadolivre.usuario;
import org.springframework.data.jpa.repository.JpaRepository;
import org.springframework.stereotype.Repository;
import java.util.Optional;
@Repository
public interface UsuarioRepositori extends JpaRepository<Usuario, Long> {
Optional<Usuario> findByEmail(String email);
}
|
<reponame>horowitz2009/BBGun<filename>src/com/horowitz/bigbusiness/model/Contract.java
package com.horowitz.bigbusiness.model;
import com.horowitz.mickey.Pixel;
public class Contract {
private Product _product;
private long _start;
private Building _building;
private Pixel _coordinates;
public long getStart() {
return _start;
}
public void setStart(long start) {
_start = start;
}
public Building getBuilding() {
return _building;
}
public void setBuilding(Building building) {
_building = building;
}
public Pixel getCoordinates() {
return _coordinates;
}
public void setCoordinates(Pixel coordinates) {
_coordinates = coordinates;
}
public Product getProduct() {
return _product;
}
public void setProduct(Product product) {
_product = product;
}
}
|
<filename>Select.hpp<gh_stars>1-10
/*
* Select.hpp
*
*
* Copyright (C) 2019 <NAME> <<EMAIL>>
*
*/
#ifndef SELECT_HPP
#define SELECT_HPP
template <bool c, class T, class F>
struct Select { typedef T Result; };
template <class T, class F>
struct Select<false, T, F> { typedef F Result; };
#endif /* SELECT_HPP */
|
package simulation;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 1952번: 달팽이2
*
* @see https://www.acmicpc.net/problem/1952/
*
*/
public class Boj1952 {
private static final int[][] DIRECTIONS = {{0, 1}, {1, 0}, {0, -1}, {-1, 0}};
private static final int ROW = 0, COL = 1;
private static int N, M;
private static boolean[][] visit;
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringTokenizer st = new StringTokenizer(br.readLine());
N = Integer.parseInt(st.nextToken());
M = Integer.parseInt(st.nextToken());
visit = new boolean[N][M];
System.out.println(search());
}
private static int search(){
int row = 0, col = 0, dir = 0;
int count = 0;
while(true) {
if(visit[row][col]) break;
visit[row][col] = true;
int nextRow = row + DIRECTIONS[dir][ROW];
int nextCol = col + DIRECTIONS[dir][COL];
if(isRange(nextRow, nextCol) || visit[nextRow][nextCol]) {
dir = (dir + 1) % 4;
nextRow = row + DIRECTIONS[dir][ROW];
nextCol = col + DIRECTIONS[dir][COL];
count++;
}
row = nextRow;
col = nextCol;
}
return count - 1;
}
private static boolean isRange(int row, int col){
return row < 0 || row >= N || col < 0 || col >= M;
}
}
|
#!/bin/bash
FLACFILE=$1
CUEFILE=${FLACFILE%.*}.cue
cuebreakpoints $CUEFILE | shnsplit -o flac -O always $FLACFILE
cuetag.sh $CUEFILE split-track*.flac
# Read titles from .cue-file
TRACKLIST=`grep TITLE $CUEFILE | sed -e 's/.*TITLE.*\"\(.*\)\"/\1/g' | sed -e 's/[[:space:]]/___/g'`
COUNT=0
TRACKNUM=0
for T in $TRACKLIST
do
let COUNT=COUNT+1
#Skip the first title since it is (or should be) the album title
if [ $COUNT -gt 1 ]; then
let TRACKNUM=COUNT-1
TMP=`printf "%02d" "$TRACKNUM"`
TRACKTITLE=`echo $T | sed -e 's/___/\ /g'`
mv split-track$TMP.flac "$TMP $TRACKTITLE.flac"
fi
done
|
<gh_stars>1000+
package middleware
import (
"github.com/keptn/keptn/api/models"
"os"
"reflect"
"testing"
)
func TestValidateToken(t *testing.T) {
type args struct {
token string
}
tests := []struct {
name string
args args
want models.Principal
configuredToken string
wantErr bool
}{
{
name: "token valid",
args: args{
token: "my-token",
},
configuredToken: "my-token",
want: models.Principal("my-token"),
wantErr: false,
},
{
name: "token invalid",
args: args{
token: "my-invalid-token",
},
configuredToken: "my-token",
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_ = os.Setenv("SECRET_TOKEN", tt.configuredToken)
tv := &BasicTokenValidator{}
got, err := tv.ValidateToken(tt.args.token)
if (err != nil) != tt.wantErr {
t.Errorf("ValidateToken() error = %v, wantErr %v", err, tt.wantErr)
return
}
if tt.want != "" {
if !reflect.DeepEqual(*got, tt.want) {
t.Errorf("ValidateToken() got = %v, want %v", got, tt.want)
}
}
})
}
}
|
#include <boost/program_options.hpp>
#include <filesystem> // C++17
#include <iostream>
#include <opencv2/opencv.hpp>
#include "cascade_model.cpp"
using namespace boost::program_options;
using namespace cv;
namespace po = boost::program_options;
namespace fs = std::filesystem;
int main(int argc, char **argv) {
std::vector<std::string> argList(argv, argv + argc);
po::variables_map vm;
// Get options
po::options_description desc("Options for helium");
desc.add_options()("help", "produce help message")(
"input", po::value<std::string>()->required(), "input file")(
"output", po::value<std::string>()->required()->default_value("out"),
"output folder")(
"model", po::value<std::string>()->default_value("internal"),
"model file ('internal' to use haarcascade_frontalface_alt2)")(
"zoom", po::value<float>()->default_value(2),
"zoom factor (0.5 = half size, 2 = double size)")(
"min-size", po::value<float>()->default_value(0.05, "0.05"),
"minimum face size (0.05 = 5% of image size, 0.5 = 50% of image "
"size)");
po::store(po::parse_command_line(argc, argv, desc), vm);
// Return help message
if (vm.count("help") || argc == 1) {
std::cout << desc << std::endl;
return 1;
}
// Raise errors about incorrect options
po::notify(vm);
// Cast variables from variable map
const std::string inputPath = vm["input"].as<std::string>();
const std::string modelPath = vm["model"].as<std::string>();
const std::string outputPath = vm["output"].as<std::string>();
const float zoomLevel = vm["zoom"].as<float>();
const float minimumSize = vm["min-size"].as<float>();
// Error if input path is not a file
if (!fs::is_regular_file(inputPath)) {
std::cerr << inputPath << " is not a file." << std::endl;
return -1;
}
// Ensure the output path exists or can be created
if (!fs::is_directory(outputPath)) {
if (!fs::create_directory(outputPath)) {
std::cerr << outputPath << " is not a directory and could not be created."
<< std::endl;
return -1;
}
}
// Load the input image
cv::Mat inputImage = cv::imread(inputPath);
// Error if image is empty
if (inputImage.empty()) {
std::cerr << inputPath << " couldn't be read" << std::endl;
return -1;
}
// Initialize cascade classifier
CascadeClassifier cascade;
if (modelPath == "internal") {
// Open in-memory model
cv::FileStorage fs;
fs.open(std::string(cascade_model),
FileStorage::READ | FileStorage::MEMORY);
// Load model into cascade classifier
cascade.read(fs.getFirstTopLevelNode());
} else {
// Error if model path is not a file
if (!fs::is_regular_file(modelPath)) {
std::cerr << modelPath << " does not exist." << std::endl;
return -1;
}
// Load model into cascade classifier
cascade.load(modelPath);
}
// The minimum size for detected faces is based on the larger of the
// image's sides multiplied by a ratio
const int greatestImageSideLength =
max(inputImage.size().height, inputImage.size().width);
const int minimumFaceLength = (greatestImageSideLength * minimumSize);
cv::Size minimumFaceSize = cv::Size(minimumFaceLength, minimumFaceLength);
// Use the cascade model to detect faces as Rects in detectedFaces
std::vector<Rect> detectedFaces;
cascade.detectMultiScale(inputImage, detectedFaces, 1.1, 3, 0,
minimumFaceSize);
// Loop over every detected face
int imageId = 1;
for (const Rect &faceArea : detectedFaces) {
// Set the crop rectangle to the detected face before performing zoom
// transformation
Rect cropRectangle = faceArea;
// Apply the zoom level
cropRectangle.width = faceArea.width * zoomLevel;
cropRectangle.height = faceArea.height * zoomLevel;
cropRectangle.x += ((faceArea.width - cropRectangle.width) / 2);
cropRectangle.y += ((faceArea.height - cropRectangle.height) / 2);
// If the new crop rectangle would try to read nonexistent pixels, skip
if ((cropRectangle.x < 0) || (cropRectangle.y < 0) ||
((cropRectangle.x + cropRectangle.width) > inputImage.size().width) ||
((cropRectangle.y + cropRectangle.height) > inputImage.size().height)) {
std::cerr << "Face " << faceArea << " could not be saved (zoom too large)"
<< std::endl;
continue;
}
// Write a cropped version of the image
cv::imwrite((outputPath + "/" + std::to_string(imageId) + ".jpg"),
inputImage(cropRectangle));
imageId++;
}
return 0;
} |
// styles
import { Container, Avatar } from './styles';
interface Props {
nickName: string;
isBot?: boolean;
}
export function UserRow({ nickName, isBot }: Props){
return(
<Container>
<Avatar className={isBot ? 'bot' : ''} />
<strong>{nickName}</strong>
{isBot && <span>bot</span>}
</Container>
);
} |
package com.atjl.email.dto;
import com.atjl.email.api.MailException;
import org.apache.commons.mail.Email;
import org.apache.commons.mail.MultiPartEmail;
import javax.activation.DataHandler;
import javax.mail.MessagingException;
import javax.mail.internet.MimeBodyPart;
import javax.mail.internet.MimeMultipart;
import java.net.URL;
/**
* mail
*/
public class MailDto {//implements Serializable {
private static final String charset = "UTF-8";
private static final long serialVersionUID = 1L;
public static final int SIMPLE_MAIL = 0;
public static final int MUTIL_MAIL = 1;
private int mailType = SIMPLE_MAIL;
/**
* 发件方式 - 普通发送
*/
public static final int TO = 0;
/**
* 发件方式 - 抄送
*/
public static final int CC = 1;
/**
* 发件方式 - 抄送人邮件地址
*/
private String addChaoSong = null;
/**
* 发件方式 - 密件抄送
*/
static final int BCC = 2;
/**
* 邮件内容
*/
private String mailContent = null;
/**
* 邮件相关信息 - SMTP 服务器
*/
private String mailSMTPHost = null;
private String mailSMTPHostPort = "25";
/**
* 邮件相关信息 - 邮件用户名
*/
private String mailUser = null;
/**
* 邮件相关信息 - 密码
*/
private String mailPassword = <PASSWORD>;
/**
* 邮件相关信息 - 发件人邮件地址
*/
private String mailFromAddress = null;
/**
* 邮件相关信息 - 邮件主题
*/
private String mailSubject = "";
/**
* 邮件相关信息 - 邮件发送地址
*/
private String[] mailTOAddress = null;
/**
* 邮件相关信息 - 邮件抄送地址
*/
private String[] mailCCAddress = null;
/**
* 邮件相关信息 - 邮件密件抄送地址
*/
private String[] mailBCCAddress = null;
/**
* 邮件相关信息 - 邮件正文(复合结构)
*/
private MimeMultipart mailBody = null;
private MultiPartEmail mailclass = null;
private boolean debug = false;
private boolean result = false;
/**
* 邮件相关信息 - 邮件發送方顯示名
*/
private String mailShowName = "";
public boolean isDebug() {
return debug;
}
public void setDebug(boolean debug) {
this.debug = debug;
}
public MailDto() {
mailBody = new MimeMultipart();
}
/**
* 设置 SMTP 服务器
*
* @param strSMTPHost 邮件服务器名称或 IP
* @param strUser 邮件用户名
* @param strPassword 密码
*/
public void setSMTPHost(String strSMTPHost, String port, String strUser,
String strPassword) {
this.mailSMTPHost = strSMTPHost;
this.mailUser = strUser;
this.mailPassword = <PASSWORD>;
if (port != null && !"".equals(port)) {
this.mailSMTPHostPort = port;
}
}
/**
* 设置邮件发送地址
*
* @param strFromAddress 邮件发送地址
*/
public void setFromAddress(String strFromAddress) {
this.mailFromAddress = strFromAddress;
}
/**
* 设置邮件发送地址
*
* @param strFromAddress 邮件发送地址
* @param mailShowName 邮件顯示名
*/
public void setFromAddress(String strFromAddress, String mailShowName) {
this.mailFromAddress = strFromAddress;
this.mailShowName = mailShowName;
}
/**
* 设置邮件目的地址
*
* @param strAddress 邮件目的地址列表, 不同的地址可用;号分隔
* @param iAddressType 邮件发送方式 (TO 0, CC 1, BCC 2) 常量已在本类定义
*/
public void setAddress(String strAddress, int iAddressType) {
String[] mailAddr = strAddress.split(",");
switch (iAddressType) {
case MailDto.TO:
mailTOAddress = mailAddr;
break;
case MailDto.CC:
mailCCAddress = mailAddr;
break;
case MailDto.BCC:
mailBCCAddress = mailAddr;
break;
default:
mailTOAddress = mailAddr;
break;
}
}
/**
* 设置邮件主题
*
* @param strSubject 邮件主题
*/
public void setSubject(String strSubject) {
this.mailSubject = strSubject;
}
/**
* 设置邮件文本正文
*
* @param strTextBody 邮件文本正文
* @throws MessagingException
*/
public void setTextBody(String strTextBody) throws MessagingException {
MimeBodyPart mimebodypart = new MimeBodyPart();
mimebodypart.setText(strTextBody, charset);
mailBody.addBodyPart(mimebodypart);
mailContent = strTextBody;
}
/**
* 设置邮件超文本正文
*
* @param strHtmlBody 邮件超文本正文
* @throws MessagingException
*/
public void setHtmlBody(String strHtmlBody) throws MessagingException {
MimeBodyPart mimebodypart = new MimeBodyPart();
mimebodypart.setDataHandler(new DataHandler(strHtmlBody,
"text/html;charset=UTF-8"));
mailBody.addBodyPart(mimebodypart);
mailContent = strHtmlBody;
}
/**
* 设置邮件正文外部链接 URL, 信体中将包含链接所指向的内容
*
* @param strURLAttachment 邮件正文外部链接 URL
*/
public void setURLAttachment(String strURLAttachment) {
MimeBodyPart mimebodypart = new MimeBodyPart();
try {
mimebodypart.setDataHandler(new DataHandler(new URL(strURLAttachment)));
mailBody.addBodyPart(mimebodypart);
} catch (Exception e) {
throw new MailException(e);
}
this.mailType = MUTIL_MAIL;
}
/**
* 设置邮件附件
*
* @param strFileAttachment 文件的全路径
*
public void setFileAttachment(String showName, String strFileAttachment) {
File path = new File(strFileAttachment);
if (!path.exists() || path.isDirectory()) {
throw new FileException("文件不存在!");
}
try {
MimeBodyPart mimebodypart = new MimeBodyPart();
mimebodypart.setDataHandler(new DataHandler(new FileDataSource(
strFileAttachment)));
mimebodypart.setFileName(MimeUtility.encodeText(showName, charset, charset));
mailBody.addBodyPart(mimebodypart);
} catch (Exception e) {
throw new MailException(e);
}
this.mailType = MUTIL_MAIL;
}*/
/**
* 设置邮件图片附件
*
* @param strFileAttachment 文件的全路径
*
public void setImgFileAttachment(String strFileAttachment, String fileNum) {
File path = new File(strFileAttachment);
if (!path.exists() || path.isDirectory()) {
throw new FileException("文件不存在!");
}
try {
String strFileName = path.getName();
MimeBodyPart mimebodypart = new MimeBodyPart();
mimebodypart.setDataHandler(new DataHandler(new FileDataSource(
strFileAttachment)));
mimebodypart.setFileName(MimeUtility.encodeText(strFileName));
mailBody.setSubType("related");
mimebodypart.setHeader("Content-ID", "IMG" + fileNum);
mailBody.addBodyPart(mimebodypart);
} catch (Exception e) {
throw new MailException(e);
}
this.mailType = MUTIL_MAIL;
}*/
public String getMailContent() {
return mailContent;
}
public void setMailContent(String mailContent) {
this.mailContent = mailContent;
}
public Email getMailclass() {
return mailclass;
}
public void setMailclass(MultiPartEmail mailclass) {
this.mailclass = mailclass;
}
public int getMailType() {
return mailType;
}
public void setMailType(int mailType) {
this.mailType = mailType;
}
public String getMailShowName() {
return mailShowName;
}
public void setMailShowName(String mailShowName) {
this.mailShowName = mailShowName;
}
public String getAddChaoSong() {
return addChaoSong;
}
public void setAddChaoSong(String addChaoSong) {
this.addChaoSong = addChaoSong;
}
public String getMailSMTPHost() {
return mailSMTPHost;
}
public void setMailSMTPHost(String mailSMTPHost) {
this.mailSMTPHost = mailSMTPHost;
}
public String getMailSMTPHostPort() {
return mailSMTPHostPort;
}
public void setMailSMTPHostPort(String mailSMTPHostPort) {
this.mailSMTPHostPort = mailSMTPHostPort;
}
public String getMailUser() {
return mailUser;
}
public void setMailUser(String mailUser) {
this.mailUser = mailUser;
}
public String getMailPassword() {
return mailPassword;
}
public void setMailPassword(String mailPassword) {
this.mailPassword = <PASSWORD>;
}
public String getMailFromAddress() {
return mailFromAddress;
}
public void setMailFromAddress(String mailFromAddress) {
this.mailFromAddress = mailFromAddress;
}
public String getMailSubject() {
return mailSubject;
}
public void setMailSubject(String mailSubject) {
this.mailSubject = mailSubject;
}
public String[] getMailTOAddress() {
return mailTOAddress;
}
public void setMailTOAddress(String[] mailTOAddress) {
this.mailTOAddress = mailTOAddress;
}
public String[] getMailCCAddress() {
return mailCCAddress;
}
public void setMailCCAddress(String[] mailCCAddress) {
this.mailCCAddress = mailCCAddress;
}
public String[] getMailBCCAddress() {
return mailBCCAddress;
}
public void setMailBCCAddress(String[] mailBCCAddress) {
this.mailBCCAddress = mailBCCAddress;
}
public MimeMultipart getMailBody() {
return mailBody;
}
public void setMailBody(MimeMultipart mailBody) {
this.mailBody = mailBody;
}
public boolean isResult() {
return result;
}
public void setResult(boolean result) {
this.result = result;
}
public static int getSimpleMail() {
return SIMPLE_MAIL;
}
public static int getMutilMail() {
return MUTIL_MAIL;
}
public static int getTO() {
return TO;
}
public static int getCC() {
return CC;
}
public static int getBCC() {
return BCC;
}
}
|
<reponame>anotaai/anotaai
package br.com.alinesolutions.anotaai.rest;
import javax.annotation.security.PermitAll;
import javax.ejb.EJB;
import javax.ejb.Stateless;
import javax.persistence.EntityManager;
import javax.persistence.PersistenceContext;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import br.com.alinesolutions.anotaai.infra.Constant;
import br.com.alinesolutions.anotaai.metadata.model.domain.Menu;
import br.com.alinesolutions.anotaai.service.AppService;
@PermitAll
@Stateless
@Path("/menu")
public class MenuEndpoint {
@PersistenceContext(unitName = Constant.App.UNIT_NAME)
private EntityManager em;
@EJB
private AppService appService;
@GET()
@Path("/principal")
@Produces(MediaType.APPLICATION_JSON)
public Response principal() throws Exception {
return Response.ok(appService.getItensMenu(Menu.PRINCIPAL)).build();
}
} |
#!/bin/bash
# find all the examples with changed code
# run the tests in that directory
set -ex
gpu_folders=()
root=`pwd`
for changed_file in $CHANGED_FILES; do
file_base_dir=$(dirname $changed_file)
# Test folder changes
if [ $(basename $file_base_dir) = "tests" ]; then
file_base_dir=$(dirname "$file_base_dir")
fi
# Changes in subfolder of test folder (e.g. unit_test/integration)
if [ $(basename $(dirname "$file_base_dir")) = "tests" ]; then
file_base_dir=$(dirname $(dirname "$file_base_dir"))
fi
if [[ ! " ${gpu_folders[@]} " =~ " ${file_base_dir} " ]]; then
if [[ $file_base_dir != "." ]]; then
if [[ -f "$file_base_dir/Dockerfile.gpu" ]]; then
echo "GPU executor found in " $file_base_dir
gpu_folders+=(${file_base_dir})
fi
fi
fi
cd $root
done
#echo will store gpu_folders in output matrix
output=$(jq --compact-output --null-input '$ARGS.positional' --args "${gpu_folders[@]}")
echo "::set-output name=matrix::${output}" |
cd build
for PY_VERSION in 3.5 3.6 3.7 3.8 3.9
do
CXX=/usr/bin/clang++-11 CC=/usr/bin/clang-11 cmake .. -DCMAKE_BUILD_TYPE=RELEASE -DRAISIM_MATLAB=OFF -DRAISIM_PY=ON -DRAISIM_EXAMPLE=OFF -DRAISIM_DOC=OFF -DPYTHON_EXECUTABLE:FILEPATH=/usr/bin/python${PY_VERSION}
make -j
done
CXX=/usr/bin/clang++-11 CC=/usr/bin/clang-11 cmake .. -DCMAKE_BUILD_TYPE=RELEASE -DRAISIM_EXAMPLE=ON -DRAISIM_DOC=ON -DRAISIM_PY=OFF -DRAISIM_MATLAB=ON -DPYTHON_EXECUTABLE:FILEPATH=/home/jemin/anaconda3/envs/raisim/bin/python
make -j
make -j
|
-- 5000 records
-- SELECT id, nickname FROM users WHERE id = ?
-- SELECT * FROM users WHERE login_name = ?'
-- INSERT INTO users (login_name, pass_hash, nickname) VALUES (?, SHA2(?, 256), ?)
-- SELECT id, nickname FROM users WHERE id = ?
CREATE TABLE IF NOT EXISTS users (
id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
nickname VARCHAR(128) NOT NULL,
login_name VARCHAR(128) NOT NULL,
pass_hash VARCHAR(128) NOT NULL,
UNIQUE KEY login_name_uniq (login_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- 18 records
-- SELECT * FROM events ORDER BY id ASC
-- SELECT * FROM events WHERE id = ?
-- INSERT INTO events (title, public_fg, closed_fg, price) VALUES (?, ?, 0, ?)
-- UPDATE events SET public_fg = ?, closed_fg = ? WHERE id = ?
CREATE TABLE IF NOT EXISTS events (
id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
title VARCHAR(128) NOT NULL,
public_fg TINYINT(1) NOT NULL,
closed_fg TINYINT(1) NOT NULL,
price INTEGER UNSIGNED NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- 1000 records
-- SELECT COUNT(*) AS total_sheets FROM sheets WHERE `rank` = ?'
--- SELECT * FROM sheets WHERE id NOT IN (SELECT sheet_id FROM reservations WHERE event_id = ? AND canceled_at IS NULL FOR UPDATE) AND `rank` = ? ORDER BY RAND() LIMIT 1
-- SELECT * FROM sheets WHERE `rank` = ? AND num = ?
-- 'SELECT * FROM sheets ORDER BY `rank`, num'
CREATE TABLE IF NOT EXISTS sheets (
id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
`rank` VARCHAR(128) NOT NULL,
num INTEGER UNSIGNED NOT NULL,
price INTEGER UNSIGNED NOT NULL,
UNIQUE KEY rank_num_uniq (`rank`, num)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- 194516 records
-- SELECT * FROM reservations WHERE event_id = ? AND sheet_id = ? AND canceled_at IS NULL GROUP BY event_id, sheet_id HAVING reserved_at = MIN(reserved_at)
-- SELECT r.*, s.rank AS sheet_rank, s.num AS sheet_num FROM reservations r INNER JOIN sheets s ON s.id = r.sheet_id WHERE r.user_id = ? ORDER BY IFNULL(r.canceled_at, r.reserved_at) DESC LIMIT 5
-- SELECT IFNULL(SUM(e.price + s.price), 0) AS total_price FROM reservations r INNER JOIN sheets s ON s.id = r.sheet_id INNER JOIN events e ON e.id = r.event_id WHERE r.user_id = ? AND r.canceled_at IS NULL
-- SELECT event_id FROM reservations WHERE user_id = ? GROUP BY event_id ORDER BY MAX(IFNULL(canceled_at, reserved_at)) DESC LIMIT 5
-- SELECT * FROM sheets WHERE id NOT IN (SELECT sheet_id FROM reservations WHERE event_id = ? AND canceled_at IS NULL FOR UPDATE) AND `rank` = ? ORDER BY RAND() LIMIT 1
-- SELECT * FROM reservations WHERE event_id = ? AND sheet_id = ? AND canceled_at IS NULL GROUP BY event_id HAVING reserved_at = MIN(reserved_at) FOR UPDATE
-- UPDATE reservations SET canceled_at = ? WHERE id = ?
-- SELECT r.*, s.rank AS sheet_rank, s.num AS sheet_num, s.price AS sheet_price, e.price AS event_price FROM reservations r INNER JOIN sheets s ON s.id = r.sheet_id INNER JOIN events e ON e.id = r.event_id WHERE r.event_id = ? ORDER BY reserved_at ASC FOR UPDATE
CREATE TABLE IF NOT EXISTS reservations (
id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
event_id INTEGER UNSIGNED NOT NULL,
sheet_id INTEGER UNSIGNED NOT NULL,
user_id INTEGER UNSIGNED NOT NULL,
reserved_at DATETIME(6) NOT NULL,
canceled_at DATETIME(6) DEFAULT NULL,
last_updated_at DATETIME(6) AS (IFNULL(canceled_at, reserved_at)) PERSISTENT,
not_canceled BOOLEAN AS (ISNULL(canceled_at)) PERSISTENT,
KEY event_id_and_sheet_id_idx (event_id, sheet_id, not_canceled),
KEY user_id_and_last_updated_at (user_id, last_updated_at),
KEY user_id_and_event_id (user_id, event_id, last_updated_at),
KEY user_id_and_not_canceled(user_id, not_canceled),
KEY event_id_reserved_at(event_id, reserved_at)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
-- 105 records
-- SELECT id, nickname FROM administrators WHERE id = ?
-- SELECT * FROM administrators WHERE login_name = ?
CREATE TABLE IF NOT EXISTS administrators (
id INTEGER UNSIGNED PRIMARY KEY AUTO_INCREMENT,
nickname VARCHAR(128) NOT NULL,
login_name VARCHAR(128) NOT NULL,
pass_hash VARCHAR(128) NOT NULL,
UNIQUE KEY login_name_uniq (login_name)
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
|
const express = require('express');
const router = express.Router();
router.get('/', async (req, res) => {
try {
const query = "SELECT * FROM users";
const results = await query(query);
res.json(results);
} catch (err) {
console.error(err);
res.status(500).send(err);
}
});
module.exports = router; |
/*
* Copyright (c) 2004-2009, University of Oslo
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* * Neither the name of the HISP project nor the names of its contributors may
* be used to endorse or promote products derived from this software without
* specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
package org.hisp.dhis.validation;
/**
* @author <NAME>
* @version $Id$
*/
public class ValidationCriteria
{
public static final int OPERATOR_LESS_THAN = -1;
public static final int OPERATOR_EQUAL_TO = 0;
public static final int OPERATOR_GREATER_THAN = 1;
// -------------------------------------------------------------------------
// Fields
// -------------------------------------------------------------------------
private int id;
private String name;
private String description;
private String property;
private int operator;
private Object value;
// -------------------------------------------------------------------------
// Constructor method
// -------------------------------------------------------------------------
public ValidationCriteria()
{
}
// -------------------------------------------------------------------------
// equals && hashCode
// -------------------------------------------------------------------------
@Override
public int hashCode()
{
final int prime = 31;
int result = 1;
result = prime * result + id;
return result;
}
@Override
public boolean equals( Object obj )
{
if ( this == obj )
return true;
if ( obj == null )
return false;
if ( getClass() != obj.getClass() )
return false;
ValidationCriteria other = (ValidationCriteria) obj;
if ( id != other.id )
return false;
return true;
}
// -------------------------------------------------------------------------
// Getters && Setters
// -------------------------------------------------------------------------
public int getId()
{
return id;
}
public String getName()
{
return name;
}
public void setName( String name )
{
this.name = name;
}
public void setId( int id )
{
this.id = id;
}
public String getDescription()
{
return description;
}
public void setDescription( String description )
{
this.description = description;
}
public String getProperty()
{
return property;
}
public void setProperty( String property )
{
this.property = property;
}
public int getOperator()
{
return operator;
}
public void setOperator( int operator )
{
this.operator = operator;
}
public Object getValue()
{
return value;
}
public void setValue( Object value )
{
this.value = value;
}
}
|
module.exports = {
name: "淘宝柠檬鱼科技",
uniacid: "2",
acid: "2",
multiid: "0",
version: "1.21",
siteroot: "https://shop486845690.taobao.com/app/index.php",
design_method: "3"
}; |
#!/bin/sh
if [[ -z $1 ]]; then
mpc lsp | sort
else
mpc clear
mpc load "$1"
mpc toggle
fi
|
/*
* security.c
*
* Created on: Mar 17, 2017
* Created by: <NAME> <<EMAIL>>
*
* One MMT-Probe worker thread will create n security thread by calling security_worker_alloc_init.
* Number of security thread is designed by config->threads_size when passing to the function above.
*
* +--------+ +-------------+
* | worker |======> | sec. thread |
* | thread | || +-------------+
* +--------+ || +-------------+
* ====>| sec. thread |
* || +-------------+
* || +-------------+
* ====>| sec. thread |
* +-------------+
*
* Each time a worker thread receives a packet, _security_packet_handler is called to parse the packet and
* then, encapsulates the extracted data from the packet to a security message.
* The message is then sent to a buffer of MMT-Security to be able to access by the sec. thread.
* The message will be freed by MMT-Security when it does not need the message any more.
*
* If a sec. thread detects an alert, it will call _print_security_verdict to print out the alert.
*
* Depending on the parameter ignore_remain_flow, the rest of a flow can be ignored if an alert was detected on that flow.
* This will increase the verification performance.
*/
#include "security.h"
//dpi_message_set_data function to set data to message_t
#include <dpi_message_t.h>
#include <mmt_core.h>
#include <tcpip/mmt_tcpip.h>
#include <stdio.h>
#include <stdlib.h>
#include "../../lib/memory.h"
#include "../../lib/log.h"
#include "../../lib/string_builder.h"
#include "../../lib/malloc_ext.h"
#define SECURITY_DPI_PACKET_HANDLER_ID 10
#define IS_CFG_INOGRE( x ) (x->config->ignore_remain_flow )
/*
* return available room inside a message
*/
static inline uint32_t _get_msg_room( const message_t * msg ){
uint32_t used_room = msg->elements_count * SIZE_OF_MMT_MEMORY_T + msg->_data_index + SIZE_OF_MMT_MEMORY_T;
return ( msg->_data_length <= used_room )? 0 : (msg->_data_length - used_room);
}
/**
* Get index of i-th proto_id in a protocol hierarchy
* @return
* -1 if proto_id does not exist in the hierarchy
* number is index of proto_id in the hierarchy
*/
static inline int _find_proto_index( int proto_id, int encap_index, const proto_hierarchy_t *proto_hierarchy ){
int proto_order = 0, last_i = -1, i;
//no session => no IP
if( proto_hierarchy == NULL )
return last_i;
for( i=0; i<proto_hierarchy->len; i++ )
if( proto_hierarchy->proto_path[i] == proto_id ){
proto_order ++;
last_i = i;
if( proto_order == encap_index )
return i;
}
return last_i;
}
static inline bool _extract_data_for_specific_attribute(security_context_t *context, const ipacket_t *pkt, int dpi_data_type, message_t *msg, uint32_t proto_id, uint32_t att_id ){
uint32_t *data_len = NULL;
void *data = NULL;
switch( proto_id ){
case PROTO_TCP:
switch( att_id ){
case TCP_SESSION_PAYLOAD_UP:
data_len = get_attribute_extracted_data( pkt, PROTO_TCP, TCP_SESSION_PAYLOAD_UP_LEN );
goto __got_data_len;
case TCP_SESSION_PAYLOAD_DOWN:
data_len = get_attribute_extracted_data( pkt, PROTO_TCP, TCP_SESSION_PAYLOAD_DOWN_LEN );
goto __got_data_len;
}
return false;
case PROTO_IP:
if( context->config->ip_encapsulation_index == CONF_IP_ENCAPSULATION_INDEX_FIRST )
return false;
//find index
int proto_index = _find_proto_index( PROTO_IP, context->config->ip_encapsulation_index, pkt->proto_hierarchy );
//does not exist IP in this packet => do not need to go further
if( proto_index == -1 )
return true;
data = get_attribute_extracted_data_at_index( pkt, proto_id, att_id, proto_index );
if( data == NULL )
return true;
if( dpi_data_type == MMT_DATA_POINTER )
dpi_message_set_void_data( pkt, data, msg, proto_id, att_id );
else
dpi_message_set_dpi_data( data, dpi_data_type, msg, proto_id, att_id );
return true;
break;
default:
return false;
}
//when we go to here, it means that proto_id == PROTO_TCP
//and, att_id == TCP_SESSION_PAYLOAD_UP or TCP_SESSION_PAYLOAD_DOWN
__got_data_len:
if( data_len == NULL )
return true;
uint32_t room_size = _get_msg_room( msg );
if( *data_len > room_size ){
log_write( LOG_INFO, "Not enough room to contain %d bytes of %d.%d (avail. %d). Need to increase \"input.max_message_size\"",
*data_len, proto_id, att_id, room_size );
return true;
}
//get the whole data of a tcp flow
// this may lead a problem of memory as a TCP flow may tranfer a huge data amount
data = get_attribute_extracted_data( pkt, proto_id, att_id );
if( data == NULL )
return true;
//append data to a security message that will be sent to MMT-Security
set_element_data_message_t( msg, proto_id, att_id, data, MMT_SEC_MSG_DATA_TYPE_BINARY, *data_len );
return true;
}
/**
* This function is called by mmt-dpi for each incoming packet containing registered proto/att.
* It gets interested information from the #ipkacet to a message then sends the
* message to mmt-security.
*/
static int _security_packet_handler( const ipacket_t *ipacket, void *args ) {
int i;
bool ret;
security_context_t *context = (security_context_t *)args;
MUST_NOT_OCCUR( context == NULL, "args parameter must not be NULL"); //this must not happen
uint64_t session_id = 0;
//when parameter ignore_remain_flow is active
if( IS_CFG_INOGRE( context )){
session_id = get_session_id_from_packet( ipacket );
//check if we can ignore this packet
bool can_ignore = mmt_sec_is_ignore_remain_flow( context->sec_handler, session_id );
//the first part of the flow has been examined and we got at least one alert from that part
// => we do not need to continue to examine the rest of the flow
if( can_ignore ){
return 0;
}
}
/* We need to process this packet*/
/* Convert a pcap packet to a message being understandable by mmt-security.
* The function returns NULL if the packet contains no interested information.
* Otherwise it creates a new memory segment to store the result message.
* One need to use #free_message_t to free the message.
*/
message_t *msg = create_message_t( context->proto_atts_count );
//get a list of proto/attributes being used by mmt-security
for( i=0; i<context->proto_atts_count; i++ ){
ret = _extract_data_for_specific_attribute( context, ipacket, context->proto_atts[i]->dpi_type, msg,
context->proto_atts[i]->proto_id, context->proto_atts[i]->att_id );
//if this proto/att has been processed, we do not need to call dpi_message_set_data
if( ret )
continue;
dpi_message_set_data( ipacket, context->proto_atts[i]->dpi_type, msg,
context->proto_atts[i]->proto_id, context->proto_atts[i]->att_id );
}
//if there is no interested information
if( unlikely( msg->elements_count == 0 )){
free_message_t( msg );
return 0;
}
//add other information to the message, such as, timestamp, packet_id, session_id
msg->timestamp = mmt_sec_encode_timeval( &ipacket->p_hdr->ts );
msg->counter = ipacket->packet_id;
//when parameter ignore_remain_flow is active,
// we need to remember the session_id of the packet
if( IS_CFG_INOGRE( context ))
msg->flow_id = session_id;
//give the message to MMT-Security
mmt_sec_process( context->sec_handler, msg );
return 0;
}
/**
* This function inits security rules
* @return
*/
int security_open( const char *excluded_rules ){
log_write( LOG_INFO, "Start MMT-Security %s", mmt_sec_get_version_info() );
//exclude rules in rules_mask
return mmt_sec_init( excluded_rules );
}
void security_close(){
mmt_sec_close();
}
/**
* A function to be called when a rule is validated
* Note: this function can be called from one or many different threads,
* ==> be carefully when using static or shared variables inside it
*/
static void _print_security_verdict(
const rule_info_t *rule, //rule being validated
enum verdict_type verdict, //DETECTED, NOT_RESPECTED
uint64_t timestamp, //moment (by time) the rule is validated
uint64_t counter, //moment (by order of packet) the rule is validated
const mmt_array_t * trace, //historic messages that validates the rule
void *user_data //#user-data being given in register_security
){
security_context_t *context = (security_context_t *) user_data;
//depending on the configuration of security.report-rule-description,
// we include the description of the rule or not
const char *description = context->config->is_report_rule_description? rule->description : "";
const char *exec_trace = mmt_convert_execution_trace_to_json_string( trace, rule );
int i;
struct timeval ts;
mmt_sec_decode_timeval(timestamp, &ts );
char message[ MAX_LENGTH_REPORT_MESSAGE ];
int offset = 0;
STRING_BUILDER_WITH_SEPARATOR( offset, message, sizeof( message ), ",",
__INT( rule->id ),
__STR( verdict_type_string[verdict] ),
__STR( rule->type_string ),
__STR( description ), //string with quotes
__ARR( exec_trace ) //string without quotes
);
output_write_report( context->output,
context->config->output_channels,
SECURITY_REPORT_TYPE,
&ts,
message);
}
static inline bool _register_additional_attributes_if_need( mmt_handler_t *dpi_handler, uint32_t proto_id, uint32_t att_id, uint32_t *add_att_id ){
//we need IP_HEADER_LEN to calculate length of IP_OPTS
if( proto_id == PROTO_IP && att_id == IP_OPTS ){
if (!register_extraction_attribute( dpi_handler, PROTO_IP, IP_HEADER_LEN)){
log_write( LOG_WARNING, "Cannot register protocol/attribute ip.header_len");
return false;
}
*add_att_id = IP_HEADER_LEN;
return true;
}
#ifdef TCP_REASSEMBLY_MODULE
//we need the length of tcp session payload
if( proto_id == PROTO_TCP && att_id == TCP_SESSION_PAYLOAD_UP ){
if (!register_extraction_attribute( dpi_handler, PROTO_TCP, TCP_SESSION_PAYLOAD_UP_LEN)){
log_write( LOG_WARNING, "Cannot register protocol/attribute tcp.tcp_session_payload_up_len");
return false;
}
*add_att_id = TCP_SESSION_PAYLOAD_UP_LEN;
return true;
}
if( proto_id == PROTO_TCP && att_id == TCP_SESSION_PAYLOAD_DOWN ){
if (!register_extraction_attribute( dpi_handler, PROTO_TCP, TCP_SESSION_PAYLOAD_DOWN_LEN)){
log_write( LOG_WARNING, "Cannot register protocol/attribute tcp.tcp_session_payload_down_len");
return false;
}
*add_att_id = TCP_SESSION_PAYLOAD_DOWN_LEN;
return true;
}
#endif
return false;
}
/**
* Update security parameters
* @param param_id
* @param val
*/
static inline void _update_lib_security_param( int param_id, uint32_t val ){
//if user want to use default value => do nothing
if( val == 0 )
return;
uint32_t old_val = mmt_sec_get_config( param_id );
//value does not change?
if( val == old_val )
return;
//update the new value
mmt_sec_set_config( param_id, val );
log_write( LOG_INFO, "Overridden the security parameter '%s' by %d", mmt_sec_get_config_name( param_id ), val );
}
/**
*
* @param dpi_handler
* @param threads_count: if 0, security will use the lcore of caller
* @param cores_id
* @param rules_mask
* @param verbose
* @param callback
* @param user_data
* @return
*/
security_context_t* security_worker_alloc_init( const security_conf_t *config,
mmt_handler_t *dpi_handler, const uint32_t *cores_id,
bool verbose, output_t *output, bool is_enable_tcp_reassembly ){
size_t threads_count = config->threads_size;
int i;
int att_registed_offset = 0;
const int att_registed_length = 10000;
char att_registed[10000];
uint32_t add_att_id;
if( ! config->is_enable )
return NULL;
//set default parameters for libmmt_security
_update_lib_security_param( MMT_SEC__CONFIG__INPUT__MAX_MESSAGE_SIZE, config->lib.input_max_message_size );
_update_lib_security_param( MMT_SEC__CONFIG__SECURITY__MAX_INSTANCES, config->lib.security_max_instances );
_update_lib_security_param( MMT_SEC__CONFIG__SECURITY__SMP__RING_SIZE, config->lib.security_smp_ring_size );
//init
security_context_t *ret = mmt_alloc_and_init_zero(sizeof( security_context_t ));
ret->dpi_handler = dpi_handler;
ret->config = config;
ret->output = output;
//init mmt-sec to verify the rules
ret->sec_handler = mmt_sec_register( threads_count, cores_id, config->rules_mask, verbose, _print_security_verdict, ret );
if( config->ignore_remain_flow )
mmt_sec_set_ignore_remain_flow( ret->sec_handler, true, 5000000 ); //5M flows
rule_info_t const*const*rules_array;
ret->rules_count = mmt_sec_get_rules_info( &rules_array );
//register protocols and their attributes using by mmt-sec
ret->proto_atts_count = mmt_sec_get_unique_protocol_attributes((void*) &ret->proto_atts );
bool is_need_tcp_reassembly = false;
for( i=0; i<ret->proto_atts_count; i++ ){
//mmt_debug( "Registered attribute to extract: %s.%s", proto_atts[i]->proto, proto_atts[i]->att );
if( register_extraction_attribute( dpi_handler, ret->proto_atts[i]->proto_id, ret->proto_atts[i]->att_id ) == 0){
log_write( LOG_WARNING, "Cannot register protocol/attribute %s.%s", ret->proto_atts[i]->proto, ret->proto_atts[i]->att );
}
else if( verbose )
att_registed_offset += snprintf( att_registed + att_registed_offset, MAX( att_registed_length - att_registed_offset, 0),
"%s.%s,", ret->proto_atts[i]->proto, ret->proto_atts[i]->att );
//for some attribute, we need to register another attribute
// example, we need `tcp_session_payload_up_len` when one wants to access `tcp_session_payload_up`
if( _register_additional_attributes_if_need( dpi_handler, ret->proto_atts[i]->proto_id, ret->proto_atts[i]->att_id, &add_att_id ) ){
if( verbose )
att_registed_offset += snprintf( att_registed + att_registed_offset, MAX( att_registed_length - att_registed_offset, 0),
"%s.%s,", ret->proto_atts[i]->proto,
get_attribute_name_by_protocol_and_attribute_ids(ret->proto_atts[i]->proto_id, add_att_id ));
}
#ifdef TCP_REASSEMBLY_MODULE
if( !is_need_tcp_reassembly
&& ret->proto_atts[i]->proto_id == PROTO_TCP
&& (ret->proto_atts[i]->att_id == TCP_SESSION_PAYLOAD_UP || ret->proto_atts[i]->att_id == TCP_SESSION_PAYLOAD_DOWN ))
is_need_tcp_reassembly = true;
#endif
}
if( is_need_tcp_reassembly ){
#ifdef TCP_REASSEMBLY_MODULE
if( ! is_enable_tcp_reassembly )
log_write( LOG_WARNING, "The rules used tcp.tcp_session_payload_up or tcp.tcp_session_payload_down will not work as 'enable-tcp-reassembly = false'" );
#else
log_write( LOG_WARNING, "The rules used tcp.tcp_session_payload_up or tcp.tcp_session_payload_down will not work as TCP_REASSEMBLY_MODULE is not enable" );
#endif
}
if( verbose ){
rule_info_t const* const* rules_array = NULL;
//remove the last comma
att_registed[ strlen( att_registed ) - 1 ] = '\0';
log_write( LOG_INFO,"Registered %u proto.atts to process %zu rules: %s",
ret->proto_atts_count,
mmt_sec_get_rules_info( &rules_array ),
att_registed );
}
//Register a packet handler, it will be called for every processed packet
register_packet_handler( dpi_handler, SECURITY_DPI_PACKET_HANDLER_ID, _security_packet_handler, ret );
return ret;
}
/**
* Stop and free mmt_security
*/
size_t security_worker_release( security_context_t* ret ){
size_t alerts_count = 0;
if( unlikely( ret == NULL || ret->sec_handler == NULL ) )
return 0;
alerts_count = mmt_sec_unregister( ret->sec_handler );
ret->sec_handler = NULL;
unregister_packet_handler (ret->dpi_handler, SECURITY_DPI_PACKET_HANDLER_ID );
mmt_probe_free( ret );
return alerts_count;
}
|
SELECT COUNT(*)
FROM products
WHERE status = 'active'; |
#!/bin/bash
set -ex
source variables.env
docker login $REGISTRY -u $REGISTRY_USER -p $REGISTRY_PASSWORD
if [ -z ${PLATFORM} ]; then
docker tag $IMAGE:$VERSION $REGISTRY/$IMAGE:$VERSION
docker push $REGISTRY/$IMAGE:$VERSION
else
docker buildx build --push --platform $PLATFORM --pull --tag $REGISTRY/$IMAGE:$VERSION --build-arg version=$VERSION ..
fi
|
<filename>kastking-salesforecast-server/src/main/java/com/kastking/warehouse/service/IWarehouseService.java
package com.kastking.warehouse.service;
import com.kastking.warehouse.domain.Warehouse;
import java.util.List;
/**
* 仓库Service接口
*
* @author Michael
* @date 2020-02-28
*/
public interface IWarehouseService {
/**
* 查询仓库
*
* @param warehouseId 仓库ID
* @return 仓库
*/
public Warehouse selectWarehouseById(Long warehouseId);
/**
* 查询仓库列表
*
* @param warehouse 仓库
* @return 仓库集合
*/
public List<Warehouse> selectWarehouseList(Warehouse warehouse);
/**
* 新增仓库
*
* @param warehouse 仓库
* @return 结果
*/
public int insertWarehouse(Warehouse warehouse);
/**
* 修改仓库
*
* @param warehouse 仓库
* @return 结果
*/
public int updateWarehouse(Warehouse warehouse);
/**
* 批量删除仓库
*
* @param ids 需要删除的数据ID
* @return 结果
*/
public int deleteWarehouseByIds(String ids);
/**
* 删除仓库信息
*
* @param warehouseId 仓库ID
* @return 结果
*/
public int deleteWarehouseById(Long warehouseId);
/**
* 导入仓库数据
*
* @param warehouseList 仓库数据列表
* @param isUpdateSupport 是否更新支持,如果已存在,则进行更新数据
* @param operName 操作用户
* @return 结果
*/
public String importWarehouse(List<Warehouse> warehouseList, Boolean isUpdateSupport, String operName);
}
|
package com.huatuo.bean;
import java.util.ArrayList;
/**
* 获取时间轴内容列表 ,包括秒杀活动,活动专题,广告表,推荐服务
*
*/
public class SecKillActivityListItemBean {
public String ID = "";// 秒杀活动ID
public String name = "";// 秒杀活动名称
public String servID = "";// 服务ID
public String servName = "";// 服务名称
public String icon = "";// 服务Icon
public String storeID = "";// 店铺ID ANS
public String storeName = "";// 属店铺名称 ANS
public String address = "";// 店铺地址 ANS
public String isLevel = "";// 是否开启等级 N
public int stock = 0;// 库存 N
public String minPrice = "";// 最低价格 ANS 64 C
public String marketPrice = "";// 市场价格 ANS 64 C
public ArrayList<SecKillActivitydescBean> activitydesc = null;// 活动说明 JSONArray
public ArrayList<SecKillActivityGradeBean> servLevelList = null;// 服务价格等级列表 JSONArray
public SecKillActivityListItemBean() {
activitydesc = new ArrayList<SecKillActivitydescBean>();
servLevelList = new ArrayList<SecKillActivityGradeBean>();
}
}
|
#!/usr/bin/env bash
NAME='Prismo'
VERSION='1.0.0'
OPTION_HELP='h'
OPTION_VERSION='v'
OPTION_LOAD_KERNEL='l'
OPTIONS="$OPTION_HELP$OPTION_VERSION$OPTION_LOAD_KERNEL"
DESCRIPTION_HELP='Print this help.'
DESCRIPTION_VERSION="Print the version of $NAME."
DESCRIPTION_LOAD_KERNEL='Load a new kernel.'
declare -A BOOT_ENTRIES
BOOT_ENTRIES_LAST_INDEX=-1
SELECTED_BOOT_ENTRY_INDEX=-1
help() {
# local PRISMO_LOCATION='Time Room'
local PRISMO_LOCATION=`basename "$0"`
/usr/bin/printf '%s %s\n' "$NAME" "$VERSION"
/usr/bin/printf 'Usage: %s [OPTION]\n\n' "$PRISMO_LOCATION"
/usr/bin/printf ' -%s\t%s\n' \
"$OPTION_HELP" "$DESCRIPTION_HELP" \
"$OPTION_VERSION" "$DESCRIPTION_VERSION" \
"$OPTION_LOAD_KERNEL" "$DESCRIPTION_LOAD_KERNEL"
exit
}
version() {
/usr/bin/printf '%s %s\n' "$NAME" "$VERSION"
exit
}
exitIfNotRoot() {
if [[ `id -u` != '0' ]]; then
/usr/bin/printf '%s needs to be run as root. Exiting...\n' "$NAME"
exit 13
fi
}
getBootEntries() {
local LAST_INDEX=-1;
local NON_LINUX_INDICES=0;
while read -r LINE; do
local INDEX=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^index=(.+)$/\1/p'`
local KERNEL=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^kernel=(.+)$/\1/p'`
local ARGS=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^args="(.+)"$/\1/p'`
local ROOT=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^root=(.+)$/\1/p'`
local INITRD=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^initrd=(.+)$/\1/p'`
local TITLE=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^title=(.+)$/\1/p'`
local NON_LINUX_ENTRY=`/usr/bin/printf '%s\n' "$LINE" | sed -nE 's/^(non linux entry)$/\1/p'`
if [[ -n "$INDEX" ]]; then
LAST_INDEX="$INDEX"
BOOT_ENTRIES["$LAST_INDEX,INDEX"]="$INDEX"
fi
[[ -n "$KERNEL" ]] && BOOT_ENTRIES["$LAST_INDEX,KERNEL"]="$KERNEL"
[[ -n "$ARGS" ]] && BOOT_ENTRIES["$LAST_INDEX,ARGS"]="$ARGS"
[[ -n "$ROOT" ]] && BOOT_ENTRIES["$LAST_INDEX,ROOT"]="$ROOT"
[[ -n "$INITRD" ]] && BOOT_ENTRIES["$LAST_INDEX,INITRD"]="$INITRD"
[[ -n "$TITLE" ]] && BOOT_ENTRIES["$LAST_INDEX,TITLE"]="$TITLE"
if [[ -n "$NON_LINUX_ENTRY" ]]; then
let "NON_LINUX_INDICES=$NON_LINUX_INDICES + 1"
unset BOOT_ENTRIES["$LAST_INDEX,INDEX"]
fi
done <<< `grubby --info=ALL`
let "BOOT_ENTRIES_LAST_INDEX=LAST_INDEX - NON_LINUX_INDICES"
}
printBootEntries() {
/usr/bin/printf ' %s \t %s \n' "INDEX" "KERNEL"
local CURRENT_INDEX=0
while [[ "$CURRENT_INDEX" -le "$BOOT_ENTRIES_LAST_INDEX" ]]; do
/usr/bin/printf ' %s \t %s \n' "$CURRENT_INDEX" "${BOOT_ENTRIES[$CURRENT_INDEX,TITLE]}"
let "CURRENT_INDEX=$CURRENT_INDEX + 1"
done
}
selectBootEntry() {
while [[ ( ! "$SELECTED_BOOT_ENTRY_INDEX" =~ ^[0-9]+$ ) || ( "$SELECTED_BOOT_ENTRY_INDEX" -lt '0' ) || ( "$SELECTED_BOOT_ENTRY_INDEX" -gt "$BOOT_ENTRIES_LAST_INDEX" ) ]]; do
/usr/bin/printf 'Please select a boot entry (0 - %s): ' "$BOOT_ENTRIES_LAST_INDEX"
read SELECTED_BOOT_ENTRY_INDEX
done
}
unloadPreviousKernel() {
kexec -u
/usr/bin/printf 'Unloaded any previously loaded kernel.\n'
}
loadKernel() {
local KERNEL="${BOOT_ENTRIES[$SELECTED_BOOT_ENTRY_INDEX,KERNEL]}"
local INITRD="${BOOT_ENTRIES[$SELECTED_BOOT_ENTRY_INDEX,INITRD]}"
local ARGS="${BOOT_ENTRIES[$SELECTED_BOOT_ENTRY_INDEX,ARGS]}"
local ROOT="${BOOT_ENTRIES[$SELECTED_BOOT_ENTRY_INDEX,ROOT]}"
local TITLE="${BOOT_ENTRIES[$SELECTED_BOOT_ENTRY_INDEX,TITLE]}"
kexec -l "$KERNEL" --initrd="$INITRD" --append="root=$ROOT $ARGS"
/usr/bin/printf 'Loaded %s.\n' "$TITLE"
}
[[ -z "$1" ]] && help
while getopts "$OPTIONS" OPTION; do
case "$OPTION" in
"$OPTION_HELP")
help
;;
"$OPTION_VERSION")
version
;;
"$OPTION_LOAD_KERNEL")
exitIfNotRoot
getBootEntries
printBootEntries
selectBootEntry
unloadPreviousKernel
loadKernel
;;
*)
help
;;
esac
done
|
import numpy as np
from scipy import special, optimize, spatial
import matplotlib.pyplot as plt
from sklearn import mixture
from sklearn import preprocessing
from sklearn.decomposition import PCA
from MulticoreTSNE import MulticoreTSNE as TSNE
from umap import UMAP
def stratefied_sampling(index, label, prob, size):
sample = np.zeros(size).astype(index.dtype)
label_count = (prob * size).astype(int)
if label_count.sum() != size:
label_count[-1] += (size-label_count.sum())
progress_index = 0
for label_i in range(prob.shape[0]):
if label_count[label_i] > 0:
index_i = index[label == label_i]
index_i_sample = np.random.choice(index_i, size=label_count[label_i], replace=True)
sample[progress_index:progress_index+label_count[label_i]] = index_i_sample
progress_index += label_count[label_i]
return sample
def generate_data(mean, cov, state, size, noise_level=0):
n_components = state.shape[0]
dim = mean.shape[1]
state_logit = np.log(state+np.finfo(float).eps)
state_logit_sample = state_logit + np.random.randn(*(state.shape)) * noise_level
state_sample = special.softmax(state_logit_sample)
mean_sample = mean + np.random.randn(*(mean.shape)) * noise_level
cov_sample = cov + np.random.randn(*(cov.shape)) * noise_level
count = (state_sample * size).astype(int)
data = np.zeros((size, dim))
front = 0
for i in range(n_components):
mean_i = mean_sample[i, ...]
cov_i = cov_sample[i, ...]
data[front:front+count[i], :] = np.random.multivariate_normal(
mean=mean_i, cov=cov_i, size=count[i])
front += count[i]
return data
def treatment_ops(v0, treatment_operator):
v0_logit = np.log(v0+np.finfo(float).eps)
v1_logit = np.dot(v0_logit, treatment_operator)
v1 = special.softmax(v1_logit)
return v1
def loss_fn(v0, v1):
dot_product = np.multiply(v0, v1).sum(axis=1)
v0_length = np.sqrt(np.multiply(v0, v0).sum(axis=1))
v1_length = np.sqrt(np.multiply(v1, v1).sum(axis=1))
cos_distance = 1-np.divide(np.divide(dot_product, v0_length), v1_length)
return cos_distance.mean()
def simulation_fn(
gmm_mean, # mean of Gaussian mixture model, (n_components, dim)
gmm_cov, # covariance of Gaussian mixture model, (n_components, dim, dim)
state_0, # initial state in format of Categorical PMF, (n_components,)
state_1, # final state in format of Categorical PMF, (n_components,)
n_components=2,
n_tiles=100,
n_case=100,
noise_level=1e-4):
# data
index_dict = {}
dim = gmm_mean.shape[1]
data = np.zeros((n_case * n_tiles * 2, dim)) # 2 states
front = 0
for i in range(n_case):
data_0 = generate_data(mean=gmm_mean, cov=gmm_cov,
state=state_0, size=n_tiles, noise_level=noise_level)
data_1 = generate_data(mean=gmm_mean, cov=gmm_cov,
state=state_1, size=n_tiles, noise_level=noise_level)
index_0 = np.arange(front, front+n_tiles).astype(int)
index_1 = np.arange(front+n_tiles, front+2*n_tiles).astype(int)
index_dict[i] = {'state_0': index_0, 'state_1': index_1}
data[index_0, :] = data_0
data[index_1, :] = data_1
front += 2 * n_tiles
# model
model = mixture.GaussianMixture(n_components=n_components)
model.fit(data)
sortkey = np.argsort(model.means_[:, 0])
# estimate states
v0 = np.zeros((n_case, n_components))
for case_ID in index_dict:
index = index_dict[case_ID]['state_0']
y_pred = model.predict_proba(data[index, ...])
y_pred = y_pred[:, sortkey]
v0[case_ID, :] = y_pred.mean(axis=0)
v1 = np.zeros((n_case, n_components))
for case_ID in index_dict:
index = index_dict[case_ID]['state_1']
y_pred = model.predict_proba(data[index, ...])
y_pred = y_pred[:, sortkey]
v1[case_ID, :] = y_pred.mean(axis=0)
# estimate linear transformation
# v0: (n_case, n_components)
# v1: (n_case, n_components)
# solve A: (n_components, n_components)
# v0 @ A = v1
def target_fn(A_flat):
A = A_flat.reshape((n_components, n_components))
v1_pred = treatment_ops(v0, A)
loss = loss_fn(v1, v1_pred)
return loss
minimization_result = optimize.minimize(fun=target_fn, x0=np.eye(n_components).flatten())
# report
result_dict = {
'state_0_pred': v0,
'state_1_pred': v1,
'gmm': model,
'sortkey': sortkey,
'minimization_result': minimization_result,
}
metric_dict = {
'state_0_cosine_distance':spatial.distance.cosine(state_0,
result_dict['state_0_pred'].mean(axis=0)),
'state_1_cosine_distance':spatial.distance.cosine(state_1,
result_dict['state_1_pred'].mean(axis=0)),
'minimization_loss': result_dict['minimization_result'].fun,
'gmm_convergence': result_dict['gmm'].converged_,
'gmm_lower_bound': result_dict['gmm'].lower_bound_,
'gmm_mean_cosine_distance': spatial.distance.cosine(gmm_mean.flatten(),
result_dict['gmm'].means_[result_dict['sortkey'], ...].flatten()),
'gmm_cov_cosine_distance': spatial.distance.cosine(gmm_cov.flatten(),
result_dict['gmm'].covariances_[result_dict['sortkey'], ...].flatten()),
}
return result_dict, metric_dict
if __name__ == '__main__':
# load and preprocess MNIST embeddings from VAE
z = np.load('./result/mnist_z.npy')
y = np.load('./result/mnist_y.npy')
z = preprocessing.scale(z)
# ground truth: mean and covariance of the Gaussian mixture model
true_mean = np.stack([
z[y == 0, :].mean(axis=0),
z[y == 1, :].mean(axis=0),
], axis=0) # (n_components, dim)
true_cov = np.stack([
np.cov(z[y == 0, :], rowvar=False),
np.cov(z[y == 1, :], rowvar=False),
], axis=0) # (n_components, dim, dim)
sortkey = np.argsort(true_mean[:, 0]) # for comparison
true_mean = true_mean[sortkey, ...]
true_cov = true_cov[sortkey, ...]
# parameters to vary and monitor
params = {
'n_components': 2,
'n_tiles': 100,
'n_case': 10,
'noise_level': 1e-4,
}
# run simulation
for case in ['balanced', 'biased']:
print('case {}'.format(case))
for p in np.arange(0, 1.1, 0.1):
state_0 = np.array([p, 1-p])
if case == 'balanced':
state_1 = 1-state_0
elif case == 'biased':
state_1 = state_0
result_dict, metric_dict = simulation_fn(
gmm_mean=true_mean,
gmm_cov=true_cov,
state_0=state_0,
state_1=state_1,
**params,
)
print('p: {:.3f}, state 0 loss: {:.3E}, state 1 loss: {:.3E}'.format(
p, metric_dict['state_0_cosine_distance'], metric_dict['state_1_cosine_distance']))
print(result_dict['gmm'].weights_[result_dict['sortkey']])
|
<filename>lib/code_mapper/output/png.rb
require 'ruby-graphviz'
module CodeMapper
module Output
class Png
def initialize(file)
@file = file
@stack = []
@graph = GraphViz.new(:G, type: :digraph, rankdir: "LR")
end
def push(tp, normalized_class_name)
node = @graph.add_nodes("#{normalized_class_name}.#{tp.method_id.to_s}", shape: :box)
if @stack != []
@graph.add_edges( @stack.last, node )
end
@stack << node
end
def pop(tp, normalized_class_name)
@stack.pop
end
def done
@graph.output( png: @file )
end
end
end
end
|
<filename>uvicore/database/provider.py
from uvicore.support.dumper import dump, dd
from uvicore.support.module import location
from uvicore.database import Connection
from uvicore.typing import Dict, List
class Db:
"""Database Service Provider Mixin"""
#def _add_db_definition(self, key, value):
# if type(value) == list:
# if not self.package.database[key]: self.package.database[key] = []
# self.package.database[key].extend(value)
# elif type(value) == dict:
# self.package.database[key].merge(value)
# else:
# self.package.database[key] = value
#self.package.database[key] = value
# if 'database' not in self.package:
# self.package.database = Dict()
# if type(value) == list:
# if key not in self.package.database: self.package.database = []
# self.package['database'][key].extend(value)
# else:
# self.package['database'][key] = value
def connections(self, connections: Dict, default: str):
#connections = []
for name, connection in connections.items():
# Build URL and metakey
# Metakey cannot be the connection name. If 2 connections share the exact
# same database (host, port, dbname) then they need to also share the same
# metedata for foreign keys to work properly.
if not connection.backend: connection.backend = 'sqlalchemy'
connection.backend = connection.backend.lower()
connection.driver = connection.driver.lower()
url = ''
if connection.backend == 'sqlalchemy':
if connection.driver == 'sqlite':
url = 'sqlite:///' + connection.database
metakey = url
elif connection.driver in ['mysql', 'postgresql']:
url = connection.driver
if connection.dialect: url += '+' + connection.dialect
url += (
'://' + connection.username
+ ':' + connection.password
+ '@' + connection.host
+ ':' + str(connection.port)
+ '/' + connection.database
)
metakey = (
connection.host
+ ':' + str(connection.port)
+ '/' + connection.database
)
else:
# Any opther backend type (like api), should have at minimum a url defined
url = connection.url
metakey = url
# Merge new values into connection SuperDict
if not connection.prefix: connection.prefix = ''
connection.merge({
'name': name,
'metakey': metakey.lower(),
'url': url
})
self.package.database.connections = connections
self.package.database.connection_default = default
def models(self, items: List):
# Default registration
self.package.registers.defaults({'models': True})
# Register models only if allowed
if self.package.registers.models:
self.package.database.models = items
def tables(self, items: List):
# Default registration
self.package.registers.defaults({'tables': True})
# Register tables only if allowed
if self.package.registers.tables:
self.package.database.tables = items
def seeders(self, items: List):
# Default registration
self.package.registers.defaults({'seeders': True})
# Register seeders only if allowed
if self.package.registers.seeders:
self.package.database.seeders = items
|
#!/bin/bash
PYTHON=/home/przy/projects/rrg-mageed/MT/code/anaconda3/bin/python
dataset=dataset_IWSLT_de-en
$PYTHON main.py --combine_results $dataset
|
<reponame>M-zg/azure-cli-extensions
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
import functools
import os
import hashlib
import json
from knack import util
from . import ip_utils
from . import rsa_parser
from . import ssh_utils
def ssh_vm(cmd, resource_group_name=None, vm_name=None, ssh_ip=None, public_key_file=None, private_key_file=None):
_do_ssh_op(cmd, resource_group_name, vm_name, ssh_ip,
public_key_file, private_key_file, ssh_utils.start_ssh_connection)
def ssh_config(cmd, config_path, resource_group_name=None, vm_name=None, ssh_ip=None,
public_key_file=None, private_key_file=None):
op_call = functools.partial(ssh_utils.write_ssh_config, config_path, resource_group_name, vm_name)
_do_ssh_op(cmd, resource_group_name, vm_name, ssh_ip, public_key_file, private_key_file, op_call)
def _do_ssh_op(cmd, resource_group, vm_name, ssh_ip, public_key_file, private_key_file, op_call):
_assert_args(resource_group, vm_name, ssh_ip)
public_key_file, private_key_file = _check_public_private_files(public_key_file, private_key_file)
ssh_ip = ssh_ip or ip_utils.get_ssh_ip(cmd, resource_group, vm_name)
if not ssh_ip:
raise util.CLIError(f"VM '{vm_name}' does not have a public IP address to SSH to")
scopes = ["https://pas.windows.net/CheckMyAccess/Linux/user_impersonation"]
data = _prepare_jwk_data(public_key_file)
from azure.cli.core._profile import Profile
profile = Profile(cli_ctx=cmd.cli_ctx)
username, certificate = profile.get_msal_token(scopes, data)
cert_file = _write_cert_file(public_key_file, certificate)
op_call(ssh_ip, username, cert_file, private_key_file)
def _prepare_jwk_data(public_key_file):
modulus, exponent = _get_modulus_exponent(public_key_file)
key_hash = hashlib.sha256()
key_hash.update(modulus.encode('utf-8'))
key_hash.update(exponent.encode('utf-8'))
key_id = key_hash.hexdigest()
jwk = {
"kty": "RSA",
"n": modulus,
"e": exponent,
"kid": key_id
}
json_jwk = json.dumps(jwk)
data = {
"token_type": "ssh-cert",
"req_cnf": json_jwk,
"key_id": key_id
}
return data
def _assert_args(resource_group, vm_name, ssh_ip):
if not (resource_group or vm_name or ssh_ip):
raise util.CLIError("The VM must be specified by --ip or --resource-group and --name")
if resource_group and not vm_name or vm_name and not resource_group:
raise util.CLIError("--resource-group and --name must be provided together")
if ssh_ip and (vm_name or resource_group):
raise util.CLIError("--ip cannot be used with --resource-group or --name")
def _check_public_private_files(public_key_file, private_key_file):
ssh_dir_parts = ["~", ".ssh"]
public_key_file = public_key_file or os.path.expanduser(os.path.join(*ssh_dir_parts, "id_rsa.pub"))
private_key_file = private_key_file or os.path.expanduser(os.path.join(*ssh_dir_parts, "id_rsa"))
if not os.path.isfile(public_key_file):
raise util.CLIError(f"Pulic key file {public_key_file} not found")
if not os.path.isfile(private_key_file):
raise util.CLIError(f"Private key file {private_key_file} not found")
return public_key_file, private_key_file
def _write_cert_file(public_key_file, certificate_contents):
cert_file = os.path.join(*os.path.split(public_key_file)[:-1], "id_rsa-cert.pub")
with open(cert_file, 'w') as f:
f.write(f"ssh-rsa-cert-v01@openssh.com {certificate_contents}")
return cert_file
def _get_modulus_exponent(public_key_file):
if not os.path.isfile(public_key_file):
raise util.CLIError(f"Public key file '{public_key_file}' was not found")
with open(public_key_file, 'r') as f:
public_key_text = f.read()
parser = rsa_parser.RSAParser()
try:
parser.parse(public_key_text)
except Exception as e:
raise util.CLIError(f"Could not parse public key. Error: {str(e)}")
modulus = parser.modulus
exponent = parser.exponent
return modulus, exponent
|
<filename>library/src/main/java/com/bumptech/glide/load/data/HttpUrlFetcher.java
package com.bumptech.glide.load.data;
import android.text.TextUtils;
import com.bumptech.glide.Priority;
import com.bumptech.glide.load.model.GlideUrl;
import java.io.IOException;
import java.io.InputStream;
import java.net.HttpURLConnection;
import java.net.URISyntaxException;
import java.net.URL;
/**
* A DataFetcher that retrieves an {@link java.io.InputStream} for a Url.
*/
public class HttpUrlFetcher implements DataFetcher<InputStream> {
private static final int MAXIMUM_REDIRECTS = 5;
private static final HttpUrlConnectionFactory DEFAULT_CONNECTION_FACTORY = new DefaultHttpUrlConnectionFactory();
private final GlideUrl glideUrl;
private final HttpUrlConnectionFactory connectionFactory;
private HttpURLConnection urlConnection;
private InputStream stream;
private volatile boolean isCancelled;
public HttpUrlFetcher(GlideUrl glideUrl) {
this(glideUrl, DEFAULT_CONNECTION_FACTORY);
}
// Visible for testing.
HttpUrlFetcher(GlideUrl glideUrl, HttpUrlConnectionFactory connectionFactory) {
this.glideUrl = glideUrl;
this.connectionFactory = connectionFactory;
}
@Override
public InputStream loadData(Priority priority) throws Exception {
return loadDataWithRedirects(glideUrl.toURL(), 0 /*redirects*/, null /*lastUrl*/);
}
private InputStream loadDataWithRedirects(URL url, int redirects, URL lastUrl) throws IOException {
if (redirects >= MAXIMUM_REDIRECTS) {
throw new IOException("Too many (> " + MAXIMUM_REDIRECTS + ") redirects!");
} else {
// Comparing the URLs using .equals performs additional network I/O and is generally broken.
// See http://michaelscharf.blogspot.com/2006/11/javaneturlequals-and-hashcode-make.html.
try {
if (lastUrl != null && url.toURI().equals(lastUrl.toURI())) {
throw new IOException("In re-direct loop");
}
} catch (URISyntaxException e) {
// Do nothing, this is best effort.
}
}
urlConnection = connectionFactory.build(url);
urlConnection.setConnectTimeout(2500);
urlConnection.setReadTimeout(2500);
urlConnection.setUseCaches(false);
urlConnection.setDoInput(true);
// Connect explicitly to avoid errors in decoders if connection fails.
urlConnection.connect();
if (isCancelled) {
return null;
}
final int statusCode = urlConnection.getResponseCode();
if (statusCode / 100 == 2) {
stream = urlConnection.getInputStream();
return stream;
} else if (statusCode / 100 == 3) {
String redirectUrlString = urlConnection.getHeaderField("Location");
if (TextUtils.isEmpty(redirectUrlString)) {
throw new IOException("Received empty or null redirect url");
}
URL redirectUrl = new URL(url, redirectUrlString);
return loadDataWithRedirects(redirectUrl, redirects + 1, url);
} else {
if (statusCode == -1) {
throw new IOException("Unable to retrieve response code from HttpUrlConnection.");
}
throw new IOException("Request failed " + statusCode + ": " + urlConnection.getResponseMessage());
}
}
@Override
public void cleanup() {
if (stream != null) {
try {
stream.close();
} catch (IOException e) {
// Ignore
}
}
if (urlConnection != null) {
urlConnection.disconnect();
}
}
@Override
public String getId() {
return glideUrl.toString();
}
@Override
public void cancel() {
// TODO: we should consider disconnecting the url connection here, but we can't do so directly because cancel is
// often called on the main thread.
isCancelled = true;
}
interface HttpUrlConnectionFactory {
HttpURLConnection build(URL url) throws IOException;
}
private static class DefaultHttpUrlConnectionFactory implements HttpUrlConnectionFactory {
@Override
public HttpURLConnection build(URL url) throws IOException {
return (HttpURLConnection) url.openConnection();
}
}
}
|
def is_palindrome(input_string):
# Reverse the string and see if it matches the original string
reversed_string = input_string[::-1]
if reversed_string == input_string:
return True
else:
return False
# Test the program
test_string = 'bob'
if is_palindrome(test_string):
print("The string is a palindrome.")
else:
print("The string is not a palindrome.") |
# django initial stuff
import os
# Django specific settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
from django.core.wsgi import get_wsgi_application
get_wsgi_application()
#library imports
import json
from tangerine import Tangerine
import arxiv
from django.utils.timezone import now
from datetime import timedelta
from pandas import DataFrame as df
from db.models import User
from time import sleep
# Ensure settings are read
application = get_wsgi_application()
path = os.path.dirname(os.path.realpath(__file__)) + "/"
with open(f'{path}secret.json', 'r') as f:
d = json.load(f)
tangerine = Tangerine(d['slack-secret'])
def get_arxiv_news(query_string,filter_list = None):
result = arxiv.query(query=query_string,
sort_by="lastUpdatedDate",
sort_order="descending",
prune=True,
iterative=True,
max_chunk_results=50,
max_results=250)
time =now().replace(hour=18,minute=0,second=0,microsecond=0)
lower_time = time - timedelta(days=4) if time.weekday() == 1 else time - timedelta(days=2)
upper_time = time - timedelta(days=1)
paper_dict = {
'title' : [],
'summary' : [],
'author' : [],
'link' : [],
'date' : [],
'obj' : []
}
for i in result():
paper_time = now().replace(year=i['updated_parsed'][0], month=i['updated_parsed'][1],
day=i['updated_parsed'][2], hour=i['updated_parsed'][3],
minute=i['updated_parsed'][4])
if filter_list is not None:
filtered = False
for j in filter_list.split(";"):
if j in i['summary']:
filtered = True
if filtered:
continue
if lower_time < paper_time < upper_time:
author_str = ""
for j in i.authors:
author_str += f"{j}, "
paper_dict['title'].append(i['title'].replace('\n',''))
paper_dict['summary'].append(i['summary'])
paper_dict['author'].append(author_str)
paper_dict['link'].append(i['arxiv_url'])
paper_dict['date'].append(paper_time)
paper_dict['obj'].append(i)
else:
break
return df.from_dict(paper_dict)
@tangerine.listen_for('add category')
def add_category(user, message):
try:
u = User.objects.get(u_id=user)
except User.DoesNotExist:
u = User(u_id=user)
u.save()
channel = user
if u.categories is None:
u.categories = message.split(" ")[-1].split("|")[-1][:-1]
else:
u.categories += ";" + message.split(" ")[-1].split("|")[-1][:-1]
u.save()
tangerine.speak(f"Set up categories: {u.categories}",channel)
@tangerine.listen_for('add filter')
def add_category(user, message):
try:
u = User.objects.get(u_id=user)
except User.DoesNotExist:
u = User(u_id=user)
u.save()
channel = user
if u.filter_list is None:
u.filter_list = message.replace("add filter ","")
else:
u.filter_list += ";" + message.replace("add filter ")[-1]
u.save()
tangerine.speak(f"Set up filter: {u.filter_list}",channel)
@tangerine.listen_for('personal')
def personal_news(user, message):
try:
u = User.objects.get(u_id=user)
except User.DoesNotExist:
u = User(u_id=user)
u.save()
channel = user
if u.categories is None:
tangerine.speak("You need to set up categories. Type 'add category CATEGORY'. Categories must be named in"
"arxiv format.",channel)
return
cat_text = ""
try:
for i in u.categories.split(";")[:-1]:
cat_text += "cat:" + i + " OR"
except IndexError:
cat_text = ""
cat_text += "cat:" + u.categories.split(";")[-1]
papers = get_arxiv_news(cat_text,u.filter_list)
for i in papers.sort_values(by=['date'],ascending=False).iterrows():
i = i[1]
text = f"*{i.title}* ({i.date.strftime('%d.%m.%Y: %H:%M')})\n"
text += f"*Author(s): {i.author} *\n\n"
text += f">>>{i.summary}\n\n"
text += f"*Link: {i.link} *"
tangerine.speak(text, channel)
sleep(0.1)
@tangerine.listen_for('news')
def news(user, message):
papers = get_arxiv_news("cat:astro-ph.SR OR cat:astro-ph.IM OR astro-ph.EP")
channel = "#arxiv"
for i in papers.sort_values(by=['date'],ascending=False).iterrows():
i = i[1]
text = f"*{i.title}* ({i.date.strftime('%d.%m.%Y: %H:%M')})\n"
text += f"*Author(s): {i.author} *\n\n"
text += f">>>{i.summary}\n\n"
text += f"*Link: {i.link} *"
tangerine.speak(text, tangerine.get_channel_id_from_name(channel))
sleep(0.1)
if __name__ == '__main__':
tangerine.run() |
#include "NXTControl.h"
NXTControl::NXTControl(){
_serial = &Serial;
}
NXTControl::NXTControl(Stream &serial){
_serial = &serial;
}
void NXTControl::StartProgram(String name){
byte size = name.length() + 5;
byte commandToSend[size];
commandToSend[0] = size-2;
commandToSend[1] = 0x00;
commandToSend[2] = DIRECT_COMMAND;
commandToSend[3] = COMMAND_START_PROGRAM;
name += "\0";
for(byte i=4; i<name.length()+5; i++){
commandToSend[i] = name.charAt(i-4);
}
_serial->write(commandToSend, sizeof(commandToSend));
}
void NXTControl::StopProgram(){
byte commandToSend[] = {
0x02,
0x00,
DIRECT_COMMAND,
COMMAND_STOP_PROGRAM
};
_serial->write(commandToSend, sizeof(commandToSend));
}
void NXTControl::PlayTone(unsigned int frequency, unsigned int duration){
byte commandToSend[] =
{
0x06,
0x00,
DIRECT_COMMAND,
COMMAND_PLAY_TONE,
lowByte(frequency),
highByte(frequency),
lowByte(duration),
highByte(duration)
};
_serial->write(commandToSend, sizeof(commandToSend));
}
void NXTControl::SetOutputState(byte port, sbyte power, byte mode,
byte regulationMode, sbyte turnRatio, byte runState,
unsigned long tachoLimit){
power = constrain(power, -100, 100);
byte commandToSend[] =
{
0x0C,
0x00,
DIRECT_COMMAND,
COMMAND_SET_OUTPUT_STATE,
port,
power,
mode,
regulationMode,
turnRatio,
runState,
byteRead(tachoLimit, 0),
byteRead(tachoLimit, 1),
byteRead(tachoLimit, 2),
byteRead(tachoLimit, 3)
};
if(port > OUT_C){
switch (port){
case OUT_AB:
commandToSend[4] = OUT_A;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_B;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_AC:
commandToSend[4] = OUT_A;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_C;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_BC:
commandToSend[4] = OUT_B;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_C;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_ABC:
commandToSend[4] = 0xFF;
_serial->write(commandToSend, sizeof(commandToSend));
break;
}
}else{
_serial->write(commandToSend, sizeof(commandToSend));
}
}
void NXTControl::OnFwd(byte port, sbyte power){
SetOutputState(port, power, MODE_MOTOR_ON, REGMODE_IDLE, 0, RUNSTATE_RUNNING, 0);
}
void NXTControl::OnRev(byte port, sbyte power){
SetOutputState(port, -power, MODE_MOTOR_ON, REGMODE_IDLE, 0, RUNSTATE_RUNNING, 0);
}
void NXTControl::OnFwdReg(byte port, sbyte power, byte regMode){
SetOutputState(port, power, MODE_MOTOR_ON, regMode, 0, RUNSTATE_RUNNING, 0);
}
void NXTControl::OnRevReg(byte port, sbyte power, byte regMode){
SetOutputState(port, -power, MODE_MOTOR_ON, regMode, 0, RUNSTATE_RUNNING, 0);
}
void NXTControl::Off(byte port){
SetOutputState(port, 0, MODE_BRAKE, REGMODE_IDLE, 0, RUNSTATE_RUNNING, 0);
}
void NXTControl::SetInputMode(byte port, byte sensorType, byte sensorMode){
byte commandToSend[] =
{
0x05,
0x00,
DIRECT_COMMAND,
COMMAND_SET_INPUT_MODE,
port,
sensorType,
sensorMode
};
_serial->write(commandToSend, sizeof(commandToSend));
}
bool NXTControl::GetOutputState(byte port, OutputState ¶ms){
if(port > OUT_C) return false;
byte commandToSend[] = {0x03, 0x00, DIRECT_COMMAND_RESPONSE,
COMMAND_GET_OUTPUT_STATE,
port};
_serial->write(commandToSend, sizeof(commandToSend));
unsigned long time = millis();
while(!_serial->available()){
// expect to receive the values
if(millis() - time > 100)
break;
}
byte returnPackage[27];
_serial->readBytes(returnPackage, 27);
params.statusByte = returnPackage[4];
params.port = returnPackage[5];
params.power = returnPackage[6];
params.mode = returnPackage[7];
params.regulationMode = returnPackage[8];
params.turnRatio = returnPackage[9];
params.runState = returnPackage[10];
params.tachoLimit = returnPackage[11] |
(returnPackage[12] << 8) |
(returnPackage[13] << 16) |
(returnPackage[14] << 32);
params.tachoCount = returnPackage[15] |
(returnPackage[16] << 8) |
(returnPackage[17] << 16) |
(returnPackage[18] << 32);
params.blockTachoCount = returnPackage[19] |
(returnPackage[20] << 8) |
(returnPackage[21] << 16) |
(returnPackage[22] << 32);
params.rotationCount = returnPackage[23] |
(returnPackage[24] << 8) |
(returnPackage[25] << 16) |
(returnPackage[26] << 32);
if(params.statusByte != 0)
return false;
return true;
}
bool NXTControl::GetInputValues(byte port, InputValues ¶ms){
if(port > S4) return false;
byte commandToSend[] = {0x03, 0x00, DIRECT_COMMAND_RESPONSE,
COMMAND_GET_INPUT_VALUES,
port};
_serial->write(commandToSend, sizeof(commandToSend));
unsigned long time = millis();
while(!_serial->available()){
// expect to receive the values
if(millis() - time > 100)
break;
}
byte returnPackage[18];
_serial->readBytes(returnPackage, 18);
params.statusByte = returnPackage[4];
params.port = returnPackage[5];
params.isValid = returnPackage[6];
params.isCalibrated = returnPackage[7];
params.sensorType = returnPackage[8];
params.sensorMode = returnPackage[9];
params.rawValue = returnPackage[10] |
(returnPackage[11] << 8);
params.normalizedValue = returnPackage[12] |
(returnPackage[13] << 8);
params.scaledValue = returnPackage[14] |
(returnPackage[15] << 8);
params.calibratedValue = returnPackage[16] |
(returnPackage[17] << 8);
if(params.statusByte != 0)
return false;
return true;
}
void NXTControl::ResetMotorPosition(byte port, bool isRelative = true){
byte commandToSend[] = {0x04,
0x00,
DIRECT_COMMAND,
COMMAND_RESET_MOTOR_POSITION,
port,
isRelative
};
if(port > OUT_C){
switch (port) {
case OUT_AB:
commandToSend[4] = OUT_A;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_B;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_AC:
commandToSend[4] = OUT_A;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_C;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_BC:
commandToSend[4] = OUT_B;
_serial->write(commandToSend, sizeof(commandToSend));
commandToSend[4] = OUT_C;
_serial->write(commandToSend, sizeof(commandToSend));
break;
case OUT_ABC:
commandToSend[4] = 0xFF;
_serial->write(commandToSend, sizeof(commandToSend));
break;
}
}else{
_serial->write(commandToSend, sizeof(commandToSend));
}
}
void NXTControl::RotateMotor(byte port, sbyte power, int degrees){
OutputState params;
if(port > OUT_C){
switch (port) {
case OUT_AB:
ResetMotorPosition(OUT_A, true);
delay(WAIT_TIME);
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
OnFwd(port, power);
delay(WAIT_TIME);
while(params.blockTachoCount < degrees){
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
}
break;
case OUT_AC:
ResetMotorPosition(OUT_A, true);
delay(WAIT_TIME);
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
OnFwd(port, power);
delay(WAIT_TIME);
while(params.blockTachoCount < degrees){
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
}
break;
case OUT_BC:
ResetMotorPosition(OUT_B, true);
delay(WAIT_TIME);
GetOutputState(OUT_B, params);
delay(WAIT_TIME);
OnFwd(port, power);
delay(WAIT_TIME);
while(params.blockTachoCount < degrees){
GetOutputState(OUT_B, params);
delay(WAIT_TIME);
}
break;
case OUT_ABC:
ResetMotorPosition(OUT_A, true);
delay(WAIT_TIME);
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
OnFwd(port, power);
delay(WAIT_TIME);
while(params.blockTachoCount < degrees){
GetOutputState(OUT_A, params);
delay(WAIT_TIME);
}
break;
}
}else{
ResetMotorPosition(port, true);
delay(WAIT_TIME);
GetOutputState(port, params);
delay(WAIT_TIME);
OnFwd(port, power);
delay(WAIT_TIME);
while(params.blockTachoCount < degrees){
GetOutputState(port, params);
delay(WAIT_TIME);
}
}
Off(port);
}
|
<reponame>ColonialDagger/PublixSubSaleNotifier<filename>run.py
import time
import requests
import config
import sys
from bs4 import BeautifulSoup
from twilio.rest import Client
def pullrequest(): # Defines pullrequest() to pull source from url
print("\n\n\nPulling new web page at %s...") % (time.time())
pull = requests.get("http://arepublixchickentendersubsonsale.com/") # Pulls source code from web page
soup = BeautifulSoup(pull.content,"html.parser") # Cleans source code
print('New web page pulled succesfully!')
return soup # Returns formatted source code
def eval():
soup = str(pullrequest())
if 'onsale:yes' in soup: # When goes on sale
return True
elif 'onsale:yes' not in soup: # When goes off sale
return False
def andr_notify(prompt): # Defines andr_notify() to notify via SMS with Twilio API
print("Logging into Twilio...")
auth_token = "<PASSWORD>"
client = Client(config.account_sid,config.auth_token)
print("Sending messages...")
for x in config.numbers:
client.api.account.messages.create(
to=x, # Outbound recievers
from_="+17865395514", # Outbound sender
body=prompt) # Outbound message
print("Messages sent at %s!") % (time.time())
def sleep(sleep_time):
for i in range(sleep_time+1):
print("Sleeping for " + str(sleep_time - i) + " more seconds...")
sys.stdout.write("\033[F") # Cursor up one line
time.sleep(1)
prev_sale = "null"
while True: # Run forever
onsale = eval()
if onsale != prev_sale: # Checks for change in
change = True
if onsale and change: # If sale has started
prev_sale = True # Adds variable for state of previous cycle to be true
change = False # Resets change
print(config.promptstart)
andr_notify(config.promptstart) # Sends SMS
elif not onsale and change: # If sale has ended
prev_sale = False # Adds variable for state of previous cycle to be false
change = False # Resets change
print(config.promptend)
andr_notify(config.promptend) # Sends SMS
sleep(config.sleep_time) # Sleeps before beginning new cycle |
<gh_stars>10-100
var searchData=
[
['lightuserdata',['LightUserdata',['../namespacelua.html#ad9971b6ef33e02ba2c75d19c1d2518a1a6e4c577b4d0b098de1e834d2ccc96928',1,'lua']]]
];
|
import Cocoa
struct Color {
let colorName: String
let colorCode: String
let color: NSColor
var selected: Bool
init(colorName: String, colorCode: String, color: NSColor, selected: Bool) {
self.colorName = colorName
self.colorCode = colorCode
self.color = color
self.selected = selected
}
}
class ColorPaletteView {
var colors: [Color] = []
func addColor(_ color: Color) {
colors.append(color)
}
func removeColor(at index: Int) {
guard index >= 0, index < colors.count else { return }
colors.remove(at: index)
}
func selectColor(at index: Int) {
guard index >= 0, index < colors.count else { return }
colors[index].selected = true
}
func updateView() {
// Implement the logic to update the view based on the selected colors
}
} |
function checkUserEmailVerification(context) {
if (!context.params.user || !context.params.user.isVerified) {
throw new errors.BadRequest("User's email is not yet verified.");
}
} |
def get_currency(list_of_countries_and_currencies, country_name):
for country_and_currency in list_of_countries_and_currencies:
if country_name == country_and_currency[0]:
return country_and_currency[1]
return None |
#!/usr/bin/env bash
set -ev
# Run 'blt' phpunit tests, excluding deploy-push tests.
phpunit ${BLT_DIR}/tests/phpunit --group blt --exclude-group deploy-push
set +v
|
#!/bin/bash
PROGNAME="$( basename $0 )"
# Usage
function usage() {
cat << EOS >&2
Usage: ${PROGNAME}
Options:
-c, --coverage Minimum read coverage allowed for the predicted transcripts. (default: 5)
-l, --length Minimum length allowed for the predicted transcripts. (default: 74)
-h, --help Show usage.
-v, --version Show version.
EOS
exit 1
}
function version() {
cat << EOS >&2
STRT2-NextSeq-automated-pipeline_TFE-based ver2020.6.30
EOS
exit 1
}
# Default parameters
cover_VALUE=5
len_VALUE=74
# Parameter settings
PARAM=()
for opt in "$@"; do
case "${opt}" in
'-c' | '--coverage' )
if [[ -z "$2" ]] || [[ "$2" =~ ^-+ ]]; then
echo "${PROGNAME}: option requires an argument -- $( echo $1 | sed 's/^-*//' )" 1>&2
exit 1
fi
cover_VALUE="$2"
shift 2
;;
'-l' | '--length' )
if [[ -z "$2" ]] || [[ "$2" =~ ^-+ ]]; then
echo "${PROGNAME}: option requires an argument -- $( echo $1 | sed 's/^-*//' )" 1>&2
exit 1
fi
len_VALUE="$2"
shift 2
;;
'-h' | '--help' )
usage
;;
'-v' | '--version' )
version
;;
'--' | '-' )
shift
PARAM+=( "$@" )
break
;;
-* )
echo "${PROGNAME}: illegal option -- '$( echo $1 | sed 's/^-*//' )'" 1>&2
exit 1
;;
esac
done
if [[ -n "${PARAM[@]}" ]]; then
usage
fi
# Load the required tools
module load bioinfo-tools
module load samtools/1.10
module load StringTie/2.1.4
module load BEDTools/2.29.2
module load subread/2.0.0
module load ruby/2.6.2
# Make temporary and output directories
mkdir byTFE_tmp
mkdir byTFE_out
mkdir byTFE_tmp/class
OUTPUT_NAME=$(basename out/Output_bam/*_1.output.bam _1.output.bam)
# Sample classification
while read row; do
column1=`echo ${row} | cut -d ' ' -f 1`
column2=`echo ${row} | cut -d ' ' -f 2`
if [[ $column2 != "NA" ]]; then
mkdir -p byTFE_tmp/class/${column2}
cp out/Output_bam/${OUTPUT_NAME}_${column1}.output.bam byTFE_tmp/class/${column2}
else
:
fi
done < src/TFEclass.txt
classes=`find byTFE_tmp/class/* -type d`
for class in $classes;
do
CLASS_NAME=$(basename $class byTFE_tmp/class/)
# Merge all BAMs, remove duplicated, non-primary, unmapped reads, and sort
samtools merge -@ 8 - $class/*.bam | samtools view -@ 8 -b -F 256 -F 1024 -F 4 - | samtools sort -@ 8 -o $class/merged.bam
# Assembly with Stringtie
stringtie $class/merged.bam -o $class/stringtie.gtf -p 8 -m ${len_VALUE} --fr -l ${OUTPUT_NAME}.${CLASS_NAME} -c ${cover_VALUE}
# Extract 1st-exon
cat $class/stringtie.gtf | awk '{if($7=="+"||$7=="."){print $0}}'| grep 'exon_number "1"' \
| awk 'OFS = "\t" {print $1,$4-1,$5,$12,"0",$7}' | sed -e 's/"//g'| sed -e 's/;//g' > $class/firstExons-fwd.bed
cat $class/stringtie.gtf | awk 'BEGIN{OFS="\t"}{if($7=="-" && $3=="exon"){print $1,$4-1,$5,$12,"0",$7}}' \
| sed -e 's/"//g'| sed -e 's/;//g' | sort -k 4,4 -k 1,1 -k 2,2n | bedtools groupby -i stdin -g 4 -c 1,2,3,4,5,6 -o last \
| awk 'BEGIN{OFS="\t"}{print $2,$3,$4,$5,$6,$7}' > $class/firstExons-rev.bed
cat $class/firstExons-fwd.bed $class/firstExons-rev.bed | sortBed -i stdin > $class/firstExons.bed
rm $class/firstExons-fwd.bed && rm $class/firstExons-rev.bed
# Fiveprimes for peak detection
mkdir $class/bedGraph
for file in $class/*.output.bam
do
name=$(basename $file .output.bam)
Spike=$(samtools view -F 256 -F 1024 -F 4 $file |grep -e ERCC -e NIST| wc -l)
samtools view -b -F 256 -F 1024 -F 4 $file | bamToBed -i stdin\
| gawk 'BEGIN{ FS="\t"; OFS=" " }{if($6=="+"){print $1,$2,$2+1,".",0,"+"}else{print $1,$3-1,$3,".",0,"-"}}'\
| sort -k 1,1 -k 2,2n\
| uniq -c\
| gawk 'BEGIN{ FS=" "; OFS="\t" }{print $2,$3,$4,$5,$1/'$Spike',$7}'\
| pigz -c > $class/bedGraph/$name.bedGraph.gz
done
gunzip -c $class/bedGraph/*.bedGraph.gz | sort -k 1,1 -k 2,2n | mergeBed -s -c 4,5,6 -o distinct,sum,distinct -d -1 > $class/fivePrimes.bed
done
# TFE annotation
cat byTFE_tmp/class/*/firstExons.bed | sort -k 1,1 -k 2,2n |awk '{if($6=="+"){print $0}}' | grep -e ERCC -e NIST \
| mergeBed -s -c 6 -o distinct | bedtools groupby -i stdin -g 1 -c 1,2,3,4 -o first \
| awk 'BEGIN{OFS="\t"}{print $2,$3,$4,"RNA_SPIKE_"$2,0,$5}' > byTFE_tmp/${OUTPUT_NAME}_spike-firstExons_class.bed
cat byTFE_tmp/class/*/firstExons.bed | sort -k 1,1 -k 2,2n | mergeBed -s -c 6 -o distinct \
| grep -v ERCC| grep -v NIST | awk 'BEGIN{OFS="\t"}{print $1,$2,$3,"TFE"NR,0,$4}' > byTFE_tmp/${OUTPUT_NAME}_nonspike-firstExons_class.bed
cat byTFE_tmp/${OUTPUT_NAME}_spike-firstExons_class.bed byTFE_tmp/${OUTPUT_NAME}_nonspike-firstExons_class.bed > byTFE_tmp/${OUTPUT_NAME}_TFE-regions.bed
rm byTFE_tmp/${OUTPUT_NAME}_spike-firstExons_class.bed && rm byTFE_tmp/${OUTPUT_NAME}_nonspike-firstExons_class.bed
# Counting
awk '{print $4 "\t" $1 "\t" $2+1 "\t" $3 "\t" $6}' byTFE_tmp/${OUTPUT_NAME}_TFE-regions.bed > byTFE_tmp/${OUTPUT_NAME}_TFE-regions.saf
featureCounts -T 8 -s 1 --largestOverlap --ignoreDup --primary -a byTFE_tmp/${OUTPUT_NAME}_TFE-regions.saf -F SAF -o byTFE_tmp/${OUTPUT_NAME}_byTFE-counts.txt byTFE_tmp/class/*/*.output.bam
# Peaks
cat byTFE_tmp/class/*/fivePrimes.bed | sort -k 1,1 -k 2,2n | mergeBed -s -c 4,5,6 -o distinct,sum,distinct -d -1 > byTFE_tmp/${OUTPUT_NAME}_fivePrimes.bed
intersectBed -wa -wb -s -a byTFE_tmp/${OUTPUT_NAME}_TFE-regions.bed -b byTFE_tmp/${OUTPUT_NAME}_fivePrimes.bed \
| cut -f 4,7,8,9,11,12 \
| gawk 'BEGIN{ FS="\t"; OFS="\t" }{p=$6=="+"?$3:-$4;print $2,$3,$4,$1,$5,$6,p,$1}' \
| sort -k 8,8 -k 5,5gr -k 7,7g \
| uniq -f 7 \
| cut -f 1-6 \
| sort -k 1,1 -k 2,2n > byTFE_out/${OUTPUT_NAME}_peaks.bed
# Annotation of peaks
mkdir src/anno
if test -f src/ens-genes.txt && test ! -f src/knowngene-names.txt && test ! -f src/refGene.txt && test ! -f src/Gencode.txt; then
echo "Annotation with Ensembl"
ruby bin/ensGene_annotation.rb
shift 2
elif test ! -f src/ens-genes.txt && test -f src/knowngene-names.txt && test ! -f src/refGene.txt && test ! -f src/Gencode.txt; then
echo "Annotation with UCSC KnownGenes"
ruby bin/knownGene_annotation.rb
shift 2
elif test ! -f src/ens-genes.txt && test ! -f src/knowngene-names.txt && test -f src/refGene.txt && test ! -f src/Gencode.txt; then
echo "Annotation with NCBI RefSeq"
ruby bin/refGene_annotation.rb
shift 2
elif test ! -f src/ens-genes.txt && test ! -f src/knowngene-names.txt && test ! -f src/refGene.txt && test -f src/Gencode.txt; then
echo "Annotation with GENCODE"
ruby bin/GENCODE_annotation.rb
shift 2
else
echo "Something is wrong with the annotation data file."
exit 1
fi
intersectBed -a src/anno/Coding-up.bed -b src/chrom.size.bed > src/anno/Coding-up_trimmed.bed
intersectBed -a src/anno/NC-up.bed -b src/chrom.size.bed > src/anno/NC-up_trimmed.bed
# Coding 5'UTR
intersectBed -s -wa -wb -a byTFE_out/${OUTPUT_NAME}_peaks.bed -b src/anno/Coding-5UTR.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Coding_5UTR"}' | sort -k 1,1 > src/anno/peaks_class1.txt
# Coding upstream
intersectBed -s -wa -wb -a byTFE_out/${OUTPUT_NAME}_peaks.bed -b src/anno/Coding-5UTR.bed -v > src/anno/peaks_nonClass1.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass1.bed -b src/anno/Coding-up_trimmed.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Coding_upstream"}' | sort -k 1,1 > src/anno/peaks_class2.txt
# Coding CDS
intersectBed -s -wa -wb -a src/anno/peaks_nonClass1.bed -b src/anno/Coding-up_trimmed.bed -v > src/anno/peaks_nonClass2.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass2.bed -b src/anno/Coding-CDS.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Coding_CDS"}' | sort -k 1,1 > src/anno/peaks_class3.txt
# Coding 3'UTR
intersectBed -s -wa -wb -a src/anno/peaks_nonClass2.bed -b src/anno/Coding-CDS.bed -v > src/anno/peaks_nonClass3.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass3.bed -b src/anno/Coding-3UTR.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Coding_3UTR"}' | sort -k 1,1 > src/anno/peaks_class4.txt
# Noncoding 1st-exon
intersectBed -s -wa -wb -a src/anno/peaks_nonClass3.bed -b src/anno/Coding-3UTR.bed -v > src/anno/peaks_nonClass4.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass4.bed -b src/anno/NC-1stexon.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Noncoding_1st-exon"}' | sort -k 1,1 > src/anno/peaks_class5.txt
# Noncoding upstream
intersectBed -s -wa -wb -a src/anno/peaks_nonClass4.bed -b src/anno/NC-1stexon.bed -v > src/anno/peaks_nonClass5.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass5.bed -b src/anno/NC-up_trimmed.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Noncoding_upstream"}' | sort -k 1,1 > src/anno/peaks_class6.txt
# Noncoding other exon
intersectBed -s -wa -wb -a src/anno/peaks_nonClass5.bed -b src/anno/NC-up_trimmed.bed -v > src/anno/peaks_nonClass6.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass6.bed -b src/anno/NC-exon.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Noncoding_other-exon"}' | sort -k 1,1 > src/anno/peaks_class7.txt
# Intron
intersectBed -s -wa -wb -a src/anno/peaks_nonClass6.bed -b src/anno/NC-exon.bed -v > src/anno/peaks_nonClass7.bed
intersectBed -s -wa -wb -a src/anno/peaks_nonClass7.bed -b src/anno/Intron.bed | awk -F "\t" '{print($4,$10)}' \
|awk -F "|" '{if(a[$1])a[$1]=a[$1]";"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -F " " '{print $1"\t"$2","$3}' \
|awk -F "\t" '{if(a[$1])a[$1]=a[$1]":"$2; else a[$1]=$2;}END{for (i in a)print i, a[i];}' OFS="\t" \
|awk -v 'OFS=\t' '{print $1,$2,"Intron"}' | sort -k 1,1 > src/anno/peaks_class8.txt
# Unannotated
intersectBed -s -wa -wb -a src/anno/peaks_nonClass7.bed -b src/anno/Intron.bed -v > src/anno/peaks_nonClass8.bed
cat src/anno/peaks_nonClass8.bed | awk -v 'OFS=\t' '{print($4,$1":"$3";"$6,"Unannotated")}' > src/anno/peaks_class9.txt
for i in {1..9}; do
cat src/anno/peaks_class${i}.txt
done | sort -k 1,1 > byTFE_out/${OUTPUT_NAME}_annotation.txt
# Peak annotation
join -1 4 -2 4 -t "$(printf '\011')" <(sort -k 4,4 byTFE_tmp/${OUTPUT_NAME}_TFE-regions.bed) <(sort -k 4,4 byTFE_out/${OUTPUT_NAME}_peaks.bed) \
| awk 'BEGIN{OFS="\t"}{print $1,$2,$3,$4,$9,$6}' > byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak.txt
join -1 1 -2 1 -t "$(printf '\011')" <(sort -k 1,1 byTFE_out/${OUTPUT_NAME}_annotation.txt) <(sort -k 1,1 byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak.txt) \
> byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak-anno.txt
join -1 1 -2 1 -t "$(printf '\011')" <(echo -e "Geneid""\t""Gene""\t""Annotation""\t""Chr""\t""Start""\t""End""\t""Peak""\t""Strand" \
| cat - <(sort -k 1,1 byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak-anno.txt)) <(cat byTFE_tmp/${OUTPUT_NAME}_byTFE-counts.txt | sed -e '1d' \
| awk 'NR<2{print $0;next}{print $0| "sort -k 1,1"}') | cut -f-8,13- | awk 'NR<2{print $0;next}{print $0| "sort -k4,4 -k5,5n -k8,8"}' \
| sed -e "1 s/Geneid/TFE/g" | sed -e "1 s/byTFE_tmp\/class\///g" | sed -e "1 s/.output.bam//g" | sed -e "1 s/\//\|/g"\
> byTFE_out/${OUTPUT_NAME}_byTFE-counts_annotation.txt
rm byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak.txt &&rm byTFE_tmp/${OUTPUT_NAME}_TFE-region-peak-anno.txt
|
<gh_stars>10-100
/*
* Copyright (C) 2022 HERE Europe B.V.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
* License-Filename: LICENSE
*/
#pragma once
#include <olp/core/generated/serializer/SerializerWrapper.h>
#include <olp/dataservice/read/model/CatalogVersion.h>
#include <rapidjson/document.h>
namespace olp {
namespace serializer {
inline void to_json(const dataservice::read::model::CatalogVersion& x,
rapidjson::Value& value,
rapidjson::Document::AllocatorType& allocator) {
value.SetObject();
serialize("hrn", x.GetHrn(), value, allocator);
serialize("version", x.GetVersion(), value, allocator);
}
template <>
inline void to_json<dataservice::read::model::CatalogVersion>(
const std::vector<dataservice::read::model::CatalogVersion>& x,
rapidjson::Value& value, rapidjson::Document::AllocatorType& allocator) {
value.SetObject();
rapidjson::Value array_value;
array_value.SetArray();
for (auto itr = x.begin(); itr != x.end(); ++itr) {
rapidjson::Value item_value;
to_json(*itr, item_value, allocator);
array_value.PushBack(std::move(item_value), allocator);
}
value.AddMember("dependencies", std::move(array_value), allocator);
}
} // namespace serializer
} // namespace olp
|
<reponame>skylark-integration/skylark-browserfs
define(['../core/global'], function (global) {
'use strict';
/**
* @hidden
*/
let bfsSetImmediate;
if (typeof (setImmediate) !== "undefined") {
bfsSetImmediate = setImmediate;
}
else {
const gScope = global;
const timeouts = [];
const messageName = "zero-timeout-message";
const canUsePostMessage = function () {
if (typeof gScope.importScripts !== 'undefined' || !gScope.postMessage) {
return false;
}
let postMessageIsAsync = true;
const oldOnMessage = gScope.onmessage;
gScope.onmessage = function () {
postMessageIsAsync = false;
};
gScope.postMessage('', '*');
gScope.onmessage = oldOnMessage;
return postMessageIsAsync;
};
if (canUsePostMessage()) {
bfsSetImmediate = function (fn) {
timeouts.push(fn);
gScope.postMessage(messageName, "*");
};
const handleMessage = function (event) {
if (event.source === self && event.data === messageName) {
if (event.stopPropagation) {
event.stopPropagation();
}
else {
event.cancelBubble = true;
}
if (timeouts.length > 0) {
const fn = timeouts.shift();
return fn();
}
}
};
if (gScope.addEventListener) {
gScope.addEventListener('message', handleMessage, true);
}
else {
gScope.attachEvent('onmessage', handleMessage);
}
}
else if (gScope.MessageChannel) {
// WebWorker MessageChannel
const channel = new gScope.MessageChannel();
channel.port1.onmessage = (event) => {
if (timeouts.length > 0) {
return timeouts.shift()();
}
};
bfsSetImmediate = (fn) => {
timeouts.push(fn);
channel.port2.postMessage('');
};
}
else {
bfsSetImmediate = function (fn) {
return setTimeout(fn, 0);
};
}
}
return bfsSetImmediate;
}); |
<filename>experimental/pqcrypto/cc/subtle/cecpq2_hkdf_sender_kem_boringssl_test.cc
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
///////////////////////////////////////////////////////////////////////////////
#include "pqcrypto/cc/subtle/cecpq2_hkdf_sender_kem_boringssl.h"
#include "gtest/gtest.h"
#include "absl/memory/memory.h"
#include "openssl/hrss.h"
#include "openssl/sha.h"
#include "tink/subtle/common_enums.h"
#include "tink/subtle/hkdf.h"
#include "tink/subtle/random.h"
#include "tink/subtle/subtle_util.h"
#include "tink/subtle/subtle_util_boringssl.h"
#include "tink/util/secret_data.h"
#include "tink/util/status.h"
#include "tink/util/statusor.h"
#include "tink/util/test_matchers.h"
#include "tink/util/test_util.h"
#include "pqcrypto/cc/subtle/cecpq2_hkdf_recipient_kem_boringssl.h"
namespace crypto {
namespace tink {
namespace subtle {
namespace {
class Cecpq2HkdfSenderKemBoringSslTest : public ::testing::Test {};
struct HrssKeyPair {
util::SecretUniquePtr<struct HRSS_private_key> hrss_private_key =
util::MakeSecretUniquePtr<struct HRSS_private_key>();
struct HRSS_public_key hrss_public_key;
std::string hrss_public_key_marshaled;
};
struct Cecpq2KeyPair {
struct HrssKeyPair hrss_key_pair;
SubtleUtilBoringSSL::EcKey x25519_key_pair;
};
// This method performs some basic common setup (HRSS and X25519 key generation,
// and marshaling HRSS public key) needed by the tests.
crypto::tink::util::StatusOr<struct Cecpq2KeyPair> HrssTestCommon(
EllipticCurveType curve_type) {
Cecpq2KeyPair cecpq2_key_pair;
// Generating a X25519 key pair
auto status_or_ec_test_key = SubtleUtilBoringSSL::GetNewEcKey(curve_type);
if (!status_or_ec_test_key.ok()) return status_or_ec_test_key.status();
cecpq2_key_pair.x25519_key_pair = status_or_ec_test_key.ValueOrDie();
// Generating a HRSS key pair
util::SecretData generate_hrss_key_entropy =
crypto::tink::subtle::Random::GetRandomKeyBytes(HRSS_GENERATE_KEY_BYTES);
// struct HRSS_public_key pk_dumb;
HRSS_generate_key(&cecpq2_key_pair.hrss_key_pair.hrss_public_key,
cecpq2_key_pair.hrss_key_pair.hrss_private_key.get(),
generate_hrss_key_entropy.data());
// Marshalling the HRSS public key
subtle::ResizeStringUninitialized(
&(cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled),
HRSS_PUBLIC_KEY_BYTES);
HRSS_marshal_public_key(
reinterpret_cast<uint8_t*>(
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled.data()),
&(cecpq2_key_pair.hrss_key_pair.hrss_public_key));
return cecpq2_key_pair;
}
// This test evaluates the creation of a Cecpq2HkdfSenderKemBoringSsl instance
// with an unknown curve type parameter. It should fail with an
// util::error::UNIMPLEMENTED error.
TEST_F(Cecpq2HkdfSenderKemBoringSslTest, TestUnknownCurve) {
if (kUseOnlyFips) {
GTEST_SKIP() << "Not supported in FIPS-only mode";
}
auto statur_or_cecpq2_key = HrssTestCommon(EllipticCurveType::CURVE25519);
ASSERT_TRUE(statur_or_cecpq2_key.ok());
auto cecpq2_key_pair = std::move(statur_or_cecpq2_key).ValueOrDie();
// Creating an instance of Cecpq2HkdfSenderKemBoringSsl specifying an unknown
// curve
auto status_or_sender_kem = Cecpq2HkdfSenderKemBoringSsl::New(
EllipticCurveType::UNKNOWN_CURVE, cecpq2_key_pair.x25519_key_pair.pub_x,
cecpq2_key_pair.x25519_key_pair.pub_y,
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled);
// The instance creation above should fail with an unimplemented algorithm
// error given the UNKNOWN_CURVE parameter
EXPECT_EQ(util::error::UNIMPLEMENTED,
status_or_sender_kem.status().error_code());
}
// This test evaluates the case where an unsupported curve (NIST_P256) is
// specified. This test should fail with an util::error::UNIMPLEMENTED error.
TEST_F(Cecpq2HkdfSenderKemBoringSslTest, TestUnsupportedCurve) {
if (kUseOnlyFips) {
GTEST_SKIP() << "Not supported in FIPS-only mode";
}
auto statur_or_cecpq2_key = HrssTestCommon(EllipticCurveType::CURVE25519);
ASSERT_TRUE(statur_or_cecpq2_key.ok());
auto cecpq2_key_pair = std::move(statur_or_cecpq2_key).ValueOrDie();
// Creating an instance of Cecpq2HkdfSenderKemBoringSsl specifying an unknown
// curve
auto status_or_sender_kem = Cecpq2HkdfSenderKemBoringSsl::New(
EllipticCurveType::NIST_P256, cecpq2_key_pair.x25519_key_pair.pub_x,
cecpq2_key_pair.x25519_key_pair.pub_y,
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled);
// This test should fail with an unimplemented algorithm error
EXPECT_EQ(util::error::UNIMPLEMENTED,
status_or_sender_kem.status().error_code());
}
// This test evaluates if a Sender can successfully generate a symmetric key.
TEST_F(Cecpq2HkdfSenderKemBoringSslTest, TestGenerateKey) {
if (kUseOnlyFips) {
GTEST_SKIP() << "Not supported in FIPS-only mode";
}
auto statur_or_cecpq2_key = HrssTestCommon(EllipticCurveType::CURVE25519);
ASSERT_TRUE(statur_or_cecpq2_key.ok());
auto cecpq2_key_pair = std::move(statur_or_cecpq2_key).ValueOrDie();
// Creating an instance of Cecpq2HkdfSenderKemBoringSsl
auto status_or_sender_kem = Cecpq2HkdfSenderKemBoringSsl::New(
EllipticCurveType::CURVE25519, cecpq2_key_pair.x25519_key_pair.pub_x,
cecpq2_key_pair.x25519_key_pair.pub_y,
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled);
ASSERT_TRUE(status_or_sender_kem.ok());
auto sender_kem = std::move(status_or_sender_kem.ValueOrDie());
// Generating a symmetric key
uint32_t key_size_in_bytes = HRSS_KEY_BYTES;
auto status_or_kem_key =
sender_kem->GenerateKey(HashType::SHA256, "hkdf_salt", "hkdf_info",
key_size_in_bytes, EcPointFormat::COMPRESSED);
// Asserting that the symmetric key has been successfully generated
ASSERT_TRUE(status_or_kem_key.ok());
auto kem_key = std::move(status_or_kem_key.ValueOrDie());
EXPECT_FALSE(kem_key->get_kem_bytes().empty());
EXPECT_EQ(kem_key->get_symmetric_key().size(), key_size_in_bytes);
}
// This test evaluates the whole KEM flow: from Sender to Recipient. This test
// should successfully generate an encapsulated shared secret that matches with
// a decapsulated shared secret.
TEST_F(Cecpq2HkdfSenderKemBoringSslTest, TestSenderRecipientFullFlowSuccess) {
if (kUseOnlyFips) {
GTEST_SKIP() << "Not supported in FIPS-only mode";
}
// Declaring auxiliary parameters
EllipticCurveType curve = EllipticCurveType::CURVE25519;
HashType hash_type = HashType::SHA256;
EcPointFormat point_format = EcPointFormat::COMPRESSED;
std::string salt_hex = "0b0b0b0b";
std::string info_hex = "0b0b0b0b0b0b0b0b";
int out_len = 32;
auto statur_or_cecpq2_key = HrssTestCommon(EllipticCurveType::CURVE25519);
ASSERT_TRUE(statur_or_cecpq2_key.ok());
auto cecpq2_key_pair = std::move(statur_or_cecpq2_key).ValueOrDie();
// Creating an instance of Cecpq2HkdfSenderKemBoringSsl
auto status_or_sender_kem = Cecpq2HkdfSenderKemBoringSsl::New(
curve, cecpq2_key_pair.x25519_key_pair.pub_x,
cecpq2_key_pair.x25519_key_pair.pub_y,
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled);
ASSERT_TRUE(status_or_sender_kem.ok());
auto sender_kem = std::move(status_or_sender_kem.ValueOrDie());
// Generating sender's shared secret
auto status_or_kem_key = sender_kem->GenerateKey(
hash_type, test::HexDecodeOrDie(salt_hex), test::HexDecodeOrDie(info_hex),
out_len, point_format);
ASSERT_TRUE(status_or_kem_key.ok());
auto kem_key = std::move(status_or_kem_key.ValueOrDie());
// Initializing recipient's KEM data structure using recipient's private keys
auto status_or_recipient_kem = Cecpq2HkdfRecipientKemBoringSsl::New(
curve, cecpq2_key_pair.x25519_key_pair.priv,
std::move(cecpq2_key_pair.hrss_key_pair.hrss_private_key));
ASSERT_TRUE(status_or_recipient_kem.ok());
auto recipient_kem = std::move(status_or_recipient_kem.ValueOrDie());
// Generating recipient's shared secret
auto status_or_shared_secret = recipient_kem->GenerateKey(
kem_key->get_kem_bytes(), hash_type, test::HexDecodeOrDie(salt_hex),
test::HexDecodeOrDie(info_hex), out_len, point_format);
ASSERT_TRUE(status_or_shared_secret.ok());
// Asserting that both shared secrets match
EXPECT_EQ(test::HexEncode(
util::SecretDataAsStringView(kem_key->get_symmetric_key())),
test::HexEncode(util::SecretDataAsStringView(
status_or_shared_secret.ValueOrDie())));
}
// Method that generates the shared secret returned by HRSS in case of
// decapsulation failure. This shared secret consists of the HMAC of the
// ciphertext using portion of the HRSS private key as the HMAC key.
void createFailureSharedSecret(uint8_t out_shared_key[HRSS_KEY_BYTES],
struct HRSS_private_key* in_priv,
const uint8_t* ciphertext,
size_t ciphertext_len) {
// Shifting the private key by 15 positions (as in its marshaled version) then
// by 1760 positions to reach the expected HMAC key used in BoringSSL:
uint8_t* priv_hmac_ptr =
reinterpret_cast<uint8_t*>(in_priv->opaque) + 15 + 1760;
// This is HMAC, expanded inline rather than using the |HMAC| function so that
// we can avoid dealing with possible allocation failures and so keep this
// function infallible.
uint8_t masked_key[SHA256_CBLOCK];
for (size_t i = 0; i < 32; i++) {
masked_key[i] = priv_hmac_ptr[i] ^ 0x36;
}
std::memset(masked_key + 32, 0x36, 32);
SHA256_CTX hash_ctx;
SHA256_Init(&hash_ctx);
SHA256_Update(&hash_ctx, masked_key, SHA256_CBLOCK);
SHA256_Update(&hash_ctx, ciphertext, ciphertext_len);
uint8_t inner_digest[SHA256_DIGEST_LENGTH];
SHA256_Final(inner_digest, &hash_ctx);
for (size_t i = 0; i < 32; i++) {
masked_key[i] ^= (0x5c ^ 0x36);
}
memset(masked_key + 32, 0x5c, 32);
SHA256_Init(&hash_ctx);
SHA256_Update(&hash_ctx, masked_key, sizeof(masked_key));
SHA256_Update(&hash_ctx, inner_digest, sizeof(inner_digest));
OPENSSL_STATIC_ASSERT(HRSS_KEY_BYTES == SHA256_DIGEST_LENGTH,
"HRSS shared key length incorrect");
SHA256_Final(out_shared_key, &hash_ctx);
}
// This test evaluates the whole KEM flow: from Sender to Recipient. This test
// is essentially the same as TestSenderRecipientFullFlowSuccess with the
// difference that we alter bytes of the kem_bytes thus preventing the two
// shared secrets to match.
TEST_F(Cecpq2HkdfSenderKemBoringSslTest, TestSenderRecipientFullFlowFailure) {
if (kUseOnlyFips) {
GTEST_SKIP() << "Not supported in FIPS-only mode";
}
// Declaring auxiliary parameters
EllipticCurveType curve = EllipticCurveType::CURVE25519;
HashType hash_type = HashType::SHA256;
EcPointFormat point_format = EcPointFormat::COMPRESSED;
std::string info_hex = "0b0b0b0b0b0b0b0b";
std::string salt_hex = "0b0b0b0b";
int out_len = 32;
auto statur_or_cecpq2_key = HrssTestCommon(EllipticCurveType::CURVE25519);
ASSERT_TRUE(statur_or_cecpq2_key.ok());
auto cecpq2_key_pair = std::move(statur_or_cecpq2_key).ValueOrDie();
// Initializing sender's KEM data structure using recipient's public keys
auto status_or_sender_kem = Cecpq2HkdfSenderKemBoringSsl::New(
curve, cecpq2_key_pair.x25519_key_pair.pub_x,
cecpq2_key_pair.x25519_key_pair.pub_y,
cecpq2_key_pair.hrss_key_pair.hrss_public_key_marshaled);
ASSERT_TRUE(status_or_sender_kem.ok());
auto sender_kem = std::move(status_or_sender_kem.ValueOrDie());
// storing an HRSS private key backup needed for the defective testing flow:
struct HRSS_private_key recipient_hrss_priv_copy;
std::memcpy(recipient_hrss_priv_copy.opaque,
cecpq2_key_pair.hrss_key_pair.hrss_private_key->opaque,
sizeof(recipient_hrss_priv_copy.opaque));
// Generating sender's shared secret (using salt_hex1)
auto status_or_kem_key = sender_kem->GenerateKey(
hash_type, test::HexDecodeOrDie(salt_hex), test::HexDecodeOrDie(info_hex),
out_len, point_format);
ASSERT_TRUE(status_or_kem_key.ok());
auto kem_key = std::move(status_or_kem_key.ValueOrDie());
// Initializing recipient's KEM data structure using recipient's private keys
auto status_or_recipient_kem = Cecpq2HkdfRecipientKemBoringSsl::New(
curve, cecpq2_key_pair.x25519_key_pair.priv,
std::move(cecpq2_key_pair.hrss_key_pair.hrss_private_key));
ASSERT_TRUE(status_or_recipient_kem.ok());
auto recipient_kem = std::move(status_or_recipient_kem.ValueOrDie());
// Here, we corrupt kem_bytes (we change all bytes to "a") so that
// the HRSS shared secret is not successfully recovered
std::string kem_bytes = kem_key->get_kem_bytes();
for (int i = 0; i < HRSS_CIPHERTEXT_BYTES; i++)
kem_bytes[X25519_PUBLIC_VALUE_LEN + i] = 'a';
// Generating the defective recipient's shared secret
auto status_or_shared_secret = recipient_kem->GenerateKey(
kem_bytes, hash_type, test::HexDecodeOrDie(salt_hex),
test::HexDecodeOrDie(info_hex), out_len, point_format);
// Recover the X25519 shared secret (needed for the defective shared secret
// computation)
util::SecretData x25519_shared_secret(X25519_SHARED_KEY_LEN);
X25519(x25519_shared_secret.data(),
cecpq2_key_pair.x25519_key_pair.priv.data(),
reinterpret_cast<const uint8_t*>(kem_bytes.data()));
// Computing the shared secret returned by BoringSSL's HRSS assuming that HRSS
// decapsulation fails
util::SecretData hrss_out_shared_key_defective(HRSS_KEY_BYTES);
createFailureSharedSecret(hrss_out_shared_key_defective.data(),
&recipient_hrss_priv_copy,
reinterpret_cast<const uint8_t*>(
kem_bytes.data() + X25519_PUBLIC_VALUE_LEN),
HRSS_CIPHERTEXT_BYTES);
// Concatenate both shared secrets (correct X25519 and wrong HRSS) and
// kem_bytes
std::string kem_bytes_and_shared_secrets = absl::StrCat(
kem_bytes, util::SecretDataAsStringView(x25519_shared_secret),
util::SecretDataAsStringView(hrss_out_shared_key_defective));
util::SecretData ikm =
util::SecretDataFromStringView(kem_bytes_and_shared_secrets);
// Compute symmetric key from both shared secrets, kem_bytes, hkdf_salt and
// hkdf_info using HKDF
auto symmetric_key_or =
Hkdf::ComputeHkdf(hash_type, ikm, test::HexDecodeOrDie(salt_hex),
test::HexDecodeOrDie(info_hex), out_len);
ASSERT_TRUE(symmetric_key_or.ok());
util::SecretData symmetric_key = symmetric_key_or.ValueOrDie();
// Asserting that the generated shared secret matches with the one that should
// be produced by HRSS in case of HRSS decapsulation failure:
EXPECT_EQ(test::HexEncode(util::SecretDataAsStringView(symmetric_key)),
test::HexEncode(util::SecretDataAsStringView(
status_or_shared_secret.ValueOrDie())));
}
} // namespace
} // namespace subtle
} // namespace tink
} // namespace crypto
|
#!/bin/sh
# Copyright 2020 Xilinx Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Author: Mark Harvey, Xilinx Inc
if [ $1 = zcu102 ]; then
ARCH=/opt/vitis_ai/compiler/arch/DPUCZDX8G/ZCU102/arch.json
TARGET=zcu102
echo "-----------------------------------------"
echo "COMPILING MODEL FOR ZCU102.."
echo "-----------------------------------------"
elif [ $1 = zcu104 ]; then
ARCH=/opt/vitis_ai/compiler/arch/DPUCZDX8G/ZCU104/arch.json
TARGET=zcu104
echo "-----------------------------------------"
echo "COMPILING MODEL FOR ZCU104.."
echo "-----------------------------------------"
elif [ $1 = vck190 ]; then
ARCH=/opt/vitis_ai/compiler/arch/DPUCVDX8G/VCK190/arch.json
TARGET=vck190
echo "-----------------------------------------"
echo "COMPILING MODEL FOR VCK190.."
echo "-----------------------------------------"
elif [ $1 = u50 ]; then
ARCH=/opt/vitis_ai/compiler/arch/DPUCAHX8H/U50/arch.json
TARGET=u50
echo "-----------------------------------------"
echo "COMPILING MODEL FOR ALVEO U50.."
echo "-----------------------------------------"
elif [ $1 = kv260 ]; then
ARCH=/opt/vitis_ai/compiler/arch/DPUCZDX8G/KV260/arch.json
TARGET=kv260
echo "------------------------------------------"
echo "COMPILING MODEL FOR KV260.."
echo ".........................................."
else
echo "Target not found. Valid choices are: zcu102, zcu104, vck190, u50 ..exiting"
exit 1
fi
BUILD=$2
LOG=$3
compile() {
vai_c_xir \
--xmodel ${BUILD}/quant_model/CNN_int.xmodel \
--arch $ARCH \
--net_name CNN_${TARGET} \
--output_dir ${BUILD}/compiled_model
}
compile 2>&1 | tee ${LOG}/compile_$TARGET.log
echo "-----------------------------------------"
echo "MODEL COMPILED"
echo "-----------------------------------------"
|
#!/usr/bin/env bash
shopt -s -o pipefail
set -e # Exit on error
PKG_NAME="coreutils"
PKG_VERSION="8.31"
TARBALL="${PKG_NAME}-${PKG_VERSION}.tar.xz"
SRC_DIR="${PKG_NAME}-${PKG_VERSION}"
LINK="http://ftp.gnu.org/gnu/$PKG_NAME/$TARBALL"
function showHelp() {
echo -e "--------------------------------------------------------------------------------------------------------------"
echo -e "Description: The Coreutils package contains utilities for showing and setting the basic system characteristics"
echo -e "--------------------------------------------------------------------------------------------------------------"
echo -e ""
}
function prepare() {
echo -e "Downloading $TARBALL from $LINK"
wget "$LINK" -O "$TARBALL"
}
function unpack() {
echo -e "Unpacking $TARBALL"
tar xf ${TARBALL}
}
function build() {
echo -e "Configuring $PKG_NAME"
./configure --prefix=/tools --enable-install-program=hostname
make "$MAKE_PARALLEL"
}
function instal() {
echo -e "Installing $PKG_NAME"
make "${MAKE_PARALLEL}" install
}
function clean() {
echo -e "Cleaning up..."
rm -rf ${SRC_DIR} ${TARBALL}
}
# Run the installation procedure
time {
showHelp
clean
prepare
unpack
pushd ${SRC_DIR}
build
instal
popd
clean
}
|
<reponame>despo/apply-for-teacher-training<gh_stars>0
require 'rails_helper'
RSpec.feature 'Entering their work history' do
include CandidateHelper
around do |example|
Timecop.freeze(Time.zone.local(2019, 11, 1)) do
example.run
end
end
scenario 'Candidate deleting their only job entry should also remove any breaks entered' do
FeatureFlag.activate(:restructured_work_history)
given_i_am_signed_in
and_i_visit_the_site
when_i_click_on_work_history
then_i_should_see_the_start_page
then_i_choose_that_i_have_work_history_to_add
and_i_click_add_a_first_job
then_i_should_see_the_add_a_job_page
and_i_add_a_job_between_october_2014_to_august_2019
then_i_see_a_two_month_break_between_my_job_and_now
given_i_am_on_review_work_history_page
when_i_click_to_explain_my_break_between_august_2019_and_november_2019
then_i_see_the_start_and_end_date_filled_in_for_my_break_between_august_2019_and_november_2019
when_i_enter_a_reason_for_my_break_between_august_2019_and_november_2019
then_i_see_my_reason_for_my_break_between_august_2019_and_november_2019_on_the_review_page
when_i_delete_my_job_between_october_2014_and_august_2019
and_i_confirm_i_want_to_delete_my_job_between_october_2014_and_august_2019
then_i_should_see_the_start_page
then_i_choose_that_i_have_work_history_to_add
and_i_click_add_a_first_job
then_i_should_see_the_add_a_job_page
and_i_add_a_job_between_october_2014_to_august_2019
then_i_see_a_two_month_break_between_my_job_and_now
and_i_should_not_see_my_previous_break_entry
end
def given_i_am_signed_in
create_and_sign_in_candidate
end
def and_i_visit_the_site
visit candidate_interface_application_form_path
end
def when_i_click_on_work_history
click_link t('page_titles.work_history')
end
def then_i_should_see_the_start_page
expect(page).to have_current_path candidate_interface_restructured_work_history_path
end
def then_i_choose_that_i_have_work_history_to_add
choose 'Yes'
click_button 'Continue'
end
def and_i_click_add_a_first_job
click_link 'Add a job'
end
def then_i_should_see_the_add_a_job_page
expect(page).to have_current_path candidate_interface_new_restructured_work_history_path
end
def and_i_add_a_job_between_october_2014_to_august_2019
scope = 'application_form.restructured_work_history'
fill_in t('role.label', scope: scope), with: 'Microsoft Painter'
fill_in t('employer.label', scope: scope), with: 'Department for Education'
choose 'Full time'
within('[data-qa="start-date"]') do
fill_in 'Month', with: '10'
fill_in 'Year', with: '2014'
end
within('[data-qa="currently-working"]') do
choose 'No'
end
within('[data-qa="end-date"]') do
fill_in 'Month', with: '8'
fill_in 'Year', with: '2019'
end
within('[data-qa="relevant-skills"]') do
choose 'No'
end
click_button t('save_and_continue')
end
def then_i_see_a_two_month_break_between_my_job_and_now
expect(page).to have_content('You have a break in your work history (2 months)')
end
def then_i_see_the_start_and_end_date_filled_in_for_my_break_between_august_2019_and_november_2019
then_i_see_the_start_and_end_date_filled_for_adding_another_job_between_august_2019_and_november_2019
end
def then_i_see_the_start_and_end_date_filled_for_adding_another_job_between_august_2019_and_november_2019
expect(page).to have_selector("input[value='8']")
expect(page).to have_selector("input[value='2019']")
expect(page).to have_selector("input[value='11']")
expect(page).to have_selector("input[value='2019']")
end
def given_i_am_on_review_work_history_page
visit candidate_interface_restructured_work_history_review_path
end
def when_i_click_to_explain_my_break_between_august_2019_and_november_2019
click_link 'add a reason for this break', match: :first
end
def when_i_enter_a_reason_for_my_break_between_august_2019_and_november_2019
fill_in 'Enter reasons for break in work history', with: 'Painting is tiring.'
click_button t('continue')
end
def then_i_see_my_reason_for_my_break_between_august_2019_and_november_2019_on_the_review_page
expect(page).to have_content('Painting is tiring.')
end
def when_i_delete_my_job_between_october_2014_and_august_2019
click_link 'Delete job Microsoft Painter for Department for Education'
end
def and_i_confirm_i_want_to_delete_my_job_between_october_2014_and_august_2019
click_button 'Yes I’m sure - delete this job'
end
def and_i_should_not_see_my_previous_break_entry
expect(page).not_to have_content('Painting is tiring.')
end
end
|
<!DOCTYPE html>
<html>
<head>
<title>Form</title>
</head>
<body>
<form action="" method="POST">
<label for="name">Name:</label>
<input type="text" name="name">
<br>
<label for="email">Email:</label>
<input type="email" name="email">
<br>
<input type="submit" value="Submit">
</form>
</body>
</html> |
<?php
$sentence = "Hello world!";
$numWords = str_word_count($sentence);
echo $numWords;
?> |
echo 'echo $1 $2' | sh -s a b
|
import sys
from os import path
from LDA import TCVB0
sys.path.append(path.abspath(path.join(path.dirname(__file__), "..")))
def show_usage():
print('Usage: demo1.py path_to_dataset [alpha beta]')
sys.exit(1)
def main():
if len(sys.argv) < 2 or len(sys.argv) > 4:
show_usage()
dataset_path = sys.argv[1]
alpha = float(sys.argv[2]) if len(sys.argv) > 2 else 0.1
beta = float(sys.argv[3]) if len(sys.argv) > 3 else 0.1
# Read the dataset from the specified path
try:
dataset = read_dataset(dataset_path)
except FileNotFoundError:
print(f"Error: Dataset not found at {dataset_path}")
sys.exit(1)
# Perform topic modeling using the TCVB0 algorithm
lda_model = TCVB0(dataset, alpha, beta)
topics = lda_model.fit()
# Display the results of the topic modeling
for topic_id, words in enumerate(topics):
print(f"Topic {topic_id + 1}: {', '.join(words)}")
if __name__ == "__main__":
main() |
import java.util.Random;
public class RandomArray {
public static int[] generateRandomArray(int size) {
Random rand = new Random();
int[] randomArray = new int[size];
for (int i = 0; i < size; i++) {
randomArray[i] = rand.nextInt(11);
}
return randomArray;
}
} |
<reponame>gogui63/GPXWriter
package fr.gautierragueneau.gpxwriter;
import android.location.Location;
import java.util.ArrayList;
import java.util.List;
/**
* Created by gautier on 11/01/2017.
*/
public class GPX {
private final String name;
private final String description;
private final String author;
private final List<Location> points;
private GPX(GPXBuilder gpxBuilder) {
this.name = gpxBuilder.name;
this.description = gpxBuilder.description;
this.author = gpxBuilder.author;
this.points = gpxBuilder.points;
}
public String getName() {
return name;
}
public String getDescription() {
return description;
}
public String getAuthor() {
return author;
}
public List<Location> getPoints() {
return points;
}
public void generateGPX(IGPX gpxListner) {
new AsyncTaskGenerateGPX(this, gpxListner).execute();
}
public static class GPXBuilder {
private final String name;
private String description;
private String author;
private List<Location> points = new ArrayList<>();
public GPXBuilder(String name) {
this.name = name;
}
public GPXBuilder description(String description) {
this.description = description;
return this;
}
public GPXBuilder author(String author) {
this.author = author;
return this;
}
public GPXBuilder addPoints(List<Location> points) {
this.points = points;
return this;
}
public GPXBuilder addPoint(Location point) {
this.points.add(point);
return this;
}
public GPX build() {
return new GPX(this);
}
}
}
|
<filename>Project/SimuladorEnigma/src/simuladorenigma/Converter.java
/*
Classe que implementa so metodos Encripta() e converter();
Encripta agrupa e utiliza todos os outros componentes da maquina, em ordem para fazer a criptografia da letra.
Converter converte o numero(int) para a letra correspondente(char).
*/
package simuladorenigma;
public class Converter {
int e, exit,pp1,pp2, r11,r12,r21,r22,r31,r32,a11,a12,a21,a22,a31,a32,rf1,rf2;
public int getE() {
return e;
}
public int getExit() {
return exit;
}
public int getPp1() {
return pp1;
}
public int getPp2() {
return pp2;
}
public int getR11() {
return r11;
}
public int getR12() {
return r12;
}
public int getR21() {
return r21;
}
public int getR22() {
return r22;
}
public int getR31() {
return r31;
}
public int getR32() {
return r32;
}
public int getA11() {
return a11;
}
public int getA12() {
return a12;
}
public int getA21() {
return a21;
}
public int getA22() {
return a22;
}
public int getA31() {
return a31;
}
public int getA32() {
return a32;
}
public int getRf1() {
return rf1;
}
public int getRf2() {
return rf2;
}
public int Encripta(int entrada, int r1, int r2, int r3, int a1, int a2, int a3, int cabo11, int cabo12, int cabo21, int cabo22, int cabo31, int cabo32, int cabo41, int cabo42, int cabo51, int cabo52, int cabo61, int cabo62, int cabo71, int cabo72, int cabo81, int cabo82){
Rotor rotor1 = new Rotor();
Rotor rotor2 = new Rotor();
Rotor rotor3 = new Rotor();
Refletor refletor = new Refletor();
PainelPlug plug = new PainelPlug();
int s;
this.e = entrada;
//SETA OS PLUGS DO PAINEL
plug.completaPainel(cabo11, cabo12);
plug.completaPainel(cabo21, cabo22);
plug.completaPainel(cabo31, cabo32);
plug.completaPainel(cabo41, cabo42);
plug.completaPainel(cabo51, cabo52);
plug.completaPainel(cabo61, cabo62);
plug.completaPainel(cabo71, cabo72);
plug.completaPainel(cabo81, cabo82);
//caminho da encriptação
//tambem pega dados para a simulação do caminho
s = plug.saidaPainel(entrada);
this.pp1 = s;
this.e = entrada;
this.r11 = s;
s = rotor1.codRotorAnel(s,r1,a1);
this.a11 = s;
this.r21 = s;
s = rotor2.codRotorAnel(s,r2,a2);
this.a21 = s;
this.r31 = s;
s = rotor3.codRotorAnel(s,r3,a3);
this.a31 = s;
this.rf1 = s;
s = refletor.reflete(s);
this.rf2 = s;
this.a12 = s;
s = rotor3.codAnelRotor(s, a3, r3);
this.r32 = s;
this.a22 = s;
s = rotor2.codAnelRotor(s, a2, r2);
this.r22 = s;
this.a32 = s;
s = rotor1.codAnelRotor(s, a1, r1);
this.r12 = s;
s = plug.saidaPainel(s);
this.pp2 = s;
this.exit = s;
return s;
}
public char Converter(int op){
char var = 0;
switch(op){
case 0:
break;
case 1:
var = 'A';
break;
case 2:
var = 'B';
break;
case 3:
var = 'C';
break;
case 4:
var = 'D';
break;
case 5:
var = 'E';
break;
case 6:
var = 'F';
break;
case 7:
var = 'G';
break;
case 8:
var = 'H';
break;
case 9:
var = 'I';
break;
case 10:
var = 'J';
break;
case 11:
var = 'K';
break;
case 12:
var = 'L';
break;
case 13:
var = 'M';
break;
case 14:
var = 'N';
break;
case 15:
var = 'O';
break;
case 16:
var = 'P';
break;
case 17:
var = 'Q';
break;
case 18:
var = 'R';
break;
case 19:
var = 'S';
break;
case 20:
var = 'T';
break;
case 21:
var = 'U';
break;
case 22:
var = 'V';
break;
case 23:
var = 'W';
break;
case 24:
var = 'X';
break;
case 25:
var = 'Y';
break;
case 26:
var = 'Z';
break;
}
return var;
}
} |
#!/usr/bin/env bash
set -e
os=$(uname -s | tr "[:upper:]" "[:lower:]")
case $os in
linux)
os="lnx"
;;
darwin)
os="mac"
;;
*)
printf "%s doesn't supported by bash installer" "$os"
exit 1
;;
esac
version="v0.1.16"
curl -L -o svls-$version-x86_64-$os.zip "https://github.com/dalance/svls/releases/download/$version/svls-$version-x86_64-$os.zip"
unzip svls-$version-x86_64-$os.zip
rm svls-$version-x86_64-$os.zip
|
<gh_stars>0
import { NgModule } from '@angular/core';
// Routing
import { RouterModule, Routes } from '@angular/router';
// Shared
import { SharedModule } from './shared/shared.module';
export const ROUTES: Routes = [
{ path: '', loadChildren: './root/website-root.module#WebsiteRootModule' }
];
@NgModule({
imports: [
RouterModule.forChild(ROUTES),
SharedModule.forRoot()
]
})
export class WebsiteModule { }
|
function isCouncilMember(address) {
let members = app.sdb.getAll('CouncilMember') || []
members = members = members.sort((a, b) => b.votes - a.votes).slice(0, 3)
return members.find(i => i.address === address)
}
module.exports = {
async register(website) {
if (!website || typeof website !== 'string' || website.length > 256) return 'Invalid parameters'
const senderId = this.sender.address
const sender = this.sender
if (!sender) return 'Account not found'
if (!sender.name) return 'Account has not a name'
if (sender.role) return 'Account already have a role'
app.sdb.create('CouncilMember', {
address: senderId,
name: sender.name,
tid: this.trs.id,
publicKey: this.trs.senderPublicKey,
votes: 0,
website,
})
sender.role = app.AccountRole.COUNCIL_MEMBER
app.sdb.update('Account', { role: app.AccountRole.COUNCIL_MEMBER }, { address: senderId })
return null
},
async vote(targets) {
if (!targets || typeof targets !== 'string') return 'Invalid parameters'
const names = targets.split(',')
if (names.length > 3) return 'Up to 3 targets'
const memberNames = new Set()
for (const member of app.sdb.getAll('CouncilMember')) {
memberNames.add(member.name)
}
for (const name of names) {
if (!memberNames.has(name)) return 'Target is not council member'
}
if (!app.isCurrentBookkeeper(this.sender.address)) return 'Permission denied'
const { session, status } = modules.council.getCouncilInfo()
if (status === 1) return 'Invalid session status'
const voter = this.sender.name
const exists = await app.sdb.exists('CouncilVote', { voter, session })
if (exists) return 'Already voted'
app.sdb.create('CouncilVote', { voter, session, targets })
for (const name of names) {
app.sdb.increase('CouncilMember', { votes: 1 }, { name })
}
},
async initiatePayment(recipient, amount, currency, remarks, expirtedAt) {
if (!app.util.address.isNormalAddress(recipient)) return 'Invalid recipient address'
app.validate('amount', amount)
if (currency !== 'XAS') return 'UIA token not supported'
if (typeof remarks !== 'string' || remarks.length > 256) return 'Invalid remarks'
if (typeof expiredAt !== 'number') return 'Invalid expired time'
if (!isCouncilMember(this.sender.address)) return 'Permission denied'
const session = modules.council.getCouncilInfo().session
app.sdb.create('CouncilTransaction', {
tid: this.trs.id,
currency,
amount,
remarks,
recipient,
timestamp: this.trs.timestamp,
expirtedAt,
pending: 1,
signs: 1,
session,
})
},
async signPayment(tid) {
if (!isCouncilMember(this.sender.address)) return 'Permission denied'
const payment = await app.sdb.load('CouncilTransaction', tid)
if (!payment) return 'Payment not found'
if (payment.pending === 0) return 'Payment already finished'
const height = modules.blocks.getLastBlock().height
if (!!payment.expiredAt && height >= payment.expiredAt) return 'Payment expired'
const session = modules.council.getCouncilInfo().session
if (session !== payment.session) return 'Session expired'
const amount = Number.parseInt(payment.amount)
const COUNCIL_ADDRESS = 'GADQ2bozmxjBfYHDQx3uwtpwXmdhafUdkN'
const councilAccount = await app.sdb.load('Account', COUNCIL_ADDRESS)
if (!councilAccount) return 'Council account not found'
if (councilAccount.xas < amount) return 'Insufficient balance'
const recipientAccount = await app.sdb.load('Account', payment.recipient)
if (!recipientAccount) return 'Recipient account not found'
payment.signs += 1
app.sdb.increase('CouncilTransaction', { signs: 1 }, { tid })
if (payment.signs >= 2) {
payment.pending = 0
app.sdb.update('CouncilTransaction', { pending: 0 }, { tid })
app.sdb.increase('Account', { xas: amount }, { address: payment.recipient })
app.sdb.increase('Account', { xas: -1 * amount }, { address: COUNCIL_ADDRESS })
app.sdb.create('Transfer', {
tid: this.trs.id,
height: this.block.height,
senderId: COUNCIL_ADDRESS,
recipientId: recipientAccount.address,
recipientName: recipientAccount.name,
currency: payment.currency,
amount: payment.amount,
timestamp: this.trs.timestamp,
})
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.