text stringlengths 1 1.05M |
|---|
#!/bin/bash
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# build and install are separated so changes to build don't invalidate
# the whole docker cache for the image
set -ex
wget http://downloads.lightbend.com/scala/2.11.8/scala-2.11.8.deb && \
dpkg -i scala-2.11.8.deb && rm scala-2.11.8.deb
apt-get install -y doxygen libatlas-base-dev graphviz pandoc
pip install sphinx==1.3.5 CommonMark==0.5.4 breathe mock recommonmark pypandoc beautifulsoup4
|
import CSVToArray from "./CSV/CsvToArray";
export var FinalJSON = "";
function ConvertToJson(FormData) {
var convertedToArrayFromFunction = CSVToArray(FormData.content);
var finalConversionToJson = [];
ReceiveTheCSVInArrayAndPushTheSentences(
convertedToArrayFromFunction,
finalConversionToJson);
var json = JSON.stringify(finalConversionToJson);
var conversionToJsonInString = json.replace(/},/g, "},\r\n");
setTheJSON(conversionToJsonInString);
}
function ReceiveTheCSVInArrayAndPushTheSentences
(convertedToArrayFromFunction, finalConversionToJson)
{
for (var i = 1; i < convertedToArrayFromFunction.length; i++)
{
finalConversionToJson[i - 1] = {};
for(var k = 0;
k < convertedToArrayFromFunction[0].length &&
k < convertedToArrayFromFunction[i].length;
k++)
{
var key =
convertedToArrayFromFunction[0][k];
finalConversionToJson[i - 1][key] =
convertedToArrayFromFunction[i][k];
}
}
}
function setTheJSON (conversionToJsonInString) {
return FinalJSON = conversionToJsonInString;
}
export function clearTheJSON()
{
return FinalJSON = "";
}
export default ConvertToJson; |
import torch
import torch.nn as nn
import torch.nn.functional as F
class ResidualBlock(torch.nn.Module):
def __init__(self, channels):
super(ResidualBlock, self).__init__()
self.block = nn.Sequential(
ConvBlock(channels, channels, kernel_size=3, stride=1, normalize=True, relu=True),
ConvBlock(channels, channels, kernel_size=3, stride=1, normalize=True, relu=False),
)
def forward(self, x):
return self.block(x) + x
class ConvBlock(torch.nn.Module):
def __init__(self, in_channels, out_channels, kernel_size, stride=1, upsample=False, normalize=True, relu=True):
super(ConvBlock, self).__init__()
self.upsample = upsample
self.block = nn.Sequential(
nn.ReflectionPad2d(kernel_size // 2), nn.Conv2d(in_channels, out_channels, kernel_size, stride)
)
self.norm = nn.InstanceNorm2d(out_channels, affine=True) if normalize else None
self.relu = relu
def forward(self, x):
if self.upsample:
x = F.interpolate(x, scale_factor=2)
x = self.block(x)
if self.norm is not None:
x = self.norm(x)
if self.relu:
x = F.relu(x)
return x
class StyleResnet18(torch.nn.Module):
def __init__(self):
super(StyleResnet18, self).__init__()
self.model = nn.Sequential(
ConvBlock(3, 32, kernel_size=9, stride=1),
ConvBlock(32, 64, kernel_size=3, stride=2),
ConvBlock(64, 128, kernel_size=3, stride=2),
ResidualBlock(128),
ResidualBlock(128),
ResidualBlock(128),
ResidualBlock(128),
ResidualBlock(128),
ConvBlock(128, 64, kernel_size=3, upsample=True),
ConvBlock(64, 32, kernel_size=3, upsample=True),
ConvBlock(32, 3, kernel_size=9, stride=1, normalize=False, relu=False),
)
def forward(self, x):
return self.model(x)
class StyleResnet18Model():
def __init__(self, checkpoint_path=None):
self._device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
self._model = StyleResnet18().to(self._device)
if checkpoint_path is not None:
self._model.load_state_dict(torch.load(checkpoint_path, map_location=self._device))
def save_checkpoint(self, path: str):
torch.save(self._model.state_dict(), path)
@property
def model(self):
return self._model
|
package org.felix.ml.sampling.cfg;
import org.felix.ml.sampling.exception.ConfigException;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.lang.StringUtils;
import java.io.InputStream;
import java.io.Reader;
import java.util.Enumeration;
import java.util.Iterator;
/**
*
* */
public class CommonConfig extends ConfigBase {
PropertiesConfiguration propertiesConf = new PropertiesConfiguration();
@Override
protected String getValue(String key) {
String[] arr = propertiesConf.getStringArray(key);
return StringUtils.join(arr, ",");
}
@Override
protected boolean contains(String key) {
return propertiesConf.containsKey(key);
}
@Override
protected Enumeration<Object> keys() {
final Iterator<String> iter = propertiesConf.getKeys();
return new Enumeration<Object>() {
@Override
public boolean hasMoreElements() {
return iter.hasNext();
}
@Override
public Object nextElement() {
return iter.next();
}
};
}
@Override
protected void doLoad(InputStream in) throws ConfigException {
try {
propertiesConf.load(in);
} catch (ConfigurationException e) {
throw new ConfigException("error load config!", e);
}
}
protected void doLoad(Reader in) throws ConfigException {
try {
propertiesConf.load(in);
} catch (ConfigurationException e) {
throw new ConfigException("error load config!", e);
}
}
@Override
public String getProperty(String key) {
return getValue(key);
}
}
|
<reponame>codefacts/Elastic-Components
package elasta.orm.idgenerator;
import elasta.core.promise.intfs.Promise;
import io.vertx.core.json.JsonObject;
/**
* Created by sohan on 6/28/2017.
*/
public interface ObjectIdGenerator<T> {
Promise<JsonObject> generateId(String entity, JsonObject jsonObject);
}
|
#!/bin/bash
/bin/sh -c "php-fpm -D --pid /opt/bitnami/php/tmp/php-fpm.pid -y /opt/bitnami/php/etc/php-fpm.conf"
/bin/sh -c "/opt/bitnami/scripts/nginx/run.sh"
|
from django.db import models
from django.utils import timezone
import uuid
from django.conf import settings
class Comment(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False, serialize=False)
text = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='comments')
class Reply(models.Model):
id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False, serialize=False)
text = models.TextField()
created_at = models.DateTimeField(auto_now_add=True)
comment = models.ForeignKey(Comment, on_delete=models.CASCADE, related_name='replies')
user = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name='replies') |
export declare enum ImageOperandsMask {
None = 0,
Bias = 1,
Lod = 2,
Grad = 4,
ConstOffset = 8,
Offset = 16,
ConstOffsets = 32,
Sample = 64,
MinLod = 128,
MakeTexelAvailable = 256,
MakeTexelAvailableKHR = 256,
MakeTexelVisible = 512,
MakeTexelVisibleKHR = 512,
NonPrivateTexel = 1024,
NonPrivateTexelKHR = 1024,
VolatileTexel = 2048,
VolatileTexelKHR = 2048,
SignExtend = 4096,
ZeroExtend = 8192
}
|
const { spawn } = require('child_process')
const consolelog = require('debug')('webdriver:utils')
function isObject (p) { return typeof p === 'object' && p !== null && !Array.isArray(p) }
function checkRes (res) {
if (!isObject(res)) throw new Error('Unexpected non-object received from webdriver')
if (typeof res.value === 'undefined') throw new Error('Missing `value` from object returned by webdriver')
return res
}
/* Options: args[], stdio{}, env{}, */
function exec (command, commandOptions) {
var options = commandOptions || {}
return new Promise((resolve, reject) => {
//
// It's unclear whether the try/catch here is necessary,
// but better safe than sorry
try {
var child = spawn(command, options.args || [], {
env: options.env || process.env,
stdio: options.stdio || 'ignore'
})
} catch (e) {
reject(e)
}
child.on('error', (err) => {
// The error event will be emitted immediately if ENOENT is
// the cause of the problems
if (err.code === 'ENOENT') {
consolelog(`Could not run ${command}:`, 'aaa', err.code, 'ppp', err)
reject(err)
// This will only even happen if "The process could not be killed",
// or "Sending a message to the child process failed."
} else {
consolelog('RETHROWING:', err)
throw (err)
}
})
// A new process was started: bingo!
// This actually tells us to resolve this successfully
if (child.pid) {
//
// Unref the child
child.unref()
// This process should not wait on the spawned child, however, we do
// want to ensure the child is killed when this process exits.
process.once('exit', killChild)
child.once('exit', (code, signal) => {
consolelog(`Process ${command} has exited! Code and signal:`, code, signal)
child = null
process.removeListener('exit', killChild)
})
return resolve({ killCommand })
}
function killCommand (signal) {
process.removeListener('exit', killChild)
if (child) {
consolelog(`Sending ${signal} to ${command}`)
child.kill(signal)
child = null
}
}
function killChild () {
consolelog(`Process closed, killing ${command}`, killCommand)
killCommand('SIGTERM')
}
})
}
function sleep (ms) {
return new Promise(resolve => setTimeout(resolve, ms))
}
exports = module.exports = { isObject, checkRes, exec, sleep }
|
export * from './AudioManager';
export * from './GameUpdateArgs';
export * from './GameState';
export * from './GameStorage';
export * from './Rotation';
export * from './RotationMap';
export * from './Session';
export * from './Tag';
|
//
// mulle_objc_version.h
// mulle-objc-runtime
//
// Created by Nat! on 10.07.16.
// Copyright (c) 2016 Nat! - <NAME>.
// Copyright (c) 2016 Codeon GmbH.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// Neither the name of Mulle kybernetiK nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef mulle_objc_version_h__
#define mulle_objc_version_h__
#include <stdint.h>
//
// up the major if the compiler output is incompatible
// up the minor for added features
// up the patch for bugfixes
//
// Change the values below also to match.
//
// *** DONT FORGET TO EDIT mulle-objc-jit.inc TOO***
//
#define MULLE_OBJC_RUNTIME_VERSION ((0 << 20) | (19 << 8) | 0)
//
// these three values are read by the compiler(!)
// only use integers and no expressions
//
#define MULLE_OBJC_RUNTIME_VERSION_MAJOR 0 // max 511
#define MULLE_OBJC_RUNTIME_VERSION_MINOR 19 // max 1023
#define MULLE_OBJC_RUNTIME_VERSION_PATCH 0 // max 255
static inline uint32_t mulle_objc_version_get_major( uint32_t version)
{
assert( (MULLE_OBJC_RUNTIME_VERSION >> 20) == MULLE_OBJC_RUNTIME_VERSION_MAJOR);
return( (uint32_t) (version >> 20));
}
static inline uint32_t mulle_objc_version_get_minor( uint32_t version)
{
assert( ((MULLE_OBJC_RUNTIME_VERSION >> 8) & (1024 - 1)) == MULLE_OBJC_RUNTIME_VERSION_MINOR);
return( (uint32_t) (version >> 8) & (1024 - 1));
}
static inline uint32_t mulle_objc_version_get_patch( uint32_t version)
{
assert( (MULLE_OBJC_RUNTIME_VERSION_PATCH & 255) == MULLE_OBJC_RUNTIME_VERSION_PATCH);
return( (uint32_t) (version & 255));
}
#endif
|
<gh_stars>0
import {GQLRequest} from '../node_modules/prendus-shared/services/graphql-service';
export async function LTIPassback(userToken: string, ltiSessionIdJWT: string) {
const data = await GQLRequest(`
mutation($ltiSessionIdJWT: String!) {
assignmentLTIGrade(ltiSessionIdJWT: $ltiSessionIdJWT) {
success
}
}
`, {ltiSessionIdJWT}, userToken, (error: any) => {
throw error
});
}
|
#!/bin/bash
set echo on
SP_MAVEN="192.168.4.201 mvn.csdn.net maven.csdn.net"
BI_MAVEN="192.168.6.145 mvn.csdn.net maven.csdn.net"
HOST_FILE="/C/Windows/System32/drivers/etc/hosts"
M2_HOME="/c/Users/zhengwx/.m2/"
SP_SETTINGS="settings_sp.xml"
BI_SETTINGS="settings_bi.xml"
DST_SETTINGS="settings.xml"
grepResult=`grep "$BI_MAVEN" "$HOST_FILE"`
if test -z "$grepResult"; then
echo "DataService MAVEN is being used"
# here, we need to use " instead of '
sed -i "s/$SP_MAVEN/$BI_MAVEN/g" $HOST_FILE
cp -f $M2_HOME$BI_SETTINGS $M2_HOME$DST_SETTINGS
echo "Done: Change to BI MAVEN"
else
echo "BI MAVEN is being used"
# here, we need to use " instead of '
sed -i "s/$BI_MAVEN/$SP_MAVEN/g" $HOST_FILE
cp -f $M2_HOME$SP_SETTINGS $M2_HOME$DST_SETTINGS
echo "Done: Change to SP MAVEN"
fi
|
const getters = {
menus: state => state.routes.menus,
name: state => state.user.user_name,
hasGetInfo: state => state.user.hasGetInfo
}
export default getters
|
# Config for Powerlevel10k with lean prompt style. Type `p10k configure` to generate
# your own config based on it.
#
# Tip: Looking for a nice color? Here's a one-liner to print colormap.
#
# for i in {0..255}; do print -Pn "%${i}F${(l:3::0:)i}%f " ${${(M)$((i%8)):#7}:+$'\n'}; done
# Temporarily change options.
'builtin' 'local' '-a' 'p10k_config_opts'
[[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases')
[[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob')
[[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand')
'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand'
() {
emulate -L zsh
setopt no_unset extended_glob
# Unset all configuration options. This allows you to apply configiguration changes without
# restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`.
unset -m 'POWERLEVEL9K_*'
autoload -Uz is-at-least && is-at-least 5.1 || return
zmodload zsh/langinfo
if [[ ${langinfo[CODESET]:-} != (utf|UTF)(-|)8 ]]; then
local LC_ALL=${${(@M)$(locale -a):#*.(utf|UTF)(-|)8}[1]:-en_US.UTF-8}
fi
# The list of segments shown on the left. Fill it with the most important segments.
typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
# os_icon # os identifier
dir # current directory
vcs # git status
# =========================[ Line #2 ]=========================
newline
prompt_char # prompt symbol
)
# The list of segments shown on the right. Fill it with less important segments.
# Right prompt on the last prompt line (where you are typing your commands) gets
# automatically hidden when the input line reaches it. Right prompt above the
# last prompt line gets hidden if it would overlap with left prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=(
# =========================[ Line #1 ]=========================
status # exit code of the last command
command_execution_time # duration of the last command
background_jobs # presence of background jobs
direnv # direnv status (https://direnv.net/)
virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html)
anaconda # conda environment (https://conda.io/)
pyenv # python environment (https://github.com/pyenv/pyenv)
goenv # go environment (https://github.com/syndbg/goenv)
nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv)
nvm # node.js version from nvm (https://github.com/nvm-sh/nvm)
nodeenv # node.js environment (https://github.com/ekalinin/nodeenv)
# node_version # node.js version
# go_version # go version (https://golang.org)
# rust_version # rustc version (https://www.rust-lang.org)
# dotnet_version # .NET version (https://dotnet.microsoft.com)
rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv)
rvm # ruby version from rvm (https://rvm.io)
fvm # flutter version management (https://github.com/leoafarias/fvm)
luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv)
jenv # java version from jenv (https://github.com/jenv/jenv)
plenv # perl version from plenv (https://github.com/tokuhirom/plenv)
kubecontext # current kubernetes context (https://kubernetes.io/)
terraform # terraform workspace (https://www.terraform.io)
aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html)
aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/)
azure # azure account name (https://docs.microsoft.com/en-us/cli/azure)
gcloud # google cloud cli acccount and project (https://cloud.google.com/)
google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production)
context # user@hostname
nordvpn # nordvpn connection status, linux only (https://nordvpn.com/)
ranger # ranger shell (https://github.com/ranger/ranger)
nnn # nnn shell (https://github.com/jarun/nnn)
vim_shell # vim shell indicator (:sh)
midnight_commander # midnight commander shell (https://midnight-commander.org/)
nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html)
# vpn_ip # virtual private network indicator
# load # CPU load
# disk_usage # disk usage
# ram # free RAM
# swap # used swap
todo # todo items (https://github.com/todotxt/todo.txt-cli)
timewarrior # timewarrior tracking status (https://timewarrior.net/)
# time # current time
# =========================[ Line #2 ]=========================
newline
# public_ip # public IP address
# proxy # system-wide http/https/ftp proxy
# battery # internal battery
# example # example user-defined segment (see prompt_example function below)
)
# Basic style options that define the overall look of your prompt. You probably don't want to
# change them.
typeset -g POWERLEVEL9K_BACKGROUND= # transparent background
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_{LEFT,RIGHT}_WHITESPACE= # no surrounding whitespace
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SUBSEGMENT_SEPARATOR=' ' # separate segments with a space
typeset -g POWERLEVEL9K_{LEFT,RIGHT}_SEGMENT_SEPARATOR= # no end-of-line symbol
# To enable default icons for all segments, don't define POWERLEVEL9K_VISUAL_IDENTIFIER_EXPANSION
# or set it to '${P9K_VISUAL_IDENTIFIER}'.
#
# To remove spaces from all default icons, set POWERLEVEL9K_VISUAL_IDENTIFIER_EXPANSION
# to '${P9K_VISUAL_IDENTIFIER// }'. You'll know that you you need this option if you see extra
# spaces after icons.
#
# To enable default icons for one segment (e.g., dir), set
# POWERLEVEL9K_DIR_VISUAL_IDENTIFIER_EXPANSION='${P9K_VISUAL_IDENTIFIER}'.
#
# To assign a specific icon to one segment (e.g., dir), set
# POWERLEVEL9K_DIR_VISUAL_IDENTIFIER_EXPANSION='⭐'.
#
# To assign a specific icon to a segment in a given state (e.g., dir in state NOT_WRITABLE),
# set POWERLEVEL9K_DIR_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'.
#
# Note: You can use $'\u2B50' instead of '⭐'. It's especially convenient when specifying
# icons that your text editor cannot render. Don't forget to put $ and use single quotes when
# defining icons via Unicode codepoints.
#
# Note: Many default icons cannot be displayed with system fonts. You'll need to install a
# capable font to use them. See POWERLEVEL9K_MODE below.
typeset -g POWERLEVEL9K_VISUAL_IDENTIFIER_EXPANSION='${P9K_VISUAL_IDENTIFIER}'
# This option makes a difference only when default icons are enabled for all or some prompt
# segments (see POWERLEVEL9K_VISUAL_IDENTIFIER_EXPANSION above). LOCK_ICON can be printed as
# $'\uE0A2', $'\uE138' or $'\uF023' depending on POWERLEVEL9K_MODE. The correct value of this
# parameter depends on the provider of the font your terminal is using.
#
# Font Provider | POWERLEVEL9K_MODE
# ---------------------------------+-------------------
# Powerline | powerline
# Font Awesome | awesome-fontconfig
# Adobe Source Code Pro | awesome-fontconfig
# Source Code Pro | awesome-fontconfig
# Awesome-Terminal Fonts (regular) | awesome-fontconfig
# Awesome-Terminal Fonts (patched) | awesome-patched
# Nerd Fonts | nerdfont-complete
# Other | compatible
#
# If this looks overwhelming, either stick with a preinstalled system font and set
# POWERLEVEL9K_MODE=compatible, or install the recommended Powerlevel10k font from
# https://github.com/romkatv/powerlevel10k/#recommended-meslo-nerd-font-patched-for-powerlevel10k
# and set POWERLEVEL9K_MODE=nerdfont-complete.
typeset -g POWERLEVEL9K_MODE=nerdfont-complete
# When set to true, icons appear before content on both sides of the prompt. When set
# to false, icons go after content. If empty or not set, icons go before content in the left
# prompt and after content in the right prompt.
#
# You can also override it for a specific segment:
#
# POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false
#
# Or for a specific segment in specific state:
#
# POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false
typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT=true
# Add an empty line before each prompt.
typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=true
# Connect left prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX=
# Connect right prompt lines with these symbols.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX=
typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX=
# The left end of left prompt.
typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
# The right end of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL=
# Ruler, a.k.a. the horizontal line before each prompt. If you set it to true, you'll
# probably want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false above and
# POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' ' below.
typeset -g POWERLEVEL9K_SHOW_RULER=false
typeset -g POWERLEVEL9K_RULER_CHAR='─' # reasonable alternative: '·'
typeset -g POWERLEVEL9K_RULER_FOREGROUND=240
# Filler between left and right prompt on the first prompt line. You can set it to '·' or '─'
# to make it easier to see the alignment between left and right prompt and to separate prompt
# from command output. It serves the same purpose as ruler (see above) without increasing
# the number of prompt lines. You'll probably want to set POWERLEVEL9K_SHOW_RULER=false
# if using this. You might also like POWERLEVEL9K_PROMPT_ADD_NEWLINE=false for more compact
# prompt.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR=' '
if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then
# The color of the filler.
typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=240
# Add a space between the end of left prompt and the filler.
typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=' '
# Add a space between the filler and the start of right prompt.
typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL=' '
# Start filler from the edge of the screen if there are no left segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}'
# End filler on the edge of the screen if there are no right segments on the first line.
typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}'
fi
#################################[ os_icon: os identifier ]##################################
# OS identifier color.
typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=
# Make the icon bold.
typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='%B${P9K_CONTENT}'
################################[ prompt_char: prompt symbol ]################################
# Green prompt symbol if the last command succeeded.
typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76
# Red prompt symbol if the last command failed.
typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196
# Default prompt symbol.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯'
# Prompt symbol in command vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮'
# Prompt symbol in visual vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='Ⅴ'
# Prompt symbol in overwrite vi mode.
typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶'
typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true
# No line terminator if prompt_char is the last segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL=''
# No line introducer if prompt_char is the first segment.
typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL=
##################################[ dir: current directory ]##################################
# Default current directory color.
typeset -g POWERLEVEL9K_DIR_FOREGROUND=31
# If directory is too long, shorten some of its segments to the shortest possible unique
# prefix. The shortened directory can be tab-completed to the original.
typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique
# Replace removed segment suffixes with this symbol.
typeset -g POWERLEVEL9K_SHORTEN_DELIMITER=
# Color of the shortened directory segments.
typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=103
# Color of the anchor directory segments. Anchor segments are never shortened. The first
# segment is always an anchor.
typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=39
# Display anchor directory segments in bold.
typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true
# Don't shorten directories that contain any of these files. They are anchors.
local anchor_files=(
.bzr
.citc
.git
.hg
.node-version
.python-version
.ruby-version
.shorten_folder_marker
.svn
.terraform
CVS
Cargo.toml
composer.json
go.mod
package.json
)
typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})"
# Don't shorten this many last directory segments. They are anchors.
typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1
# Shorten directory if it's longer than this even if there is space for it. The value can
# be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty,
# directory will be shortened only when prompt doesn't fit or when other parameters demand it
# (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below).
# If set to `0`, directory will always be shortened to its minimum length.
typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this
# many columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40
# When `dir` segment is on the last prompt line, try to shorten it enough to leave at least
# COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands.
typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50
# If set to true, embed a hyperlink into the directory. Useful for quickly
# opening a directory in the file manager simply by clicking the link.
# Can also be handy when the directory is shortened, as it allows you to see
# the full directory that was used in previous commands.
typeset -g POWERLEVEL9K_DIR_HYPERLINK=false
# Enable special styling for non-writable directories.
typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=true
# Show this icon when the current directory is not writable. POWERLEVEL9K_DIR_SHOW_WRITABLE
# above must be set to true for this parameter to have effect.
# typeset -g POWERLEVEL9K_DIR_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_DIR_PREFIX='%fin '
# POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons for different directories.
# It must be an array with 3 * N elements. Each triplet consists of:
#
# 1. A pattern against which the current directory is matched. Matching is done with
# extended_glob option enabled.
# 2. Directory class for the purpose of styling.
# 3. Icon.
#
# Triplets are tried in order. The first triplet whose pattern matches $PWD wins. If there
# are no matches, the directory will have no icon.
#
# Example:
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=(
# '~/work(|/*)' WORK '(╯°□°)╯︵ ┻━┻'
# '~(|/*)' HOME '⌂'
# '*' DEFAULT '')
#
# With these settings, the current directory in the prompt may look like this:
#
# (╯°□°)╯︵ ┻━┻ ~/work/projects/important/urgent
#
# Or like this:
#
# ⌂ ~/best/powerlevel10k
#
# You can also set different colors for directories of different classes. Remember to override
# FOREGROUND, SHORTENED_FOREGROUND and ANCHOR_FOREGROUND for every directory class that you wish
# to have its own color.
#
# typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=31
# typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=103
# typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=39
#
# typeset -g POWERLEVEL9K_DIR_CLASSES=()
#####################################[ vcs: git status ]######################################
# Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon.
typeset -g POWERLEVEL9K_VCS_BRANCH_ICON=
POWERLEVEL9K_VCS_BRANCH_ICON=${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}
# Untracked files icon. It's really a question mark, your font isn't broken.
# Change the value of this parameter to show a different icon.
typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?'
POWERLEVEL9K_VCS_UNTRACKED_ICON=${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}
# Formatter for Git status.
#
# Example output: master ⇣42⇡42 *42 merge ~42 +42 !42 ?42.
#
# You can edit the function to customize how Git status looks.
#
# VCS_STATUS_* parameters are set by gitstatus plugin. See reference:
# https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh.
function my_git_formatter() {
emulate -L zsh
if [[ -n $P9K_CONTENT ]]; then
# If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from
# gitstatus plugin). VCS_STATUS_* parameters are not available in this case.
typeset -g my_git_format=$P9K_CONTENT
return
fi
if (( $1 )); then
# Styling for up-to-date Git status.
local meta='%f' # default foreground
local clean='%76F' # green foreground
local modified='%178F' # yellow foreground
local untracked='%39F' # blue foreground
local conflicted='%196F' # red foreground
else
# Styling for incomplete and stale Git status.
local meta='%244F' # grey foreground
local clean='%244F' # grey foreground
local modified='%244F' # grey foreground
local untracked='%244F' # grey foreground
local conflicted='%244F' # grey foreground
fi
local res
local where # branch or tag
if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then
res+="${clean}${POWERLEVEL9K_VCS_BRANCH_ICON}"
where=${(V)VCS_STATUS_LOCAL_BRANCH}
elif [[ -n $VCS_STATUS_TAG ]]; then
res+="${meta}#"
where=${(V)VCS_STATUS_TAG}
fi
# If local branch name or tag is at most 32 characters long, show it in full.
# Otherwise show the first 12 … the last 12.
(( $#where > 32 )) && where[13,-13]="…"
res+="${clean}${where//\%/%%}" # escape %
# Display the current Git commit if there is no branch or tag.
# Tip: To always display the current Git commit, remove `[[ -z $where ]] &&` from the next line.
[[ -z $where ]] && res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}"
# Show tracking branch name if it differs from local branch.
if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then
res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}" # escape %
fi
# ⇣42 if behind the remote.
(( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}"
# ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42.
(( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" "
(( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}"
# *42 if have stashes.
(( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}"
# 'merge' if the repo is in an unusual state.
[[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}"
# ~42 if have merge conflicts.
(( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}"
# +42 if have staged changes.
(( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}"
# !42 if have unstaged changes.
(( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}"
# ?42 if have untracked files. It's really a question mark, your font isn't broken.
# See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon.
# Remove the next line if you don't want to see untracked files at all.
(( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}"
typeset -g my_git_format=$res
}
functions -M my_git_formatter 2>/dev/null
# Disable the default Git status formatting.
typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true
# Install our own Git status formatter.
typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter(1)))+${my_git_format}}'
typeset -g POWERLEVEL9K_VCS_LOADING_CONTENT_EXPANSION='${$((my_git_formatter(0)))+${my_git_format}}'
# Enable counters for staged, unstaged, etc.
typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1
# Icon color.
typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_COLOR=76
typeset -g POWERLEVEL9K_VCS_LOADING_VISUAL_IDENTIFIER_COLOR=244
# Custom icon.
# typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_VCS_PREFIX='%fon '
# Show status of repositories of these types. You can add svn and/or hg if you are
# using them. If you do, your prompt may become slow even when your current directory
# isn't in an svn or hg reposotiry.
typeset -g POWERLEVEL9K_VCS_BACKENDS=(git)
# These settings are used for respositories other than Git or when gitstatusd fails and
# Powerlevel10k has to fall back to using vcs_info.
typeset -g POWERLEVEL9K_VCS_CLEAN_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_UNTRACKED_FOREGROUND=76
typeset -g POWERLEVEL9K_VCS_MODIFIED_FOREGROUND=178
##########################[ status: exit code of the last command ]###########################
# Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and
# style them independently from the regular OK and ERROR state.
typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true
# Status on success. No content, just an icon. No need to show it if prompt_char is enabled as
# it will signify success by turning green.
typeset -g POWERLEVEL9K_STATUS_OK=false
typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when some part of a pipe command fails but the overall exit status is zero. It may look
# like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=70
typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔'
# Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as
# it will signify error by turning red.
typeset -g POWERLEVEL9K_STATUS_ERROR=false
typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when the last command was terminated by a signal.
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=160
# Use terse signal names: "INT" instead of "SIGINT(2)".
typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false
typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘'
# Status when some part of a pipe command fails and the overall exit status is also non-zero.
# It may look like this: 1|0.
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=160
typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘'
###################[ command_execution_time: duration of the last command ]###################
# Show duration of the last command if takes longer than this many seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3
# Show this many fractional digits. Zero means round to seconds.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0
# Execution time color.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=101
# Duration format: 1d 2h 3m 4s.
typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s'
# Custom icon.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='%ftook '
#######################[ background_jobs: presence of background jobs ]#######################
# Don't show the number of background jobs.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false
# Background jobs color.
typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ direnv: direnv status (https://direnv.net/) ]########################
# Direnv color.
typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]###########
# NordVPN connection indicator color.
typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=39
# Hide NordVPN connection indicator when not connected.
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION=
typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ ranger: ranger shell (https://github.com/ranger/ranger) ]##################
# Ranger shell color.
typeset -g POWERLEVEL9K_RANGER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]#######################
# Nnn shell color.
typeset -g POWERLEVEL9K_NNN_FOREGROUND=72
# Custom icon.
# typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########################[ vim_shell: vim shell indicator (:sh) ]###########################
# Vim shell indicator color.
typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=34
# Custom icon.
# typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]######
# Midnight Commander shell color.
typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=178
# Custom icon.
# typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]##
# Nix shell color.
typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=74
# Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line.
# typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ disk_usgae: disk usage ]##################################
# Colors for different levels of disk usage.
typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=35
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=220
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=160
# Thresholds for different levels of disk usage (percentage points).
typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90
typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95
# If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent.
typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false
# Custom icon.
# typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ ram: free RAM ]#######################################
# RAM color.
typeset -g POWERLEVEL9K_RAM_FOREGROUND=66
# Custom icon.
# typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################################[ swap: used swap ]######################################
# Swap color.
typeset -g POWERLEVEL9K_SWAP_FOREGROUND=96
# Custom icon.
# typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐'
######################################[ load: CPU load ]######################################
# Show average CPU load over this many last minutes. Valid values are 1, 5 and 15.
typeset -g POWERLEVEL9K_LOAD_WHICH=5
# Load color when load is under 50%.
typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=66
# Load color when load is between 50% and 70%.
typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=178
# Load color when load is over 70%.
typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=166
# Custom icon.
# typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################
# Todo color.
typeset -g POWERLEVEL9K_TODO_FOREGROUND=110
# Hide todo when the total number of tasks is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true
# Hide todo when the number of tasks after filtering is zero.
typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false
# Todo format. The following parameters are available within the expansion.
#
# - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks.
# - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering.
#
# These variables correspond to the last line of the output of `todo.sh -p ls`:
#
# TODO: 24 of 42 tasks shown
#
# Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT.
#
# typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT'
# Custom icon.
# typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############
# Timewarrior color.
typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=110
# If the tracked task is longer than 24 characters, truncate and append "…".
# Tip: To always display tasks without truncation, delete the following parameter.
# Tip: To hide task names and display just the icon when time tracking is enabled, set the
# value of the following parameter to "".
typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}'
# Custom icon.
# typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐'
##################################[ context: user@hostname ]##################################
# Context color when running with privileges.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=178
# Context color in SSH without privileges.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=180
# Default context color (no privileges, no SSH).
typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=180
# Context format when running with privileges: bold user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%B%n@%m'
# Context format when in SSH without privileges: user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m'
# Default context format (no privileges, no SSH): user@hostname.
typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m'
# Don't show context unless running with privileges or in SSH.
# Tip: Remove the next line to always show context.
typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION=
# Custom icon.
# typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_CONTEXT_PREFIX='%fwith '
###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]###
# Python virtual environment color.
typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=37
# Don't show Python version next to the virtual environment name.
typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#####################[ anaconda: conda environment (https://conda.io/) ]######################
# Anaconda environment color.
typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=37
# Don't show Python version next to the anaconda environment name.
typeset -g POWERLEVEL9K_ANACONDA_SHOW_PYTHON_VERSION=false
# Separate environment name from Python version only with a space.
typeset -g POWERLEVEL9K_ANACONDA_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################
# Pyenv color.
typeset -g POWERLEVEL9K_PYENV_FOREGROUND=37
# Hide python version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global)
# If set to false, hide python version if it's the same as global:
# $(pyenv version-name) == $(pyenv global).
typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
################[ goenv: go environment (https://github.com/syndbg/goenv) ]################
# Goenv color.
typeset -g POWERLEVEL9K_GOENV_FOREGROUND=37
# Hide go version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global)
# If set to false, hide go version if it's the same as global:
# $(goenv version-name) == $(goenv global).
typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]##########
# Nodenv color.
typeset -g POWERLEVEL9K_NODENV_FOREGROUND=70
# Don't show node version if it's the same as global: $(nodenv version-name) == $(nodenv global).
typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]###############
# Nvm color.
typeset -g POWERLEVEL9K_NVM_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############
# Nodeenv color.
typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=70
# Don't show Node version next to the environment name.
typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false
# Separate environment name from Node version only with a space.
typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER=
# Custom icon.
# typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##############################[ node_version: node.js version ]###############################
# Node version color.
typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=70
# Show node version only when in a directory tree containing package.json.
typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ go_version: go version (https://golang.org) ]########################
# Go version color.
typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=37
# Show go version only when in a go project subdirectory.
typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#################[ rust_version: rustc version (https://www.rust-lang.org) ]##################
# Rust version color.
typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=37
# Show rust version only when in a rust project subdirectory.
typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################
# .NET version color.
typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=134
# Show .NET version only when in a .NET project subdirectory.
typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true
# Custom icon.
# typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]##############
# Rbenv color.
typeset -g POWERLEVEL9K_RBENV_FOREGROUND=168
# Hide ruby version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global)
# If set to false, hide ruby version if it's the same as global:
# $(rbenv version-name) == $(rbenv global).
typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#######################[ rvm: ruby version from rvm (https://rvm.io) ]########################
# Rvm color.
typeset -g POWERLEVEL9K_RVM_FOREGROUND=168
# Don't show @gemset at the end.
typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false
# Don't show ruby- at the front.
typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false
# Custom icon.
# typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############
# Fvm color.
typeset -g POWERLEVEL9K_FVM_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]###########
# Lua color.
typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=32
# Hide lua version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global)
# If set to false, hide lua version if it's the same as global:
# $(luaenv version-name) == $(luaenv global).
typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################
# Java color.
typeset -g POWERLEVEL9K_JENV_FOREGROUND=32
# Hide java version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global)
# If set to false, hide java version if it's the same as global:
# $(jenv version-name) == $(jenv global).
typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############
# Perl color.
typeset -g POWERLEVEL9K_PLENV_FOREGROUND=67
# Hide perl version if it doesn't come from one of these sources.
typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global)
# If set to false, hide perl version if it's the same as global:
# $(plenv version-name) == $(plenv global).
typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false
# Custom icon.
# typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
#############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]#############
# Show kubecontext only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show kubecontext.
typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc'
# Kubernetes context classes for the purpose of using different colors, icons and expansions with
# different contexts.
#
# POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current kubernetes context gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current kubernetes context is "deathray-testing/default", its class is TEST
# because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=134
# typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext
# segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# Within the expansion the following parameters are always available:
#
# - P9K_CONTENT The content that would've been displayed if there was no content
# expansion defined.
# - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the
# output of `kubectl config get-contexts`.
# - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE
# in the output of `kubectl config get-contexts`. If there is no
# namespace, the parameter is set to "default".
# - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the
# output of `kubectl config get-contexts`.
#
# If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS),
# the following extra parameters are available:
#
# - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks".
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID.
# - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone.
# - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster.
#
# P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example,
# if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=gke
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
#
# If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01":
#
# - P9K_KUBECONTEXT_CLOUD_NAME=eks
# - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012
# - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1
# - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01
typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION=
# Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME.
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}'
# Append the current context's namespace if it's not "default".
POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}'
# Custom prefix.
# typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='%fat '
################[ terraform: terraform workspace (https://www.terraform.io) ]#################
# Terraform color.
typeset -g POWERLEVEL9K_TERRAFORM_FOREGROUND=38
# Custom icon.
# typeset -g POWERLEVEL9K_TERRAFORM_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]#
# Show aws only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show aws.
typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi'
# POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element
# in each pair defines a pattern against which the current AWS profile gets matched.
# More specifically, it's P9K_CONTENT prior to the application of context expansion (see below)
# that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters,
# you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_AWS_CLASSES defines the context class. Patterns are tried in order. The
# first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD
# '*test*' TEST
# '*' DEFAULT)
#
# If your current AWS profile is "company_test", its class is TEST
# because "company_test" doesn't match the pattern '*prod*' but does match '*test*'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <'
typeset -g POWERLEVEL9K_AWS_CLASSES=(
# '*prod*' PROD # These values are examples that are unlikely
# '*test*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=208
# typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]#
# AWS Elastic Beanstalk environment color.
typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=70
# Custom icon.
# typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]##########
# Show azure only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show azure.
typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi'
# Azure account name color.
typeset -g POWERLEVEL9K_AZURE_FOREGROUND=32
# Custom icon.
# typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐'
##########[ gcloud: google cloud acccount and project (https://cloud.google.com/) ]###########
# Show gcloud only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show gcloud.
typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs'
# Google cloud color.
typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=32
# Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_CONTENT_EXPANSION if the default
# is too verbose or not informative enough.
#
# P9K_GCLOUD_ACCOUNT: the output of `gcloud config get-value account`
# P9K_GCLOUD_PROJECT: the output of `gcloud config get-value project`
# ${VARIABLE//\%/%%}: ${VARIABLE} with all occurences of '%' replaced with '%%'.
#
typeset -g POWERLEVEL9K_GCLOUD_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT//\%/%%}'
# Custom icon.
# typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐'
#[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]#
# Show google_app_cred only when the the command you are typing invokes one of these tools.
# Tip: Remove the next line to always show google_app_cred.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi'
# Google application credentials classes for the purpose of using different colors, icons and
# expansions with different credentials.
#
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first
# element in each pair defines a pattern against which the current kubernetes context gets
# matched. More specifically, it's P9K_CONTENT prior to the application of context expansion
# (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION
# parameters, you'll see this value in your prompt. The second element of each pair in
# POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order.
# The first match wins.
#
# For example, given these settings:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD
# '*:*test*:*' TEST
# '*' DEFAULT)
#
# If your current Google application credentials is "service_account deathray-testing x@y.com",
# its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'.
#
# You can define different colors, icons and content expansions for different classes:
#
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐'
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID'
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=(
# '*:*prod*:*' PROD # These values are examples that are unlikely
# '*:*test*:*' TEST # to match your needs. Customize them as needed.
'*' DEFAULT)
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=32
# typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by
# google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference:
# http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion.
#
# You can use the following parameters in the expansion. Each of them corresponds to one of the
# fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS.
#
# Parameter | JSON key file field
# ---------------------------------+---------------
# P9K_GOOGLE_APP_CRED_TYPE | type
# P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id
# P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email
#
# Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurences of '%' replaced by '%%'.
typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}'
###############################[ public_ip: public IP address ]###############################
# Public IP color.
typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=94
# Custom icon.
# typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
########################[ vpn_ip: virtual private network indicator ]#########################
# VPN IP color.
typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=81
# When on VPN, show just an icon without the IP address.
# Tip: To display the private IP address when on VPN, remove the next line.
typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION=
# Regular expression for the VPN network interface. Run ifconfig while on VPN to see the
# name of the interface.
typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(wg|(.*tun))[0-9]*'
# Custom icon.
# typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐'
#########################[ proxy: system-wide http/https/ftp proxy ]##########################
# Proxy color.
typeset -g POWERLEVEL9K_PROXY_FOREGROUND=68
# Custom icon.
# typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐'
################################[ battery: internal battery ]#################################
# Show battery in red when it's below this level and not connected to power supply.
typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20
typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=160
# Show battery in green when it's charging or fully charged.
typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=70
# Show battery in yellow when it's discharging.
typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=178
# Battery pictograms going from low to high level of charge.
typeset -g POWERLEVEL9K_BATTERY_STAGES=('%K{232}▁' '%K{232}▂' '%K{232}▃' '%K{232}▄' '%K{232}▅' '%K{232}▆' '%K{232}▇' '%K{232}█')
# Don't show the remaining time to charge/discharge.
typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false
####################################[ time: current time ]####################################
# Current time color.
typeset -g POWERLEVEL9K_TIME_FOREGROUND=66
# Format for the current time: 09:51:02. See `man 3 strftime`.
typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%H:%M:%S}'
# If set to true, time will update when you hit enter. This way prompts for the past
# commands will contain the start times of their commands as opposed to the default
# behavior where they contain the end times of their preceding commands.
typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false
# Custom icon.
# typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Custom prefix.
# typeset -g POWERLEVEL9K_TIME_PREFIX='%fat '
# Example of a user-defined prompt segment. Function prompt_example will be called on every
# prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or
# POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and orange text greeting the user.
#
# Type `p10k help segment` for documentation and a more sophisticated example.
function prompt_example() {
p10k segment -f 208 -i '⭐' -t 'hello, %n'
}
# User-defined prompt segments may optionally provide an instant_prompt_* function. Its job
# is to generate the prompt segment for display in instant prompt. See
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
#
# Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function
# and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k
# will replay these calls without actually calling instant_prompt_*. It is imperative that
# instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this
# rule is not observed, the content of instant prompt will be incorrect.
#
# Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If
# instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt.
function instant_prompt_example() {
# Since prompt_example always makes the same `p10k segment` calls, we can call it from
# instant_prompt_example. This will give us the same `example` prompt segment in the instant
# and regular prompts.
prompt_example
}
# User-defined prompt segments can be customized the same way as built-in segments.
# typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=208
# typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐'
# Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt
# when accepting a command line. Supported values:
#
# - off: Don't change prompt when accepting a command line.
# - always: Trim down prompt when accepting a command line.
# - same-dir: Trim down prompt when accepting a command line unless this is the first command
# typed after changing current working directory.
typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=off
# Instant prompt mode.
#
# - off: Disable instant prompt. Choose this if you've tried instant prompt and found
# it incompatible with your zsh configuration files.
# - quiet: Enable instant prompt and don't print warnings when detecting console output
# during zsh initialization. Choose this if you've read and understood
# https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt.
# - verbose: Enable instant prompt and print a warning when detecting console output during
# zsh initialization. Choose this if you've never tried instant prompt, haven't
# seen the warning, or if you are unsure what this all means.
typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose
# Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized.
# For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload
# can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you
# really need it.
typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true
# If p10k is already loaded, reload configuration.
# This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true.
(( ! $+functions[p10k] )) || p10k reload
}
(( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]}
'builtin' 'unset' 'p10k_config_opts'
|
#!/usr/bin/env bash
version="$1"
if [ -z "$version" ]; then
echo "version is empty!"
exit 1
fi
find . -name "go.mod" | sed 's~\./~~' | sed "s/go.mod/${version}/" | while read t; do
git tag $t
done
|
<reponame>jorgedemetrio/social-midia-manager
/**
*
*/
package com.br.alldreams.socialmidia.conf;
import javax.persistence.EntityManagerFactory;
import javax.sql.DataSource;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.jdbc.datasource.DriverManagerDataSource;
import org.springframework.orm.jpa.JpaTransactionManager;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.HibernateJpaVendorAdapter;
import com.br.alldreams.socialmidia.conf.beans.DatabaseDataConfBeans;
import lombok.extern.slf4j.Slf4j;
/**
* @author <NAME>
* @since 19 de abr de 2019 18:10:33
* @version 1.0
*/
@Slf4j
@Configuration
public class Database {
@Autowired
private DatabaseDataConfBeans config;
@Bean
public DataSource dataSource() {
DriverManagerDataSource database = null;
try {
database = new DriverManagerDataSource();
database.setDriverClassName("com.mysql.cj.jdbc.Driver");
database.setUrl(config.getHost());
database.setUsername(config.getUser());
database.setPassword(config.getPass());
} catch (Exception e) {
log.error(e.getMessage(), e);
throw e;
}
return database;
}
@Bean
public LocalContainerEntityManagerFactoryBean entityManagerFactory(DataSource dataSource, JpaVendorAdapter jpaVendorAdapter) {
LocalContainerEntityManagerFactoryBean bean = new LocalContainerEntityManagerFactoryBean();
bean.setDataSource(dataSource);
bean.setJpaVendorAdapter(jpaVendorAdapter);
bean.setPackagesToScan("com.br.alldreams");
return bean;
}
@Bean
public JpaVendorAdapter jpaVendorAdapter() {
HibernateJpaVendorAdapter bean = new HibernateJpaVendorAdapter();
bean.setDatabase(org.springframework.orm.jpa.vendor.Database.MYSQL);
bean.setGenerateDdl(true);
bean.setShowSql(true);
bean.setGenerateDdl(true);
return bean;
}
@Bean
public JpaTransactionManager transactionManager(EntityManagerFactory emf) {
return new JpaTransactionManager(emf);
}
}
|
from distutils.core import setup
setup(
name="irt-data",
version="1.2",
packages=[
"irt",
"irt.data",
"irt.graph",
"irt.text",
"irt.common",
],
license="MIT",
author="<NAME>",
author_email="<EMAIL>",
description="Inductive Reasoning with Text - Benchmarks",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
install_requires=[
"pyyaml==5.*",
"networkx==2.*",
"gitpython==3.*",
"tqdm",
"jupyter",
"tabulate",
"matplotlib",
],
)
|
#!/bin/bash
set -e
# urls.txtの内容をredisのqueueとして投入する
# すでに投入済みの場合はスキップする
. ./lib/redis-helper.sh
. ./lib/url-helper.sh
namespace="vscovid-crawler"
for url in `cat urls.txt`; do
# URLの整形
url=${url:9:-1}l
echo path $url
domain=`get_domain_by_url $url`
echo domain $domain
host=`grep $domain --include="*.csv" ./data/*|cut -d',' -f 3`
echo host $host
url=${url/$domain/$host}
echo url $url
md5=`get_md5_by_url $url`
echo $md5
# redisに存在しないことを確認する
is_exists=`redis_exists_md5 $namespace $md5`
if [ $is_exists = "0" ]; then
redis-cli SET "$namespace:queue-$md5" $url
fi
done
|
import React from 'react';
import { Link } from 'react-router-dom';
import logo from './logo.png';
import './style.scss';
const Header = () => (
<header>
<img className="logo" src={logo} alt={logo} />
<nav>
<ul>
<li>
<Link to="/">Drag & Drop</Link>
</li>
<li>
<Link to="/usedraggable">UseDraggable</Link>
</li>
</ul>
</nav>
</header>
);
export default Header;
|
var style = {
container: {
border: 'solid',
borderWidth: 0.5,
borderColor: '#000'
}
};
var FilterElement = React.createClass({
displayName: 'FilterElement',
getInitialState: function () {
return {
autodelete: false,
counts: 0
};
},
getDefaultProps: function () {
return {
counts: 0
};
},
componentDidMount: function () {
this.setState({ counts: this.props.counts });
},
componentWillReceiveProps: function (nextProps) {
var self = this;
self.setState({ counts: nextProps.counts }, function () {
self.runAutoDelete();
});
},
runAutoDelete: function () {
if (this.state.autodelete) {
console.log('running autodelete');
if (this.state.counts > 0) {
this.refs.deleteAllButton.click();
}
}
},
deleteAllButton: function () {
return React.createElement(
'button',
{
onClick: this.props.handleCommand,
'data-command': this.props.cmdSlug,
className: 'btn btn-default',
ref: 'deleteAllButton'
},
'Delete All'
);
},
handleCheckboxChange: function (e) {
console.log(e.target.dataset);
var self = this;
state = {};
state[self.refs.deleteAllButton.dataset.command] = e.target.checked;
self.props.setAutodeleteState(state);
// setTimeout(function(){
self.props.sendCommand('toggleAutodelete', {
command: self.refs.deleteAllButton.dataset.command,
enabled: e.target.checked
});
// self.handleCheckboxChange(e)
// }, 2000)
},
deleteButtonStyles: function () {
if (this.refs.autodelete == undefined) {
return {};
}
if (this.refs.autodelete.checked) {
console.log(this.refs.autodelete);
return { display: 'none' };
} else {
return {};
}
},
render: function () {
return React.createElement(
'div',
{ style: style.container },
React.createElement(
'div',
{ className: 'row' },
React.createElement(
'div',
{ className: 'text-center' },
React.createElement(
'h4',
null,
this.props.title
)
)
),
React.createElement(
'div',
{ className: 'row' },
React.createElement(
'div',
{ className: 'text-center' },
React.createElement(
'h3',
null,
this.props.counts
)
)
),
React.createElement(
'div',
{ className: 'row' },
React.createElement(
'div',
{ className: 'text-center' },
React.createElement(
'label',
null,
'Autodelete',
React.createElement('input', { ref: 'autodelete', type: 'checkbox', onChange: this.handleCheckboxChange })
)
)
),
React.createElement(
'div',
{ className: 'text-center', style: this.deleteButtonStyles() },
this.deleteAllButton()
)
);
}
});
module.exports = FilterElement; |
<reponame>chrishumboldt/rocket-utility
/**
* @author <NAME>
*/
import { RocketIs } from '../is/is.utility';
/**
* Lowercase the whole string.
*
* @param data - The string modify.
*/
function lowercaseAll(data: string = ''): string {
return data.toString().toLowerCase();
}
/**
* Lowercase the first letter of the string.
*
* @param data - The string modify.
*/
function lowercaseFirst(data: string = ''): string {
const asString = data.toString();
return `${asString.charAt(0).toLowerCase()}${asString.slice(1)}`;
}
/**
* Lowercase the last letter of the string.
*
* @param data - The string modify.
*/
function lowercaseLast(data: string = ''): string {
const asString = data.toString();
return `${asString.slice(0, asString.length - 1)}${asString
.charAt(asString.length - 1)
.toLowerCase()}`;
}
/**
* Remove the first character of a string.
*
* @param data - The string modify.
*/
function removeFirst(data: string = ''): string {
return data.toString().substring(1);
}
/**
* Remove the first and last characters of a string.
*
* @param data - The string modify.
*/
function removeFirstAndLast(data: string = ''): string {
const asString = data.toString();
return asString.substring(1, asString.length - 1);
}
/**
* Remove the last character of a string.
*
* @param data - The string modify.
*/
function removeLast(data: string = ''): string {
const asString = data.toString();
return asString.substring(0, asString.length - 1);
}
/**
* Remove all the spaces from a string.
*
* @param data - The string modify.
*/
function removeSpaces(data: string = ''): string {
return data.toString().replace(/ /g, '');
}
/**
* Uppercase the whole string.
*
* @param data - The string modify.
*/
function uppercaseAll(data: string = ''): string {
return RocketIs.string(data) ? data.toUpperCase() : data;
}
/**
* Uppercase the first letter of the string.
*
* @param data - The string modify.
*/
function uppercaseFirst(data: string = ''): string {
const asString = data.toString();
return `${asString.charAt(0).toUpperCase()}${asString.slice(1)}`;
}
/**
* Uppercase the last letter of the string.
*
* @param data - The string modify.
*/
function uppercaseLast(data: string = ''): string {
const asString = data.toString();
return `${asString.slice(0, asString.length - 1)}${asString
.charAt(asString.length - 1)
.toUpperCase()}`;
}
export const RocketString = {
remove: {
first: removeFirst,
firstAndLast: removeFirstAndLast,
last: removeLast,
spaces: removeSpaces
},
lowercase: {
all: lowercaseAll,
first: lowercaseFirst,
last: lowercaseLast
},
uppercase: {
all: uppercaseAll,
first: uppercaseFirst,
last: uppercaseLast
}
};
|
<filename>app/components/directives/general/box-directive.js<gh_stars>0
myApp.compileProvider.directive('boxDirective', function() {
return {
restrict: 'E',
templateUrl: function() {
return "/app/components/directives/general/views/box-template.html";
},
scope: {
content: "=?",
icon: "=?",
background: "=?",
link: "=?",
button: "=?"
},
controller: function( $scope, $state, $stateParams ) {
$scope.changeState = function() {
$state.go($scope.link, { reload: true });
};
if( $scope.content ) {
$scope.content = $scope.content;
} else {
$scope.content = "This is Empty";
}
if( $scope.icon ) {
$scope.icon = $scope.icon;
} else {
$scope.icon = "notification.png";
}
if( $scope.background ) {
$scope.background = $scope.background;
} else {
$scope.background = "bg-white bg-font-white";
}
if( $scope.link ) {
$scope.link = $scope.link;
} else if( $scope.url ) {
$scope.url = $scope.url;
} else {
$scope.link = "dashboard";
}
if( $scope.button ) {
$scope.showbutton = true;
$scope.button = $scope.button;
} else {
$scope.showbutton = false;
}
}
};
}); |
#!/bin/bash
#
# * build directory is /usr/src/CMake
#
# * install directory is /usr
#
# * after installation, archive, source and build directories are removed
#
set -ex
WRAPPER=""
while [ $# -gt 0 ]; do
case "$1" in
-32)
WRAPPER="linux32"
;;
*)
echo "Usage: Usage: ${0##*/} [-32]"
exit 1
;;
esac
shift
done
if ! command -v git &> /dev/null; then
echo >&2 'error: "git" not found!'
exit 1
fi
if [[ "${CMAKE_VERSION}" == "" ]]; then
echo >&2 'error: CMAKE_VERSION env. variable must be set to a non-empty value'
exit 1
fi
cd /usr/src
git clone git://cmake.org/cmake.git CMake
(cd CMake && git checkout v$CMAKE_VERSION)
mkdir /usr/src/CMake-build
cd /usr/src/CMake-build
${WRAPPER} /usr/src/CMake/bootstrap \
--parallel=$(grep -c processor /proc/cpuinfo)
${WRAPPER} make -j$(grep -c processor /proc/cpuinfo)
mkdir /usr/src/CMake-ssl-build
cd /usr/src/CMake-ssl-build
${WRAPPER} /usr/src/CMake-build/bin/cmake \
-DCMAKE_BUILD_TYPE:STRING=Release \
-DBUILD_TESTING:BOOL=ON \
-DCMAKE_INSTALL_PREFIX:PATH=/usr/src/cmake-$CMAKE_VERSION \
-DCMAKE_USE_OPENSSL:BOOL=ON \
-DOPENSSL_ROOT_DIR:PATH=/usr/local/ssl \
../CMake
${WRAPPER} make -j$(grep -c processor /proc/cpuinfo) install
cd /usr/src/cmake-$CMAKE_VERSION
rm -rf doc man
find . -type f -exec install -D "{}" "/usr/{}" \;
# Test
ctest -R CMake.FileDownload
# Write test script
cat <<EOF > cmake-test-https-download.cmake
file(
DOWNLOAD https://raw.githubusercontent.com/Kitware/CMake/master/README.rst /tmp/README.rst
STATUS status
)
list(GET status 0 error_code)
list(GET status 1 error_msg)
if(error_code)
message(FATAL_ERROR "error: Failed to download ${url} - ${error_msg}")
else()
message(STATUS "CMake: HTTPS download works")
endif()
file(REMOVE /tmp/README.rst)
EOF
# Execute test script
cmake -P cmake-test-https-download.cmake
rm -rf /usr/src/CMake*
|
import ast
import pathlib
import pprint
from flake8_rst_docstrings import Plugin
def parse_python_file(file_path):
file_content = pathlib.Path(file_path).read_text()
parsed_nodes = []
tree = ast.parse(file_content)
plugin = Plugin(tree)
plugin_results = list(plugin.run())
for node in ast.walk(tree):
if isinstance(node, (ast.FunctionDef, ast.ClassDef, ast.Import)):
node_info = {
"node_type": type(node).__name__,
"name": getattr(node, "name", ""),
"lineno": node.lineno
}
parsed_nodes.append(node_info)
return parsed_nodes
# Example usage
file_path = "example.py"
parsed_structure = parse_python_file(file_path)
pprint.pprint(parsed_structure) |
<reponame>kikkia/Vinny-Redux<gh_stars>10-100
package com.bot.commands.alias;
import com.bot.commands.ModerationCommand;
import com.bot.db.AliasDAO;
import com.bot.db.GuildDAO;
import com.bot.models.Alias;
import com.bot.models.InternalGuild;
import com.bot.utils.AliasUtils;
import com.bot.utils.ConstantStrings;
import com.jagrosh.jdautilities.command.CommandEvent;
import com.jagrosh.jdautilities.commons.waiter.EventWaiter;
import datadog.trace.api.Trace;
import net.dv8tion.jda.api.events.message.MessageReceivedEvent;
import java.sql.SQLException;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;
public class AddGuildAliasCommand extends ModerationCommand {
private EventWaiter waiter;
private AliasDAO aliasDAO;
private GuildDAO guildDAO;
public AddGuildAliasCommand(EventWaiter waiter) {
this.name = "addgalias";
this.aliases = new String[]{"addguildalias"};
this.help = "Adds an alias that will apply everywhere on the guild";
this.waiter = waiter;
this.aliasDAO = AliasDAO.getInstance();
this.guildDAO = GuildDAO.getInstance();
}
// Step by step walkthrough for making an alias
@Override
@Trace(operationName = "executeCommand", resourceName = "AddGuildAlias")
protected void executeCommand(CommandEvent commandEvent) {
commandEvent.reply(ConstantStrings.GUILD_ALIAS_SETUP_HELLO);
waiter.waitForEvent(MessageReceivedEvent.class,
e -> e.getAuthor().equals(commandEvent.getAuthor())
&& e.getChannel().equals(commandEvent.getChannel())
&& !e.getMessage().equals(commandEvent.getMessage()),
new StepOneConsumer(commandEvent),
// if the user takes more than a minute, time out
1, TimeUnit.MINUTES, () -> commandEvent.reply(ConstantStrings.EVENT_WAITER_TIMEOUT));
}
class StepOneConsumer implements Consumer<MessageReceivedEvent> {
private String alias;
private CommandEvent commandEvent;
public StepOneConsumer(CommandEvent commandEvent) {
this.commandEvent = commandEvent;
}
@Override
public void accept(MessageReceivedEvent event) {
// Logic for accepting the alias
// No other alias for the guild with the same trigger exists.
InternalGuild guild = guildDAO.getGuildById(event.getMessage().getGuild().getId());
alias = event.getMessage().getContentRaw();
if (alias.length() > 250) {
commandEvent.replyWarning("Aliases can be no longer than 250 characters. Please try again.");
return;
}
Alias existing = guild.getAliasList().get(alias);
if (existing != null) {
commandEvent.replyWarning("There already exists an alias for this trigger in this guild. Please use another trigger");
return;
}
// There is no existing alias, continue on to the next step
commandEvent.replySuccess(ConstantStrings.ALIAS_STEP_ONE_COMPLETE_PART_1 +
alias + ConstantStrings.ALIAS_STEP_ONE_COMPLETE_PART_2);
waiter.waitForEvent(MessageReceivedEvent.class,
e -> e.getAuthor().equals(event.getAuthor()),
new StepTwoConsumer(commandEvent, alias),
1,
TimeUnit.MINUTES, () -> commandEvent.replyWarning(ConstantStrings.EVENT_WAITER_TIMEOUT));
}
}
class StepTwoConsumer implements Consumer<MessageReceivedEvent> {
private String alias;
private String command;
private CommandEvent commandEvent;
public StepTwoConsumer(CommandEvent commandEvent, String alias) {
this.commandEvent = commandEvent;
this.alias = alias;
}
@Override
public void accept(MessageReceivedEvent event) {
// TODO: Possible more input validation
boolean isValid = AliasUtils.confirmValidCommandName(event.getMessage().getContentRaw().split(" ")[0]);
command = "~" + event.getMessage().getContentRaw();
if (!isValid) {
commandEvent.replyWarning("That does not seem like it would trigger any commands. Please try again.");
} else if (event.getMessage().getContentRaw().length() > 500) {
commandEvent.replyWarning("Commands can not be longer than 500 characters. Please try again.");
} else {
Alias toPut = new Alias(alias, command, commandEvent.getGuild().getId(), commandEvent.getAuthor().getId());
try {
aliasDAO.addGuildAlias(toPut);
// Update cached guild
InternalGuild guild = guildDAO.getGuildById(event.getGuild().getId());
guild.getAliasList().put(toPut.getAlias(), toPut);
guildDAO.updateGuildInCache(guild);
} catch (SQLException e) {
commandEvent.replyError("Something went wrong writing the alias to the db.");
logger.severe("Error encountered trying to write guild alias", e);
}
commandEvent.replySuccess(ConstantStrings.ALIAS_SUCCESSFULLY_ADDED);
}
}
}
}
|
'''
Created on Oct 14, 2014
@author: stefan
'''
from parser import stpcommands
from ciphers.cipher import AbstractCipher
from parser.stpcommands import getStringLeftRotate as rotl
class KeccakCipher(AbstractCipher):
"""
This class provides a model for the Keccak hash function by
<NAME>, <NAME>, <NAME> and <NAME>.
For more information on Keccak see http://keccak.noekeon.org/
"""
name = "keccak"
RO = [[0, 36, 3, 41, 18],
[1, 44, 10, 45, 2],
[62, 6, 43, 15, 61],
[28, 55, 25, 21, 56],
[27, 20, 39, 8, 14]]
RC = ["0hex0001", "0hex8082", "0hex808A", "0hex8000", "0hex808B",
"0hex0001", "0hex8081", "0hex8009"]
def getFormatString(self):
return ['s00', 's10', 's20', 's30', 's40',
's01', 's11', 's21', 's31', 's41',
's02', 's12', 's22', 's32', 's42',
's03', 's13', 's23', 's33', 's43',
's04', 's14', 's24', 's34', 's44']
def createSTP(self, stp_filename, parameters):
"""
Creates an STP file to find a preimage for Keccak.
"""
wordsize = parameters["wordsize"]
rounds = parameters["rounds"]
# Default rate and capacity
capacity = 160
rate = (wordsize * 25) - capacity
if "rate" in parameters:
rate = parameters["rate"]
if "capacity" in parameters:
capacity = parameters["capacity"]
assert (rate + capacity) == wordsize * 25
with open(stp_filename, 'w') as stp_file:
stp_file.write("% Input File for STP\n% Keccak w={} rate={} "
"capacity={}\n\n\n".format(wordsize, rate, capacity,
rounds))
# Setup variables
# 5x5 lanes of wordsize
s = ["s{}{}{}".format(x, y, i) for i in range(rounds + 1)
for y in range(5) for x in range(5)]
a = ["a{}{}{}".format(x, y, i) for i in range(rounds + 1)
for y in range(5) for x in range(5)]
b = ["b{}{}{}".format(x, y, i) for i in range(rounds + 1)
for y in range(5) for x in range(5)]
c = ["c{}{}".format(x, i) for i in range(rounds + 1) for x in range(5)]
d = ["d{}{}".format(x, i) for i in range(rounds + 1) for x in range(5)]
stpcommands.setupVariables(stp_file, s, wordsize)
stpcommands.setupVariables(stp_file, a, wordsize)
stpcommands.setupVariables(stp_file, b, wordsize)
stpcommands.setupVariables(stp_file, c, wordsize)
stpcommands.setupVariables(stp_file, d, wordsize)
# Fix variables for capacity, only works if rate/capacity
# is multiple of wordsize
for i in range(rate // wordsize, (rate + capacity) // wordsize):
stpcommands.assertVariableValue(stp_file, s[i], "0hex{}".format(
"0" * (wordsize // 4)))
for rnd in range(rounds):
self.setupKeccakRound(stp_file, rnd, s, a, b, c, d, wordsize)
for key, value in parameters["fixedVariables"].items():
stpcommands.assertVariableValue(stp_file, key, value)
stpcommands.setupQuery(stp_file)
return
def setupKeccakRound(self, stp_file, rnd, s, a, b, c, d, wordsize):
"""
Model for one round of Keccak.
"""
command = ""
#Compute Parity for each column
for i in range(5):
command += "ASSERT({} = BVXOR({}, BVXOR({}, BVXOR({}, BVXOR({}, {})))));\n".format(
c[i + 5*rnd], s[i + 5*0 + 25*rnd], s[i + 5*1 + 25*rnd],
s[i + 5*2 + 25*rnd], s[i + 5*3 + 25*rnd], s[i + 5*4 + 25*rnd])
#Compute intermediate values
for i in range(5):
command += "ASSERT({} = BVXOR({}, {}));\n".format(
d[i + 5*rnd], c[(i - 1) % 5 + 5*rnd],
rotl(c[(i + 1) % 5 + 5*rnd], 1, wordsize))
#Rho and Pi
for x in range(5):
for y in range(5):
#x + 5*y + 25*rnd -> y + 5*((2*x + 3*y) % 5) + 25*rnd
new_b_index = y + 5*((2*x + 3*y) % 5) + 25*rnd
tmp_xor = "BVXOR({}, {})".format(s[x + 5*y + 25*rnd], d[x + 5*rnd])
command += "ASSERT({} = {});\n".format(
b[new_b_index], rotl(tmp_xor, self.RO[x][y], wordsize))
#Chi
for x in range(5):
for y in range(5):
chiTmp = "BVXOR({}, ~{} & {})".format(b[(x + 0) % 5 + 5*y + 25*rnd],
b[(x + 1) % 5 + 5*y + 25*rnd],
b[(x + 2) % 5 + 5*y + 25*rnd])
command += "ASSERT({} = {});\n".format(a[x + 5*y + 25*rnd], chiTmp)
#Add rnd constant
for x in range(5):
for y in range(5):
if x == 0 and y == 0:
command += "ASSERT({} = BVXOR({}, {}));\n".format(
s[25*(rnd + 1)], a[25*rnd], self.RC[rnd])
else:
command += "ASSERT({} = {});\n".format(
s[x + 5*y + 25*(rnd + 1)], a[x + 5*y + 25*rnd])
stp_file.write(command)
return
|
import { Component, OnInit } from '@angular/core';
@Component({
selector: 'app-credentials',
templateUrl: './credentials.component.html',
styleUrls: ['./credentials.component.scss']
})
export class CredentialsComponent implements OnInit {
constructor() { }
ngOnInit() {
}
messages = [
{
image: 'assets/img/certificate-icon.jpg', // Icons should be fetched of blockchain network such as IPFS
credential:
{
"@context": "https://www.w3.org/2018/credentials/v1",
"id": "link:discipl:ephemeral:394799234772934879324...",
"type": ["VerifiableCredential", "Uittreksel-GBA-woonplaats"],
"issuer": "did:discipl:nlx:x509:2349837EF9032783278CD93434...",
"issuanceDate": "2017-01-01T19:23:24Z",
"credentialSubject": {
"BSN": "123456789",
"CityLivingIn": "The Hague"
}
}
},
{
image: 'assets/img/certificate-icon.jpg', // Icons should be fetched of blockchain network such as IPFS
credential: {
"@context": "https://www.w3.org/2018/credentials/v1",
"id": "link:discipl:ephemeral:394799234772934879324...",
"type": ["VerifiableCredential", "Uittreksel-GBA-woonplaats"],
"issuer": "did:discipl:nlx:x509:2349837EF9032783278CD93434...",
"issuanceDate": "2017-01-01T19:23:24Z",
"credentialSubject": {
"BSN": "123456789",
"CityLivingIn": "The Hague"
}
}
},
]
}
|
package cn.stylefeng.guns.onlineaccess.modular.mapper;
import cn.stylefeng.guns.onlineaccess.modular.entity.DataTypeDirector;
import com.baomidou.mybatisplus.core.mapper.BaseMapper;
import org.apache.ibatis.annotations.Mapper;
@Mapper
public interface DataTypeDirectorMapper extends BaseMapper<DataTypeDirector> {
}
|
package AulaSeis;
import java.util.Scanner;
public class BoletimPersistente {
private Double MediaAprovacao;
private int NumeroFaltas;
public void run() {
Scanner readLine = new Scanner(System.in);
System.out.println("Qual a média de aprovação?");
this.MediaAprovacao = Double.parseDouble(readLine.nextLine());
System.out.println("Qual o máximo de faltas?");
this.NumeroFaltas = Integer.parseInt(readLine.nextLine());
this.cadastrarNotasFrequencia(0);
}
private void cadastrarNotasFrequencia( int n ) {
Scanner readLine = new Scanner(System.in);
System.out.println("Qual a nota um?");
Double NotaUm = Double.parseDouble(readLine.nextLine());
System.out.println("Qual a nota dois?");
Double NotaDois = Double.parseDouble(readLine.nextLine());
System.out.println("Qual a quantidade de falas?");
int Faltas = Integer.parseInt(readLine.nextLine());
Double media = ((NotaUm+NotaDois)/2);
if( media > this.MediaAprovacao && Faltas < this.NumeroFaltas)
{
System.out.println("Parabéns pela persistência, você tentou "+n+" vezes até passar em estrutura de dados!");
} else {
System.out.println("Tente novamentes");
this.cadastrarNotasFrequencia(n++);
}
}
}
|
import {colors} from '@material-ui/core';
import {
AddCircleOutline as AddCircleOutlineIcon,
RemoveCircleOutline as RemoveCircleOutlineIcon,
Edit as EditIcon
} from '@material-ui/icons';
export const lines = (stats) =>{
return [
{
title: 'Lines Added',
value: stats.addPercentage,
icon: AddCircleOutlineIcon,
color: colors.indigo[500]
},
{
title: 'Lines Deleted',
value: stats.delPercentage,
icon: RemoveCircleOutlineIcon,
color: colors.red[600]
},
{
title: 'Lines Changed',
value: stats.changePercentage,
icon: EditIcon,
color: colors.orange[600]
}
];
};
export const defaultPieStats = {
labels:["No data to show"],
numbers:[0],
total: 0,
addPercentage: 0,
delPercentage: 0,
changePercentage: 0
}
export const pieData = (stats) => {
return {
datasets: [
{
data: stats.numbers,
backgroundColor: [
colors.indigo[500],
colors.red[600],
colors.orange[600]
],
borderWidth: 8,
borderColor: colors.common.white,
hoverBorderColor: colors.common.white
}
],
labels: stats.labels
};
};
export const filesData = (stats,theme) =>{
return {
datasets: [
{
fill: false,
backgroundColor: theme.palette.primary.main,
borderColor: theme.palette.secondary.main,
data: stats.numbers,
yAxisID: 'A',
label: 'Metrics',
maxBarThickness: 10,
barThickness: 12,
barPercentage: 0.5,
categoryPercentage: 0.5
},
{
fill: false,
backgroundColor: theme.palette.secondary.main,
borderColor: theme.palette.primary.main,
data: stats.time,
yAxisID: 'B',
label: 'Time(s)',
maxBarThickness: 10,
barThickness: 12,
barPercentage: 0.3,
categoryPercentage: 0.5
}
],
labels: stats.labels
};
};
export const lineData = (stats,theme) => {
return {
datasets: [
{
fill: false,
backgroundColor: theme.palette.primary.main,
borderColor: theme.palette.secondary.main,
data: stats.numbers,
label: 'Daily Metrics'
}
],
labels: stats.labels
};
};
export const pieOptions = (theme) => {
return{
animation: {
animateRotate :true
},
cutoutPercentage: 80,
layout: { padding: 0 },
legend: {
display: false
},
maintainAspectRatio: false,
responsive: true,
tooltips: {
backgroundColor: theme.palette.background.default,
bodyFontColor: theme.palette.text.secondary,
borderColor: theme.palette.divider,
borderWidth: 1,
enabled: true,
footerFontColor: theme.palette.text.secondary,
intersect: false,
mode: 'index',
titleFontColor: theme.palette.text.primary
}
};
};
export const filesOptions = (theme) => {
return{
animation: {
linear :true
},
cornerRadius: 20,
layout: { padding: 0 },
legend: { display: false },
maintainAspectRatio: false,
responsive: true,
scales: {
xAxes: [
{
ticks: {
fontColor: theme.palette.text.secondary
},
gridLines: {
display: false,
drawBorder: false
}
}
],
yAxes: [
{
position:'right',
id: 'A',
ticks: {
fontColor: theme.palette.text.secondary,
beginAtZero: true,
min: 0
},
gridLines: {
borderDash: [2],
borderDashOffset: [2],
color: theme.palette.divider,
drawBorder: false,
zeroLineBorderDash: [2],
zeroLineBorderDashOffset: [2],
zeroLineColor: theme.palette.divider
}
},
{
position:'left',
id: 'B',
ticks: {
fontColor: theme.palette.text.secondary,
beginAtZero: true,
min: 0
},
gridLines: {
borderDash: [2],
borderDashOffset: [2],
color: theme.palette.divider,
drawBorder: false,
zeroLineBorderDash: [2],
zeroLineBorderDashOffset: [2],
zeroLineColor: theme.palette.divider
}
}
]
},
tooltips: {
backgroundColor: theme.palette.background.default,
bodyFontColor: theme.palette.text.secondary,
borderColor: theme.palette.divider,
borderWidth: 1,
enabled: true,
footerFontColor: theme.palette.text.secondary,
intersect: false,
mode: 'index',
titleFontColor: theme.palette.text.primary
}
};
};
export const lineOptions = (theme) => {
return {
animation: {
linear :true
},
cornerRadius: 20,
layout: { padding: 0 },
legend: { display: false },
maintainAspectRatio: false,
responsive: true,
scales: {
xAxes: [
{
barThickness: 12,
maxBarThickness: 10,
barPercentage: 0.5,
categoryPercentage: 0.5,
ticks: {
fontColor: theme.palette.text.secondary
},
gridLines: {
display: false,
drawBorder: false
}
}
],
yAxes: [
{
ticks: {
fontColor: theme.palette.text.secondary,
beginAtZero: true,
min: 0
},
gridLines: {
borderDash: [2],
borderDashOffset: [2],
color: theme.palette.divider,
drawBorder: false,
zeroLineBorderDash: [2],
zeroLineBorderDashOffset: [2],
zeroLineColor: theme.palette.divider
}
}
]
},
tooltips: {
backgroundColor: theme.palette.background.default,
bodyFontColor: theme.palette.text.secondary,
borderColor: theme.palette.divider,
borderWidth: 1,
enabled: true,
footerFontColor: theme.palette.text.secondary,
intersect: false,
mode: 'index',
titleFontColor: theme.palette.text.primary
}
};
};
|
/*
* Licensed to the OpenAirInterface (OAI) Software Alliance under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The OpenAirInterface Software Alliance licenses this file to You under
* the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*-------------------------------------------------------------------------------
* For more information about the OpenAirInterface (OAI) Software Alliance:
* <EMAIL>
*/
#include <stdlib.h>
#include <stdio.h>
#include <stdint.h>
#include <unistd.h>
#include <string.h>
#include "config.h"
#include "test_utils.h"
#include "test_fd.h"
#include "auc.h"
static
void
do_derive_kasme (
uint8_t * sn_id,
uint8_t * sqn,
uint8_t * ak,
uint8_t * ck,
uint8_t * ik,
uint8_t * kasme_exp)
{
uint8_t kasme[32];
derive_kasme (ck, ik, sn_id, sqn, ak, kasme);
if (compare_buffer (kasme, 32, kasme_exp, 32) != 0) {
fail ("Fail: derive_kasme\n");
}
}
void
doit (
void)
{
/*
* 20834 -> 024830 SNid
*/
do_derive_kasme (H ("024830"), H ("FD8EEF40DF7D"), H ("AA689C648370"), H ("B<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("238E457E0F758BADBCA8D34BB2612C10" "428D426757CB5553B2B184FA64BFC549"));
do_derive_kasme (H ("02F843"), H ("FD8EEF40DF7D"), H ("AA689C648370"), H ("B<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("BD7A0903A7D0F68767EE2F5C90CB7D7D" "835998D940AFDBF73173E63567C5B894"));
do_derive_kasme (H ("21F354"), H ("FD8EEF40DF7D"), H ("AA689C648370"), H ("B40BA9A3C58B2A05BB<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("546A79BC6D1613A72A4D631EE0351D66" "036B2A0C44A3831BE6D365E24F023013"));
do_derive_kasme (H ("024830"), H ("FF9BB4D0B607"), H ("AA689C648370"), H ("B<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("564CB4D2007E4F293B67D9B29392A64A" "DD4C776B133D895AF6499AA6882AAB62"));
do_derive_kasme (H ("02F843"), H ("FF9BB4D0B607"), H ("AA689C648370"), H ("B<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("34865EB0DC9A6D788A905C0514529BF5" "88485DA817FFBE92E9A9B4D033B8CC6F"));
do_derive_kasme (H ("21F354"), H ("FF9BB4D0B607"), H ("AA689C648370"), H ("B<KEY>"), H ("F769BCD751044604127672711C6D3441"), H ("9EA141DA4B24CDEBC8F5FB3F61A05112" "16681F121199B23EBCFACC75B358BE43"));
}
|
class CreditCard {
private:
int cardNumber;
int expirationDate;
float balance;
float creditLimit;
public:
CreditCard(int cardNumber, int expirationDate);
bool charge(float amount); // Charge the CreditCard
bool pay(float amount); // Pay amount to the CreditCard
float getBalance(); // Return balance
float getCreditLimit(); // Return credit limit
void setCreditLimit(float newLimit); // Set Credit Limit
}; |
<filename>C2CRIBuildDir/projects/C2C-RI/src/RICenterServices/src/org/enterprisepower/net/portforward/Listener.java
/*
* Copyright 2002-2007 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.enterprisepower.net.portforward;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import java.net.*;
import java.io.*;
import java.util.ArrayList;
import java.util.HashMap;
import javax.net.ServerSocketFactory;
import javax.net.SocketFactory;
import javax.net.ssl.SSLServerSocketFactory;
import javax.net.ssl.SSLSocketFactory;
/**
*
* @author <NAME>
*
*/
public class Listener implements Runnable {
// private static Log log = LogFactory.getLog(Listener.class);
private static Log log = LogFactory.getLog("net.sf.jameleon");
private ServerSocket serverSocket;
private InetSocketAddress from, to;
private Throwable exception;
private Cleaner cleaner = new Cleaner();
private String testCase;
private String connectionName;
private boolean enableSSL;
private boolean serverConnection;
private ArrayList<SocketAssignmentListener> addressListeners = new ArrayList<>();
public Throwable getException() {
return exception;
}
public void registerAddressListener(SocketAssignmentListener theListener){
if(!addressListeners.contains(theListener)){
addressListeners.add(theListener);
}
}
public void unRegisterAddressListener(SocketAssignmentListener theListener){
if(addressListeners.contains(theListener)){
addressListeners.remove(theListener);
}
}
private void notifyListeners(String internalAddress, String externalAddress){
for (SocketAssignmentListener thisListener : addressListeners){
if (thisListener != null){
thisListener.addInternalAddressMapping(internalAddress, externalAddress);
}
}
}
public Listener(InetSocketAddress from, InetSocketAddress to, String testCase, String connectionName)
throws IOException {
this.testCase = testCase;
this.connectionName = connectionName;
this.from = from;
this.to = to;
serverSocket = new ServerSocket();
serverSocket.setReuseAddress(true);
serverSocket.bind(from);
String hostname = from.getHostName();
if (hostname == null) {
hostname = "*";
}
log.info("Ready to accept client connection on " + hostname + ":"
+ from.getPort());
}
public Listener(InetSocketAddress from, InetSocketAddress to, String testCase, String connectionName, boolean enableSSL, boolean serverConnection)
throws IOException {
this.testCase = testCase;
this.connectionName = connectionName;
this.serverConnection = serverConnection;
this.from = from;
this.to = to;
this.enableSSL = enableSSL;
if (enableSSL) {
ServerSocketFactory ssocketFactory = SSLServerSocketFactory.getDefault();
serverSocket = ssocketFactory.createServerSocket();
} else {
serverSocket = new ServerSocket();
}
serverSocket.setReuseAddress(true);
serverSocket.bind(from);
String hostname = from.getHostName();
if (hostname == null) {
hostname = "*";
}
log.info("Ready to accept client connection on " + hostname + ":"
+ from.getPort());
}
public Listener(InetSocketAddress to, String testCase, String connectionName, boolean enableSSL, boolean serverConnection)
throws IOException {
this.testCase = testCase;
this.connectionName = connectionName;
this.serverConnection = serverConnection;
this.to = to;
this.enableSSL = enableSSL;
if (enableSSL) {
ServerSocketFactory ssocketFactory = SSLServerSocketFactory.getDefault();
serverSocket = ssocketFactory.createServerSocket();
} else {
serverSocket = new ServerSocket();
}
serverSocket.setReuseAddress(true);
InetSocketAddress theResult = new InetSocketAddress(InetAddress.getLocalHost(),0);
System.out.println("Listener:: Trying to bind to Address ->"+theResult.toString());
serverSocket.bind(theResult); // Let the system pick the address
// if (serverIsRemote){
this.from = new InetSocketAddress(serverSocket.getInetAddress(), serverSocket.getLocalPort());
// } else {
// }
String hostname = from.getHostName();
if (hostname == null) {
hostname = "*";
}
log.info("Ready to accept client connection on " + hostname + ":"
+ from.getPort());
}
/**
*
* @return The internal address to use for the server address
*/
public InetSocketAddress getInternalServerAddress() {
InetSocketAddress returnAddress = new InetSocketAddress(serverSocket.getInetAddress(), serverSocket.getLocalPort());
return returnAddress;
}
public void run() {
Socket source = null;
new Thread(cleaner).start();
while (true) {
try {
TargetConnector connector = new TargetConnector(to);
source = serverSocket.accept();
log.trace("accepted client connection");
Socket target = connector.openSocket(enableSSL);
if (serverConnection){
notifyListeners(target.getLocalAddress().getHostAddress()+":"+target.getLocalPort(),
source.getLocalAddress().getHostAddress()+":"+source.getLocalPort());
notifyListeners(target.getInetAddress().getHostAddress()+":"+target.getPort(),
source.getInetAddress().getHostAddress()+":"+source.getPort());
new Processor(source, target, from, cleaner, serverConnection, testCase, connectionName).process();
} else {
notifyListeners(source.getLocalAddress().getHostAddress()+":"+source.getLocalPort(),
target.getLocalAddress().getHostAddress()+":"+target.getLocalPort());
notifyListeners(source.getInetAddress().getHostAddress()+":"+source.getPort(),
target.getInetAddress().getHostAddress()+":"+target.getPort());
new Processor(source, target, new InetSocketAddress(target.getLocalAddress(), target.getLocalPort()), cleaner, serverConnection, testCase, connectionName).process();
}
} catch (IOException e) {
String msg = connectionName + " Listener: Failed to accept a client connection on port "
+ from.getPort() + "because of :" + e.getMessage();
log.error(msg, e);
exception = e;
return;
}
}
}
public void close() {
if (!serverSocket.isClosed()) {
try {
serverSocket.close();
} catch (IOException e) {
log.error("Listener.close: " + e.getMessage(), e);
}
}
}
}
|
var PlayerState = {
playerIdx: 0,
color: 0,
position: {
x: 0,
y: 0
}
}
var Player = {
oninit: function (vnode) {
m.request({
method: "GET",
url: "/api/register/Erwan/1",
extract: function (xhr) { return xhr.responseText }
})
.then(function (result) {
[PlayerState.playerIdx, PlayerState.position.x, PlayerState.position.y, PlayerState.color] = result.split(",");
})
},
view: function (vnode) {
return m("player.player-card.pure-g", [
m(".player-index.pure-u-1-3", [
m(".label", "Player Index"),
m(".value", PlayerState.playerIdx)
]),
m(".player-position.pure-u-1-3", [
m(".label", "Position"),
m(".pure-g", [
m(".x-card.pure-u-1-2", [
m(".label", "x: "),
m(".value", PlayerState.position.x)
]),
m(".y-card.pure-u-1-2", [
m(".label", "y: "),
m(".value", PlayerState.position.y)
]),
]),
]),
m(".player-color.pure-u-1-3", [
m(".label", "Color"),
m(".value", PlayerState.color)
])
]);
}
}
|
<gh_stars>0
import { useEffect, useState } from "react";
export const useDidMount = (initState = false) => {
const [didMount, setDidMount] = useState(initState);
useEffect(() => {
setDidMount(true);
return () => {
setDidMount(false);
};
}, []);
return didMount;
};
|
#!/bin/bash
# Bash script with AZ CLI to automate the creation of an
# Azure Data Factory account.
# Chris Joakim, Microsoft, October 2021
source ./azconfig.sh
arg_count=$#
processed=0
mkdir -p tmp
create() {
processed=1
echo 'creating adf rg: '$adf_rg
az group create \
--location $adf_region \
--name $adf_rg \
--subscription $subscription \
> tmp/adf_rg_create.json
echo 'creating adf acct: '$adf_name
az datafactory factory create \
--location $adf_region \
--name $adf1_name \
--resource-group $adf_rg \
> tmp/adf1_acct_create.json
}
display_usage() {
echo 'Usage:'
echo './adf1.sh create'
}
# ========== "main" logic below ==========
if [ $arg_count -gt 0 ]
then
for arg in $@
do
if [ $arg == "create" ]; then create; fi
done
fi
if [ $processed -eq 0 ]; then display_usage; fi
echo 'done'
|
package net
import (
"bytes"
"fmt"
"net"
"strconv"
"time"
"github.com/pkg/errors"
)
const (
OutOfBandHeader = "\xff\xff\xff\xff"
GetInfoCommand = "getinfo"
GetStatusCommand = "getstatus"
)
func SendCommand(addr, cmd string) ([]byte, error) {
raddr, err := net.ResolveUDPAddr("udp4", addr)
if err != nil {
return nil, err
}
conn, err := net.ListenPacket("udp4", "0.0.0.0:0")
if err != nil {
return nil, err
}
defer conn.Close()
buffer := make([]byte, 1024*1024)
if err := conn.SetDeadline(time.Now().Add(5 * time.Second)); err != nil {
return nil, err
}
n, err := conn.WriteTo([]byte(fmt.Sprintf("%s%s", OutOfBandHeader, cmd)), raddr)
if err != nil {
return nil, err
}
n, _, err = conn.ReadFrom(buffer)
if err != nil {
return nil, err
}
return buffer[:n], nil
}
func parseMap(data []byte) map[string]string {
if i := bytes.Index(data, []byte("\n")); i >= 0 {
data = data[i+1:]
}
data = bytes.TrimPrefix(data, []byte("\\"))
data = bytes.TrimSuffix(data, []byte("\n"))
parts := bytes.Split(data, []byte("\\"))
m := make(map[string]string)
for i := 0; i < len(parts)-1; i += 2 {
m[string(parts[i])] = string(parts[i+1])
}
return m
}
type Player struct {
Name string
Ping int
Score int
}
func parsePlayers(data []byte) ([]Player, error) {
players := make([]Player, 0)
for _, player := range bytes.Split(data, []byte("\n")) {
parts := bytes.SplitN(player, []byte(" "), 3)
if len(parts) != 3 {
continue
}
name, err := strconv.Unquote(string(parts[2]))
if err != nil {
return nil, err
}
ping, err := strconv.Atoi(string(parts[1]))
if err != nil {
return nil, err
}
score, err := strconv.Atoi(string(parts[0]))
if err != nil {
return nil, err
}
players = append(players, Player{
Name: name,
Ping: ping,
Score: score,
})
}
return players, nil
}
func GetInfo(addr string) (map[string]string, error) {
resp, err := SendCommand(addr, GetInfoCommand)
if err != nil {
return nil, err
}
return parseMap(resp), nil
}
type StatusResponse struct {
Configuration map[string]string
Players []Player
}
func GetStatus(addr string) (*StatusResponse, error) {
resp, err := SendCommand(addr, GetStatusCommand)
if err != nil {
return nil, err
}
data := bytes.TrimSuffix(resp, []byte("\n"))
parts := bytes.SplitN(data, []byte("\n"), 3)
switch len(parts) {
case 2:
status := &StatusResponse{
Configuration: parseMap(parts[1]),
Players: make([]Player, 0),
}
return status, nil
case 3:
status := &StatusResponse{
Configuration: parseMap(parts[1]),
}
status.Players, _ = parsePlayers(parts[2])
return status, nil
default:
return nil, errors.Errorf("cannot parse response: %q", resp)
}
}
|
/* eslint arrow-body-style: ["error", "as-needed"] */
import { call, fork, put, takeEvery, all } from 'redux-saga/effects';
import { log } from '@navikt/digisyfo-npm';
import { hentApiUrl, post } from '../../../gateway-api';
import {
ETTERSEND_SOKNAD_ARBG_FORESPURT,
ettersenderSoknadTilArbeidsgiver, ettersendSoknadTilArbeidsgiverFeilet, soknadEttersendtTilArbeidsgiver,
soknadAlleredeEttersendtTilArbeidsgiver,
} from './ettersendingArbeidsgiver';
import {
ETTERSEND_SOKNAD_NAV_FORESPURT,
ettersenderSoknadTilNav, ettersendSoknadTilNavFeilet, soknadEttersendtTilNav, soknadAlleredeEttersendtTilNav,
} from './ettersendingNav';
export function* ettersendSoknadNav(action) {
try {
yield put(ettersenderSoknadTilNav(action.soknadId));
yield call(post, `${hentApiUrl()}/soknader/${action.soknadId}/ettersendTilNav`);
yield put(soknadEttersendtTilNav());
} catch (e) {
log(e);
if (e.toString().endsWith('409')) {
yield put(soknadAlleredeEttersendtTilNav());
return;
}
yield put(ettersendSoknadTilNavFeilet());
}
}
export function* ettersendSoknadArbeidsgiver(action) {
try {
yield put(ettersenderSoknadTilArbeidsgiver(action.soknadId));
yield call(post, `${hentApiUrl()}/soknader/${action.soknadId}/ettersendTilArbeidsgiver`);
yield put(soknadEttersendtTilArbeidsgiver());
} catch (e) {
log(e);
if (e.toString().endsWith('409')) {
yield put(soknadAlleredeEttersendtTilArbeidsgiver());
return;
}
yield put(ettersendSoknadTilArbeidsgiverFeilet());
}
}
function* watchEttersendSoknadNav() {
yield takeEvery(ETTERSEND_SOKNAD_NAV_FORESPURT, ettersendSoknadNav);
}
function* watchEttersendSoknadArbeidsgiver() {
yield takeEvery(ETTERSEND_SOKNAD_ARBG_FORESPURT, ettersendSoknadArbeidsgiver);
}
export default function* ettersendingSagas() {
yield all([
fork(watchEttersendSoknadNav),
fork(watchEttersendSoknadArbeidsgiver),
]);
}
|
/**
* Copyright 2019 <NAME> (www.algorithmist.net)
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import {
AfterViewInit,
Component,
ElementRef,
Inject,
Input,
HostListener,
OnDestroy,
OnInit,
ViewChild
} from '@angular/core';
import { getCurrentOffset } from './libs/map-libs';
import { MapIconOptions } from './data/map-icon-options';
import { EventHandler } from './interfaces/event-handler';
import { INIT_COORDS } from '../tokens';
import * as esri from 'esri-leaflet';
import * as L from 'leaflet';
/**
* Leaflet Map Component
*
* @author <NAME> (www.algorithmist.net)
*
* @version 1.0
*/
@Component({
selector: 'app-map',
templateUrl: './map.component.html',
styleUrls: ['./map.component.scss'],
})
export class MapComponent implements OnInit, AfterViewInit, OnDestroy {
public mcText: string; // mouse coords text (innerHTML)
@Input()
public markers: {lat: number, long: number}[]; // Markers to overlay on Map
public currentWidth: number; // current map width based on window width
public currentHeight: number; // current map height based on window height
protected baseLayer: any; // Map Base layer
protected map: any; // Map reference (currently leaflet)
protected mapLoaded = false; // True if the map has been loaded
// The primary Map
@ViewChild('primaryMap', {static: true}) protected mapDivRef: ElementRef;
protected mapDiv: HTMLDivElement;
// Leaflet Map Event Handlers (used for removal on destroy)
protected onClickHandler: EventHandler;
protected onMouseMoveHandler: EventHandler;
constructor( @Inject(INIT_COORDS) protected _initCoords: {lat: number, long: number} )
{
this.baseLayer = null;
// Leaflet Map Event Handlers
this.onClickHandler = (evt: any) => this.__onMapClick(evt);
this.onMouseMoveHandler = (evt: any) => this.__onMapMouseMove(evt);
// Initial mouse-coords text
this.mcText = '';
// some simple default values
this.currentWidth = 600;
this.currentHeight = 200;
}
public ngOnInit(): void
{
// Reference to DIV containing map is used in Leaflet initialization
this.mapDiv = this.mapDivRef.nativeElement;
this.__initializeMap();
this.__renderMap();
this.__showMarkers();
}
public ngAfterViewInit(): void
{
this.map.invalidateSize();
this.__initMapHandlers();
}
public ngOnDestroy(): void
{
this.map.off('click' , this.onClickHandler );
this.map.off('mousemove', this.onMouseMoveHandler);
}
/**
* Basic map initialization
*/
protected __initializeMap(): void
{
if (this.mapLoaded) {
return;
}
this.mapLoaded = true;
this.__updateMapSize();
}
/**
* Render the map (establish center and base layer)
*/
protected __renderMap(): void
{
// Create Leaflet Map in fixed DIV - zoom level is hardcoded for simplicity
this.map = L.map(this.mapDiv, {
zoomControl: false,
zoomAnimation: false,
trackResize: true,
boxZoom: true,
}).setView([this._initCoords.lat, this._initCoords.long], 10);
this.baseLayer = esri.basemapLayer('Gray');
this.map.addLayer(this.baseLayer);
}
/**
* Show markers if they are defined
*/
protected __showMarkers(): void
{
if (this.markers !== undefined && this.markers != null && this.markers.length > 0)
{
// Add markers
const icon = L.icon({
iconUrl: MapIconOptions.mapIcon,
iconSize: MapIconOptions.iconSize,
iconAnchor: MapIconOptions.iconAnchor,
shadowUrl: MapIconOptions.mapShadowIcon,
shadowSize: MapIconOptions.shadowSize,
shadowAnchor: MapIconOptions.shadowAnchor,
});
const n: number = this.markers.length;
let i: number;
let m: L.marker;
let x: number;
let y: number;
for (i = 0; i < n; ++i) {
x = this.markers[i].lat;
y = this.markers[i].long;
if (x !== undefined && !isNaN(x) && y !== undefined && !isNaN(y))
{
// okay to add the icon
m = L.marker([x, y], {icon: icon}).addTo(this.map);
}
else
{
// implement your own error handling
console.log('MARKER ERROR, Marker number: ', (i+1), 'x: ', x, ' y: ', y);
}
}
}
}
@HostListener('window:resize', ['$event'])
protected __onResize(event: any): void
{
this.__updateMapSize();
this.map.invalidateSize();
}
/**
* Update the current width/height occupied by the map
*/
protected __updateMapSize(): void
{
// update width/height settings as you see fit
this.currentWidth = window.innerWidth || document.documentElement.clientWidth || document.body.clientWidth;
this.currentHeight = (window.innerHeight || document.documentElement.clientHeight || document.body.clientHeight) - 200;
}
/**
* Initialize Leaflet Map handlers
*/
protected __initMapHandlers(): void
{
this.map.on('mousemove', this.onMouseMoveHandler);
this.map.on('click' , this.onClickHandler );
}
/**
* Execute on Leaflet Map click
*/
protected __onMapClick(evt: any): void {
const target: any = evt.originalEvent.target;
console.log('Map click on: ', target);
}
/**
* Execute on mouse move over Leaflet map
*
* @param evt Leaflet-supplied information regarding current mouse point, mainly geo coords.
*/
protected __onMapMouseMove(evt: any): void
{
const offset: {x: number, y: number} = getCurrentOffset(this.map);
// uncomment to study offset
// console.log('offset computation:', offset);
// Lat and Long are embedded in the event object
const lat: string = evt.latlng.lat.toFixed(3);
const long: string = evt.latlng.lng.toFixed(3);
this.mcText = `Latitude: ${lat} Longitude: ${long}`;
}
}
|
from typing import List
from .helpers import Reconstruct, QuantizationError
class DataProcessor:
def __init__(self, compressed_data: List[int]):
self.compressed_data = compressed_data
def reconstruct_data(self) -> List[float]:
reconstructor = Reconstruct()
reconstructed_data = reconstructor.process(self.compressed_data)
return [float(val) for val in reconstructed_data]
def calculate_error(self) -> float:
error_calculator = QuantizationError()
total_error = error_calculator.calculate(self.compressed_data)
return float(total_error)
# Demonstration of usage
compressed_data = [10, 20, 30, 40]
processor = DataProcessor(compressed_data)
reconstructed = processor.reconstruct_data()
error = processor.calculate_error()
print("Reconstructed Data:", reconstructed)
print("Quantization Error:", error) |
$(function() {
registerTemplate('guides',`
<h1 id="page-heading">Guides</h1>
<div class='mdl-grid'>
{{#forEachGuide}}
<div class='mdl-cell mdl-cell--4-col'>
<div class='guide-card mdl-card mdl-shadow--2dp'>
<div class='mdl-card__title mdl-card--expand'>
<h2 class='mdl-card__title-text'>{{name}}</h2>
</div>
<div class='mdl-card__supporting-text'>
{{#if summary}}
{{summary}}
{{/if}}
</div>
<div class='mdl-card__actions mdl-card--border'>
<a class='mdl-button mdl-button--colored mdl-js-button mdl-js-ripple-effect' page='{{name}}'>
Learn
</a>
</div>
</div>
</div>
{{/forEachGuide}}
</div>
`);
}); |
#include <math.h>
#define MAX_VOLUME 100
typedef struct {
float x;
float y;
float z;
} vec;
float calculate_distance(const vec *loc) {
// Assuming listener's location is at (0, 0, 0)
return sqrt(loc->x * loc->x + loc->y * loc->y + loc->z * loc->z);
}
void sound_updatechanvol(int chan, const vec *loc) {
float distance = calculate_distance(loc);
int volume = MAX_VOLUME - distance;
if (volume < 0) {
volume = 0;
}
// Update the volume of the specified channel
// Code to update the volume of the specified channel goes here
} |
#!/bin/bash
# builds intermediate interp tables (geo and taxonomy). NOTE: make sure the epsg-hsql jar is the right version for the latest release of occurrence-hive!
log () {
echo $(tput setaf 6)$(date '+%Y-%m-%d %H:%M:%S ')$(tput setaf 14)$1$(tput sgr0)
}
#SONAR_REDIRECT_URL=http://repository.gbif.org/repository/gbif/org/gbif/occurrence/occurrence-hive
#VERSION=$(xmllint --xpath "string(//release)" <(curl -s "${SONAR_REDIRECT_URL}/maven-metadata.xml"))
#FILENAME=occurrence-hive-${VERSION}-jar-with-dependencies.jar
#SONAR_DOWNLOAD_URL=${SONAR_REDIRECT_URL}/${VERSION}/${FILENAME}
#curl -SsLo /tmp/occurrence-hive.jar "${SONAR_REDIRECT_URL}/${VERSION}/${FILENAME}"
if [[ ! -e /home/hdfs/occurrence-hive-udf-kvs.jar ]]; then
echo "Build the udf-kvs branch of occurrence-hive, copy to /home/hdfs/occurrence-hive-udf-kvs.jar"
exit 1
fi
# Last known working version for species matching
curl -SsLo /home/hdfs/occurrence-hive-0.129.jar https://repository.gbif.org/repository/gbif/org/gbif/occurrence/occurrence-hive/0.129/occurrence-hive-0.129-jar-with-dependencies.jar
curl -SsLo /tmp/gt-epsg-hsql.jar 'http://download.osgeo.org/webdav/geotools/org/geotools/gt-epsg-hsql/20.5/gt-epsg-hsql-20.5.jar'
log 'Building intermediate interp geo tables'
hive --hiveconf occjar=/home/hdfs/occurrence-hive-udf-kvs.jar \
--hiveconf props=hive/normalize/occurrence-processor.properties \
--hiveconf epsgjar=/tmp/gt-epsg-hsql.jar \
--hiveconf api=http://api.gbif.org/v1/ \
--hiveconf cacheTable=geocode_gadm_kv --hiveconf cacheBuckets=50 --hiveconf cacheZk=c5zk1.gbif.org \
-f hive/normalize/interp_geo.q
log 'Building intermediate interp taxonomy tables'
hive --hiveconf occjar=/home/hdfs/occurrence-hive-0.129.jar \
--hiveconf props=hive/normalize/occurrence-processor.properties \
--hiveconf api=http://api.gbif.org/v1/ \
-f hive/normalize/interp_taxonomy.q
log 'Building country→gbifRegion map table'
(
curl -Ssg https://api.gbif.org/v1/enumeration/country | jq -r '.[] | "\(.iso2)\t\(.gbifRegion)"'
# Snapshots 2010-04-01 and 2010-07-26 contain UK values.
echo -e "UK\tEUROPE"
echo -e "XK\tEUROPE"
) | sort | sort -k2 > /tmp/analytics_regions.tsv
hive --hiveconf regionTable=/tmp/analytics_regions.tsv \
-f hive/normalize/create_region_table.q
|
curl -X POST \
https://app.datadoghq.com/api/v2/roles/<ROLE_UUID>/permissions \
-H "Content-Type: application/json" \
-H "DD-API-KEY: <YOUR_DATADOG_API_KEY>" \
-H "DD-APPLICATION-KEY: <YOUR_DATADOG_APPLICATION_KEY>" \
-d '{
"data":
{
"type": "permissions",
"id": <PERMISSION_UUID>
}
}' |
##########################################################################################
#
# Copy original mixer_paths_qrd.xml and run script to add stereo effects into it.
#
#
##########################################################################################
# Copy files and run scripts
mkdir -p $MODPATH/system/vendor/etc
ui_print "Patching Mixer"
cp -f /system/vendor/etc/mixer_paths_qrd.xml $MODPATH/system/vendor/etc/mixer_paths_qrd.xml
. $MODPATH/.aml.sh
ui_print "Installation Successful!"
ui_print "By acervenky@XDA"
# Default permissions
set_perm_recursive $MODPATH 0 0 0755 0644
|
/**
* @file : smartptr.h
* @brief : Smart pointers header file in CUDA C++14,
* @author : <NAME> <<EMAIL>>
* @date : 20171007
* @ref :
*
* If you find this code useful, feel free to donate directly and easily at this direct PayPal link:
*
* https://www.paypal.com/cgi-bin/webscr?cmd=_donations&business=ernestsaveschristmas%2bpaypal%40gmail%2ecom&lc=US&item_name=ernestyalumni¤cy_code=USD&bn=PP%2dDonationsBF%3abtn_donateCC_LG%2egif%3aNonHosted
*
* which won't go through a 3rd. party such as indiegogo, kickstarter, patreon.
* Otherwise, I receive emails and messages on how all my (free) material on
* physics, math, and engineering have helped students with their studies,
* and I know what it's like to not have money as a student, but love physics
* (or math, sciences, etc.), so I am committed to keeping all my material
* open-source and free, whether or not
* sufficiently crowdfunded, under the open-source MIT license:
* feel free to copy, edit, paste, make your own versions, share, use as you wish.
* Just don't be an asshole and not give credit where credit is due.
* Peace out, never give up! -EY
*
* */
/*
* COMPILATION TIP
* nvcc -std=c++14 -dc smartptr.cu -o smartptr.o
*
* */
#ifndef __SMARTPTR_H__
#define __SMARTPTR_H__
#include <memory> // std::shared_ptr, std::unique_ptr
#include <vector> // std::vector
/*
* *** custom deleters ***
* */
// field K = float; RR = real numbers, float
auto deleterRR_lambda=[&](float* ptr){ cudaFree(ptr); };
/* custom deleter as a struct */
struct deleterRR_struct
{
void operator()(float* ptr) const
{
cudaFree(ptr);
}
};
/*
* *** END of custom deleters ***
* */
/**
* *** @name function (factories)
* @note function factory : Lx \in \mathbb{Z}^+ |-> (\mapsto) u \in \mathbb{R}^{Lx}
* */
//std::unique_ptr<float[], decltype(deleterRR_lambda)> make_uniq_u(const int); // undefined reference
std::unique_ptr<float[], deleterRR_struct> make_uniq_u(const int); // undefined reference
std::shared_ptr<float> make_sh_u(const int);
/*
* *** END of function (factories) ***
* */
/*
* *** classes with smart pointers as member functions
* */
// RRmodule; RR = real numbers, float
class RRModule
{
private:
int Lx; // remember you can use long
// member custom deleter as struct; auto lambda not allowed here
struct deleterRR {
void operator()(float* ptr) const
{
cudaFree(ptr);
}
};
// member
std::unique_ptr<float[], deleterRR> X;
public:
// Constructor
RRModule(const int);
// member functions
void load_from_hvec(std::vector<float>& );
void load_from_d_X(std::vector<float>& );
// destructor
~RRModule();
};
class RRModule_sh
{
private:
int Lx; // remember you can use long
// member custom deleter as struct; auto lambda not allowed here
struct deleterRR {
void operator()(float* ptr) const
{
cudaFree(ptr);
}
};
// member
std::shared_ptr<float> X;
public:
// Constructor
RRModule_sh(const int);
// member functions
void load_from_hvec(std::vector<float>& );
void load_from_d_X(std::vector<float>& );
void load_from_uniq(std::unique_ptr<float[],deleterRR> &);
std::shared_ptr<float> get();
// destructor
~RRModule_sh();
};
#endif // __SMARTPTR_H__
|
export { default as spec } from './spec/index';
export * from './interfaces/index';
|
<!--
Copyright 2020 Kansaneläkelaitos
Licensed under the Apache License, Version 2.0 (the "License"); you may not
use this file except in compliance with the License. You may obtain a copy
of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
-->
package fi.kela.kanta.util;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Field;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Date;
import java.util.List;
import org.apache.commons.lang.reflect.FieldUtils;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class GenericToString {
@OmitFromToString
private SimpleDateFormat SDF = new SimpleDateFormat("dd.MM.yyyy HH:mm.ss");
private static final String comma = ", ";
private static final String equals = "=";
private static final String open_parameters = " [";
private static final String close_parameters = "]";
private static final String null_date = "null (date)";
private static final String na_date = "N/A (date)";
private static final String null_non_primitive = "null (object)";
private static final String na_non_primitive = "N/A (object)";
private static final String null_array = "null (array)";
private static final String na_array = "N/A (array)";
private static final String na_access = "N/A (restricted)";
private static final String hidden_value = "xxxxx";
private static final Logger LOGGER = LogManager.getLogger(GenericToString.class);
@Override
public String toString() {
return this.toString(this);
}
@Override
public int hashCode() {
return this.toString().hashCode();
}
@Override
public boolean equals(Object o) {
// TODO Auto-generated method stub
return super.equals(o);
}
/**
* Geneerinen toString metodi, jolla saadaan yksinkertainen siisti key=value listaus annetun luokan attribuuteista.
*
* @param obj
* Objekti jonka tiedot halutaan tulostaa.
* @return Listaus annetun objektin attribuuteista ja niiden arvoista.
*/
public String toString(Object obj) {
StringBuilder sb = new StringBuilder();
sb.append(obj.getClass().getSimpleName());
sb.append(GenericToString.open_parameters);
boolean fieldsAdded = false;
for (Field field : GenericToString.getAllFields(obj.getClass())) {
// Ei näytetä staattisia kenttiä
if ( field != null && !java.lang.reflect.Modifier.isStatic(field.getModifiers())
&& !(field.getName().startsWith("this$") || field.getName().startsWith("_persistence")) ) {
fieldsAdded = true;
sb.append(field.getName()).append(GenericToString.equals);
try {
if ( field.getType().isPrimitive() || field.getType().isAssignableFrom(String.class) ) {
if ( field.getAnnotation(OmitFromToString.class) != null ) {
sb.append(hidden_value);
}
else {
sb.append(FieldUtils.readField(obj, field.getName(), true));
}
}
else if ( field.getType().isArray() ) {
if ( !field.isAccessible() ) {
field.setAccessible(true);
}
// TODO: Voi jatkokehittää siten että tulostaa siististi arrayn
Object fieldValue = field.get(obj);
if ( fieldValue != null ) {
sb.append(GenericToString.na_array);
}
else {
sb.append(GenericToString.null_array);
}
}
else {
addObjectInfo(sb, field, obj);
}
}
catch (IllegalAccessException e) {
LOGGER.error(e);
sb.append(GenericToString.na_access);
}
sb.append(GenericToString.comma);
}
}
if ( fieldsAdded ) {
// remove last comma and space
sb.delete(sb.length() - 2, sb.length());
}
sb.append(GenericToString.close_parameters);
return sb.toString();
}
public static List<Field> getAllFields(Class<?> type) {
List<Field> fields = new ArrayList<Field>();
for (Class<?> c = type; c != null; c = c.getSuperclass()) {
fields.addAll(Arrays.asList(c.getDeclaredFields()));
}
return fields;
}
private void addObjectInfo(StringBuilder sb, Field field, Object originalObject)
throws IllegalArgumentException, IllegalAccessException {
if ( field != null ) {
if ( !field.isAccessible() ) {
field.setAccessible(true);
}
if ( field.getType().isAssignableFrom(Date.class) ) {
try {
Object date = field.get(originalObject);
if ( date != null ) {
sb.append(SDF.format(date));
}
else {
sb.append(GenericToString.null_date);
}
// Return accessibility?
}
catch (Exception e) {
LOGGER.warn(e);
sb.append(GenericToString.na_date);
}
}
else {
// TODO: Voi jatkokehittää siten että tulostaa esim. lapsiobjektin tiedot
Object fieldValue = field.get(originalObject);
if ( fieldValue != null ) {
sb.append(GenericToString.na_non_primitive);
}
else {
sb.append(GenericToString.null_non_primitive);
}
}
}
}
/**
* Annotation to omit a field from printing automatically by the generic toString method. E.G. some information that
* is not wished to be seen. The value of this field is simply presented with 'xxxxx'.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.FIELD)
public @interface OmitFromToString {
}
}
|
function countUniqueCategories($categoryIds) {
// If the input list is empty, return 0
if (empty($categoryIds)) {
return 0;
}
// Use array_unique to remove duplicate category IDs and count the unique ones
return count(array_unique($categoryIds));
} |
// Generated by script, don't edit it please.
import createSvgIcon from '../../createSvgIcon';
import CalendarCheckOSvg from '@rsuite/icon-font/lib/legacy/CalendarCheckO';
const CalendarCheckO = createSvgIcon({
as: CalendarCheckOSvg,
ariaLabel: 'calendar check o',
category: 'legacy',
displayName: 'CalendarCheckO'
});
export default CalendarCheckO;
|
package com.plus3.privilege.dao.mapper;
import com.plus3.privilege.dao.entity.PermissionOfRole;
import java.util.List;
public interface PermissionOfRoleMapper {
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
int deleteByPrimaryKey(Integer id);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
int insert(PermissionOfRole record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
int insertSelective(PermissionOfRole record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
PermissionOfRole selectByPrimaryKey(Integer id);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
int updateByPrimaryKeySelective(PermissionOfRole record);
/**
* This method was generated by MyBatis Generator.
* This method corresponds to the database table permission_of_role
*
* @mbggenerated Mon Dec 18 14:27:29 CST 2017
*/
int updateByPrimaryKey(PermissionOfRole record);
List<PermissionOfRole> selectAll();
} |
use std::path::Path;
fn check_config_file(file_path: &str) {
let config_path = Path::new(file_path);
if config_path.exists() {
println!("{} already exists", config_path.to_str().unwrap());
} else {
println!("{} does not exist", config_path.to_str().unwrap());
}
}
fn main() {
check_config_file("config/settings.conf");
} |
def perimeter(edges):
total = 0
for edge in edges:
total += edge
return total |
import React, { Component, PropTypes } from 'react';
import classNames from 'classnames';
import except from 'except';
import rowStyles from './Row.scss'
class Row extends Component {
render() {
const other = except(this.props, ['className']);
let rowClassName = classNames(this.props.className, 'ms-row');
return(
<div className={rowClassName} {...other}>{this.props.children}</div>
);
}
}
Row.propTypes = {
};
Row.defaultProps = {
};
export default Row;
|
## Schema
CREATE DATABASE pets_db;
USE pets_db;
CREATE TABLE buyers(
id int NOT NULL AUTO_INCREMENT,
buyer_name varchar(255) NOT NULL,
PRIMARY KEY (id)
);
CREATE TABLE pets(
id int NOT NULL AUTO_INCREMENT,
animal_breed varchar(255) NOT NULL,
animal_name varchar(255) NOT NULL,
price int NOT NULL,
buyer_id int NOT NULL,
PRIMARY KEY (id),
FOREIGN KEY (buyer_id) REFERENCES buyers(id)
);
|
<reponame>neelsomani/literature-server
import React, { Component } from 'react';
export default class ClaimDisplay extends Component {
constructor(props) {
super(props);
this.state = {
show: true,
lastHalfSuit: this.props.halfSuit
};
}
componentDidUpdate() {
if (this.props.halfSuit !== this.state.lastHalfSuit) {
const lastTimeout =
setTimeout(() => this.setState({ show: false }), 15 * 1000);
clearTimeout(this.state.lastTimeout);
this.setState({
show: true,
lastHalfSuit: this.props.halfSuit,
lastTimeout
});
}
}
render() {
const success = (this.props.success && 'Success') || 'Failure';
return <div className='ClaimDisplay'>
{(this.props.halfSuit !== undefined)
&& this.state.show
&& (
<div className='ClaimText'>
<a href='#!'
className='ShowFullClaimLink'
onClick={() => this.props.showFullClaim()}>{success}</a>:
{' '}{this.props.playerNames[this.props.claimBy.toString()]} claims
{' ' + this.props.halfSuit.half + ' ' + this.props.halfSuit.suit}
</div>
)}
</div>
}
}
|
<gh_stars>0
var md5 = require('MD5'),
User = require('./userModel'),
loginUser = require('./loginUser');
// Creates a new user
module.exports = function createUser (socket, data) {
// Hash the password
data.password = <PASSWORD>(data.password);
// Create a new user in MongoDB
var user = new User(data);
// Save the MongoDB Model
user.save().then(function (data) {
return loginUser(socket, data);
});
};
|
def isPalindrome(inputString):
# Base case: if the length of inputString is 0 or 1
if len(inputString) == 0 or len(inputString) == 1:
return True
# Checking the first and last character of inputString
if inputString[0] != inputString[len(inputString) - 1]:
return False
return isPalindrome(inputString[1:-1]) |
def generate_send_to_spark_command(variable, var_name):
if isinstance(variable, str):
type_of_variable = "str"
else:
type_of_variable = "pandas"
return f"%%send_to_spark -i {var_name} -t {type_of_variable} -n {var_name}" |
import React from "react";
import API from "../../utils/API";
import "./style.css";
function BookCard(props){
const handleCardButton = (event) => {
if(props.type === "save"){
API.saveBook(props).then(res => {
document.getElementById(props.googleId).disabled = true;
document.getElementById(props.googleId).textContent = "Saved";
});
}else if(props.type === "delete"){
API.deleteBook(props._id).then(res => {
document.getElementById(props.googleId).disabled = true;
document.getElementById(props.googleId).textContent = "Deleted";
});
}
};
return(
<div className="col s12 m7">
<div className="header-container">
<h2 className="header">{props.title}</h2>
<button
className="btn save-btn"
id={props.googleId}
onClick={(event)=> handleCardButton(event)}
disabled={props.saved === true && props.type === "save" ? true : false}
>
{props.type === "save" ? "Save" : "Delete"}
</button>
</div>
<div className="card horizontal">
<div className="card-image">
<img src={props.image} alt="book cover"></img>
</div>
{/* <span className="activator">
<i className="material-icons right">more_vert</i>
</span> */}
<div className="card-stacked">
<div className="card-content">
<span className="card-title grey-text text-darken-4">{props.title}</span>
<ul>
{props.authors.map((a,i) => {
return(
<li key={i}>{a}</li>
);
})}
</ul>
</div>
<div className="card-content">
<p>{props.description}</p>
</div>
<div className="card-action">
<a href={props.link} target="_blank" rel="noopener noreferrer">View at Google Books</a>
</div>
</div>
</div>
</div>
);
};
export default BookCard; |
<gh_stars>0
module.exports = require('./utils/queue');
|
<gh_stars>0
function increaseNumberRoundness(n) {
return /0[1-9]/.test(n);
}
////////////////////////////////////////
function increaseNumberRoundness(n) {
const parts = n
.toString()
.split("")
.reverse();
let state = false;
for (let part of parts) {
if (part !== "0") state = true;
else if (state && part === "0") {
return true;
}
}
return false;
}
////////////////////////////////////////
function increaseNumberRoundness(n) {
return (
String(n)
.split("0")
.filter(el => el != "").length > 1
);
}
////////////////////////////////////////
function increaseNumberRoundness(n) {
return ("" + n).split("0").filter(x => x != "").length - 1;
}
|
export const SET_CARDS = 'SET_CARDS';
export const DELETE_CARD = 'DELETE_CARD';
export const SET_CURRENT_USER = 'SET_CURRENT_USER';
export const SET_GROUPS = 'SET_GROUPS';
|
#!/bin/bash
set -e
DIR=$(dirname $(realpath "$0")) # locate folder where this sh-script is located in
SCRIPT="./tests/run_tests.inp"
PROJECT="parallel_specs"
cd $DIR
echo "Switched to ${DIR}"
gretlcli -b -e -q ${SCRIPT}
if [ $? -eq 0 ]
then
echo "Success: All tests passed for '${PROJECT}'."
exit 0
else
echo "Failure: Tests not passed for '${PROJECT}'." >&2
exit 1
fi
|
<reponame>saltstack/rend<gh_stars>1-10
version = '4.1'
|
<filename>back-end/hub-core/src/test/java/io/apicurio/hub/core/editing/KafkaEditingSessionTest.java
/*
* Copyright 2020 Red Hat
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.apicurio.hub.core.editing;
import io.apicurio.hub.core.editing.events.EventAction;
import io.apicurio.hub.core.editing.kafka.JsonSerde;
import io.apicurio.hub.core.editing.ops.JoinLeaveOperation;
import io.apicurio.hub.core.editing.ops.OperationFactory;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.List;
import java.util.UUID;
/**
* @author <NAME>
*/
public class KafkaEditingSessionTest {
JsonSerde serde = new JsonSerde();
@Test
public void testSerde() {
String uuid = UUID.randomUUID().toString();
testSerde(EventAction.close(uuid));
testSerde(EventAction.rollup(uuid));
JoinLeaveOperation join = OperationFactory.join("alesj", "foobar");
testSerde(EventAction.sendToOthers(join, uuid));
testSerde(EventAction.sendToList(uuid));
testSerde(EventAction.sendToExecute(Collections.singletonList(join), uuid));
}
private void testSerde(EventAction original) {
byte[] bytes = serde.serialize(null, original);
EventAction copy = serde.deserialize(null, bytes);
Assert.assertEquals(original.getType(), copy.getType());
Assert.assertEquals(original.getId(), copy.getId());
Assert.assertArrayEquals(original.getOp(), copy.getOp());
List<JoinLeaveOperation> originalOps = original.getOps();
List<JoinLeaveOperation> copyOps = copy.getOps();
if (originalOps != null) {
Assert.assertNotNull(copyOps);
Assert.assertEquals(originalOps.size(), copyOps.size());
for (int i = 0; i < originalOps.size(); i++) {
JoinLeaveOperation oJLO = originalOps.get(i);
JoinLeaveOperation cJLO = copyOps.get(i);
Assert.assertEquals(oJLO.getId(), cJLO.getId());
Assert.assertEquals(oJLO.getUser(), cJLO.getUser());
}
}
}
}
|
package com.prt2121.bees;
import android.support.v4.app.Fragment;
import android.os.Bundle;
import android.support.v7.widget.LinearLayoutManager;
import android.support.v7.widget.RecyclerView;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import java.util.ArrayList;
import java.util.List;
public class TicketsFragment extends Fragment {
public TicketsFragment() {
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
View view = inflater.inflate(R.layout.fragment_tickets, container, false);
RecyclerView ticketsRecyclerView = (RecyclerView) view.findViewById(R.id.ticketsRecyclerView);
LinearLayoutManager layoutManager = new LinearLayoutManager(getActivity());
ticketsRecyclerView.setLayoutManager(layoutManager);
List<String> fakeDataSet = new ArrayList<>();
fakeDataSet.add("test 1");
fakeDataSet.add("test 2");
fakeDataSet.add("test 3");
TicketAdapter mAdapter = new TicketAdapter(fakeDataSet);
ticketsRecyclerView.setAdapter(mAdapter);
return view;
}
}
|
#!/bin/bash
count=`nvidia-smi --query-gpu=name --format=csv,noheader | wc -l`
echo 'start'
for (( c=count; c>=1; c-- ))
do
python3 benchmark_models.py -g $c &
done
wait
echo 'end'
|
#!/bin/bash
set -eo pipefail
FUNCTION=$(aws cloudformation describe-stack-resource --stack-name java-basic --logical-resource-id function --query 'StackResourceDetail.PhysicalResourceId' --output text)
if [ $1 ]
echo "dollar 1 is $1"
echo "function is $FUNCTION"
then
case $1 in
string)
PAYLOAD='"MYSTRING"'
;;
int | integer)
PAYLOAD=12345
;;
list)
PAYLOAD='[24,25,26]'
;;
divide)
PAYLOAD='[235241,17]'
;;
*)
echo -n "Unknown event type"
;;
esac
fi
while true; do
if [ $PAYLOAD ]
then
aws lambda invoke --cli-binary-format raw-in-base64-out --function-name $FUNCTION --payload $PAYLOAD out.json
else
aws lambda invoke --cli-binary-format raw-in-base64-out --function-name $FUNCTION --payload file://event.json out.json
fi
cat out.json
echo ""
sleep 2
done
|
import chalk from 'chalk'
import * as path from 'path'
import * as typescript from 'typescript'
import webpack from 'webpack'
import { formatWebpackMessages } from '../lib/formatWebpackMessages'
import { paths } from '../lib/paths'
import { IS_CI, RuntimeOptions } from '../util/env'
import { diffFileSize, getBundleSize } from './util/fileSizeReporter'
import { getWebpackConfig } from './util/getWebpackConfig'
const prodBundlePath = path.join(paths.appBuild, paths.prodBundle)
const sizeBeforeBuild = getBundleSize(prodBundlePath)
const build = (): Promise<{ stats: webpack.Stats; warnings: string[] }> => {
console.log(chalk.cyan('Creating an optimized production build...'))
console.log(chalk.green('Using TypeScript v' + typescript.version))
console.log()
const compiler: webpack.Compiler = webpack(getWebpackConfig('production'))
return new Promise((resolve, reject) => {
compiler.run((err, stats) => {
if (err) {
return reject(err)
}
const messages = formatWebpackMessages(stats!.toJson())
if (messages.errors.length) {
if (messages.errors.length > 1 && !RuntimeOptions.noCollapse) {
messages.errors.length = 1
}
return reject(new Error(messages.errors.join('\n\n')))
}
if (IS_CI && messages.warnings.length) {
if (RuntimeOptions.bypassCiWarnings) {
console.log(
chalk.yellow(
'\nBypassing warnings as CI errors due to --bypass-ci-warnings option.\n'
)
)
} else {
console.log(
chalk.yellow(
'\nTreating warnings as errors because process.env.CI = true.\n' +
'Most CI servers set it automatically.\n'
)
)
return reject(new Error(messages.warnings.join('\n\n')))
}
}
return resolve({
stats: stats!,
warnings: messages.warnings,
})
})
})
}
build()
.then(
({ warnings }) => {
if (warnings.length) {
console.log(chalk.yellow('Compiled with warnings.\n'))
console.log(warnings.join('\n\n'))
console.log(
'\nSearch for the ' +
chalk.cyan('keywords') +
' to learn more about each warning.'
)
console.log(
'To ignore, add ' +
chalk.yellow('// eslint-disable-next-line') +
' to the line before.\n'
)
}
},
(err) => {
console.log(chalk.red('Failed to compile.\n'))
console.log(err.message)
console.log()
process.exit(1)
}
)
.then(() => {
const sizeAfterBuild = getBundleSize(prodBundlePath)
console.log()
console.log(chalk.greenBright('Successfully built bundle.prod.js!'))
console.log(
'Bundle size: ' + diffFileSize(sizeBeforeBuild!, sizeAfterBuild!)
)
console.log()
})
.catch((err) => {
if (err && err.message) {
console.log(err.message)
}
process.exit(1)
})
|
<filename>lib/chord.js
'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _createClass = function () { function defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, descriptor.key, descriptor); } } return function (Constructor, protoProps, staticProps) { if (protoProps) defineProperties(Constructor.prototype, protoProps); if (staticProps) defineProperties(Constructor, staticProps); return Constructor; }; }();
var _base = require('./base');
var _base2 = _interopRequireDefault(_base);
var _constants = require('./constants');
var _constants2 = _interopRequireDefault(_constants);
var _immutable = require('immutable');
var _immutable2 = _interopRequireDefault(_immutable);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } }
function _possibleConstructorReturn(self, call) { if (!self) { throw new ReferenceError("this hasn't been initialised - super() hasn't been called"); } return call && (typeof call === "object" || typeof call === "function") ? call : self; }
function _inherits(subClass, superClass) { if (typeof superClass !== "function" && superClass !== null) { throw new TypeError("Super expression must either be null or a function, not " + typeof superClass); } subClass.prototype = Object.create(superClass && superClass.prototype, { constructor: { value: subClass, enumerable: false, writable: true, configurable: true } }); if (superClass) Object.setPrototypeOf ? Object.setPrototypeOf(subClass, superClass) : subClass.__proto__ = superClass; }
var Chord = function (_Base) {
_inherits(Chord, _Base);
function Chord(fns) {
_classCallCheck(this, Chord);
var _this = _possibleConstructorReturn(this, (Chord.__proto__ || Object.getPrototypeOf(Chord)).call(this));
_this.__fns = Array.prototype.slice.call(fns);
_this.__doneFns = new _immutable2.default.Set();
_this.__currentFn = -1;
_this.__changeEvent = 'chord-change';
return _this;
}
_createClass(Chord, [{
key: '__doStep',
value: function __doStep(step) {
var _this2 = this;
this.__emit(_constants2.default.StepStatus.STARTED, null, step);
var stepNum = this.__currentFn;
if (step instanceof _base2.default) {
step.on(_constants2.default.States.FINISHED, function () {
_this2.finishStep(step, stepNum);
});
step.on(_constants2.default.States.FAILED, function () {
var error = step.getState('error');
if (error == null) {
error = step.getState('errors', 'step ' + step.name + ' failed');
}
_this2.finishStep(step, stepNum, error);
});
// Going to the next step is safer than calling start
// because the Flow could have already started.
step.gotoNextStep();
} else {
try {
step(this, function () {
return _this2.finishStep(step, stepNum);
}, function (e) {
return _this2.finishStep(step, stepNum, e);
});
} catch (e) {
this.finishStep(step, stepNum, e);
}
}
this.gotoNextStep();
}
}, {
key: '__handleError',
value: function __handleError(step, error) {
var errors = this.getState('errors', new _immutable2.default.List());
errors = errors.push(error);
this.setState({ errors: errors });
this.failStep(step);
}
}, {
key: '__gotoNextStep',
value: function __gotoNextStep() {
if (this.hasNextStep()) {
this.__currentFn = this.__currentFn + 1;
}
}
}, {
key: 'addStep',
value: function addStep(step) {
this.__fns.push(step);
}
}, {
key: 'failStep',
value: function failStep(step) {
this.__emit(_constants2.default.StepStatus.FAILED, null, step);
if (this.isFinished() && this.getState('errors').size === this.__fns.length) {
this.__readyState = this.__readyState.add(_constants2.default.States.FAILED);
this.__emit(_constants2.default.States.FAILED, _constants2.default.States.FAILED, step);
}
}
}, {
key: 'finishStep',
value: function finishStep(step, stepNum, opt_error) {
var _this3 = this;
if (this.__fns[stepNum] !== step) {
throw 'Got incorrect step num \'' + stepNum + '\' for step';
}
this.__doneFns = this.__doneFns.add(stepNum);
if (opt_error != null) {
this.__handleError(step, opt_error);
} else {
this.__emit(_constants2.default.StepStatus.FINISHED, null, step);
}
var isFinished = this.__fns.every(function (_, i) {
return _this3.__doneFns.has(i);
});
if (isFinished) {
this.finish();
}
}
}, {
key: 'getCurrentFn',
value: function getCurrentFn() {
if (this.__currentFn > -1 && this.__currentFn < this.__fns.length) {
return this.__fns[this.__currentFn];
}
return null;
}
}, {
key: 'gotoNextStep',
value: function gotoNextStep() {
if (!this.isStarted()) {
this.start();
return;
}
if (this.hasNextStep()) {
this.__gotoNextStep();
// The naming here really sucks.
// With a Chord there is no concept of a "current function" because
// functions could be async. This needs a better name.
var step = this.getCurrentFn();
this.__doStep(step);
}
}
}, {
key: 'hasNextStep',
value: function hasNextStep() {
return !this.isFinished() && !this.isLastStep() && this.__currentFn < this.__fns.length;
}
}, {
key: 'isFinished',
value: function isFinished() {
var _this4 = this;
var isFinished = this.__fns.every(function (_, i) {
return _this4.__doneFns.has(i);
});
return isFinished;
}
}, {
key: 'isLastStep',
value: function isLastStep() {
return this.__currentFn === this.__fns.length - 1;
}
}]);
return Chord;
}(_base2.default);
exports.default = Chord; |
class MedicalCodeManager:
def __init__(self):
self.icd9cm = [...] # List of ICD-9-CM codes
self.comorbidity_mappers = {...} # Dictionary mapping ICD-9-CM codes to comorbidities
self.icd10 = [...] # List of ICD-10 codes
def get_icd9cm_codes(self):
return self.icd9cm
def get_icd10_codes(self):
return self.icd10
def map_comorbidity(self, icd9_code):
return self.comorbidity_mappers.get(icd9_code, "Comorbidity mapping not available") |
<filename>snail/src/main/java/com/acgist/snail/context/RecycleContext.java
package com.acgist.snail.context;
import java.util.function.Function;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import com.acgist.snail.IContext;
import com.acgist.snail.context.SystemContext.SystemType;
import com.acgist.snail.context.recycle.Recycle;
import com.acgist.snail.context.recycle.windows.WindowsRecycle;
import com.acgist.snail.utils.StringUtils;
/**
* <p>回收站上下文</p>
*
* @author acgist
*/
public final class RecycleContext implements IContext {
private static final Logger LOGGER = LoggerFactory.getLogger(RecycleContext.class);
/**
* <p>回收站创建器</p>
*/
private static final Function<String, Recycle> BUILDER;
static {
final SystemType systemType = SystemType.local();
LOGGER.debug("初始化回收站:{}", systemType);
if(systemType == SystemType.WINDOWS) {
BUILDER = WindowsRecycle::new;
} else {
LOGGER.warn("不支持回收站:{}", systemType);
BUILDER = null;
}
}
/**
* <p>禁止创建实例</p>
*/
private RecycleContext() {
}
/**
* <p>创建回收站</p>
*
* @param path 文件路径
*
* @return 回收站
*/
public static final Recycle newInstance(String path) {
if(BUILDER == null) {
return null;
}
return BUILDER.apply(path);
}
/**
* <p>使用回收站删除文件</p>
*
* @param filePath 文件路径
*
* @return 是否删除成功
*/
public static final boolean recycle(final String filePath) {
if(StringUtils.isEmpty(filePath)) {
LOGGER.warn("删除文件路径错误:{}", filePath);
return false;
}
final var recycle = RecycleContext.newInstance(filePath);
if(recycle == null) {
// 不支持回收站
return false;
}
return recycle.delete();
}
}
|
cd ../
cd dev.juiceyourskills.com
git pull
npm install
cd ../
cd test.juiceyourskills.com
git pull
npm install
cd ../
cd www.juiceyourskills.com
git pull
npm install
cd ../
cd dev.juiceyourskills.com
|
<reponame>huangbin082/Bin<filename>Algorithm/src/main/java/com/leetcode/Solution_451.java
package com.leetcode;
import java.util.*;
public class Solution_451 {
public String frequencySort(String s) {
Map<Character, Integer> map = new HashMap<>();
for (int i = 0; i < s.length(); i++) {
map.put(s.charAt(i), map.getOrDefault(s.charAt(i), 0) + 1);
}
PriorityQueue<Num> queue = new PriorityQueue<>(Comparator.comparingInt(n -> -1 * n.count));
for (Map.Entry<Character, Integer> entry : map.entrySet()) {
queue.add(new Num(entry.getKey(), entry.getValue()));
}
StringBuilder sb = new StringBuilder();
while (!queue.isEmpty()){
Num num = queue.poll();
for (int i = 0; i < num.count; i++) {
sb.append(num.character);
}
}
return sb.toString();
}
public class Num {
Character character;
Integer count;
public Num(Character character, Integer count) {
this.character = character;
this.count = count;
}
}
}
|
#!/bin/bash
WORK_DIR=$(readlink -f .)
DATA_DIR=${WORK_DIR}/data
PROJECT=$1
CONFIG_NAME=$2
MODEL_NAME=$3
PROJECT_DIR=${WORK_DIR}/experiments/$PROJECT
OUTPUT_DIR=${DATA_DIR}/output/$PROJECT
ANNOTATION_DIR=${DATA_DIR}/annotations/$PROJECT
if [ -z "$MODEL_NAME" ]; then
MODEL_NAME=bert-base-uncased
fi
ANNOTATION_DIR="$ANNOTATION_DIR" MODEL_NAME="$MODEL_NAME" allennlp train \
"$PROJECT_DIR"/"${CONFIG_NAME}".jsonnet \
-s "$OUTPUT_DIR"/"${CONFIG_NAME}" \
--include-package ciyi
#ANNOTATION_DIR="$ANNOTATION_DIR" MODEL_NAME="$MODEL_NAME" allennlp evaluate \
# "$OUTPUT_DIR"/bert_layer-"${layer}"/bilm/model.tar.gz \
# "$ANNOTATION_DIR"/test.jsonl \
# --output-file "$OUTPUT_DIR"/bert_layer-"${layer}"/bilm/test_results.json \
# --include-package ciyi
#
#ANNOTATION_DIR="$ANNOTATION_DIR" MODEL_NAME="$MODEL_NAME" allennlp predict \
# "$OUTPUT_DIR"/bert_layer-"${layer}"/bilm/model.tar.gz \
# "$ANNOTATION_DIR"/test.jsonl \
# --output-file "$OUTPUT_DIR"/bert_layer-"${layer}"/bilm/test.predictions \
# --include-package ciyi --predictor span_classifier
|
<reponame>EIDSS/EIDSS-Legacy
package com.bv.eidss;
import java.util.List;
import com.bv.eidss.model.GisBaseReference;
import android.app.Activity;
import android.view.View;
import android.view.ViewGroup;
import android.widget.BaseAdapter;
import android.widget.TextView;
public class GisBaseReferenceAdapter extends BaseAdapter {
private List<GisBaseReference> items;
private Activity context;
public GisBaseReferenceAdapter(Activity context, List<GisBaseReference> items)
{
super();
this.context = context;
this.items = items;
}
@Override
public int getCount() {
return items.size();
}
@Override
public Object getItem(int position) {
return items.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
@Override
public View getView(int position, View convertView, ViewGroup parent) {
GisBaseReference item = (GisBaseReference)items.get(position);
View view;
if (convertView != null) view = convertView;
else view = context.getLayoutInflater().inflate(R.layout.lookup_layout, null);
TextView tv = (TextView)view.findViewById(R.id.LookupText);
tv.setText(item.name);
return view;
}
}
|
# Function to evaluate expression in postfix notation
def postfix_evaluator(expr):
# Create an empty stack
stack = []
# Iterate through the input expression
for char in expr:
# Push operand in stack
# Negative numbers are considered as operand
if char.isdigit() or char[0] == '-':
stack.append(char)
# If operator is encountered
# Pop two elements from stack
# Perform operation and push result back
else:
val1 = stack.pop()
val2 = stack.pop()
stack.append(str(eval(val2 + char + val1)))
return int(stack.pop()) |
class FeatureToggle:
def __init__(self, toggles):
self.toggles = toggles
def is_enabled(self, feature_name):
if feature_name in self.toggles:
return self.toggles[feature_name] != DISABLED
else:
return False |
#!/bin/bash
# I rarely do bash scripting, so feel free to refine this script.
echo "Linking MALA and MALA data repo."
# Get the paths we need for setup.
script_path="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
mala_base_path=$( echo ${script_path%/*} )
mala_base_path=$( echo ${mala_base_path%/*} )
examples_path=$mala_base_path/examples
test_path=$mala_base_path/test
pythonfile=data_repo_path.py
# Ask the user for the path to the repo if none was given in the command line.
if [ "$1" != "" ]
then
data_repo_path=$1
else
echo "Please input the full path to the MALA data repo."
read data_repo_path
fi
# Append a / if we have to.
lastcharacter="${data_repo_path: -1}"
if [ "$lastcharacter" != "/" ]
then
data_repo_path=$data_repo_path/
fi
# Append a / if we have to.
lastcharacter="${mala_base_path: -1}"
if [ "$lastcharacter" != "/" ]
then
mala_base_path=$mala_base_path/
fi
# Write the python file.
rm -f ${script_path}/${pythonfile}
touch ${script_path}/${pythonfile}
echo "data_repo_path = \"${data_repo_path}\"" >> ${script_path}/${pythonfile}
# copy the file to test and example folders.
cp ${script_path}/${pythonfile} ${test_path}
cp ${script_path}/${pythonfile} ${examples_path}
echo "Linking done!"
|
/**
* ## popularPhotos.js
*
* # Display Popular Feed Photos as a grid
*/
'use strict'
/**
* ## Imports
*
*/
import React, {PropTypes} from 'react'
import {
StyleSheet,
View,
Dimensions,
Image
} from 'react-native'
import _ from 'lodash'
/**
* ## Styles
*/
const styles = StyleSheet.create({
container: {
marginTop: -1
},
placeholder: {
backgroundColor: '#DDDDDD'
},
row: {
flexDirection: 'row'
},
item: {
marginRight:2,
marginTop:2
}
})
const windowWidth = Dimensions.get('window').width
const popularPhotosGrid = React.createClass({
propTypes: {
popularPhotos: PropTypes.array,
photos: PropTypes.array,
props: PropTypes.object,
},
/**
* ### Render
*
* @return {Component} Display the popular feed photos grid
*/
render() {
const { popularPhotos } = this.props
const size = {width: windowWidth, height: windowWidth}
const placeholder = [0].map(
(photo, i) => {
return (
<View key={i} style={[styles.placeholder, size]}>
</View>
)
}
)
const photosRecieved = Boolean(popularPhotos[0])
const IMAGES_PER_ROW = 3
const calculatedSize = function(){
const size = windowWidth / IMAGES_PER_ROW
return {width: size, height: size}
}
const renderRow = function(images) {
return images.map((uri,i) =>{
return(
<Image key={i} style={[styles.item, calculatedSize()]} source={{uri: uri}} />
)
})
}
const renderImagesInGroupsOf = function(count) {
return _.chunk(popularPhotos, IMAGES_PER_ROW).map((imagesForRow, i) => {
return (
<View style={styles.row} key={i}>
{renderRow(imagesForRow)}
</View>
)
})
}
let images = renderImagesInGroupsOf(3)
return (
<View style={styles.container}>
{photosRecieved ? images : placeholder}
</View>
)
}
})
module.exports = popularPhotosGrid
|
<reponame>mothguib/pytrol<gh_stars>0
# -*- coding: utf-8 -*-
from pytrol.model.action.Action import Action
from pytrol.model.action.Actions import Actions
class SendingMessageAction(Action):
def __init__(self, _message: str, _agt_id: int):
r"""
Args:
_message (str):
_agt_id (int):
"""
Action.__init__(self, "sending_message", Actions.Sending_message)
self.message = _message
self.agt_id = _agt_id
|
use std::error::Error;
struct Context;
impl Context {
fn find_exact(&self, name: &str) -> Result<String, Box<dyn Error>> {
// Implementation of finding the password based on the name
// For example, returning a hardcoded password for demonstration purposes
match name {
"Alice" => Ok("password123".to_string()),
"Bob" => Ok("securepass".to_string()),
_ => Err("Password not found".into()),
}
}
}
fn manage_passwords(names: Vec<&str>, ctx: &Context, tree: bool) -> Result<(), Box<dyn Error>> {
let passwds = names
.iter()
.filter_map(|n| ctx.find_exact(n).ok())
.collect::<Vec<_>>();
if !tree {
for name in passwds {
println!("{}", name);
}
}
Ok(())
}
fn main() {
let names = vec!["Alice", "Bob", "Eve"];
let ctx = Context;
let tree = false;
manage_passwords(names, &ctx, tree).unwrap();
} |
# Ruby 1.x is no longer a supported runtime,
# but its regex features are still recognized.
#
# Aliases for the latest patch version are provided as 'ruby/n.n',
# e.g. 'ruby/1.9' refers to Ruby v1.9.3.
Dir[File.expand_path('../versions/*.rb', __FILE__)].sort.each { |f| require f }
|
function docker_volume_list_each_that_matches() { # [docker_volume_match_predicate_expression]
local docker_volume_match_predicate_expression="${1:-0}"
docker volume list | perl -e '
my $match_predicate_expression = shift(@ARGV);
while( <> ) {
if ($. == 1) {
if (m{DRIVER\s+VOLUME NAME\b}) {
print ;
}
else {
die "Looks like the format of a docker volume list has changed; aborting." ;
}
}
else {
my ($driver, $name, @rest) = split ;
my $match_p = eval $match_predicate_expression ;
print if ($match_p) ;
}
}
' "${docker_volume_match_predicate_expression:?}";
}
|
# Copyright (c) 2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# What to do
sign=false
verify=false
build=false
setupenv=false
# Systems to build
linux=true
windows=true
osx=true
# Other Basic variables
SIGNER=
VERSION=
commit=false
url=https://github.com/iox/iox
proc=2
mem=2000
lxc=true
osslTarUrl=http://downloads.sourceforge.net/project/osslsigncode/osslsigncode/osslsigncode-1.7.1.tar.gz
osslPatchUrl=https://bitcoincore.org/cfields/osslsigncode-Backports-to-1.7.1.patch
scriptName=$(basename -- "$0")
signProg="gpg --detach-sign"
commitFiles=true
# Help Message
read -d '' usage <<- EOF
Usage: $scriptName [-c|u|v|b|s|B|o|h|j|m|] signer version
Run this script from the directory containing the iox, gitian-builder, gitian.sigs, and iox-detached-sigs.
Arguments:
signer GPG signer to sign each build assert file
version Version number, commit, or branch to build. If building a commit or branch, the -c option must be specified
Options:
-c|--commit Indicate that the version argument is for a commit or branch
-u|--url Specify the URL of the repository. Default is https://github.com/iox/iox
-v|--verify Verify the gitian build
-b|--build Do a gitian build
-s|--sign Make signed binaries for Windows and Mac OSX
-B|--buildsign Build both signed and unsigned binaries
-o|--os Specify which Operating Systems the build is for. Default is lwx. l for linux, w for windows, x for osx, a for aarch64
-j Number of processes to use. Default 2
-m Memory to allocate in MiB. Default 2000
--kvm Use KVM instead of LXC
--setup Setup the gitian building environment. Uses KVM. If you want to use lxc, use the --lxc option. Only works on Debian-based systems (Ubuntu, Debian)
--detach-sign Create the assert file for detached signing. Will not commit anything.
--no-commit Do not commit anything to git
-h|--help Print this help message
EOF
# Get options and arguments
while :; do
case $1 in
# Verify
-v|--verify)
verify=true
;;
# Build
-b|--build)
build=true
;;
# Sign binaries
-s|--sign)
sign=true
;;
# Build then Sign
-B|--buildsign)
sign=true
build=true
;;
# PGP Signer
-S|--signer)
if [ -n "$2" ]
then
SIGNER=$2
shift
else
echo 'Error: "--signer" requires a non-empty argument.'
exit 1
fi
;;
# Operating Systems
-o|--os)
if [ -n "$2" ]
then
linux=false
windows=false
osx=false
aarch64=false
if [[ "$2" = *"l"* ]]
then
linux=true
fi
if [[ "$2" = *"w"* ]]
then
windows=true
fi
if [[ "$2" = *"x"* ]]
then
osx=true
fi
if [[ "$2" = *"a"* ]]
then
aarch64=true
fi
shift
else
echo 'Error: "--os" requires an argument containing an l (for linux), w (for windows), x (for Mac OSX), or a (for aarch64)\n'
exit 1
fi
;;
# Help message
-h|--help)
echo "$usage"
exit 0
;;
# Commit or branch
-c|--commit)
commit=true
;;
# Number of Processes
-j)
if [ -n "$2" ]
then
proc=$2
shift
else
echo 'Error: "-j" requires an argument'
exit 1
fi
;;
# Memory to allocate
-m)
if [ -n "$2" ]
then
mem=$2
shift
else
echo 'Error: "-m" requires an argument'
exit 1
fi
;;
# URL
-u)
if [ -n "$2" ]
then
url=$2
shift
else
echo 'Error: "-u" requires an argument'
exit 1
fi
;;
# kvm
--kvm)
lxc=false
;;
# Detach sign
--detach-sign)
signProg="true"
commitFiles=false
;;
# Commit files
--no-commit)
commitFiles=false
;;
# Setup
--setup)
setup=true
;;
*) # Default case: If no more options then break out of the loop.
break
esac
shift
done
# Set up LXC
if [[ $lxc = true ]]
then
export USE_LXC=1
export LXC_BRIDGE=lxcbr0
sudo ifconfig lxcbr0 up 10.0.2.2
fi
# Check for OSX SDK
if [[ ! -e "gitian-builder/inputs/MacOSX10.11.sdk.tar.gz" && $osx == true ]]
then
echo "Cannot build for OSX, SDK does not exist. Will build for other OSes"
osx=false
fi
# Get signer
if [[ -n "$1" ]]
then
SIGNER=$1
shift
fi
# Get version
if [[ -n "$1" ]]
then
VERSION=$1
COMMIT=$VERSION
shift
fi
# Check that a signer is specified
if [[ $SIGNER == "" ]]
then
echo "$scriptName: Missing signer."
echo "Try $scriptName --help for more information"
exit 1
fi
# Check that a version is specified
if [[ $VERSION == "" ]]
then
echo "$scriptName: Missing version."
echo "Try $scriptName --help for more information"
exit 1
fi
# Add a "v" if no -c
if [[ $commit = false ]]
then
COMMIT="v${VERSION}"
fi
echo ${COMMIT}
# Setup build environment
if [[ $setup = true ]]
then
sudo apt-get install ruby apache2 git apt-cacher-ng python-vm-builder qemu-kvm qemu-utils
git clone https://github.com/iox/gitian.sigs.git
git clone https://github.com/iox/iox-detached-sigs.git
git clone https://github.com/devrandom/gitian-builder.git
pushd ./gitian-builder
if [[ -n "$USE_LXC" ]]
then
sudo apt-get install lxc
bin/make-base-vm --suite trusty --arch amd64 --lxc
else
bin/make-base-vm --suite trusty --arch amd64
fi
popd
fi
# Set up build
pushd ./iox
git fetch
git checkout ${COMMIT}
popd
# Build
if [[ $build = true ]]
then
# Make output folder
mkdir -p ./iox-binaries/${VERSION}
# Build Dependencies
echo ""
echo "Building Dependencies"
echo ""
pushd ./gitian-builder
mkdir -p inputs
wget -N -P inputs $osslPatchUrl
wget -N -P inputs $osslTarUrl
make -C ../iox/depends download SOURCES_PATH=`pwd`/cache/common
# Linux
if [[ $linux = true ]]
then
echo ""
echo "Compiling ${VERSION} Linux"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit iox=${COMMIT} --url iox=${url} ../iox/contrib/gitian-descriptors/gitian-linux.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-linux --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-linux.yml
mv build/out/iox-*.tar.gz build/out/src/iox-*.tar.gz ../iox-binaries/${VERSION}
fi
# Windows
if [[ $windows = true ]]
then
echo ""
echo "Compiling ${VERSION} Windows"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit iox=${COMMIT} --url iox=${url} ../iox/contrib/gitian-descriptors/gitian-win.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-unsigned --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-win.yml
mv build/out/iox-*-win-unsigned.tar.gz inputs/iox-win-unsigned.tar.gz
mv build/out/iox-*.zip build/out/iox-*.exe ../iox-binaries/${VERSION}
fi
# Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Compiling ${VERSION} Mac OSX"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit iox=${COMMIT} --url iox=${url} ../iox/contrib/gitian-descriptors/gitian-osx.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-unsigned --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-osx.yml
mv build/out/iox-*-osx-unsigned.tar.gz inputs/iox-osx-unsigned.tar.gz
mv build/out/iox-*.tar.gz build/out/iox-*.dmg ../iox-binaries/${VERSION}
fi
# AArch64
if [[ $aarch64 = true ]]
then
echo ""
echo "Compiling ${VERSION} AArch64"
echo ""
./bin/gbuild -j ${proc} -m ${mem} --commit iox=${COMMIT} --url iox=${url} ../iox/contrib/gitian-descriptors/gitian-aarch64.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-aarch64 --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-aarch64.yml
mv build/out/iox-*.tar.gz build/out/src/iox-*.tar.gz ../iox-binaries/${VERSION}
popd
if [[ $commitFiles = true ]]
then
# Commit to gitian.sigs repo
echo ""
echo "Committing ${VERSION} Unsigned Sigs"
echo ""
pushd gitian.sigs
git add ${VERSION}-linux/${SIGNER}
git add ${VERSION}-aarch64/${SIGNER}
git add ${VERSION}-win-unsigned/${SIGNER}
git add ${VERSION}-osx-unsigned/${SIGNER}
git commit -a -m "Add ${VERSION} unsigned sigs for ${SIGNER}"
popd
fi
fi
# Verify the build
if [[ $verify = true ]]
then
# Linux
pushd ./gitian-builder
echo ""
echo "Verifying v${VERSION} Linux"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-linux ../iox/contrib/gitian-descriptors/gitian-linux.yml
# Windows
echo ""
echo "Verifying v${VERSION} Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-win-unsigned ../iox/contrib/gitian-descriptors/gitian-win.yml
# Mac OSX
echo ""
echo "Verifying v${VERSION} Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-unsigned ../iox/contrib/gitian-descriptors/gitian-osx.yml
# AArch64
echo ""
echo "Verifying v${VERSION} AArch64"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-aarch64 ../iox/contrib/gitian-descriptors/gitian-aarch64.yml
# Signed Windows
echo ""
echo "Verifying v${VERSION} Signed Windows"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../iox/contrib/gitian-descriptors/gitian-osx-signer.yml
# Signed Mac OSX
echo ""
echo "Verifying v${VERSION} Signed Mac OSX"
echo ""
./bin/gverify -v -d ../gitian.sigs/ -r ${VERSION}-osx-signed ../iox/contrib/gitian-descriptors/gitian-osx-signer.yml
popd
fi
# Sign binaries
if [[ $sign = true ]]
then
pushd ./gitian-builder
# Sign Windows
if [[ $windows = true ]]
then
echo ""
echo "Signing ${VERSION} Windows"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../iox/contrib/gitian-descriptors/gitian-win-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-win-signed --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-win-signer.yml
mv build/out/iox-*win64-setup.exe ../iox-binaries/${VERSION}
mv build/out/iox-*win32-setup.exe ../iox-binaries/${VERSION}
fi
# Sign Mac OSX
if [[ $osx = true ]]
then
echo ""
echo "Signing ${VERSION} Mac OSX"
echo ""
./bin/gbuild -i --commit signature=${COMMIT} ../iox/contrib/gitian-descriptors/gitian-osx-signer.yml
./bin/gsign -p $signProg --signer $SIGNER --release ${VERSION}-osx-signed --destination ../gitian.sigs/ ../iox/contrib/gitian-descriptors/gitian-osx-signer.yml
mv build/out/iox-osx-signed.dmg ../iox-binaries/${VERSION}/iox-${VERSION}-osx.dmg
fi
popd
if [[ $commitFiles = true ]]
then
# Commit Sigs
pushd gitian.sigs
echo ""
echo "Committing ${VERSION} Signed Sigs"
echo ""
git add ${VERSION}-win-signed/${SIGNER}
git add ${VERSION}-osx-signed/${SIGNER}
git commit -a -m "Add ${VERSION} signed binary sigs for ${SIGNER}"
popd
fi
fi
|
require 'test_helper'
class PeriodsControllerTest < ActionController::TestCase
setup do
@period = periods(:one)
end
#NOT LOGGED IN
test "index should get signin if not logged in" do
get :index
assert_redirected_to "/sign_in"
end
test "new should get signin if not logged in" do
get :new
assert_redirected_to "/sign_in"
end
test "create should get signin if not logged in and not create" do
assert_no_difference('Period.count') do
post :create, period: { end_time: @period.end_time, name: @period.name, start_time: @period.start_time, timetable_id: @period.timetable_id }
end
end
test "show should get signin if not logged in" do
get :show, id: @period
assert_redirected_to "/sign_in"
end
test "edit should get signin if not logged in" do
get :edit, id: @period
assert_redirected_to "/sign_in"
end
test "should not update period" do
patch :update, id: @period, period: { end_time: @period.end_time, name: 1, start_time: @period.start_time, timetable_id: @period.timetable_id }
assert_redirected_to "/sign_in"
updated_period = Period.find(@period.id)
assert updated_period.name.eql?(@period.name), updated_period.name #i.e. the save should not have worked
end
test "should not destroy period if not logged in" do
assert_no_difference('Period.count', -1) do
delete :destroy, id: Period.first.id
end
assert_redirected_to "/sign_in"
end
test "should get index" do
user = User.first
sign_in_as(user)
user.selected_timetable_id = Timetable.first.id
get :index
assert_response :success
assert_not_nil assigns(:periods)
end
test "should get new" do
sign_in_as(User.first)
get :new
assert_response :success
end
test "should create period" do
user = User.first
timetable = Timetable.first
user.selected_timetable_id = timetable.id
user.save!
sign_in_as(user)
assert_difference('Period.count') do
post :create, period: { end_time: @period.end_time, name: @period.name, start_time: @period.start_time, timetable_id: @period.timetable_id }
end
assert_redirected_to settings_path
end
test "should show period" do
sign_in_as(User.first)
get :show, id: @period
assert_response :success
end
test "should get edit" do
sign_in_as(User.first)
get :edit, id: @period
assert_response :success
end
test "should update period" do
sign_in_as(User.first)
patch :update, id: @period, period: { end_time: @period.end_time, name: @period.name, start_time: @period.start_time, timetable_id: @period.timetable_id }
assert_redirected_to settings_path
end
test "should destroy period" do
sign_in_as(User.first)
assert_difference('Period.count', -1) do
delete :destroy, id: @period
end
assert_redirected_to settings_path
end
end
|
package com.example.tour;
import android.app.Activity;
import android.os.AsyncTask;
import android.os.Bundle;
public class user_details extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
//setContentView(R.layout.user_details);
MyTask myTask = new MyTask();
myTask.execute();
}
private class MyTask extends AsyncTask<String, Void, String>{
@Override
protected String doInBackground(String... arg0) {
// TODO Auto-generated method stub
return null;
}
@Override
protected void onPostExecute(String result) {
// TODO Auto-generated method stub
super.onPostExecute(result);
}
}
}
|
export const GridBreakPoints = { lg: 1200, md: 996, sm: 768, xs: 480, xxs: 0 }
export const GridColumns = { lg: 100, md: 80, sm: 50, xs: 25, xxs: 10 }
export const WidgetTitleHeight = 34 // 34px
export const getWidgetWidth = (windowWidth, gridColumns) => {
if (windowWidth > 1200) {
return (windowWidth * (gridColumns / 100)).toFixed(0)
} else if (windowWidth > 996) {
return (windowWidth * (gridColumns / 80)).toFixed(0)
} else if (windowWidth > 768) {
return (windowWidth * (gridColumns / 50)).toFixed(0)
} else if (windowWidth > 480) {
return (windowWidth * (gridColumns / 25)).toFixed(0)
} else {
return (windowWidth * (gridColumns / 10)).toFixed(0)
}
// return windowWidth
}
export const getWidgetHeight = (windowHeight, gridHeight, showTitle = true) => {
if (showTitle) {
return (windowHeight * (gridHeight / 100)).toFixed(0) - WidgetTitleHeight
}
return (windowHeight * (gridHeight / 100)).toFixed(0)
}
|
## DL params
export BATCHSIZE=2
export EVALBATCHSIZE=80
export NUMEPOCHS=${NUMEPOCHS:-15}
export EXTRA_PARAMS='--val-epochs 10 15 --lr-decay-epochs 60 75 --lr-warmup-epoch=26 --lr=0.004375 --weight-decay=4e-5 --bn-group=8 --gradient-predivide-factor=32 --input-batch-multiplier=20'
## System run parms
export DGXNNODES=8
export DGXSYSTEM=$(basename $(readlink -f ${BASH_SOURCE[0]}) | sed 's/^config_//' | sed 's/\.sh$//' )
WALLTIME_MINUTES=15
export WALLTIME=$((${NEXP} * ${WALLTIME_MINUTES}))
## System config params
export DGXNGPU=8
export DGXSOCKETCORES=64
export DGXNSOCKET=2
export DGXHT=2 # HT is on is 2, HT off is 1
|
package com.github.guitsilva.battleship.view.frames;
import com.github.guitsilva.battleship.view.Console;
public class ShipsFrame extends Frame implements Renderable {
public void render() {
Console.clear();
this.renderHeader();
Console.print("Ships distribution on the grid:", 100, true, ' ');
Console.print("");
Console.print("(A) Auto", 100, true, ' ');
Console.print("(M) Manual", 100, true, ' ');
Console.print("");
Console.print("");
Console.print("");
Console.print("");
Console.print("");
}
public String promptOption() {
String option;
while (true) {
option = prompt.read().toUpperCase();
if (option.equals("A") || option.equals("M")) {
break;
} else {
System.out.println("Invalid option. Try again!");
}
}
return option;
}
}
|
/*
* This file is generated by jOOQ.
*/
package com.yg.gqlwfdl.dataaccess.db.tables;
import com.yg.gqlwfdl.dataaccess.db.Indexes;
import com.yg.gqlwfdl.dataaccess.db.Keys;
import com.yg.gqlwfdl.dataaccess.db.Public;
import com.yg.gqlwfdl.dataaccess.db.tables.records.PricingDetailsRecord;
import java.util.Arrays;
import java.util.List;
import javax.annotation.Generated;
import org.jooq.Field;
import org.jooq.ForeignKey;
import org.jooq.Identity;
import org.jooq.Index;
import org.jooq.Name;
import org.jooq.Record;
import org.jooq.Schema;
import org.jooq.Table;
import org.jooq.TableField;
import org.jooq.UniqueKey;
import org.jooq.impl.DSL;
import org.jooq.impl.TableImpl;
/**
* This class is generated by jOOQ.
*/
@Generated(
value = {
"http://www.jooq.org",
"jOOQ version:3.11.2"
},
comments = "This class is generated by jOOQ"
)
@SuppressWarnings({ "all", "unchecked", "rawtypes" })
public class PricingDetails extends TableImpl<PricingDetailsRecord> {
private static final long serialVersionUID = 1928327842;
/**
* The reference instance of <code>public.pricing_details</code>
*/
public static final PricingDetails PRICING_DETAILS = new PricingDetails();
/**
* The class holding records for this type
*/
@Override
public Class<PricingDetailsRecord> getRecordType() {
return PricingDetailsRecord.class;
}
/**
* The column <code>public.pricing_details.id</code>.
*/
public final TableField<PricingDetailsRecord, Long> ID = createField("id", org.jooq.impl.SQLDataType.BIGINT.nullable(false).defaultValue(org.jooq.impl.DSL.field("nextval('pricing_details_id_seq'::regclass)", org.jooq.impl.SQLDataType.BIGINT)), this, "");
/**
* The column <code>public.pricing_details.description</code>.
*/
public final TableField<PricingDetailsRecord, String> DESCRIPTION = createField("description", org.jooq.impl.SQLDataType.CLOB.nullable(false), this, "");
/**
* The column <code>public.pricing_details.vat_rate</code>.
*/
public final TableField<PricingDetailsRecord, Long> VAT_RATE = createField("vat_rate", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>public.pricing_details.discount_rate</code>.
*/
public final TableField<PricingDetailsRecord, Long> DISCOUNT_RATE = createField("discount_rate", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* The column <code>public.pricing_details.preferred_payment_method</code>.
*/
public final TableField<PricingDetailsRecord, Long> PREFERRED_PAYMENT_METHOD = createField("preferred_payment_method", org.jooq.impl.SQLDataType.BIGINT.nullable(false), this, "");
/**
* Create a <code>public.pricing_details</code> table reference
*/
public PricingDetails() {
this(DSL.name("pricing_details"), null);
}
/**
* Create an aliased <code>public.pricing_details</code> table reference
*/
public PricingDetails(String alias) {
this(DSL.name(alias), PRICING_DETAILS);
}
/**
* Create an aliased <code>public.pricing_details</code> table reference
*/
public PricingDetails(Name alias) {
this(alias, PRICING_DETAILS);
}
private PricingDetails(Name alias, Table<PricingDetailsRecord> aliased) {
this(alias, aliased, null);
}
private PricingDetails(Name alias, Table<PricingDetailsRecord> aliased, Field<?>[] parameters) {
super(alias, null, aliased, parameters, DSL.comment(""));
}
public <O extends Record> PricingDetails(Table<O> child, ForeignKey<O, PricingDetailsRecord> key) {
super(child, key, PRICING_DETAILS);
}
/**
* {@inheritDoc}
*/
@Override
public Schema getSchema() {
return Public.PUBLIC;
}
/**
* {@inheritDoc}
*/
@Override
public List<Index> getIndexes() {
return Arrays.<Index>asList(Indexes.PRICING_DETAILS_PKEY);
}
/**
* {@inheritDoc}
*/
@Override
public Identity<PricingDetailsRecord, Long> getIdentity() {
return Keys.IDENTITY_PRICING_DETAILS;
}
/**
* {@inheritDoc}
*/
@Override
public UniqueKey<PricingDetailsRecord> getPrimaryKey() {
return Keys.PRICING_DETAILS_PKEY;
}
/**
* {@inheritDoc}
*/
@Override
public List<UniqueKey<PricingDetailsRecord>> getKeys() {
return Arrays.<UniqueKey<PricingDetailsRecord>>asList(Keys.PRICING_DETAILS_PKEY);
}
/**
* {@inheritDoc}
*/
@Override
public PricingDetails as(String alias) {
return new PricingDetails(DSL.name(alias), this);
}
/**
* {@inheritDoc}
*/
@Override
public PricingDetails as(Name alias) {
return new PricingDetails(alias, this);
}
/**
* Rename this table
*/
@Override
public PricingDetails rename(String name) {
return new PricingDetails(DSL.name(name), null);
}
/**
* Rename this table
*/
@Override
public PricingDetails rename(Name name) {
return new PricingDetails(name, null);
}
}
|
import React from 'react';
import { storiesOf } from '@storybook/react';
import ContentTitle from './contentTitle';
storiesOf('Components/ContentTitle', module).add('default', () => {
return <ContentTitle>you are swapping</ContentTitle>;
});
|
package de.hswhameln.typetogether.networking.util;
import java.util.ArrayList;
import java.util.List;
import java.util.stream.Collectors;
import de.hswhameln.typetogether.networking.types.Identifier;
public class Decimal {
public static List<Integer> fromIdentifierList(List<Identifier> identifiers) {
return identifiers.stream().map(Identifier::getDigit).collect(Collectors.toList());
}
public static List<Identifier> cons(Identifier identifier, List<Identifier> position) {
List<Identifier> returnIn = new ArrayList<>();
returnIn.add(identifier);
returnIn.addAll(position);
return returnIn;
}
public static <T> List<T> rest(List<T> position) {
List<T> returnIn = new ArrayList<>();
for(int i = 1; i < position.size(); i++) {
returnIn.add(position.get(i));
}
return returnIn;
}
public static List<Integer> increment(List<Integer> n1, List<Integer> delta) {
int firstNonzeroDigit = 0;
for(int i = 0; i < delta.size() && delta.get(i) <= 0; i++) {
firstNonzeroDigit = i;
}
List<Integer> inc = delta.subList(0, firstNonzeroDigit);
inc.add(0);
inc.add(1);
List<Integer> v1 = Decimal.add(n1, inc);
return v1.get(v1.size() - 1) == 0 ? Decimal.add(v1, inc) : v1;
}
public static List<Identifier> toIdentifierList(List<Integer> n, List<Identifier> before, List<Identifier> after, int userId) {
List<Identifier> returnIn = new ArrayList<>();
for(int i = 0; i < n.size(); i++) {
if(i == n.size() - 1) {
returnIn.add(new Identifier(n.get(i), userId));
} else if(i < before.size() && n.get(i) == before.get(i).getDigit()) {
returnIn.add(new Identifier(n.get(i), before.get(i).getUserId()));
} else if(i < after.size() && n.get(i) == after.get(i).getDigit()) {
returnIn.add(new Identifier(n.get(i), after.get(i).getUserId()));
} else {
returnIn.add(new Identifier(n.get(i), userId));
}
}
return returnIn;
}
public static List<Integer> add(List<Integer> n1, List<Integer> n2) {
List<Integer> returnIn = new ArrayList<>();
for(int i = 0; i < Math.max(n1.size(), n2.size()); i++) {
if(i >= n1.size()) {
returnIn.add(n2.get(i));
} else if(i >= n2.size()) {
returnIn.add(n1.get(i));
} else {
returnIn.add(n1.get(i) + n2.get(i));
}
}
return returnIn;
}
public static List<Integer> subtractGreaterThan(List<Integer> n1, List<Integer> n2) {
List<Integer> returnIn = new ArrayList<>();
int j = 0;
for(int i = 0; i < Math.min(n1.size(), n2.size()); i++) {
if(n1.get(i).equals(n2.get(i))) {
returnIn.add(0);
} else if(n1.get(i) > n2.get(i)) {
returnIn.add(n1.get(i) - n2.get(i));
} else {
returnIn.add(n2.get(i) - n1.get(i));
}
j = i;
}
j++;
if(n1.size() > j) {
for(int i = j; i < n1.size(); i++) {
returnIn.add(n1.get(i));
}
} else if(n2.size() > j) {
for(int i = j; i < n2.size(); i++) {
returnIn.add(n2.get(i));
}
}
return returnIn;
}
}
|
<reponame>joeosburn/parcel
// @flow strict-local
import type {
Blob,
FilePath,
BundleResult,
Bundle as BundleType,
BundleGraph as BundleGraphType,
NamedBundle as NamedBundleType,
Async,
} from '@parcel/types';
import type SourceMap from '@parcel/source-map';
import type WorkerFarm from '@parcel/workers';
import type {Bundle as InternalBundle, ParcelOptions, ReportFn} from './types';
import type ParcelConfig from './ParcelConfig';
import type InternalBundleGraph from './BundleGraph';
import type {FileSystem, FileOptions} from '@parcel/fs';
import {
md5FromObject,
md5FromString,
blobToStream,
TapStream,
} from '@parcel/utils';
import {PluginLogger} from '@parcel/logger';
import {init as initSourcemaps} from '@parcel/source-map';
import ThrowableDiagnostic, {errorToDiagnostic} from '@parcel/diagnostic';
import {Readable, Transform} from 'stream';
import nullthrows from 'nullthrows';
import path from 'path';
import url from 'url';
import crypto from 'crypto';
import {NamedBundle, bundleToInternalBundle} from './public/Bundle';
import BundleGraph, {
bundleGraphToInternalBundleGraph,
} from './public/BundleGraph';
import PluginOptions from './public/PluginOptions';
import {PARCEL_VERSION, HASH_REF_PREFIX, HASH_REF_REGEX} from './constants';
type Opts = {|
config: ParcelConfig,
configRef?: number,
farm?: WorkerFarm,
options: ParcelOptions,
optionsRef?: number,
report: ReportFn,
|};
type BundleInfo = {|
+hash: string,
+hashReferences: Array<string>,
+time: number,
|};
type CacheKeyMap = {|
content: string,
map: string,
info: string,
|};
const BOUNDARY_LENGTH = HASH_REF_PREFIX.length + 32 - 1;
export default class PackagerRunner {
config: ParcelConfig;
configRef: ?number;
options: ParcelOptions;
optionsRef: ?number;
farm: ?WorkerFarm;
pluginOptions: PluginOptions;
distDir: FilePath;
distExists: Set<FilePath>;
report: ReportFn;
getBundleInfoFromWorker: ({|
bundle: InternalBundle,
bundleGraphReference: number,
configRef: number,
cacheKeys: CacheKeyMap,
optionsRef: number,
|}) => Promise<BundleInfo>;
constructor({config, configRef, farm, options, optionsRef, report}: Opts) {
this.config = config;
this.configRef = configRef;
this.options = options;
this.optionsRef = optionsRef;
this.pluginOptions = new PluginOptions(this.options);
this.farm = farm;
this.report = report;
this.getBundleInfoFromWorker = farm
? farm.createHandle('runPackage')
: () => {
throw new Error(
'Cannot call PackagerRunner.writeBundleFromWorker() in a worker',
);
};
}
async writeBundles(bundleGraph: InternalBundleGraph) {
let farm = nullthrows(this.farm);
let {ref, dispose} = await farm.createSharedReference(bundleGraph);
let bundleInfoMap = {};
let writeEarlyPromises = {};
let hashRefToNameHash = new Map();
// skip inline bundles, they will be processed via the parent bundle
let bundles = bundleGraph.getBundles().filter(bundle => !bundle.isInline);
await Promise.all(
bundles.map(async bundle => {
let info = await this.processBundle(bundle, bundleGraph, ref);
bundleInfoMap[bundle.id] = info;
if (!info.hashReferences.length) {
hashRefToNameHash.set(bundle.hashReference, info.hash.slice(-8));
writeEarlyPromises[bundle.id] = this.writeToDist({
bundle,
info,
hashRefToNameHash,
bundleGraph,
});
}
}),
);
assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap);
await Promise.all(
bundles.map(
bundle =>
writeEarlyPromises[bundle.id] ??
this.writeToDist({
bundle,
info: bundleInfoMap[bundle.id],
hashRefToNameHash,
bundleGraph,
}),
),
);
await dispose();
}
async processBundle(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
bundleGraphReference: number,
): Promise<{|
...BundleInfo,
cacheKeys: CacheKeyMap,
|}> {
let start = Date.now();
let cacheKey = await this.getCacheKey(bundle, bundleGraph);
let cacheKeys = {
content: getContentKey(cacheKey),
map: getMapKey(cacheKey),
info: getInfoKey(cacheKey),
};
let {hash, hashReferences} =
(await this.getBundleInfoFromCache(cacheKeys.info)) ??
(await this.getBundleInfoFromWorker({
bundle,
bundleGraphReference,
cacheKeys,
optionsRef: nullthrows(this.optionsRef),
configRef: nullthrows(this.configRef),
}));
return {time: Date.now() - start, hash, hashReferences, cacheKeys};
}
getBundleInfoFromCache(infoKey: string) {
if (this.options.disableCache) {
return;
}
return this.options.cache.get(infoKey);
}
async getBundleInfo(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
cacheKeys: CacheKeyMap,
) {
let {contents, map} = await this.getBundleResult(bundle, bundleGraph);
return this.writeToCache(cacheKeys, contents, map);
}
async getBundleResult(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
): Promise<{|
contents: Blob,
map: ?string,
|}> {
await initSourcemaps;
let packaged = await this.package(bundle, bundleGraph);
let res = await this.optimize(
bundle,
bundleGraph,
packaged.contents,
packaged.map,
);
let map = res.map ? await this.generateSourceMap(bundle, res.map) : null;
return {
contents: res.contents,
map,
};
}
getSourceMapReference(bundle: NamedBundle, map: ?SourceMap): Async<?string> {
if (map && this.options.sourceMaps) {
if (
bundle.isInline ||
(bundle.target.sourceMap && bundle.target.sourceMap.inline)
) {
return this.generateSourceMap(bundleToInternalBundle(bundle), map);
} else {
return path.basename(bundle.filePath) + '.map';
}
} else {
return null;
}
}
async package(
internalBundle: InternalBundle,
bundleGraph: InternalBundleGraph,
): Promise<BundleResult> {
let bundle = new NamedBundle(internalBundle, bundleGraph, this.options);
this.report({
type: 'buildProgress',
phase: 'packaging',
bundle,
});
let packager = await this.config.getPackager(bundle.filePath);
try {
return await packager.plugin.package({
bundle,
bundleGraph: new BundleGraph<NamedBundleType>(
bundleGraph,
(bundle, bundleGraph, options) =>
new NamedBundle(bundle, bundleGraph, options),
this.options,
),
getSourceMapReference: map => {
return this.getSourceMapReference(bundle, map);
},
options: this.pluginOptions,
logger: new PluginLogger({origin: packager.name}),
getInlineBundleContents: async (
bundle: BundleType,
bundleGraph: BundleGraphType<NamedBundleType>,
) => {
if (!bundle.isInline) {
throw new Error(
'Bundle is not inline and unable to retrieve contents',
);
}
let res = await this.getBundleResult(
bundleToInternalBundle(bundle),
// $FlowFixMe
bundleGraphToInternalBundleGraph(bundleGraph),
);
return {contents: res.contents};
},
});
} catch (e) {
throw new ThrowableDiagnostic({
diagnostic: errorToDiagnostic(e, packager.name),
});
}
}
async optimize(
internalBundle: InternalBundle,
bundleGraph: InternalBundleGraph,
contents: Blob,
map?: ?SourceMap,
): Promise<BundleResult> {
let bundle = new NamedBundle(internalBundle, bundleGraph, this.options);
let optimizers = await this.config.getOptimizers(
bundle.filePath,
internalBundle.pipeline,
);
if (!optimizers.length) {
return {contents, map};
}
this.report({
type: 'buildProgress',
phase: 'optimizing',
bundle,
});
let optimized = {contents, map};
for (let optimizer of optimizers) {
try {
optimized = await optimizer.plugin.optimize({
bundle,
contents: optimized.contents,
map: optimized.map,
getSourceMapReference: map => {
return this.getSourceMapReference(bundle, map);
},
options: this.pluginOptions,
logger: new PluginLogger({origin: optimizer.name}),
});
} catch (e) {
throw new ThrowableDiagnostic({
diagnostic: errorToDiagnostic(e, optimizer.name),
});
}
}
return optimized;
}
generateSourceMap(bundle: InternalBundle, map: SourceMap): Promise<string> {
// sourceRoot should be a relative path between outDir and rootDir for node.js targets
let filePath = nullthrows(bundle.filePath);
let sourceRoot: string = path.relative(
path.dirname(filePath),
this.options.projectRoot,
);
let inlineSources = false;
if (bundle.target) {
if (
bundle.target.sourceMap &&
bundle.target.sourceMap.sourceRoot !== undefined
) {
sourceRoot = bundle.target.sourceMap.sourceRoot;
} else if (
bundle.target.env.context === 'browser' &&
this.options.mode !== 'production'
) {
sourceRoot = '/__parcel_source_root';
}
if (
bundle.target.sourceMap &&
bundle.target.sourceMap.inlineSources !== undefined
) {
inlineSources = bundle.target.sourceMap.inlineSources;
} else if (bundle.target.env.context !== 'node') {
// inlining should only happen in production for browser targets by default
inlineSources = this.options.mode === 'production';
}
}
let mapFilename = filePath + '.map';
let isInlineMap =
bundle.isInline ||
(bundle.target.sourceMap && bundle.target.sourceMap.inline);
// $FlowFixMe format is never object so it's always a string...
return map.stringify({
file: path.basename(mapFilename),
fs: this.options.inputFS,
rootDir: this.options.projectRoot,
sourceRoot: !inlineSources
? url.format(url.parse(sourceRoot + '/'))
: undefined,
inlineSources,
format: isInlineMap ? 'inline' : 'string',
});
}
async getCacheKey(
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
): Promise<string> {
let filePath = nullthrows(bundle.filePath);
// TODO: include packagers and optimizers used in inline bundles as well
let {version: packager} = await this.config.getPackager(filePath);
let optimizers = (
await this.config.getOptimizers(filePath)
).map(({name, version}) => [name, version]);
// TODO: add third party configs to the cache key
let {sourceMaps} = this.options;
return md5FromObject({
parcelVersion: PARCEL_VERSION,
packager,
optimizers,
opts: {sourceMaps},
hash: bundleGraph.getHash(bundle),
});
}
async readFromCache(
cacheKey: string,
): Promise<?{|
contents: Readable,
map: ?Readable,
|}> {
let contentKey = getContentKey(cacheKey);
let mapKey = getMapKey(cacheKey);
let contentExists = await this.options.cache.blobExists(contentKey);
if (!contentExists) {
return null;
}
let mapExists = await this.options.cache.blobExists(mapKey);
return {
contents: this.options.cache.getStream(contentKey),
map: mapExists ? this.options.cache.getStream(mapKey) : null,
};
}
async writeToDist({
bundle,
bundleGraph,
info,
hashRefToNameHash,
}: {|
bundle: InternalBundle,
bundleGraph: InternalBundleGraph,
info: {|...BundleInfo, cacheKeys: CacheKeyMap|},
hashRefToNameHash: Map<string, string>,
|}) {
let {inputFS, outputFS} = this.options;
let filePath = nullthrows(bundle.filePath);
let thisHashReference = bundle.hashReference;
if (filePath.includes(thisHashReference)) {
let thisNameHash = nullthrows(hashRefToNameHash.get(thisHashReference));
filePath = filePath.replace(thisHashReference, thisNameHash);
bundle.filePath = filePath;
bundle.name = nullthrows(bundle.name).replace(
thisHashReference,
thisNameHash,
);
}
let dir = path.dirname(filePath);
await outputFS.mkdirp(dir); // ? Got rid of dist exists, is this an expensive operation
// Use the file mode from the entry asset as the file mode for the bundle.
// Don't do this for browser builds, as the executable bit in particular is unnecessary.
let publicBundle = new NamedBundle(bundle, bundleGraph, this.options);
let writeOptions = publicBundle.env.isBrowser()
? undefined
: {
mode: (
await inputFS.stat(nullthrows(publicBundle.getMainEntry()).filePath)
).mode,
};
let cacheKeys = info.cacheKeys;
let contentStream = this.options.cache.getStream(cacheKeys.content);
let size = await writeFileStream(
outputFS,
filePath,
contentStream,
info.hashReferences,
hashRefToNameHash,
writeOptions,
);
bundle.stats = {
size,
time: info.time,
};
let mapKey = cacheKeys.map;
if (
(typeof bundle.target.sourceMap === 'object'
? !bundle.target.sourceMap.inline
: bundle.target.sourceMap) &&
(await this.options.cache.blobExists(mapKey))
) {
let mapStream = this.options.cache.getStream(mapKey);
await writeFileStream(
outputFS,
filePath + '.map',
mapStream,
info.hashReferences,
hashRefToNameHash,
);
}
}
async writeToCache(cacheKeys: CacheKeyMap, contents: Blob, map: ?Blob) {
let size = 0;
let hash = crypto.createHash('md5');
let boundaryStr = '';
let hashReferences = [];
await this.options.cache.setStream(
cacheKeys.content,
blobToStream(contents).pipe(
new TapStream(buf => {
let str = boundaryStr + buf.toString();
hashReferences = hashReferences.concat(
str.match(HASH_REF_REGEX) ?? [],
);
size += buf.length;
hash.update(buf);
boundaryStr = str.slice(str.length - BOUNDARY_LENGTH);
}),
),
);
if (map != null) {
await this.options.cache.setStream(cacheKeys.map, blobToStream(map));
}
let info = {size, hash: hash.digest('hex'), hashReferences};
await this.options.cache.set(cacheKeys.info, info);
return info;
}
}
function writeFileStream(
fs: FileSystem,
filePath: FilePath,
stream: Readable,
hashReferences: Array<string>,
hashRefToNameHash: Map<string, string>,
options: ?FileOptions,
): Promise<number> {
return new Promise((resolve, reject) => {
let initialStream = hashReferences.length
? stream.pipe(replaceStream(hashRefToNameHash))
: stream;
let fsStream = fs.createWriteStream(filePath, options);
let fsStreamClosed = new Promise(resolve => {
fsStream.on('close', () => resolve());
});
let bytesWritten = 0;
initialStream
.pipe(
new TapStream(buf => {
bytesWritten += buf.length;
}),
)
.pipe(fsStream)
.on('finish', () => resolve(fsStreamClosed.then(() => bytesWritten)))
.on('error', reject);
});
}
function replaceStream(hashRefToNameHash) {
let boundaryStr = '';
return new Transform({
transform(chunk, encoding, cb) {
let str = boundaryStr + chunk.toString();
let replaced = str.replace(HASH_REF_REGEX, match => {
return hashRefToNameHash.get(match) || match;
});
boundaryStr = replaced.slice(replaced.length - BOUNDARY_LENGTH);
let strUpToBoundary = replaced.slice(
0,
replaced.length - BOUNDARY_LENGTH,
);
cb(null, strUpToBoundary);
},
flush(cb) {
cb(null, boundaryStr);
},
});
}
function getContentKey(cacheKey: string) {
return md5FromString(`${cacheKey}:content`);
}
function getMapKey(cacheKey: string) {
return md5FromString(`${cacheKey}:map`);
}
function getInfoKey(cacheKey: string) {
return md5FromString(`${cacheKey}:info`);
}
function assignComplexNameHashes(hashRefToNameHash, bundles, bundleInfoMap) {
for (let bundle of bundles) {
if (hashRefToNameHash.get(bundle.hashReference) != null) {
continue;
}
let includedBundles = [
...getBundlesIncludedInHash(bundle.id, bundleInfoMap),
];
hashRefToNameHash.set(
bundle.hashReference,
md5FromString(
includedBundles.map(bundleId => bundleInfoMap[bundleId].hash).join(':'),
).slice(-8),
);
}
}
function getBundlesIncludedInHash(
bundleId,
bundleInfoMap,
included = new Set(),
) {
included.add(bundleId);
for (let hashRef of bundleInfoMap[bundleId].hashReferences) {
let referencedId = getIdFromHashRef(hashRef);
if (!included.has(referencedId)) {
getBundlesIncludedInHash(referencedId, bundleInfoMap, included);
}
}
return included;
}
function getIdFromHashRef(hashRef: string) {
return hashRef.slice(HASH_REF_PREFIX.length);
}
|
#!/bin/sh
set -e
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
RESOURCES_TO_COPY=${PODS_ROOT}/resources-to-copy-${TARGETNAME}.txt
> "$RESOURCES_TO_COPY"
XCASSET_FILES=()
case "${TARGETED_DEVICE_FAMILY}" in
1,2)
TARGET_DEVICE_ARGS="--target-device ipad --target-device iphone"
;;
1)
TARGET_DEVICE_ARGS="--target-device iphone"
;;
2)
TARGET_DEVICE_ARGS="--target-device ipad"
;;
3)
TARGET_DEVICE_ARGS="--target-device tv"
;;
4)
TARGET_DEVICE_ARGS="--target-device watch"
;;
*)
TARGET_DEVICE_ARGS="--target-device mac"
;;
esac
install_resource()
{
if [[ "$1" = /* ]] ; then
RESOURCE_PATH="$1"
else
RESOURCE_PATH="${PODS_ROOT}/$1"
fi
if [[ ! -e "$RESOURCE_PATH" ]] ; then
cat << EOM
error: Resource "$RESOURCE_PATH" not found. Run 'pod install' to update the copy resources script.
EOM
exit 1
fi
case $RESOURCE_PATH in
*.storyboard)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .storyboard`.storyboardc" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.xib)
echo "ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile ${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib $RESOURCE_PATH --sdk ${SDKROOT} ${TARGET_DEVICE_ARGS}"
ibtool --reference-external-strings-file --errors --warnings --notices --minimum-deployment-target ${!DEPLOYMENT_TARGET_SETTING_NAME} --output-format human-readable-text --compile "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename \"$RESOURCE_PATH\" .xib`.nib" "$RESOURCE_PATH" --sdk "${SDKROOT}" ${TARGET_DEVICE_ARGS}
;;
*.framework)
echo "mkdir -p ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
echo "rsync -av $RESOURCE_PATH ${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
rsync -av "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
;;
*.xcdatamodel)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH"`.mom\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodel`.mom"
;;
*.xcdatamodeld)
echo "xcrun momc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd\""
xcrun momc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcdatamodeld`.momd"
;;
*.xcmappingmodel)
echo "xcrun mapc \"$RESOURCE_PATH\" \"${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm\""
xcrun mapc "$RESOURCE_PATH" "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}/`basename "$RESOURCE_PATH" .xcmappingmodel`.cdm"
;;
*.xcassets)
ABSOLUTE_XCASSET_FILE="$RESOURCE_PATH"
XCASSET_FILES+=("$ABSOLUTE_XCASSET_FILE")
;;
*)
echo "$RESOURCE_PATH"
echo "$RESOURCE_PATH" >> "$RESOURCES_TO_COPY"
;;
esac
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_resource "DDSocial/DDSocial/MI/MiSDK/MiPassport.bundle"
install_resource "DDSocial/DDSocial/Sina/libWeiboSDK/WeiboSDK.bundle"
install_resource "FBSDKCoreKit/FacebookSDKStrings.bundle"
install_resource "GTMOAuth2/Source/Touch/GTMOAuth2ViewTouch.xib"
install_resource "GoogleSignIn/Resources/GoogleSignIn.bundle"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_resource "DDSocial/DDSocial/MI/MiSDK/MiPassport.bundle"
install_resource "DDSocial/DDSocial/Sina/libWeiboSDK/WeiboSDK.bundle"
install_resource "FBSDKCoreKit/FacebookSDKStrings.bundle"
install_resource "GTMOAuth2/Source/Touch/GTMOAuth2ViewTouch.xib"
install_resource "GoogleSignIn/Resources/GoogleSignIn.bundle"
fi
mkdir -p "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${TARGET_BUILD_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
if [[ "${ACTION}" == "install" ]] && [[ "${SKIP_INSTALL}" == "NO" ]]; then
mkdir -p "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
rsync -avr --copy-links --no-relative --exclude '*/.svn/*' --files-from="$RESOURCES_TO_COPY" / "${INSTALL_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
rm -f "$RESOURCES_TO_COPY"
if [[ -n "${WRAPPER_EXTENSION}" ]] && [ "`xcrun --find actool`" ] && [ -n "$XCASSET_FILES" ]
then
# Find all other xcassets (this unfortunately includes those of path pods and other targets).
OTHER_XCASSETS=$(find "$PWD" -iname "*.xcassets" -type d)
while read line; do
if [[ $line != "${PODS_ROOT}*" ]]; then
XCASSET_FILES+=("$line")
fi
done <<<"$OTHER_XCASSETS"
printf "%s\0" "${XCASSET_FILES[@]}" | xargs -0 xcrun actool --output-format human-readable-text --notices --warnings --platform "${PLATFORM_NAME}" --minimum-deployment-target "${!DEPLOYMENT_TARGET_SETTING_NAME}" ${TARGET_DEVICE_ARGS} --compress-pngs --compile "${BUILT_PRODUCTS_DIR}/${UNLOCALIZED_RESOURCES_FOLDER_PATH}"
fi
|
def find_max(root):
if root != None:
if root.right != None:
return find_max(root.right)
else:
return root.data
# Driver Code
if __name__ == "__main__":
root = Node(20)
root.left = Node(8)
root.right = Node(22)
root.left.left = Node(4)
root.left.right = Node(12)
root.left.right.left = Node(10)
root.left.right.right = Node(14)
print("Maximum element in BST:", find_max(root)) |
package com.tranzzo.android.sdk;
import android.content.Context;
import androidx.annotation.NonNull;
import java.util.Map;
interface TelemetryProvider {
@NonNull
Map<String, String> collect(@NonNull Context context);
}
|
<filename>src/reader/templatetags/shortcuts.py
from django.core.serializers import serialize
from django.db.models.query import QuerySet
from django import template
import json
register = template.Library()
@register.filter(is_safe=True)
def jsonify(obj):
if isinstance(obj, QuerySet):
return serialize('json', obj)
return json.dumps(object)
@register.filter(is_safe=True)
def unslugify(txt):
return str(txt).replace('_', ' ').replace('-', ' ').title() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.