text stringlengths 1 1.05M |
|---|
<gh_stars>0
const aws = require('aws-sdk');
const bcrypt = require('bcrypt');
const dynamodb = new aws.DynamoDB.DocumentClient();
const jwt = require('jsonwebtoken');
const ms = require('ms');
const uuid = require('uuid').v4;
const SALT_ROUNDS = 10;
const REFRESH_TOKENS_TABLE = process.env.REFRESH_TOKENS_TABLE_NAME;
const USERS_TABLE = process.env.USERS_TABLE_NAME;
const JWT_EXPIRATION = process.env.JWT_EXPIRATION;
const JWT_REFRESH_EXPIRATION = process.env.JWT_REFRESH_EXPIRATION;
const UNKNOWN_DEVICE_ID = 'unknown';
let self = {
authenticateToken: ({ requestHeaders }) => {
return new Promise((resolve, reject) => {
let authHeader = requestHeaders['Authorization']
let token = authHeader && authHeader.split(' ')[1]
if (token == null) {
return reject(new Error("Authentication token required"));
}
jwt.verify(token, process.env.JWT_SECRET, (err, user) => {
if (err) {
console.error(err);
return reject(new Error("Invalid/expired authentication token"));
}
resolve(user);
});
});
},
refreshToken: ({ userId, refreshToken, deviceId }) => {
return new Promise((resolve, reject) => {
deviceId = deviceId || UNKNOWN_DEVICE_ID;
let onFoundRefreshToken = (result) => {
if (result.Item && result.Item.hashedRefreshToken) {
bcrypt.compare(refreshToken, result.Item.hashedRefreshToken, function(err, result) {
if (!err && result) {
loadUser();
} else {
return reject(new Error("Refresh token invalid"));
}
});
} else {
console.error(`refresh token ${refreshToken} for user ${userId} not found`);
return reject(new Error("Refresh token invalid"));
}
};
let loadUser = () => {
dynamodb.get({
TableName: USERS_TABLE,
Key: { userId }
}).promise()
.then((result) => {
self.generateTokens({ ...result.Item, deviceId })
.then(resolve)
.catch(reject);
})
.catch(err => {
console.error(`error looking up user`, err);
return reject(new Error("Unable to authenticate user"));
});
};
// look up existing refresh token
dynamodb.get({
TableName: REFRESH_TOKENS_TABLE,
Key: { userId, deviceId }
}).promise()
.then(onFoundRefreshToken)
.catch(err => {
console.error(`error looking up refresh token`, err);
return reject(new Error("Refresh token invalid"));
});
});
},
generateTokens: ({ userId, email, name, projects, deviceId }) => {
return new Promise((resolve, reject) => {
deviceId = deviceId || UNKNOWN_DEVICE_ID;
// JWT_EXPIRATION in zeit/ms
jwt.sign(
{ userId, email, name, projects, deviceId },
process.env.JWT_SECRET,
{ expiresIn: JWT_EXPIRATION },
(err, authToken) => {
if (err) {
console.error(`error signing tokens`, err);
return reject(new Error("Error signing tokens"));
}
// generate refresh token
let refreshToken = uuid();
bcrypt.hash(refreshToken, SALT_ROUNDS, function(err, hashedRefreshToken) {
if (err) {
console.error(`error hashing refresh token ${refreshToken}`, err);
// even if an error occurs, the user can continue without a refresh token
return resolve({
userId,
email,
name,
projects,
authToken,
authTokenExpiration: JWT_EXPIRATION,
refreshToken,
refreshTokenExpiration: '0h',
deviceId,
});
}
// store bcrypt-hashed refresh token
dynamodb.put({
TableName: REFRESH_TOKENS_TABLE,
Item: {
userId,
deviceId,
hashedRefreshToken,
// JWT_REFRESH_EXPIRATION in zeit/ms
"expiration": Math.floor(Date.now() / 1000) + ms(JWT_REFRESH_EXPIRATION)
}
}).promise()
.catch(err => {
console.error(`failed to store refresh token ${refreshToken} for user ${userId}`, err);
// even if an error occurs, the user can continue without a refresh token
// so we cascade to then()
})
.then(() => {
resolve({
userId,
email,
name,
projects,
authToken,
authTokenExpiration: JWT_EXPIRATION,
refreshToken,
refreshTokenExpiration: JWT_REFRESH_EXPIRATION,
deviceId,
});
});
});
}
);
});
},
};
module.exports = self; |
<reponame>Boatdude55/staging-website
/**
* UMD sub
*/
import { subLib } from "./lib/umd-sub-lib.js"
export const Sub = 'Hello' + " " + subLib;
|
#!/bin/bash
rm -r dist
rm -r build
python setup.py sdist bdist_wheel
twine upload --repository-url https://test.pypi.org/legacy/ dist/*
|
async def test_reauth_invalid_auth(opp):
# Set up the initial integration for a smart lock
entry = await setup_nuki_integration(opp)
# Initiate the reauthentication flow with invalid authentication data
result = await opp.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_REAUTH}, data={"invalid_auth_data": "example"}
)
# Verify the behavior when invalid authentication data is used
assert result["type"] == "form"
assert result["step_id"] == "reauth_confirm"
assert result["errors"] == {"base": "invalid_auth_data"}
# Additional testing steps can be added here to further verify the behavior |
<filename>website/canvas/management/commands/profile_redis.py<gh_stars>10-100
import collections
import operator
import re
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from canvas.redis_models import CanvasRedis
class Command(BaseCommand):
args = 'sample_size'
help = "Profile redis based on random key sampling. Override host with localhost for local testing."
def handle(self, sample='10000', host='ip-10-203-46-218.ec2.internal', *args, **options):
slave_redis = CanvasRedis(host=host, port=settings.REDIS_PORT, db=settings.REDIS_DB_MAIN)
slave_cache = CanvasRedis(host=host, port=settings.REDIS_PORT, db=settings.REDIS_DB_CACHE)
if sample != "*":
sample = int(sample)
def human(size):
# Multiply size * 3 to roughly account for the difference in RDB vs in-memory size.
return "%.1f MB" % (size * 3 / 1000000.0)
for client in (slave_redis, slave_cache):
dbsize = client.dbsize()
if sample == "*":
print "Summarizing total memory usage for db %s" % client.connection.db
key_names = client.keys("*")
else:
groups = collections.defaultdict(lambda: 0)
sizes = []
scalar = 1.0 * dbsize / sample
print "Sampling %s random keys (of %s) from db %s" % (sample, dbsize, client.connection.db)
pipeline = client.pipeline()
for i in range(sample):
pipeline.randomkey()
key_names = pipeline.execute()
chunksize = 10000
cursor = 0
key_sizes = []
while cursor < len(key_names):
pipeline = client.pipeline()
for result in key_names[cursor:cursor+chunksize]:
pipeline.execute_command("DEBUG", "OBJECT", result)
debug_chunk = pipeline.execute()
for i, result in enumerate(debug_chunk):
debug_dict = dict([kv.split(':') for kv in ('type:' + result).split()])
key = key_names[cursor + i]
keysize = int(debug_dict['serializedlength']) + len(key)
key_sizes.append(keysize)
cursor += chunksize
if sample == "*":
print human(sum(key_sizes))
continue
# TODO: msg_backlogs look big, figure out how to group these (probably show biggest 25 keys too)
for key, keysize in zip(key_names, key_sizes):
keygroup = re.sub("(?<=[:\.]).+(?=[:\.])", "#", key)
groups[keygroup] += keysize
print "== TOP 10 RESULTS =="
for k in sorted(groups, key=lambda k: -groups[k])[:10]:
size = groups[k]
print k, human(size * scalar)
avg = 1.0 * sum(key_sizes) / len(key_sizes)
print "Average key size: %s (%s estimated total)" % (avg, human(avg * dbsize))
print ""
|
import * as React from "react";
import { connect } from "react-redux";
import Navigation from "../components/Navigation";
interface IWithNavProps {
currentPath: string;
}
// example of functional component
const withNav: React.FunctionComponent<IWithNavProps> = props => {
return (
<React.Fragment>
<Navigation></Navigation>
<h6>
Current Path : <strong>{props.currentPath}</strong>
</h6>
{props.children}
</React.Fragment>
);
};
const mapStateToProps = state => ({
currentPath: state.Home.currentPath
});
export default connect(mapStateToProps, null)(withNav);
|
import React from 'react';
import { Link } from 'react-router';
import {Col, Container, Row} from "react-bootstrap";
import doingGoodHero from "../../../assests/images/home/DoingGood_logo_HERO.png";
import addPhotoIcon from "../../../assests/images/dashboard/add photo icon.png";
const Header = (props) => {
const { organization,gotoSearchPostings } = props;
return(
<React.Fragment>
<Container>
<Row>
<Col><Link to="/landingpage"><img src={doingGoodHero} width="auto" height="70px" className="imgcenter"/></Link></Col>
<Col><button className="btn btnPostOrange" onClick={gotoSearchPostings}><span style={{'fontFamily':'Gotham-Book','fontSize':'16','color':'white'}}>Search Postings</span></button>
</Col>
<Col><Link to="/organizationDashboard" className="current"><span className="textcenter">My Dashboard</span></Link></Col>
<Col><Link to= "/login" className="current"><span className="textcenter logoutText">Logout</span></Link></Col>
</Row>
</Container>
<Row className="bgWelcomeMessage">
<Col md={1}>
</Col>
<Col md={9}>
<h3>Welcome, {organization.organizationName}</h3>
</Col>
<Col>
<img src={doingGoodHero} width="auto" height="70px" className="imgcenter"/>
</Col>
</Row>
<div className="show-grid blueBar">
<label>DoingGood Organization Dashboard</label>
</div>
</React.Fragment>
);
};
export default Header;
|
/**
* @param {string} s
* @return {number}
*/
var titleToNumber = function (s) {
let count = 0;
for (let i = s.length - 1; i >= 0; i--) {
let val = s[i].charCodeAt(0) - 64;
count += val * (Math.pow(26, (s.length - 1 - i )));
}
return count;
};
export default titleToNumber; |
#include "mrpg.hpp"
int main() {
// show the current time
get_current_time();
// file input
input_parameter();
// data input
input_data();
std::cout << " --------------------\n";
std::cout << " dataset id: " << dataset_id << "\n";
std::cout << " dataset cardinality: " << dataset.size() << "\n";
std::cout << " dataset dimensionality: " << dimensionality << "\n";
std::cout << " degree: " << degree << "\n";
std::cout << " #threads: " << core_no << "\n";
std::cout << " --------------------\n\n";
// MRPG construction
build_mrpg();
// result output
output_result();
// graph output
output_graph();
return 0;
} |
package io.smallrye.reactive.streams.stages;
import io.reactivex.Flowable;
import io.reactivex.schedulers.Schedulers;
import org.eclipse.microprofile.reactive.streams.operators.ReactiveStreams;
import org.eclipse.microprofile.reactive.streams.operators.spi.Stage;
import org.junit.After;
import org.junit.Test;
import org.reactivestreams.Publisher;
import org.reactivestreams.Subscriber;
import java.util.List;
import java.util.Objects;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicReference;
import static org.assertj.core.api.Assertions.assertThat;
/**
* Checks the behavior of the {@link FlatMapCompletionStageFactory}.
*
* @author <a href="http://escoffier.me"><NAME></a>
*/
public class FlatMapCompletionStageFactoryTest extends StageTestBase {
private final FlatMapCompletionStageFactory factory = new FlatMapCompletionStageFactory();
private final ExecutorService executor = Executors.newSingleThreadExecutor();
@After
public void cleanup() {
executor.shutdown();
}
@Test
public void create() throws ExecutionException, InterruptedException {
Flowable<Integer> flowable = Flowable.fromArray(1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
.subscribeOn(Schedulers.computation());
List<String> list = ReactiveStreams.fromPublisher(flowable)
.filter(i -> i < 4)
.flatMapCompletionStage(this::square)
.flatMapCompletionStage(this::asString)
.toList()
.run().toCompletableFuture().get();
assertThat(list).containsExactly("1", "4", "9");
}
private CompletionStage<Integer> square(int i) {
CompletableFuture<Integer> cf = new CompletableFuture<>();
executor.submit(() -> cf.complete(i * i));
return cf;
}
private CompletionStage<String> asString(int i) {
CompletableFuture<String> cf = new CompletableFuture<>();
executor.submit(() -> cf.complete(Objects.toString(i)));
return cf;
}
@Test(expected = NullPointerException.class)
public void createWithoutStage() {
factory.create(null, null);
}
@Test(expected = NullPointerException.class)
public void createWithoutFunction() {
factory.create(null, () -> null);
}
@Test(expected = NullPointerException.class)
public void testInjectingANullCompletionStage() {
AtomicReference<Subscriber<? super String>> reference = new AtomicReference<>();
Publisher<String> publisher = reference::set;
ReactiveStreams.fromPublisher(publisher)
.flatMapCompletionStage(s -> (CompletionStage<String>) null)
.toList()
.run()
.toCompletableFuture();
reference.get().onNext("a");
}
@Test(expected = NullPointerException.class)
public void testInjectingANullItem() {
AtomicReference<Subscriber<? super String>> reference = new AtomicReference<>();
Publisher<String> publisher = reference::set;
ReactiveStreams.fromPublisher(publisher)
.flatMapCompletionStage(s -> (CompletionStage<String>) null)
.toList()
.run()
.toCompletableFuture();
reference.get().onNext(null);
}
} |
zmodload zsh/datetime
zmodload -F zsh/stat b:zstat
# webpage: https://github.com/supercrabtree/k
k () {
# ----------------------------------------------------------------------------
# Setup
# ----------------------------------------------------------------------------
# Stop stat failing when a directory contains either no files or no hidden files
# Track if we _accidentally_ create a new global variable
setopt local_options null_glob typeset_silent no_auto_pushd
# Process options and get files/directories
typeset -a o_all o_almost_all o_human o_si o_directory o_no_directory o_no_vcs o_help
zparseopts -E -D \
a=o_all -all=o_all \
A=o_almost_all -almost-all=o_almost_all \
d=o_directory -directory=o_directory \
h=o_human -human=o_human \
-si=o_si \
n=o_no_directory -no-directory=o_no_directory \
-no-vcs=o_no_vcs \
-help=o_help
# Print Help if bad usage, or they asked for it
if [[ $? != 0 || "$o_help" != "" ]]
then
print -u2 "Usage: k [options] DIR"
print -u2 "Options:"
print -u2 "\t-a --all list entries starting with ."
print -u2 "\t-A --almost-all list all except . and .."
print -u2 "\t-d --directory list only directories"
print -u2 "\t-n --no-directory do not list directories"
print -u2 "\t-h --human show filesizes in human-readable format"
print -u2 "\t --si with -h, use powers of 1000 not 1024"
print -u2 "\t --no-vcs do not get VCS status (much faster)"
print -u2 "\t --help show this help"
return 1
fi
# Check for conflicts
if [[ "$o_directory" != "" && "$o_no_directory" != "" ]]; then
print -u2 "$o_directory and $o_no_directory cannot be used together"
return 1
fi
# Check which numfmt available (if any), warn user if not available
typeset -i numfmt_available=0
typeset -i gnumfmt_available=0
if [[ "$o_human" != "" ]]; then
if [[ $+commands[numfmt] == 1 ]]; then
numfmt_available=1
elif [[ $+commands[gnumfmt] == 1 ]]; then
gnumfmt_available=1
else
print -u2 "'numfmt' or 'gnumfmt' command not found, human readable output will not work."
print -u2 "\tFalling back to normal file size output"
# Set o_human to off
o_human=""
fi
fi
# Create numfmt local function
numfmt_local () {
if [[ "$o_si" != "" ]]; then
if (( $numfmt_available )); then
numfmt --to=si $1
elif (( $gnumfmt_available )); then
gnumfmt --to=si $1
fi
else
if (( $numfmt_available )); then
numfmt --to=iec $1
elif (( $gnumfmt_available )); then
gnumfmt --to=iec $1
fi
fi
}
# Set if we're in a repo or not
typeset -i INSIDE_WORK_TREE=0
if [[ $(command git rev-parse --is-inside-work-tree 2>/dev/null) == true ]]; then
INSIDE_WORK_TREE=1
fi
# Setup array of directories to print
typeset -a base_dirs
if [[ "$@" == "" ]]; then
base_dirs=.
else
base_dirs=($@)
fi
# Colors
# ----------------------------------------------------------------------------
# default colors
K_COLOR_DI="0;34" # di:directory
K_COLOR_LN="0;35" # ln:symlink
K_COLOR_SO="0;32" # so:socket
K_COLOR_PI="0;33" # pi:pipe
K_COLOR_EX="0;31" # ex:executable
K_COLOR_BD="34;46" # bd:block special
K_COLOR_CD="34;43" # cd:character special
K_COLOR_SU="30;41" # su:executable with setuid bit set
K_COLOR_SG="30;46" # sg:executable with setgid bit set
K_COLOR_TW="30;42" # tw:directory writable to others, with sticky bit
K_COLOR_OW="30;43" # ow:directory writable to others, without sticky bit
# read colors if osx and $LSCOLORS is defined
if [[ $(uname) == 'Darwin' && -n $LSCOLORS ]]; then
# Translate OSX/BSD's LSCOLORS so we can use the same here
K_COLOR_DI=$(_k_bsd_to_ansi $LSCOLORS[1] $LSCOLORS[2])
K_COLOR_LN=$(_k_bsd_to_ansi $LSCOLORS[3] $LSCOLORS[4])
K_COLOR_SO=$(_k_bsd_to_ansi $LSCOLORS[5] $LSCOLORS[6])
K_COLOR_PI=$(_k_bsd_to_ansi $LSCOLORS[7] $LSCOLORS[8])
K_COLOR_EX=$(_k_bsd_to_ansi $LSCOLORS[9] $LSCOLORS[10])
K_COLOR_BD=$(_k_bsd_to_ansi $LSCOLORS[11] $LSCOLORS[12])
K_COLOR_CD=$(_k_bsd_to_ansi $LSCOLORS[13] $LSCOLORS[14])
K_COLOR_SU=$(_k_bsd_to_ansi $LSCOLORS[15] $LSCOLORS[16])
K_COLOR_SG=$(_k_bsd_to_ansi $LSCOLORS[17] $LSCOLORS[18])
K_COLOR_TW=$(_k_bsd_to_ansi $LSCOLORS[17] $LSCOLORS[18])
K_COLOR_OW=$(_k_bsd_to_ansi $LSCOLORS[19] $LSCOLORS[20])
fi
# read colors if linux and $LS_COLORS is defined
# if [[ $(uname) == 'Linux' && -n $LS_COLORS ]]; then
# fi
# ----------------------------------------------------------------------------
# Loop over passed directories and files to display
# ----------------------------------------------------------------------------
for base_dir in $base_dirs
do
# ----------------------------------------------------------------------------
# Display name if multiple paths were passed
# ----------------------------------------------------------------------------
if [[ "$#base_dirs" > 1 ]]; then
# Only add a newline if its not the first iteration
if [[ "$base_dir" != "${base_dirs[1]}" ]]; then
print
fi
print -r "${base_dir}:"
fi
# ----------------------------------------------------------------------------
# Vars
# ----------------------------------------------------------------------------
typeset -a MAX_LEN A RESULTS STAT_RESULTS
typeset TOTAL_BLOCKS
# Get now
typeset K_EPOCH="${EPOCHSECONDS:?}"
typeset -i TOTAL_BLOCKS=0
MAX_LEN=(0 0 0 0 0 0)
# Array to hold results from `stat` call
RESULTS=()
# only set once per directory so must be out of the main loop
typeset -i IS_GIT_REPO=0
typeset GIT_TOPLEVEL
typeset -i LARGE_FILE_COLOR=196
typeset -a SIZELIMITS_TO_COLOR
SIZELIMITS_TO_COLOR=(
1024 46 # <= 1kb
2048 82 # <= 2kb
3072 118 # <= 3kb
5120 154 # <= 5kb
10240 190 # <= 10kb
20480 226 # <= 20kb
40960 220 # <= 40kb
102400 214 # <= 100kb
262144 208 # <= 0.25mb || 256kb
524288 202 # <= 0.5mb || 512kb
)
typeset -i ANCIENT_TIME_COLOR=236 # > more than 2 years old
typeset -a FILEAGES_TO_COLOR
FILEAGES_TO_COLOR=(
0 196 # < in the future, #spooky
60 255 # < less than a min old
3600 252 # < less than an hour old
86400 250 # < less than 1 day old
604800 244 # < less than 1 week old
2419200 244 # < less than 28 days (4 weeks) old
15724800 242 # < less than 26 weeks (6 months) old
31449600 240 # < less than 1 year old
62899200 238 # < less than 2 years old
)
# ----------------------------------------------------------------------------
# Build up list of files/directories to show
# ----------------------------------------------------------------------------
typeset -a show_list
show_list=()
# Check if it even exists
if [[ ! -e $base_dir ]]; then
print -u2 "k: cannot access $base_dir: No such file or directory"
# If its just a file, skip the directory handling
elif [[ -f $base_dir ]]; then
show_list=($base_dir)
#Directory, add its contents
else
# Break total blocks of the front of the stat call, then push the rest to results
if [[ "$o_all" != "" && "$o_almost_all" == "" && "$o_no_directory" == "" ]]; then
show_list+=($base_dir/.)
show_list+=($base_dir/..)
fi
if [[ "$o_all" != "" || "$o_almost_all" != "" ]]; then
if [[ "$o_directory" != "" ]]; then
show_list+=($base_dir/*(D/))
elif [[ "$o_no_directory" != "" ]]; then
#Use (^/) instead of (.) so sockets and symlinks get displayed
show_list+=($base_dir/*(D^/))
else
show_list+=($base_dir/*(D))
fi
else
if [[ "$o_directory" != "" ]]; then
show_list+=($base_dir/*(/))
elif [[ "$o_no_directory" != "" ]]; then
#Use (^/) instead of (.) so sockets and symlinks get displayed
show_list+=($base_dir/*(^/))
else
show_list+=($base_dir/*)
fi
fi
fi
# ----------------------------------------------------------------------------
# Stat call to get directory listing
# ----------------------------------------------------------------------------
typeset -i i=1 j=1 k=1
typeset -a STATS_PARAMS_LIST
typeset fn statvar h
typeset -A sv
STATS_PARAMS_LIST=()
for fn in $show_list
do
statvar="stats_$i"
typeset -A $statvar
zstat -H $statvar -Lsn -F "%s^%d^%b^%H:%M^%Y" -- "$fn" # use lstat, render mode/uid/gid to strings
STATS_PARAMS_LIST+=($statvar)
i+=1
done
# On each result calculate padding by getting max length on each array member
for statvar in "${STATS_PARAMS_LIST[@]}"
do
sv=("${(@Pkv)statvar}")
if [[ ${#sv[mode]} -gt $MAX_LEN[1] ]]; then MAX_LEN[1]=${#sv[mode]} ; fi
if [[ ${#sv[nlink]} -gt $MAX_LEN[2] ]]; then MAX_LEN[2]=${#sv[nlink]} ; fi
if [[ ${#sv[uid]} -gt $MAX_LEN[3] ]]; then MAX_LEN[3]=${#sv[uid]} ; fi
if [[ ${#sv[gid]} -gt $MAX_LEN[4] ]]; then MAX_LEN[4]=${#sv[gid]} ; fi
if [[ "$o_human" != "" ]]; then
h=$(numfmt_local ${sv[size]})
if (( ${#h} > $MAX_LEN[5] )); then MAX_LEN[5]=${#h}; fi
else
if [[ ${#sv[size]} -gt $MAX_LEN[5] ]]; then MAX_LEN[5]=${#sv[size]}; fi
fi
TOTAL_BLOCKS+=$sv[blocks]
done
# Print total block before listing
echo "total $TOTAL_BLOCKS"
# ----------------------------------------------------------------------------
# Loop through each line of stat, pad where appropriate and do git dirty checking
# ----------------------------------------------------------------------------
typeset REPOMARKER
typeset PERMISSIONS HARDLINKCOUNT OWNER GROUP FILESIZE FILESIZE_OUT DATE NAME SYMLINK_TARGET
typeset FILETYPE PER1 PER2 PER3 PERMISSIONS_OUTPUT STATUS
typeset TIME_DIFF TIME_COLOR DATE_OUTPUT
typeset -i IS_DIRECTORY IS_SYMLINK IS_SOCKET IS_PIPE IS_EXECUTABLE IS_BLOCK_SPECIAL IS_CHARACTER_SPECIAL HAS_UID_BIT HAS_GID_BIT HAS_STICKY_BIT IS_WRITABLE_BY_OTHERS
typeset -i COLOR
k=1
for statvar in "${STATS_PARAMS_LIST[@]}"
do
sv=("${(@Pkv)statvar}")
# We check if the result is a git repo later, so set a blank marker indication the result is not a git repo
REPOMARKER=" "
IS_DIRECTORY=0
IS_SYMLINK=0
IS_SOCKET=0
IS_PIPE=0
IS_EXECUTABLE=0
IS_BLOCK_SPECIAL=0
IS_CHARACTER_SPECIAL=0
HAS_UID_BIT=0
HAS_GID_BIT=0
HAS_STICKY_BIT=0
PERMISSIONS="${sv[mode]}"
HARDLINKCOUNT="${sv[nlink]}"
OWNER="${sv[uid]}"
GROUP="${sv[gid]}"
FILESIZE="${sv[size]}"
DATE=(${(s:^:)sv[mtime]}) # Split date on ^
NAME="${sv[name]}"
SYMLINK_TARGET="${sv[link]}"
# Check for file types
if [[ -d "$NAME" ]]; then IS_DIRECTORY=1; fi
if [[ -L "$NAME" ]]; then IS_SYMLINK=1; fi
if [[ -S "$NAME" ]]; then IS_SOCKET=1; fi
if [[ -p "$NAME" ]]; then IS_PIPE=1; fi
if [[ -x "$NAME" ]]; then IS_EXECUTABLE=1; fi
if [[ -b "$NAME" ]]; then IS_BLOCK_SPECIAL=1; fi
if [[ -c "$NAME" ]]; then IS_CHARACTER_SPECIAL=1; fi
if [[ -u "$NAME" ]]; then HAS_UID_BIT=1; fi
if [[ -g "$NAME" ]]; then HAS_GID_BIT=1; fi
if [[ -k "$NAME" ]]; then HAS_STICKY_BIT=1; fi
if [[ $PERMISSIONS[9] == 'w' ]]; then IS_WRITABLE_BY_OTHERS=1; fi
# IS_GIT_REPO is a 1 if $NAME is a file/directory in a git repo, OR if $NAME is a git-repo itself
# GIT_TOPLEVEL is set to the directory containing the .git folder of a git-repo
# is this a git repo
if [[ "$o_no_vcs" != "" ]]; then
IS_GIT_REPO=0
GIT_TOPLEVEL=''
else
if (( IS_DIRECTORY ));
then builtin cd -q $NAME 2>/dev/null || builtin cd -q - >/dev/null && IS_GIT_REPO=0 #Say no if we don't have permissions there
else builtin cd -q $NAME:a:h 2>/dev/null || builtin cd -q - >/dev/null && IS_GIT_REPO=0
fi
if [[ $(command git rev-parse --is-inside-work-tree 2>/dev/null) == true ]]; then
IS_GIT_REPO=1
GIT_TOPLEVEL=$(command git rev-parse --show-toplevel)
else
IS_GIT_REPO=0
fi
builtin cd -q - >/dev/null
fi
# Get human readable output if necessary
if [[ "$o_human" != "" ]]; then
# I hate making this call twice, but its either that, or do a bunch
# of calculations much earlier.
FILESIZE_OUT=$(numfmt_local $FILESIZE)
else
FILESIZE_OUT=$FILESIZE
fi
# Pad so all the lines align - firstline gets padded the other way
PERMISSIONS="${(r:MAX_LEN[1]:)PERMISSIONS}"
HARDLINKCOUNT="${(l:MAX_LEN[2]:)HARDLINKCOUNT}"
OWNER="${(l:MAX_LEN[3]:)OWNER}"
GROUP="${(l:MAX_LEN[4]:)GROUP}"
FILESIZE_OUT="${(l:MAX_LEN[5]:)FILESIZE_OUT}"
# --------------------------------------------------------------------------
# Colour the permissions - TODO
# --------------------------------------------------------------------------
# Colour the first character based on filetype
FILETYPE="${PERMISSIONS[1]}"
# Permissions Owner
PER1="${PERMISSIONS[2,4]}"
# Permissions Group
PER2="${PERMISSIONS[5,7]}"
# Permissions User
PER3="${PERMISSIONS[8,10]}"
PERMISSIONS_OUTPUT="$FILETYPE$PER1$PER2$PER3"
# --------------------------------------------------------------------------
# Colour the symlinks
# --------------------------------------------------------------------------
# --------------------------------------------------------------------------
# Colour Owner and Group
# --------------------------------------------------------------------------
OWNER=$'\e[38;5;241m'"$OWNER"$'\e[0m'
GROUP=$'\e[38;5;241m'"$GROUP"$'\e[0m'
# --------------------------------------------------------------------------
# Colour file weights
# --------------------------------------------------------------------------
COLOR=LARGE_FILE_COLOR
for i j in ${SIZELIMITS_TO_COLOR[@]}
do
(( FILESIZE <= i )) || continue
COLOR=$j
break
done
FILESIZE_OUT=$'\e[38;5;'"${COLOR}m$FILESIZE_OUT"$'\e[0m'
# --------------------------------------------------------------------------
# Colour the date and time based on age, then format for output
# --------------------------------------------------------------------------
# Setup colours based on time difference
TIME_DIFF=$(( K_EPOCH - DATE[1] ))
TIME_COLOR=$ANCIENT_TIME_COLOR
for i j in ${FILEAGES_TO_COLOR[@]}
do
(( TIME_DIFF < i )) || continue
TIME_COLOR=$j
break
done
# Format date to show year if more than 6 months since last modified
if (( TIME_DIFF < 15724800 )); then
DATE_OUTPUT="${DATE[2]} ${(r:5:: :)${DATE[3][0,5]}} ${DATE[4]}"
else
DATE_OUTPUT="${DATE[2]} ${(r:6:: :)${DATE[3][0,5]}} ${DATE[5]}" # extra space; 4 digit year instead of 5 digit HH:MM
fi;
DATE_OUTPUT[1]="${DATE_OUTPUT[1]//0/ }" # If day of month begins with zero, replace zero with space
# Apply colour to formated date
DATE_OUTPUT=$'\e[38;5;'"${TIME_COLOR}m${DATE_OUTPUT}"$'\e[0m'
# --------------------------------------------------------------------------
# Colour the repomarker
# --------------------------------------------------------------------------
if [[ "$o_no_vcs" != "" ]]; then
REPOMARKER=""
elif (( IS_GIT_REPO != 0)); then
# If we're not in a repo, still check each directory if it's a repo, and
# then mark appropriately
if (( INSIDE_WORK_TREE == 0 )); then
if (( IS_DIRECTORY )); then
if command git --git-dir="$GIT_TOPLEVEL/.git" --work-tree="${NAME}" diff --stat --quiet --ignore-submodules HEAD &>/dev/null # if dirty
then REPOMARKER=$'\e[38;5;46m|\e[0m' # Show a green vertical bar for clean
else REPOMARKER=$'\e[0;31m+\e[0m' # Show a red vertical bar if dirty
fi
fi
else
if (( IS_DIRECTORY )); then
# If the directory isn't ignored or clean, we'll just say it's dirty
if command git check-ignore --quiet ${NAME} 2>/dev/null; then STATUS='!!'
elif command git diff --stat --quiet --ignore-submodules ${NAME} 2> /dev/null; then STATUS='';
else STATUS=' M'
fi
else
# File
STATUS=$(command git status --porcelain --ignored --untracked-files=normal $GIT_TOPLEVEL/${${${NAME:a}##$GIT_TOPLEVEL}#*/})
fi
STATUS=${STATUS[1,2]}
if [[ $STATUS == ' M' ]]; then REPOMARKER=$'\e[0;31m+\e[0m'; # Tracked & Dirty
elif [[ $STATUS == 'M ' ]]; then REPOMARKER=$'\e[38;5;082m+\e[0m'; # Tracked & Dirty & Added
elif [[ $STATUS == '??' ]]; then REPOMARKER=$'\e[38;5;214m+\e[0m'; # Untracked
elif [[ $STATUS == '!!' ]]; then REPOMARKER=$'\e[38;5;238m|\e[0m'; # Ignored
elif [[ $STATUS == 'A ' ]]; then REPOMARKER=$'\e[38;5;082m+\e[0m'; # Added
else REPOMARKER=$'\e[38;5;082m|\e[0m'; # Good
fi
fi
fi
# --------------------------------------------------------------------------
# Colour the filename
# --------------------------------------------------------------------------
# Unfortunately, the choices for quoting which escape ANSI color sequences are q & qqqq; none of q- qq qqq work.
# But we don't want to quote '.'; so instead we escape the escape manually and use q-
NAME="${${NAME##*/}//$'\e'/\\e}" # also propagate changes to SYMLINK_TARGET below
if [[ $IS_DIRECTORY == 1 ]]; then
if [[ $IS_WRITABLE_BY_OTHERS == 1 ]]; then
if [[ $HAS_STICKY_BIT == 1 ]]; then
NAME=$'\e['"$K_COLOR_TW"'m'"$NAME"$'\e[0m';
fi
NAME=$'\e['"$K_COLOR_OW"'m'"$NAME"$'\e[0m';
fi
NAME=$'\e['"$K_COLOR_DI"'m'"$NAME"$'\e[0m';
elif [[ $IS_SYMLINK == 1 ]]; then NAME=$'\e['"$K_COLOR_LN"'m'"$NAME"$'\e[0m';
elif [[ $IS_SOCKET == 1 ]]; then NAME=$'\e['"$K_COLOR_SO"'m'"$NAME"$'\e[0m';
elif [[ $IS_PIPE == 1 ]]; then NAME=$'\e['"$K_COLOR_PI"'m'"$NAME"$'\e[0m';
elif [[ $HAS_UID_BIT == 1 ]]; then NAME=$'\e['"$K_COLOR_SU"'m'"$NAME"$'\e[0m';
elif [[ $HAS_GID_BIT == 1 ]]; then NAME=$'\e['"$K_COLOR_SG"'m'"$NAME"$'\e[0m';
elif [[ $IS_EXECUTABLE == 1 ]]; then NAME=$'\e['"$K_COLOR_EX"'m'"$NAME"$'\e[0m';
elif [[ $IS_BLOCK_SPECIAL == 1 ]]; then NAME=$'\e['"$K_COLOR_BD"'m'"$NAME"$'\e[0m';
elif [[ $IS_CHARACTER_SPECIAL == 1 ]]; then NAME=$'\e['"$K_COLOR_CD"'m'"$NAME"$'\e[0m';
fi
# --------------------------------------------------------------------------
# Format symlink target
# --------------------------------------------------------------------------
if [[ $SYMLINK_TARGET != "" ]]; then SYMLINK_TARGET="-> ${SYMLINK_TARGET//$'\e'/\\e}"; fi
# --------------------------------------------------------------------------
# Display final result
# --------------------------------------------------------------------------
print -r -- "$PERMISSIONS_OUTPUT $HARDLINKCOUNT $OWNER $GROUP $FILESIZE_OUT $DATE_OUTPUT $REPOMARKER $NAME $SYMLINK_TARGET"
k=$((k+1)) # Bump loop index
done
done
}
_k_bsd_to_ansi() {
local foreground=$1 background=$2 foreground_ansi background_ansi
case $foreground in
a) foreground_ansi=30;;
b) foreground_ansi=31;;
c) foreground_ansi=32;;
d) foreground_ansi=33;;
e) foreground_ansi=34;;
f) foreground_ansi=35;;
g) foreground_ansi=36;;
h) foreground_ansi=37;;
x) foreground_ansi=0;;
esac
case $background in
a) background_ansi=40;;
b) background_ansi=41;;
c) background_ansi=42;;
d) background_ansi=43;;
e) background_ansi=44;;
f) background_ansi=45;;
g) background_ansi=46;;
h) background_ansi=47;;
x) background_ansi=0;;
esac
printf "%s;%s" $foreground_ansi $background_ansi
}
# http://upload.wikimedia.org/wikipedia/en/1/15/Xterm_256color_chart.svg
# vim: set ts=2 sw=2 ft=zsh et :
|
#!/usr/bin/env bash
go get github.com/Pallinder/go-randomdata
go get github.com/gomodule/redigo/redis
go get github.com/spf13/viper
go get github.com/streadway/amqp
go get github.com/mhorr/mavencode-assignment/shared
cd ../shared
go build
#go get github.com/mhorr/mavencode-assignment/shared
cd ../test
go build
./test 1000
|
from datetime import datetime
from config import db, ma
from marshmallow import fields
class Person(db.Model):
__tablename__ = "person"
person_id = db.Column(db.Integer, primary_key=True)
lname = db.Column(db.String(32))
fname = db.Column(db.String(32))
timestamp = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow
)
notes = db.relationship(
"Note",
backref="person",
cascade="all, delete, delete-orphan",
single_parent=True,
order_by="desc(Note.timestamp)",
)
class Note(db.Model):
__tablename__ = "note"
note_id = db.Column(db.Integer, primary_key=True)
person_id = db.Column(db.Integer, db.ForeignKey("person.person_id"))
content = db.Column(db.String, nullable=False)
timestamp = db.Column(
db.DateTime, default=datetime.utcnow, onupdate=datetime.utcnow
)
class PersonSchema(ma.ModelSchema):
def __init__(self, **kwargs):
super().__init__(strict=True, **kwargs)
class Meta:
model = Person
sqla_session = db.session
notes = fields.Nested("PersonNoteSchema", default=[], many=True)
class PersonNoteSchema(ma.ModelSchema):
"""
This class exists to get around a recursion issue
"""
def __init__(self, **kwargs):
super().__init__(strict=True, **kwargs)
note_id = fields.Int()
person_id = fields.Int()
content = fields.Str()
timestamp = fields.Str()
class NoteSchema(ma.ModelSchema):
def __init__(self, **kwargs):
super().__init__(strict=True, **kwargs)
class Meta:
model = Note
sqla_session = db.session
person = fields.Nested("NotePersonSchema", default=None)
class NotePersonSchema(ma.ModelSchema):
"""
This class exists to get around a recursion issue
"""
def __init__(self, **kwargs):
super().__init__(strict=True, **kwargs)
person_id = fields.Int()
lname = fields.Str()
fname = fields.Str()
timestamp = fields.Str()
|
/*
* create_varray1.sql
* Chapter 6, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This defines a varray with a three element constructor of null elements.
*/
SET ECHO ON
SET SERVEROUTPUT ON SIZE 1000000
DECLARE
-- Define a varray of integer with 3 rows.
TYPE integer_varray IS VARRAY(3) OF INTEGER;
-- Declare and initialize a varray that allows nulls.
varray_integer INTEGER_VARRAY :=
integer_varray(NULL,NULL,NULL);
BEGIN
-- Print title.
dbms_output.put_line('Varray initialized as nulls.');
dbms_output.put_line('----------------------------');
-- Loop through the three records.
FOR i IN 1..3 LOOP
-- Print the contents.
dbms_output.put ('Integer Varray ['||i||'] ');
dbms_output.put_line('['||varray_integer(i)||']');
END LOOP;
-- Assign values to subscripted members of the varray.
varray_integer(1) := 11;
varray_integer(2) := 12;
varray_integer(3) := 13;
-- Print title.
dbms_output.put (CHR(10)); -- Visual line break.
dbms_output.put_line('Varray initialized as values.');
dbms_output.put_line('-----------------------------');
-- Loop through the three records to print the varray contents.
FOR i IN 1..3 LOOP
dbms_output.put_line('Integer Varray ['||i||'] '
|| '['||varray_integer(i)||']');
END LOOP;
END;
/
|
/*
* Swiss QR Bill Generator
* Copyright (c) 2018 <NAME>
* Licensed under MIT License
* https://opensource.org/licenses/MIT
*/
import { InputFormatter } from './input-formatter';
import { Injectable } from '@angular/core';
/** Formatter for IBAN account number */
@Injectable()
export class IBANFormatter implements InputFormatter<string> {
rawValue(formattedValue: string) {
if (!formattedValue) {
return null;
}
return formattedValue.replace(/\s/g, '');
}
formattedValue(rawValue: string): string {
if (!rawValue) {
return null;
}
rawValue = rawValue.replace(/\s/g, '');
let formatted = '';
const len = rawValue.length;
for (let p = 0; p < len; p += 4) {
const e = p + 4 <= len ? p + 4 : len;
formatted += rawValue.substring(p, p + 4);
if (e < len) {
formatted += ' ';
}
}
return formatted;
}
}
|
// Code generated by entc, DO NOT EDIT.
package ent
import (
"context"
"fmt"
"time"
"github.com/blushft/strana/modules/sink/loader/store/ent/predicate"
"github.com/blushft/strana/modules/sink/loader/store/ent/rawevent"
"github.com/facebook/ent/dialect/sql"
"github.com/facebook/ent/dialect/sql/sqlgraph"
"github.com/facebook/ent/schema/field"
"github.com/google/uuid"
)
// RawEventUpdate is the builder for updating RawEvent entities.
type RawEventUpdate struct {
config
hooks []Hook
mutation *RawEventMutation
predicates []predicate.RawEvent
}
// Where adds a new predicate for the builder.
func (reu *RawEventUpdate) Where(ps ...predicate.RawEvent) *RawEventUpdate {
reu.predicates = append(reu.predicates, ps...)
return reu
}
// SetTrackingID sets the tracking_id field.
func (reu *RawEventUpdate) SetTrackingID(u uuid.UUID) *RawEventUpdate {
reu.mutation.SetTrackingID(u)
return reu
}
// SetUserID sets the user_id field.
func (reu *RawEventUpdate) SetUserID(s string) *RawEventUpdate {
reu.mutation.SetUserID(s)
return reu
}
// SetAnonymous sets the anonymous field.
func (reu *RawEventUpdate) SetAnonymous(b bool) *RawEventUpdate {
reu.mutation.SetAnonymous(b)
return reu
}
// SetGroupID sets the group_id field.
func (reu *RawEventUpdate) SetGroupID(s string) *RawEventUpdate {
reu.mutation.SetGroupID(s)
return reu
}
// SetNillableGroupID sets the group_id field if the given value is not nil.
func (reu *RawEventUpdate) SetNillableGroupID(s *string) *RawEventUpdate {
if s != nil {
reu.SetGroupID(*s)
}
return reu
}
// ClearGroupID clears the value of group_id.
func (reu *RawEventUpdate) ClearGroupID() *RawEventUpdate {
reu.mutation.ClearGroupID()
return reu
}
// SetSessionID sets the session_id field.
func (reu *RawEventUpdate) SetSessionID(s string) *RawEventUpdate {
reu.mutation.SetSessionID(s)
return reu
}
// SetNillableSessionID sets the session_id field if the given value is not nil.
func (reu *RawEventUpdate) SetNillableSessionID(s *string) *RawEventUpdate {
if s != nil {
reu.SetSessionID(*s)
}
return reu
}
// ClearSessionID clears the value of session_id.
func (reu *RawEventUpdate) ClearSessionID() *RawEventUpdate {
reu.mutation.ClearSessionID()
return reu
}
// SetDeviceID sets the device_id field.
func (reu *RawEventUpdate) SetDeviceID(s string) *RawEventUpdate {
reu.mutation.SetDeviceID(s)
return reu
}
// SetNillableDeviceID sets the device_id field if the given value is not nil.
func (reu *RawEventUpdate) SetNillableDeviceID(s *string) *RawEventUpdate {
if s != nil {
reu.SetDeviceID(*s)
}
return reu
}
// ClearDeviceID clears the value of device_id.
func (reu *RawEventUpdate) ClearDeviceID() *RawEventUpdate {
reu.mutation.ClearDeviceID()
return reu
}
// SetEvent sets the event field.
func (reu *RawEventUpdate) SetEvent(s string) *RawEventUpdate {
reu.mutation.SetEvent(s)
return reu
}
// SetNonInteractive sets the non_interactive field.
func (reu *RawEventUpdate) SetNonInteractive(b bool) *RawEventUpdate {
reu.mutation.SetNonInteractive(b)
return reu
}
// SetChannel sets the channel field.
func (reu *RawEventUpdate) SetChannel(s string) *RawEventUpdate {
reu.mutation.SetChannel(s)
return reu
}
// SetNillableChannel sets the channel field if the given value is not nil.
func (reu *RawEventUpdate) SetNillableChannel(s *string) *RawEventUpdate {
if s != nil {
reu.SetChannel(*s)
}
return reu
}
// ClearChannel clears the value of channel.
func (reu *RawEventUpdate) ClearChannel() *RawEventUpdate {
reu.mutation.ClearChannel()
return reu
}
// SetPlatform sets the platform field.
func (reu *RawEventUpdate) SetPlatform(s string) *RawEventUpdate {
reu.mutation.SetPlatform(s)
return reu
}
// SetNillablePlatform sets the platform field if the given value is not nil.
func (reu *RawEventUpdate) SetNillablePlatform(s *string) *RawEventUpdate {
if s != nil {
reu.SetPlatform(*s)
}
return reu
}
// ClearPlatform clears the value of platform.
func (reu *RawEventUpdate) ClearPlatform() *RawEventUpdate {
reu.mutation.ClearPlatform()
return reu
}
// SetTimestamp sets the timestamp field.
func (reu *RawEventUpdate) SetTimestamp(t time.Time) *RawEventUpdate {
reu.mutation.SetTimestamp(t)
return reu
}
// SetContext sets the context field.
func (reu *RawEventUpdate) SetContext(m map[string]interface{}) *RawEventUpdate {
reu.mutation.SetContext(m)
return reu
}
// Mutation returns the RawEventMutation object of the builder.
func (reu *RawEventUpdate) Mutation() *RawEventMutation {
return reu.mutation
}
// Save executes the query and returns the number of rows/vertices matched by this operation.
func (reu *RawEventUpdate) Save(ctx context.Context) (int, error) {
var (
err error
affected int
)
if len(reu.hooks) == 0 {
affected, err = reu.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*RawEventMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
reu.mutation = mutation
affected, err = reu.sqlSave(ctx)
mutation.done = true
return affected, err
})
for i := len(reu.hooks) - 1; i >= 0; i-- {
mut = reu.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, reu.mutation); err != nil {
return 0, err
}
}
return affected, err
}
// SaveX is like Save, but panics if an error occurs.
func (reu *RawEventUpdate) SaveX(ctx context.Context) int {
affected, err := reu.Save(ctx)
if err != nil {
panic(err)
}
return affected
}
// Exec executes the query.
func (reu *RawEventUpdate) Exec(ctx context.Context) error {
_, err := reu.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (reu *RawEventUpdate) ExecX(ctx context.Context) {
if err := reu.Exec(ctx); err != nil {
panic(err)
}
}
func (reu *RawEventUpdate) sqlSave(ctx context.Context) (n int, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: rawevent.Table,
Columns: rawevent.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: rawevent.FieldID,
},
},
}
if ps := reu.predicates; len(ps) > 0 {
_spec.Predicate = func(selector *sql.Selector) {
for i := range ps {
ps[i](selector)
}
}
}
if value, ok := reu.mutation.TrackingID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Value: value,
Column: rawevent.FieldTrackingID,
})
}
if value, ok := reu.mutation.UserID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldUserID,
})
}
if value, ok := reu.mutation.Anonymous(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: rawevent.FieldAnonymous,
})
}
if value, ok := reu.mutation.GroupID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldGroupID,
})
}
if reu.mutation.GroupIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldGroupID,
})
}
if value, ok := reu.mutation.SessionID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldSessionID,
})
}
if reu.mutation.SessionIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldSessionID,
})
}
if value, ok := reu.mutation.DeviceID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldDeviceID,
})
}
if reu.mutation.DeviceIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldDeviceID,
})
}
if value, ok := reu.mutation.Event(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldEvent,
})
}
if value, ok := reu.mutation.NonInteractive(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: rawevent.FieldNonInteractive,
})
}
if value, ok := reu.mutation.Channel(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldChannel,
})
}
if reu.mutation.ChannelCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldChannel,
})
}
if value, ok := reu.mutation.Platform(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldPlatform,
})
}
if reu.mutation.PlatformCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldPlatform,
})
}
if value, ok := reu.mutation.Timestamp(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: rawevent.FieldTimestamp,
})
}
if value, ok := reu.mutation.Context(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeJSON,
Value: value,
Column: rawevent.FieldContext,
})
}
if n, err = sqlgraph.UpdateNodes(ctx, reu.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{rawevent.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return 0, err
}
return n, nil
}
// RawEventUpdateOne is the builder for updating a single RawEvent entity.
type RawEventUpdateOne struct {
config
hooks []Hook
mutation *RawEventMutation
}
// SetTrackingID sets the tracking_id field.
func (reuo *RawEventUpdateOne) SetTrackingID(u uuid.UUID) *RawEventUpdateOne {
reuo.mutation.SetTrackingID(u)
return reuo
}
// SetUserID sets the user_id field.
func (reuo *RawEventUpdateOne) SetUserID(s string) *RawEventUpdateOne {
reuo.mutation.SetUserID(s)
return reuo
}
// SetAnonymous sets the anonymous field.
func (reuo *RawEventUpdateOne) SetAnonymous(b bool) *RawEventUpdateOne {
reuo.mutation.SetAnonymous(b)
return reuo
}
// SetGroupID sets the group_id field.
func (reuo *RawEventUpdateOne) SetGroupID(s string) *RawEventUpdateOne {
reuo.mutation.SetGroupID(s)
return reuo
}
// SetNillableGroupID sets the group_id field if the given value is not nil.
func (reuo *RawEventUpdateOne) SetNillableGroupID(s *string) *RawEventUpdateOne {
if s != nil {
reuo.SetGroupID(*s)
}
return reuo
}
// ClearGroupID clears the value of group_id.
func (reuo *RawEventUpdateOne) ClearGroupID() *RawEventUpdateOne {
reuo.mutation.ClearGroupID()
return reuo
}
// SetSessionID sets the session_id field.
func (reuo *RawEventUpdateOne) SetSessionID(s string) *RawEventUpdateOne {
reuo.mutation.SetSessionID(s)
return reuo
}
// SetNillableSessionID sets the session_id field if the given value is not nil.
func (reuo *RawEventUpdateOne) SetNillableSessionID(s *string) *RawEventUpdateOne {
if s != nil {
reuo.SetSessionID(*s)
}
return reuo
}
// ClearSessionID clears the value of session_id.
func (reuo *RawEventUpdateOne) ClearSessionID() *RawEventUpdateOne {
reuo.mutation.ClearSessionID()
return reuo
}
// SetDeviceID sets the device_id field.
func (reuo *RawEventUpdateOne) SetDeviceID(s string) *RawEventUpdateOne {
reuo.mutation.SetDeviceID(s)
return reuo
}
// SetNillableDeviceID sets the device_id field if the given value is not nil.
func (reuo *RawEventUpdateOne) SetNillableDeviceID(s *string) *RawEventUpdateOne {
if s != nil {
reuo.SetDeviceID(*s)
}
return reuo
}
// ClearDeviceID clears the value of device_id.
func (reuo *RawEventUpdateOne) ClearDeviceID() *RawEventUpdateOne {
reuo.mutation.ClearDeviceID()
return reuo
}
// SetEvent sets the event field.
func (reuo *RawEventUpdateOne) SetEvent(s string) *RawEventUpdateOne {
reuo.mutation.SetEvent(s)
return reuo
}
// SetNonInteractive sets the non_interactive field.
func (reuo *RawEventUpdateOne) SetNonInteractive(b bool) *RawEventUpdateOne {
reuo.mutation.SetNonInteractive(b)
return reuo
}
// SetChannel sets the channel field.
func (reuo *RawEventUpdateOne) SetChannel(s string) *RawEventUpdateOne {
reuo.mutation.SetChannel(s)
return reuo
}
// SetNillableChannel sets the channel field if the given value is not nil.
func (reuo *RawEventUpdateOne) SetNillableChannel(s *string) *RawEventUpdateOne {
if s != nil {
reuo.SetChannel(*s)
}
return reuo
}
// ClearChannel clears the value of channel.
func (reuo *RawEventUpdateOne) ClearChannel() *RawEventUpdateOne {
reuo.mutation.ClearChannel()
return reuo
}
// SetPlatform sets the platform field.
func (reuo *RawEventUpdateOne) SetPlatform(s string) *RawEventUpdateOne {
reuo.mutation.SetPlatform(s)
return reuo
}
// SetNillablePlatform sets the platform field if the given value is not nil.
func (reuo *RawEventUpdateOne) SetNillablePlatform(s *string) *RawEventUpdateOne {
if s != nil {
reuo.SetPlatform(*s)
}
return reuo
}
// ClearPlatform clears the value of platform.
func (reuo *RawEventUpdateOne) ClearPlatform() *RawEventUpdateOne {
reuo.mutation.ClearPlatform()
return reuo
}
// SetTimestamp sets the timestamp field.
func (reuo *RawEventUpdateOne) SetTimestamp(t time.Time) *RawEventUpdateOne {
reuo.mutation.SetTimestamp(t)
return reuo
}
// SetContext sets the context field.
func (reuo *RawEventUpdateOne) SetContext(m map[string]interface{}) *RawEventUpdateOne {
reuo.mutation.SetContext(m)
return reuo
}
// Mutation returns the RawEventMutation object of the builder.
func (reuo *RawEventUpdateOne) Mutation() *RawEventMutation {
return reuo.mutation
}
// Save executes the query and returns the updated entity.
func (reuo *RawEventUpdateOne) Save(ctx context.Context) (*RawEvent, error) {
var (
err error
node *RawEvent
)
if len(reuo.hooks) == 0 {
node, err = reuo.sqlSave(ctx)
} else {
var mut Mutator = MutateFunc(func(ctx context.Context, m Mutation) (Value, error) {
mutation, ok := m.(*RawEventMutation)
if !ok {
return nil, fmt.Errorf("unexpected mutation type %T", m)
}
reuo.mutation = mutation
node, err = reuo.sqlSave(ctx)
mutation.done = true
return node, err
})
for i := len(reuo.hooks) - 1; i >= 0; i-- {
mut = reuo.hooks[i](mut)
}
if _, err := mut.Mutate(ctx, reuo.mutation); err != nil {
return nil, err
}
}
return node, err
}
// SaveX is like Save, but panics if an error occurs.
func (reuo *RawEventUpdateOne) SaveX(ctx context.Context) *RawEvent {
re, err := reuo.Save(ctx)
if err != nil {
panic(err)
}
return re
}
// Exec executes the query on the entity.
func (reuo *RawEventUpdateOne) Exec(ctx context.Context) error {
_, err := reuo.Save(ctx)
return err
}
// ExecX is like Exec, but panics if an error occurs.
func (reuo *RawEventUpdateOne) ExecX(ctx context.Context) {
if err := reuo.Exec(ctx); err != nil {
panic(err)
}
}
func (reuo *RawEventUpdateOne) sqlSave(ctx context.Context) (re *RawEvent, err error) {
_spec := &sqlgraph.UpdateSpec{
Node: &sqlgraph.NodeSpec{
Table: rawevent.Table,
Columns: rawevent.Columns,
ID: &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Column: rawevent.FieldID,
},
},
}
id, ok := reuo.mutation.ID()
if !ok {
return nil, &ValidationError{Name: "ID", err: fmt.Errorf("missing RawEvent.ID for update")}
}
_spec.Node.ID.Value = id
if value, ok := reuo.mutation.TrackingID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeUUID,
Value: value,
Column: rawevent.FieldTrackingID,
})
}
if value, ok := reuo.mutation.UserID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldUserID,
})
}
if value, ok := reuo.mutation.Anonymous(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: rawevent.FieldAnonymous,
})
}
if value, ok := reuo.mutation.GroupID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldGroupID,
})
}
if reuo.mutation.GroupIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldGroupID,
})
}
if value, ok := reuo.mutation.SessionID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldSessionID,
})
}
if reuo.mutation.SessionIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldSessionID,
})
}
if value, ok := reuo.mutation.DeviceID(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldDeviceID,
})
}
if reuo.mutation.DeviceIDCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldDeviceID,
})
}
if value, ok := reuo.mutation.Event(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldEvent,
})
}
if value, ok := reuo.mutation.NonInteractive(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeBool,
Value: value,
Column: rawevent.FieldNonInteractive,
})
}
if value, ok := reuo.mutation.Channel(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldChannel,
})
}
if reuo.mutation.ChannelCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldChannel,
})
}
if value, ok := reuo.mutation.Platform(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeString,
Value: value,
Column: rawevent.FieldPlatform,
})
}
if reuo.mutation.PlatformCleared() {
_spec.Fields.Clear = append(_spec.Fields.Clear, &sqlgraph.FieldSpec{
Type: field.TypeString,
Column: rawevent.FieldPlatform,
})
}
if value, ok := reuo.mutation.Timestamp(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeTime,
Value: value,
Column: rawevent.FieldTimestamp,
})
}
if value, ok := reuo.mutation.Context(); ok {
_spec.Fields.Set = append(_spec.Fields.Set, &sqlgraph.FieldSpec{
Type: field.TypeJSON,
Value: value,
Column: rawevent.FieldContext,
})
}
re = &RawEvent{config: reuo.config}
_spec.Assign = re.assignValues
_spec.ScanValues = re.scanValues()
if err = sqlgraph.UpdateNode(ctx, reuo.driver, _spec); err != nil {
if _, ok := err.(*sqlgraph.NotFoundError); ok {
err = &NotFoundError{rawevent.Label}
} else if cerr, ok := isSQLConstraintError(err); ok {
err = cerr
}
return nil, err
}
return re, nil
}
|
package breadth_first_search;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.Queue;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 17394번: 핑거 스냅
*
* @see https://www.acmicpc.net/problem/17394/
*
*/
public class Boj17394 {
private static final int INF = 1_000_001;
private static final String NEW_LINE = "\n";
private static final int[] FORMULA = {3, 2, -1, 1};
private static boolean[] prime = new boolean[INF];
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
StringBuilder sb = new StringBuilder();
int T = Integer.parseInt(br.readLine());
eratosthenesSieve();
while(T-- > 0) {
StringTokenizer st = new StringTokenizer(br.readLine());
int N = Integer.parseInt(st.nextToken());
int A = Integer.parseInt(st.nextToken());
int B = Integer.parseInt(st.nextToken());
sb.append(search(N, A, B)).append(NEW_LINE);
}
System.out.println(sb.toString());
}
private static void eratosthenesSieve() { // find prime
Arrays.fill(prime, true);
prime[0] = prime[1] = false;
for(int i = 2; i < 1_001; i++) {
if(!prime[i]) continue;
for(int j = i + i; j < INF; j += i) {
prime[j] = false;
}
}
}
private static int search(int n, int a, int b) {
if((n == a && prime[a]) || (n == b && prime[b])) return 0;
int[] visit = new int[INF];
Arrays.fill(visit, INF);
Queue<Integer> q = new LinkedList<>();
q.offer(n);
visit[n] = 1;
while(!q.isEmpty()) {
int current = q.poll();
for(final int F: FORMULA) { // snapping
int next = F == 1 || F == -1 ? current + F: current / F;
if(next < 0 || next >= INF) continue;
if(visit[next] > visit[current] + 1) {
visit[next] = visit[current] + 1;
q.offer(next);
}
}
}
int min = INF;
for(int i = a; i <= b; i++) {
if(visit[i] != 0 && prime[i]) { // find minimum
if(visit[i] < min) min = visit[i];
}
}
return min == INF ? -1: min - 1;
}
}
|
<reponame>mcervantes71/Tic_Tac_Toe
# frozen_string_literal: true
require './lib/board'
describe Board do
describe '#check_horizontal' do
let(:square) { [%w[0], %w[0 11 12 13], %w[0 21 22 23], %w[0 31 32 33]] }
it 'Check horizontal case 1 values.' do
board = Board.new
expect(board.check_horizontal).to eql(false)
end
end
describe '#check_horizontal' do
let(:square) { [%w[0], %w[0 O X O], %w[0 X O X], %w[0 O X O]] }
it 'Check horizontal case 2 values.' do
board = Board.new
expect(board.check_horizontal).to eql(false)
end
end
describe '#check_vertical' do
let(:square) { [%w[0], %w[0 11 12 13], %w[0 21 22 23], %w[0 31 32 33]] }
it 'Check vertical case 1 values.' do
board = Board.new
expect(board.check_vertical).to eql(false)
end
end
describe '#check_vertical' do
let(:square) { [%w[0], %w[0 O X O], %w[0 X O X], %w[0 O X O]] }
it 'Check vertical case 2 values.' do
board = Board.new
expect(board.check_vertical).to eql(false)
end
end
describe '#check_diagonal1' do
let(:square) { [%w[0], %w[0 11 12 13], %w[0 21 22 23], %w[0 31 32 33]] }
it 'Check first diagonal case 1 values.' do
board = Board.new
expect(board.check_diagonal1).to eql(false)
end
end
describe '#check_diagonal1' do
let(:square) { [%w[0], %w[0 O X 13], %w[0 X O 23], %w[0 31 X 33]] }
it 'Check first diagonal case 2 values.' do
board = Board.new
expect(board.check_diagonal1).to eql(false)
end
end
describe '#check_diagonal2' do
let(:square) { [%w[0], %w[0 11 12 13], %w[0 21 22 23], %w[0 31 32 33]] }
it 'Check second diagonal case 1 values.' do
board = Board.new
expect(board.check_diagonal2).to eql(false)
end
end
describe '#check_diagonal2' do
let(:square) { [%w[0], %w[0 O X 13], %w[0 X O 23], %w[0 31 X 33]] }
it 'Check second diagonal case 2 values.' do
board = Board.new
expect(board.check_diagonal2).to eql(false)
end
end
describe '#check_draw' do
it 'Check for draw game case 1' do
board = Board.new
expect(board.check_draw(5)).to eql(false)
end
end
describe '#check_draw' do
it 'Check for draw game case 2' do
board = Board.new
expect(board.check_draw(9)).to eql(true)
end
end
describe '#check_valid_choice' do
it 'Check valid choice case 1' do
board = Board.new
expect(board.valid_choice('11')).to eql(true)
end
end
describe '#check_valid_choice' do
it 'Check valid choice case 2' do
board = Board.new
expect(board.valid_choice('aa')).to eql(false)
end
end
describe '#check_set_choice' do
let(:square) { [%w[0], %w[0 11 12 13], %w[0 21 22 23], %w[0 31 32 33]] }
it 'Check set choice case 1' do
board = Board.new
expect(board.set_choice('11', 'X')).to eql(true)
end
end
describe '#check_set_choice' do
let(:square) { [%w[0], %w[0 11 X 13], %w[0 X O 23], %w[0 31 X 33]] }
it 'Check set choice case 2' do
board = Board.new
expect(board.set_choice('11', 'O')).to eql(true)
end
end
end
|
package cn.springmvc.model;
public class TagNum {
String goodsNo;
String shopId;
public String getGoodsNo() {
return goodsNo;
}
public void setGoodsNo(String goodsNo) {
this.goodsNo = goodsNo;
}
public String getShopId() {
return shopId;
}
public void setShopId(String shopId) {
this.shopId = shopId;
}
@Override
public String toString() {
return "TagNum [goodsNo=" + goodsNo + ", shopId=" + shopId + "]";
}
public TagNum() {
super();
}
public TagNum(String goodsNo, String shopId) {
super();
this.goodsNo = goodsNo;
this.shopId = shopId;
}
}
|
echo a#
echo #a
echo a#b
echo ##
echo \##
echo \#\#
echo \#aa
echo # aa#
echo #a
echo +#
echo ?#
echo #?
|
<reponame>shopkeep/fracker
package fracker
import (
"fmt"
"io"
"path/filepath"
"strings"
)
// A Fracker drills into etcd directories and outputs the values to a given outlet.
type Fracker interface {
// Frack() looks up each of the given keys in etcd and walks the tree of nodes returned. Each leaf
// value is converted into a environment variable declaration and written to the given io.Writer.
Frack(io.Writer, []string) error
}
// New() creates a new Fracker.
func New(client Client) Fracker {
return &fracker{client}
}
type fracker struct {
client Client
}
// Frack() reads the configuration values out of etcd and writes them to stdout. Panics if a requested key
// is not found in etcd.
func (self *fracker) Frack(out io.Writer, keys []string) error {
var node Node
var err error
env := make(map[string]string, 0)
for _, key := range keys {
key = filepath.Clean(key)
node, err = self.client.Get(key)
if err != nil {
return err
}
node.Each(func(k, v string) {
name := strings.TrimPrefix(k, key)
if name == "" {
name = filepath.Base(k)
}
name = strings.TrimPrefix(name, "/")
name = strings.ToUpper(name)
name = strings.NewReplacer("/", "_", "-", "_").Replace(name)
env[name] = v
})
}
for name, val := range env {
fmt.Fprintf(out, "%s=%s\n", name, val)
}
return nil
}
|
from PIL import Image
from mandelbrot_03 import MandelbrotSet
if __name__ == "__main__":
mandelbrot_set = MandelbrotSet(max_iterations=20, escape_radius=1000)
width, height = 512, 512
scale = 0.0075
GRAYSCALE = "L"
image = Image.new(mode=GRAYSCALE, size=(width, height))
for y in range(height):
for x in range(width):
re = scale * (x - width / 2)
im = scale * (height / 2 - y)
c = complex(re, im)
instability = 1 - mandelbrot_set.stability(c, smooth=True)
image.putpixel((x, y), int(instability * 255))
image.show()
|
// formatter.go
// ~~~~~~~~~
// This module implements the Formatter interface.
// :authors: <NAME>.
// :copyright: (c) 2015 by Fanout, Inc.
// :license: MIT, see LICENSE for more details.
package pubcontrol
// The Format interface is used for all publishing formats that are
// wrapped in the Item struct. Examples of format implementations
// include JsonObjectFormat and HttpStreamFormat.
type Formatter interface {
// The name of the format which should return a string. Examples
// include 'json-object' and 'http-response'
Name() string
// The export method which should return a format-specific hash
// containing the required format-specific data.
Export() interface{}
}
|
#!/usr/bin/env bash
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
run_all_installed_quickstart_programs() {
CONFIG_DIRECTORY="${KOKORO_GFILE_DIR:-/dev/shm}"
readonly CONFIG_DIRECTORY
if [[ ! -r "${CONFIG_DIRECTORY}/kokoro-run-key.json" ]]; then
return 0
fi
source "${PROJECT_ROOT}/ci/colors.sh"
source "${PROJECT_ROOT}/ci/etc/integration-tests-config.sh"
source "${PROJECT_ROOT}/ci/etc/quickstart-config.sh"
local run_args=(
# Remove the container after running
"--rm"
# Set the environment variables for the test program.
"--env" "GOOGLE_APPLICATION_CREDENTIALS=/c/kokoro-run-key.json"
"--env" "GOOGLE_CLOUD_PROJECT=${GOOGLE_CLOUD_PROJECT}"
# Mount the config directory as a volume in `/c`
"--volume" "${CONFIG_DIRECTORY}:/c"
)
local errors=""
for library in $(quickstart_libraries); do
echo
log_yellow "Running ${library}'s quickstart program for ${DISTRO}"
local args=()
while IFS="" read -r line; do
args+=("${line}")
done < <(quickstart_arguments "${library}")
if ! docker run "${run_args[@]}" "${INSTALL_RUN_IMAGE}" \
"/i/${library}/quickstart" "${args[@]}"; then
log_red "Error running the quickstart program"
errors="${errors} ${library}"
fi
done
echo
if [[ -z "${errors}" ]]; then
log_green "All quickstart builds were successful"
return 0
fi
log_red "Build failed for ${errors:1}"
return 1
}
run_all_installed_quickstart_programs
|
#!/usr/bin/env bash
# (c) L.Spiegelberg 2020
# create docker image
#!/usr/bin/env bash
# (c) 2021 Tuplex contributors
# builds benchmark image
while :; do
case $1 in
-u|--upload) UPLOAD="SET"
;;
*) break
esac
shift
done
# build benchmark docker image
# copy from scripts to current dir because docker doesn't understand files
# outside the build context
cp ../../ubuntu1804/install_reqs.sh .
docker build -t tuplex/ubuntu1804 . || exit 1
# is upload set?
if [[ "${UPLOAD}" == 'SET' ]]; then
docker login
docker push tuplex/ubuntu1804
fi
|
package com.linkedin.datahub.graphql.types.common.mappers;
import com.linkedin.datahub.graphql.generated.DataPlatformInstance;
import com.linkedin.datahub.graphql.generated.EntityType;
import com.linkedin.datahub.graphql.types.mappers.ModelMapper;
import javax.annotation.Nonnull;
public class DataPlatformInstanceAspectMapper implements ModelMapper<com.linkedin.common.DataPlatformInstance, DataPlatformInstance> {
public static final DataPlatformInstanceAspectMapper INSTANCE = new DataPlatformInstanceAspectMapper();
public static DataPlatformInstance map(@Nonnull final com.linkedin.common.DataPlatformInstance dataPlatformInstance) {
return INSTANCE.apply(dataPlatformInstance);
}
@Override
public DataPlatformInstance apply(@Nonnull final com.linkedin.common.DataPlatformInstance input) {
final DataPlatformInstance result = new DataPlatformInstance();
if (input.hasInstance()) {
result.setType(EntityType.DATA_PLATFORM_INSTANCE);
result.setUrn(input.getInstance().toString());
}
return result;
}
}
|
<gh_stars>0
var tcp = require('../../tcp');
var instance_skel = require('../../instance_skel');
var actions = require('./actions');
var feedback = require('./feedback');
var presets = require('./presets');
var variables = require('./variables');
var internal_api = require('../companion-module-bmd-videohub/internalAPI');
var debug;
var log;
/**
* Companion instance class for the Blackmagic MutliView 4.
*
* @extends instance_skel
* @version 1.3.0
* @since 1.0.0
* @author <NAME> <<EMAIL>>
* @author <NAME> <<EMAIL>>
*/
class instance extends instance_skel {
/**
* Create an instance of a multiview 4 module.
*
* @param {EventEmitter} system - the brains of the operation
* @param {string} id - the instance ID
* @param {Object} config - saved user configuration parameters
* @since 1.0.0
*/
constructor(system, id, config) {
super(system, id, config);
this.stash = [];
this.command = null;
this.deviceName = '';
Object.assign(this, {
...actions,
...feedback,
...presets,
...variables,
...internal_api
});
this.inputs = {};
this.outputs = {};
this.inputCount = 4;
this.outputCount = 4;
this.monitoringCount = 0;
this.serialCount = 0;
this.CHOICES_INPUTS = [];
this.CHOICES_OUTPUTS = [];
this.CHOICES_DISPLAYMODE = [
{ id: 'true', label: 'SOLO', preset: 'SOLO' },
{ id: 'false', label: '2x2', preset: '2x2' }
];
this.CHOICES_OUTPUTFORMAT = [
{ id: '50i', label: '50i', preset: '50i' },
{ id: '50p', label: '50p', preset: '50p' },
{ id: '60i', label: '60i', preset: '60i' },
{ id: '60p', label: '60p', preset: '60p' }
];
this.CHOICES_TRUEFALSE = [
{ id: 'true', label: 'True', preset: 'On' },
{ id: 'false', label: 'False', preset: 'Off' }
];
this.PRESETS_SETTIGS = [
{ action: 'mode', feedback: 'solo_enabled', label: 'Display Mode ', choices: this.CHOICES_DISPLAYMODE },
{ action: 'set_format', feedback: 'output_format', label: 'Output Format: ', choices: this.CHOICES_OUTPUTFORMAT },
{ action: 'set_border', feedback: 'display_border', label: 'Borders ', choices: this.CHOICES_TRUEFALSE },
{ action: 'set_labels', feedback: 'display_labels', label: 'Labels ', choices: this.CHOICES_TRUEFALSE },
{ action: 'set_meters', feedback: 'display_meters', label: 'Audio Meters ', choices: this.CHOICES_TRUEFALSE },
{ action: 'set_tally', feedback: 'display_tally', label: 'Tally ', choices: this.CHOICES_TRUEFALSE },
{ action: 'set_widescreen_sd', feedback: 'widescreen_sd', label: 'Widescreen SD ', choices: this.CHOICES_TRUEFALSE }
];
this.actions(); // export actions
}
/**
* Setup the actions.
*
* @param {EventEmitter} system - the brains of the operation
* @access public
* @since 1.0.0
*/
actions(system) {
this.setupChoices();
this.setActions(this.getActions());
}
/**
* Executes the provided action.
*
* @param {Object} action - the action to be executed
* @access public
* @since 1.0.0
*/
action(action) {
var cmd;
var opt = action.options;
switch (action.action) {
case 'mode':
cmd = 'CONFIGURATION:\n'+'Solo enabled: '+ opt.mode +'\n\n';
break;
case 'audio':
cmd ='VIDEO OUTPUT ROUTING:\n'+(this.outputCount+1)+' '+opt.inp +'\n\n';
break;
case 'solo':
cmd ='VIDEO OUTPUT ROUTING:\n'+this.outputCount+' '+opt.inp +'\n\n';
break;
case 'label':
cmd ='INPUT LABELS:\n'+ opt.inp +' '+ opt.label +'\n\n';
break;
case 'set_format':
cmd = "CONFIGURATION:\n"+"Output format: "+opt.setting+"\n\n";
break;
case 'set_border':
cmd = "CONFIGURATION:\n"+"Display border: "+opt.setting+"\n\n";
break;
case 'set_labels':
cmd = "CONFIGURATION:\n"+"Display labels: "+opt.setting+"\n\n";
break;
case 'set_meters':
cmd = "CONFIGURATION:\n"+"Display audio meters: "+opt.setting+"\n\n";
break;
case 'set_tally':
cmd = "CONFIGURATION:\n"+"Display SDI tally: "+opt.setting+"\n\n";
break;
case 'set_widescreen_sd':
cmd = "CONFIGURATION:\n"+"Widescreen SD enable: "+opt.setting+"\n\n";
break;
}
if (cmd !== undefined) {
if (this.socket !== undefined && this.socket.connected) {
this.socket.send(cmd);
}
else {
this.debug('Socket not connected :(');
}
}
}
/**
* Creates the configuration fields for web config.
*
* @returns {Array} the config fields
* @access public
* @since 1.0.0
*/
config_fields() {
return [
{
type: 'text',
id: 'info',
width: 12,
label: 'Information',
value: 'This module will connect to any Blackmagic Design MultiView 4 Device.'
},
{
type: 'textinput',
id: 'host',
label: 'MultiView IP',
width: 6,
regex: this.REGEX_IP
}
]
}
/**
* Clean up the instance before it is destroyed.
*
* @access public
* @since 1.0.0
*/
destroy() {
if (this.socket !== undefined) {
this.socket.destroy();
}
this.debug("destroy", this.id);
}
/**
* INTERNAL: returns the device config object.
*
* @returns {Object} the device config object
* @access protected
* @since 1.3.0
*/
getConfig() {
if (this.configuration === undefined) {
this.configuration = {
layout: '4x4',
outputFormat: '60i',
soloEnabled: 'false',
widescreenSD: 'true',
displayBorder: 'true',
displayLabels: 'true',
displayMeters: 'true',
displayTally: 'true'
};
}
return this.configuration;
}
/**
* INTERNAL: returns the desired output object.
*
* @param {number} id - the output to fetch
* @returns {Object} the desired output object
* @access protected
* @since 1.3.0
*/
getOutput(id) {
if (this.outputs[id] === undefined) {
this.outputs[id] = {
label: (id+1) + ': View ' + (id+1),
name: 'View ' + (id+1),
route: id,
status: 'BNC',
lock: 'U'
};
}
return this.outputs[id];
}
/**
* Main initialization function called once the module
* is OK to start doing things.
*
* @access public
* @since 1.0.0
*/
init() {
debug = this.debug;
log = this.log;
this.initVariables();
this.initFeedbacks();
this.initPresets();
this.init_tcp();
}
/**
* INTERNAL: use setup data to initalize the tcp socket object.
*
* @access protected
* @since 1.0.0
*/
init_tcp() {
var receivebuffer = '';
if (this.socket !== undefined) {
this.socket.destroy();
delete this.socket;
}
if (this.config.port === undefined) {
this.config.port = 9990;
}
if (this.config.host) {
this.socket = new tcp(this.config.host, this.config.port);
this.socket.on('status_change', (status, message) => {
this.status(status, message);
});
this.socket.on('error', (err) => {
this.debug("Network error", err);
this.log('error',"Network error: " + err.message);
});
this.socket.on('connect', () => {
this.debug("Connected");
});
// separate buffered stream into lines with responses
this.socket.on('data', (chunk) => {
var i = 0, line = '', offset = 0;
receivebuffer += chunk;
while ( (i = receivebuffer.indexOf('\n', offset)) !== -1) {
line = receivebuffer.substr(offset, i - offset);
offset = i + 1;
this.socket.emit('receiveline', line.toString());
}
receivebuffer = receivebuffer.substr(offset);
});
this.socket.on('receiveline', (line) => {
if (this.command === null && line.match(/:/) ) {
this.command = line;
}
else if (this.command !== null && line.length > 0) {
this.stash.push(line.trim());
}
else if (line.length === 0 && this.command !== null) {
var cmd = this.command.trim().split(/:/)[0];
this.processVideohubInformation(cmd, this.stash);
this.stash = [];
this.command = null;
}
else {
this.debug("weird response from videohub", line, line.length);
}
});
}
}
/**
* INTERNAL: Routes incoming data to the appropriate function for processing.
*
* @param {string} key - the command/data type being passed
* @param {Object} data - the collected data
* @access protected
* @since 1.3.0
*/
processVideohubInformation(key,data) {
if (key.match(/(INPUT|OUTPUT) LABELS/)) {
this.updateLabels(key,data);
this.actions();
this.initFeedbacks();
this.initPresets();
}
else if (key.match(/VIDEO OUTPUT ROUTING/)) {
this.updateRouting(key,data);
this.checkFeedbacks('input_bg');
this.checkFeedbacks('solo_source');
this.checkFeedbacks('audio_source');
}
else if (key.match(/VIDEO OUTPUT LOCKS/)) {
this.updateLocks(key,data);
}
else if (key.match(/(VIDEO INPUT|VIDEO OUTPUT) STATUS/)) {
this.updateStatus(key,data);
this.actions();
this.initFeedbacks();
this.initPresets();
}
else if (key == 'MULTIVIEW DEVICE') {
this.updateDevice(key,data);
this.actions();
this.initVariables();
this.initFeedbacks();
this.initPresets();
}
else if (key == 'CONFIGURATION') {
this.updateDeviceConfig(key,data);
}
else {
// TODO: find out more about the video hub from stuff that comes in here
}
}
/**
* INTERNAL: use model data to define the choices for the dropdowns.
*
* @access protected
* @since 1.3.0
*/
setupChoices() {
this.CHOICES_INPUTS = [];
this.CHOICES_OUTPUTS = [];
if (this.inputCount > 0) {
for(var key = 0; key < this.inputCount; key++) {
if (this.getInput(key).status != 'None') {
this.CHOICES_INPUTS.push( { id: key, label: this.getInput(key).label } );
}
}
}
if (this.outputCount > 0) {
for(var key = 0; key < this.outputCount; key++) {
if (this.getOutput(key).status != 'None') {
this.CHOICES_OUTPUTS.push( { id: key, label: this.getOutput(key).label } );
}
}
}
}
/**
* Process an updated configuration array.
*
* @param {Object} config - the new configuration
* @access public
* @since 1.0.0
*/
updateConfig(config) {
var resetConnection = false;
if (this.config.host != config.host)
{
resetConnection = true;
}
this.config = config;
this.actions();
this.initFeedbacks();
this.initVariables();
if (resetConnection === true || this.socket === undefined) {
this.init_tcp();
}
}
/**
* INTERNAL: Updates device data from the Videohub
*
* @param {string} labeltype - the command/data type being passed
* @param {Object} object - the collected data
* @access protected
* @since 1.3.0
*/
updateDevice(labeltype, object) {
for (var key in object) {
var parsethis = object[key];
var a = parsethis.split(/: /);
var attribute = a.shift();
var value = a.join(" ");
switch (attribute) {
case 'Model name':
this.deviceName = value;
this.log('info', 'Connected to a ' + this.deviceName);
break;
}
}
this.saveConfig();
}
/**
* INTERNAL: Updates device data from the Videohub
*
* @param {string} labeltype - the command/data type being passed
* @param {Object} object - the collected data
* @access protected
* @since 1.3.0
*/
updateDeviceConfig(labeltype, object) {
for (var key in object) {
var parsethis = object[key];
var a = parsethis.split(/: /);
var attribute = a.shift();
var value = a.join(" ");
switch (attribute) {
case 'Layout':
this.getConfig().layout = value;
this.checkFeedbacks('layout');
break;
case 'Output format':
this.getConfig().outputFormat = value;
this.checkFeedbacks('output_format');
break;
case 'Solo enabled':
this.getConfig().soloEnabled = value;
this.checkFeedbacks('solo_enabled');
break;
case 'Widescreen SD enable':
this.getConfig().widescreenSD = value;
this.checkFeedbacks('widescreen_sd');
break;
case 'Display border':
this.getConfig().displayBorder = value;
this.checkFeedbacks('display_border');
break;
case 'Display labels':
this.getConfig().displayLabels = value;
this.checkFeedbacks('display_labels');
break;
case 'Display audio meters':
this.getConfig().displayMeters = value;
this.checkFeedbacks('display_meters');
break;
case 'Display SDI tally':
this.getConfig().displayTally = value;
this.checkFeedbacks('display_tally');
break;
}
}
}
}
exports = module.exports = instance;
|
class AddColumnMotherDobIntoFamilies < ActiveRecord::Migration
def change
add_column :families, :mother_dob, :date
end
end
|
import re
def extract_company_name(logo):
matches = re.findall(r'\|(.+?)\|', logo)
if matches:
return matches[0].strip()
else:
return "Company name not found" |
<reponame>okoye/COAP
import unittest
from coapy.constants import *
class TestConstants (unittest.TestCase):
def test (self):
self.assertEqual(61616, COAP_PORT)
self.assertEqual(1, RESPONSE_TIMEOUT)
self.assertEqual(5, MAX_RETRANSMIT)
if __name__ == '__main__':
unittest.main()
|
package middleware
import (
"net/http"
"strings"
"time"
"github.com/fighthorse/redisAdmin/component/log"
"github.com/gin-gonic/gin"
)
func AccessLogging(c *gin.Context) {
defer func(begin time.Time) {
requestTime := float64(time.Since(begin)) / float64(time.Second)
r := c.Request
url := r.RequestURI
if FilterUrl(url) {
return
}
accesslog := log.Fields{
"host": strings.Split(r.Host, ":")[0],
"clientip": strings.Split(r.RemoteAddr, ":")[0],
"request_method": r.Method,
"request_url": url,
"status": c.Writer.Status(),
"http_user_agent": r.UserAgent(),
"request_time": requestTime,
"http_x_forwarded_for": GetIP(r),
}
log.AccessLog(r.Context(), accesslog)
}(time.Now())
c.Next()
}
// GetIP 获取连接ip
func GetIP(r *http.Request) string {
// 先从HTTP_X_CLUSTER_CLIENT_IP获取
ip := r.Header.Get("HTTP_X_CLUSTER_CLIENT_IP")
if ip == "" {
ip = r.Header.Get("HTTP_CLIENT_IP")
if ip == "" {
ip = r.Header.Get("HTTP_X_FORWARDED_FOR")
if ip == "" {
ip = r.Header.Get("X-FORWARDED-FOR")
if ip == "" {
ip = strings.Split(r.RemoteAddr, ":")[0]
}
}
}
}
return strings.Split(ip, ",")[0]
}
|
gcloud app deploy index.yaml --version 1 --project health-tracker-1366
|
#!/usr/bin/env bash
#SBATCH --job-name=bdd_source_and_dets18k_domain_im_roi_cst
#SBATCH -o gypsum/logs/%j_bdd_source_and_dets18k_domain_im_roi_cst.txt
#SBATCH -e gypsum/errs/%j_bdd_source_and_dets18k_domain_im_roi_cst.txt
#SBATCH -p 1080ti-long
#SBATCH --gres=gpu:1
#SBATCH --mem=100000
##SBATCH --cpus-per-task=4
##SBATCH --mem-per-cpu=4096
python tools/train_net_step.py \
--dataset bdd_peds+DETS18k \
--cfg configs/baselines/bdd_domain_im_roi_cst.yaml \
--set NUM_GPUS 1 TRAIN.SNAPSHOT_ITERS 5000 \
--iter_size 2 \
--use_tfboard \
--load_ckpt /mnt/nfs/scratch1/pchakrabarty/bdd_recs/ped_models/bdd_peds.pth \
|
git push origin master
gatsby clean
gatsby build
aws s3 rm s3://www.billsimmonsnba.com --recursive
aws s3 cp ./public/. s3://www.billsimmonsnba.com --recursive --exclude "*.sh"
|
def custom_max(*args):
if not args:
raise ValueError("custom_max() arg is an empty sequence")
max_val = args[0]
for arg in args[1:]:
if arg > max_val:
max_val = arg
return max_val
# Test cases
print(custom_max(3, 5, 2, 8, 1)) # Output: 8
print(custom_max(-10, -5, -8)) # Output: -5
print(custom_max()) # Output: ValueError: custom_max() arg is an empty sequence |
import asyncio
class SessionBackend:
async def exists(self, session_id: str) -> bool:
# Implement logic to check if the session ID exists
# For example, using a data store or cache
return False # Placeholder implementation
async def generate_id(self) -> str:
# Implement logic to generate a new session ID
# For example, using a secure random string generator
new_id = "generated_session_id" # Placeholder implementation
return new_id
# Test the SessionBackend class
async def test_cookie_exists(cookie: SessionBackend) -> None:
assert await cookie.exists("session_id") is False
async def test_cookie_generate_id(cookie: SessionBackend) -> None:
new_id = await cookie.generate_id()
assert isinstance(new_id, str)
# Run the tests
async def main():
cookie = SessionBackend()
await test_cookie_exists(cookie)
await test_cookie_generate_id(cookie)
asyncio.run(main()) |
nums = [1, 4, 9, 16]
for num in nums:
print(num ** 0.5) |
#!/bin/bash
export YEAR="$(date -u +%y)"
export MONTH="$(date -u +%m)"
export DAY="$(date -u +%d)"
export HOUR="$(date -u +%H)"
export MINUTE="$(date -u +%M)"
export VERSION="${YEAR}${MONTH}${DAY}-${HOUR}${MINUTE}"
|
import * as t from "io-ts";
import { optionalOrNullable } from "../../../../util/io-ts";
import { rpcAddress } from "../base-types";
export const rpcLogAddress = t.union([rpcAddress, t.array(rpcAddress)]);
export type RpcLogAddress = t.TypeOf<typeof rpcLogAddress>;
export const optionalRpcLogAddress = optionalOrNullable(rpcLogAddress);
export type OptionalRpcLogAddress = t.TypeOf<typeof optionalRpcLogAddress>;
|
import random
def random_string(length, char_set):
result = ""
for _ in range(length):
result += random.choice(char_set)
return result
print(random_string(6, 'abcdefghijklmnopqrstuvwxyz'))
# Output: acehkl |
(function(currentScriptPath) {
'use strict';
angular.module('ionicThreads', [])
.directive('ionComment', ionComment)
.directive('ionThread', ionThread);
var threadTemplate = currentScriptPath.replace('js/ionic.threads.js', 'templates/thread.html'),
commentTemplate = currentScriptPath.replace('js/ionic.threads.js', 'templates/comment.html'),
replyToComment = false;
function ionComment() {
return {
restrict: 'EA', // element & attribute
scope: {
comment: '='
},
templateUrl: commentTemplate,
link: function(scope, element, attrs) {
scope.toggleComment = function(comment) {
comment.showChildren = comment.showChildren ? false : true;
},
scope.replyToComment = function(comment) {
replyToComment && replyToComment({comment: comment});
}
}
}
}
function ionThread() {
return {
restrict: 'EA', // element & attribute",
scope: {
commentTemplate: '@',
replyToComment: '&',
template: '=',
comments: '='
},
templateUrl: function(elem, attrs) {
commentTemplate = attrs.commentTemplate || commentTemplate;
return attrs.template || threadTemplate
},
link: function(scope, element, attrs) {
replyToComment = scope.replyToComment || replyToComment;
}
}
}
})(
(function () {
var scripts = document.getElementsByTagName("script");
var currentScriptPath = scripts[scripts.length - 1].src;
return currentScriptPath;
})()
);
|
// Copyright 2017-2021 @polkadot/api-contract authors & contributors
// SPDX-License-Identifier: Apache-2.0
import fs from 'fs';
import path from 'path';
import { decorateMethod } from '@polkadot/api/promise';
import contractFlipper from '../../test/contracts/ink/flipper.contract.json';
import abiFlipper from '../../test/contracts/ink/flipper.json';
import { Code } from './Code';
import { mockApi } from './mock';
const wasmFlipper = fs.readFileSync(path.join(__dirname, '../../test/contracts/ink/flipper.wasm'));
describe('Code', (): void => {
it('can construct with an individual ABI/WASM combo', (): void => {
expect(
() => new Code(mockApi, abiFlipper, wasmFlipper, decorateMethod)
).not.toThrow();
});
it('can construct with an .contract ABI', (): void => {
expect(
() => new Code(mockApi, contractFlipper, null, decorateMethod)
).not.toThrow();
});
});
|
#!/usr/bin/env bash
CSVFILE=$1
echo ""
echo "Importing: $CSVFILE"
FIELDS=`head -1 $1 | sed -e 's/\",\"/,/g'`
echo "Fields: $FIELDS"
echo ""
echo "Database ?"
read DB
echo "Collection?"
read COLLECTION
echo "Importing to $DB.$COLLECTION ? (y|n)"
read CONSENT
if [ "$CONSENT" != "y" ]; then
echo ""
echo "Import interrupted."
echo ""
fi
mongoimport -d $DB -c $COLLECTION --type=CSV --file=$CSVFILE --headerline
|
package com.threathunter.bordercollie.slot.compute.cache.wrapper.array;
import com.threathunter.bordercollie.slot.compute.cache.CacheType;
import com.threathunter.bordercollie.slot.compute.cache.StorageType;
import com.threathunter.bordercollie.slot.compute.cache.storage.ByteArrayCacheStore;
import com.threathunter.bordercollie.slot.compute.cache.wrapper.CacheWrapperMeta;
import com.threathunter.bordercollie.slot.util.HashType;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.stubbing.Answer;
import org.powermock.api.mockito.PowerMockito;
import org.powermock.core.classloader.annotations.PrepareForTest;
import org.powermock.modules.junit4.PowerMockRunner;
import java.util.HashMap;
import java.util.Map;
/**
*
*/
@RunWith(PowerMockRunner.class)
@PrepareForTest(ByteArrayCacheStore.class)
public class SecondaryLongArrayCacheWrapperTest {
private static final String KEY_1 = "key1";
private static final String KEY_2 = "key2";
private static final String SUB_KEY_1 = "subkey1";
private static final String SUB_KEY_2 = "subkey2";
private ByteArrayCacheStore cacheStore = PowerMockito.mock(ByteArrayCacheStore.class);
private Map<String, byte[]> data = new HashMap<>();
@Before
public void setUp() {
PowerMockito.doAnswer((Answer<byte[]>) mockArg -> data.get(KEY_1)).when(cacheStore).getCache(KEY_1);
PowerMockito.doAnswer((Answer<byte[]>) mockArg -> data.get(KEY_2)).when(cacheStore).getCache(KEY_2);
PowerMockito.doAnswer((Answer<byte[]>) mockArg -> {
data.put(KEY_1, new byte[1768]);
return data.get(KEY_1);
}).when(cacheStore).allocate(KEY_1);
PowerMockito.doAnswer((Answer<byte[]>) mockArg -> {
data.put(KEY_2, new byte[1768]);
return data.get(KEY_2);
}).when(cacheStore).allocate(KEY_2);
}
@Test
public void testSecondarySumLong() {
SecondaryLongArrayCacheWrapper.SecondarySumLongArrayCacheWrapper wrapper = new SecondaryLongArrayCacheWrapper.SecondarySumLongArrayCacheWrapper(getCacheWrapperMeta(CacheType.SECONDARY_SUM_LONG));
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(120, wrapper.addData(120l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(30l, wrapper.addData(-90l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(30l, wrapper.getData(KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.getData(KEY_1, SUB_KEY_2));
Assert.assertNull(wrapper.getData(KEY_2, SUB_KEY_2));
Assert.assertEquals(-987, wrapper.addData(-987l, KEY_1, SUB_KEY_2).longValue());
for (int i = 0; i < 30; i++) {
long value = i % 2 == 0 ? i : i * -1;
String subKey = String.format("%s_%d", SUB_KEY_2, i);
Long first = wrapper.addData(value, KEY_1, subKey);
Long second = wrapper.addData(value * -1, KEY_1, subKey);
if (i <= 17) {
Assert.assertEquals(value, first.longValue());
Assert.assertEquals(0l, second.longValue());
} else {
Assert.assertNull(first);
Assert.assertNull(second);
}
}
}
@Test
public void testSecondaryFirstLong() {
SecondaryLongArrayCacheWrapper.SecondaryFirstLongArrayCacheWrapper wrapper = new SecondaryLongArrayCacheWrapper.SecondaryFirstLongArrayCacheWrapper(getCacheWrapperMeta(CacheType.SECONDARY_FIRST_LONG));
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(120, wrapper.addData(120l, KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.addData(-90l, KEY_1, SUB_KEY_1));
Assert.assertEquals(120, wrapper.getData(KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.getData(KEY_1, SUB_KEY_2));
Assert.assertNull(wrapper.getData(KEY_2, SUB_KEY_2));
Assert.assertEquals(-987, wrapper.addData(-987l, KEY_1, SUB_KEY_2).longValue());
for (int i = 0; i < 30; i++) {
long value = i % 2 == 0 ? i : i * -1;
String subKey = String.format("%s_%d", SUB_KEY_2, i);
Long first = wrapper.addData(value, KEY_1, subKey);
Long second = wrapper.addData(value * -1, KEY_1, subKey);
if (i <= 17) {
Assert.assertEquals(value, first.longValue());
Assert.assertNull(second);
} else {
Assert.assertNull(first);
Assert.assertNull(second);
}
}
}
@Test
public void testSecondaryLastLong() {
SecondaryLongArrayCacheWrapper.SecondaryLastLongArrayCacheWrapper wrapper = new SecondaryLongArrayCacheWrapper.SecondaryLastLongArrayCacheWrapper(getCacheWrapperMeta(CacheType.SECONDARY_LAST_LONG));
wrapper.updateStoreInfo(cacheStore, 9);
Assert.assertEquals(1900, wrapper.addData(1900l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(-78787, wrapper.addData(-78787l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(0, wrapper.addData(0l, KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.getData(KEY_1, SUB_KEY_2));
Assert.assertNull(wrapper.getData(KEY_2, SUB_KEY_2));
for (int i = 0; i < 30; i++) {
long value = i % 2 == 0 ? i : i * -1;
String subKey = String.format("%s_%d", SUB_KEY_2, i);
Long first = wrapper.addData(value, KEY_2, subKey);
Long second = wrapper.addData(value * -1, KEY_2, subKey);
if (i < 20) {
Assert.assertEquals(value, first.longValue());
Assert.assertEquals(value * -1, second.longValue());
} else {
Assert.assertNull(first);
Assert.assertNull(second);
}
}
}
@Test
public void testSecondaryMaxLong() {
SecondaryLongArrayCacheWrapper.SecondaryMaxLongArrayCacheWrapper wrapper = new SecondaryLongArrayCacheWrapper.SecondaryMaxLongArrayCacheWrapper(getCacheWrapperMeta(CacheType.SECONDARY_MAX_LONG));
wrapper.updateStoreInfo(cacheStore, 4);
Assert.assertEquals(90, wrapper.addData(90l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(90, wrapper.addData(-2l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(90, wrapper.addData(0l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(10000, wrapper.addData(10000l, KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.getData(KEY_1, SUB_KEY_2));
Assert.assertNull(wrapper.getData(KEY_2, SUB_KEY_2));
for (int i = 0; i < 30; i++) {
long value = i % 2 == 0 ? i : i * -1;
String subKey = String.format("%s_%d", SUB_KEY_2, i);
Long result = wrapper.addData(value, KEY_2, subKey);
if (i < 20) {
Assert.assertEquals(value, result.longValue());
} else {
Assert.assertNull(result);
}
}
}
@Test
public void testSecondaryMinLong() {
SecondaryLongArrayCacheWrapper.SecondaryMinLongArrayCacheWrapper wrapper = new SecondaryLongArrayCacheWrapper.SecondaryMinLongArrayCacheWrapper(getCacheWrapperMeta(CacheType.SECONDARY_MIN_LONG));
wrapper.updateStoreInfo(cacheStore, 0);
Assert.assertEquals(190, wrapper.addData(190l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(0, wrapper.addData(0l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(0, wrapper.addData(10l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(-10, wrapper.addData(-10l, KEY_1, SUB_KEY_1).longValue());
Assert.assertEquals(-10, wrapper.getData(KEY_1, SUB_KEY_1).longValue());
Assert.assertNull(wrapper.getData(KEY_1, SUB_KEY_2));
Assert.assertNull(wrapper.getData(KEY_2, SUB_KEY_2));
for (int i = 0; i < 30; i++) {
long value = i % 2 == 0 ? i : i * -1;
String subKey = String.format("%s_%d", SUB_KEY_2, i);
Long result = wrapper.addData(value, KEY_2, subKey);
if (i < 20) {
Assert.assertEquals(value, result.longValue());
} else {
Assert.assertNull(result);
}
}
}
private CacheWrapperMeta getCacheWrapperMeta(CacheType type) {
CacheWrapperMeta meta = new CacheWrapperMeta();
meta.setSecondaryKeyHashType(HashType.NORMAL);
meta.setIndexCount(2);
meta.setStorageType(StorageType.BYTES_ARRAY);
meta.setValueHashType(HashType.NORMAL);
meta.setCacheType(type);
return meta;
}
}
|
<filename>client/node_modules/@material-ui/styles/esm/styled/styled.js<gh_stars>0
import _extends from "@babel/runtime/helpers/esm/extends";
import _objectWithoutProperties from "@babel/runtime/helpers/esm/objectWithoutProperties";
import React from 'react';
import clsx from 'clsx';
import PropTypes from 'prop-types';
import { chainPropTypes, getDisplayName } from '@material-ui/utils';
import hoistNonReactStatics from 'hoist-non-react-statics';
import makeStyles from '../makeStyles';
function omit(input, fields) {
var output = {};
Object.keys(input).forEach(function (prop) {
if (fields.indexOf(prop) === -1) {
output[prop] = input[prop];
}
});
return output;
} // styled-components's API removes the mapping between components and styles.
// Using components as a low-level styling construct can be simpler.
function styled(Component) {
var componentCreator = function componentCreator(style) {
var options = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};
var name = options.name,
stylesOptions = _objectWithoutProperties(options, ["name"]);
if (process.env.NODE_ENV !== 'production' && Component === undefined) {
throw new Error(['You are calling styled(Component)(style) with an undefined component.', 'You may have forgotten to import it.'].join('\n'));
}
var classNamePrefix = name;
if (process.env.NODE_ENV !== 'production') {
if (!name) {
// Provide a better DX outside production.
var displayName = getDisplayName(Component);
if (displayName !== undefined) {
classNamePrefix = displayName;
}
}
}
var stylesOrCreator = typeof style === 'function' ? function (theme) {
return {
root: function root(props) {
return style(_extends({
theme: theme
}, props));
}
};
} : {
root: style
};
var style = makeStyles(stylesOrCreator, _extends({
Component: Component,
name: name || Component.displayName,
classNamePrefix: classNamePrefix
}, stylesOptions));
var filterProps;
var propTypes = {};
if (style.filterProps) {
filterProps = style.filterProps;
delete style.filterProps;
}
/* eslint-disable react/forbid-foreign-prop-types */
if (style.propTypes) {
propTypes = style.propTypes;
delete style.propTypes;
}
/* eslint-enable react/forbid-foreign-prop-types */
var StyledComponent = React.forwardRef(function StyledComponent(props, ref) {
var children = props.children,
classNameProp = props.className,
clone = props.clone,
ComponentProp = props.component,
other = _objectWithoutProperties(props, ["children", "className", "clone", "component"]);
var classes = style(props);
var className = clsx(classes.root, classNameProp);
var spread = other;
if (filterProps) {
spread = omit(spread, filterProps);
}
if (clone) {
return React.cloneElement(children, _extends({
className: clsx(children.props.className, className)
}, spread));
}
if (typeof children === 'function') {
return children(_extends({
className: className
}, spread));
}
var FinalComponent = ComponentProp || Component;
return React.createElement(FinalComponent, _extends({
ref: ref,
className: className
}, spread), children);
});
process.env.NODE_ENV !== "production" ? StyledComponent.propTypes = _extends({
/**
* A render function or node.
*/
children: PropTypes.oneOfType([PropTypes.node, PropTypes.func]),
/**
* @ignore
*/
className: PropTypes.string,
/**
* If `true`, the component will recycle it's children DOM element.
* It's using `React.cloneElement` internally.
*
* This prop will be deprecated and removed in v5
*/
clone: chainPropTypes(PropTypes.bool, function (props) {
if (props.clone && props.component) {
return new Error('You can not use the clone and component prop at the same time.');
}
return null;
}),
/**
* The component used for the root node.
* Either a string to use a DOM element or a component.
*/
component: PropTypes.elementType
}, propTypes) : void 0;
if (process.env.NODE_ENV !== 'production') {
StyledComponent.displayName = "Styled(".concat(classNamePrefix, ")");
}
hoistNonReactStatics(StyledComponent, Component);
return StyledComponent;
};
return componentCreator;
}
export default styled; |
<reponame>mballoni/toggle-boot
package br.com.mballoni.autoconfigure.beans;
import br.com.mballoni.toggleboot.Toggle;
import br.com.mballoni.toggleboot.Fetcher;
import java.util.Collections;
import java.util.List;
public class NoOpFetcher implements Fetcher {
@Override
public List<Toggle> fetchAll() {
return Collections.emptyList();
}
}
|
#!/bin/bash
check_bag()
{
echo "";
echo "";
rosrun kitcar_rosbag check.py "$1";
}
write_repo_info()
{
echo $REPO_WITH_INFO
if [ -d "$KITCAR_REPO_PATH/$REPO_WITH_INFO" ]; then
echo "$REPO_WITH_INFO repository branch:">>rosbag_info.txt;
(cd $KITCAR_REPO_PATH/$REPO_WITH_INFO && git symbolic-ref HEAD --short) >>rosbag_info.txt;
echo "">>rosbag_info.txt;
(cd $KITCAR_REPO_PATH/$REPO_WITH_INFO && git rev-list --format=medium --max-count=1 HEAD) >>rosbag_info.txt;
echo "">>rosbag_info.txt;
fi;
}
write_rosbag_info_and_diff()
{
echo "rosbag_name=$1" >rosbag_info.txt;
echo "time=$now">>rosbag_info.txt;
echo "CAR_NAME=$CAR_NAME" >>rosbag_info.txt;
echo "">>rosbag_info.txt;
REPO_WITH_INFO=kitcar-ros
write_repo_info
REPO_WITH_INFO=kitcar-gazebo-simulation
write_repo_info
}
write_camera_car_specs()
{
#extract alternate yaml-file for camera and car_specs, which can be used for parameter tuning
awk 'BEGIN{RS="(^|\n)[a-z_]+:"; print_next_line=0; ORS="";}
print_next_line {print; print_next_line=0;}
RT~/car_specs|camera/ {print RT; print_next_line=1;}
END{print "\n";}' "params_$1.yaml" > "camera_car_specs_$1.yaml"
}
if [ -z "$KITCAR_REPO_PATH" ]
then
KITCAR_REPO_PATH=$HOME;
fi
if [ $# -eq 0 ] || [ "$1" = '-h' ] || [ "$1" = '--help' ]
then
tput setaf 2; echo 'Usage: sh record.sh -o PREFIX TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'
echo "or"
echo 'Usage: sh record.sh -O NAME TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'
echo "or"
echo 'Usage: sh record.sh TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'; tput sgr0
echo ""
echo "Create folder, dump parameters and record rosbag in this directory."
echo ""
echo 'ROSBAG_RECORD_OPTIONS:'
rosbag record -h | sed '1,3 d'
exit 0
elif [ "$1" = '-o' ]
then
rosbag_dir=$2
rosbag_prefix=$(echo "$rosbag_dir" | awk -F/ '{ if($NF!=""){ folder=$NF} else if(NF>1){ folder=$(NF-1)}; print folder; }')
rosbag_dir=$(echo "$rosbag_dir" | awk -v h="$HOME" -F/ 'BEGIN {OFS="/"}{ if($1=="~"){$1=h}; print $0; }')
rosbag_prefix="$rosbag_prefix""_"
now=$(date +"%Y-%m-%d-%H-%M-%S")
mkdir "$rosbag_dir""_""$now" &&
(
cd "$rosbag_dir""_""$now" && {
write_rosbag_info_and_diff "$rosbag_dir$now"
rosparam dump "params_$rosbag_prefix$now.yaml";
write_camera_car_specs "$rosbag_prefix$now"
shift;
shift;
# shellcheck disable=SC2064
trap "{ check_bag $rosbag_prefix$now.bag; }" INT
rosbag record -O "$rosbag_prefix$now" "$@";}
)
elif [ $1 = '-O' ]
then
rosbag_dir=$2
rosbagname=$(echo $rosbag_dir | awk -F/ '{ if($NF!=""){ folder=$NF} else if(NF>1){ folder=$(NF-1)}; print folder; }')
rosbag_dir=$(echo "$rosbag_dir" | awk -v h="$HOME" -F/ 'BEGIN {OFS="/"}{ if($1=="~"){$1=h}; print $0; }')
now=$(date +"%Y-%m-%d-%H-%M-%S")
mkdir "$rosbag_dir" &&
(
cd "$rosbag_dir" && {
write_rosbag_info_and_diff "$rosbag_dir"
rosparam dump "params_$rosbagname.yaml";
write_camera_car_specs "$rosbagname"
shift;
shift;
# shellcheck disable=SC2064
trap "{ check_bag $rosbagname.bag ; }" INT
rosbag record -O "$rosbagname" "$@";}
)
else
for var in "$@"
do
if [ "$var" = '-o' ] || [ "$var" = '-O' ]
then
tput bold; tput setaf 1; echo "The name or prefix option has to be the first argument."
echo ""
tput setaf 2; echo 'Usage: sh record.sh -o PREFIX TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'
echo "or"
echo 'Usage: sh record.sh -O NAME TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'
echo "or"
echo 'Usage: sh record.sh TOPIC1 [TOPIC2 TOPIC3 ...] [ROSBAG_RECORD_OPTIONS]'; tput sgr0
echo ""
echo "Create folder, dump parameters and record rosbag in this directory."
echo ""
echo 'ROSBAG_RECORD_OPTIONS:'
rosbag record -h | sed '1,3 d'
exit 1
fi
done
now=$(date +"%Y-%m-%d-%H-%M-%S")
mkdir "$now" &&
(
cd $now && {
echo "rosbag_name=$now" >rosbag_info.txt;
write_rosbag_info_and_diff "$now"
rosparam dump params_$now".yaml";
write_camera_car_specs "$now"
# shellcheck disable=SC2064
trap "{ check_bag $now.bag; }" INT
rosbag record -O $now "$@";}
)
fi
|
<reponame>ksmit799/POTCO-PS
# File: I (Python 2.4)
from pandac.PandaModules import *
from direct.interval.IntervalGlobal import *
from EffectController import EffectController
from PooledEffect import PooledEffect
import random
class InjuredEffect(PooledEffect, EffectController):
def __init__(self):
PooledEffect.__init__(self)
EffectController.__init__(self)
self.dummy = self.attachNewNode(ModelNode('dummyNode'))
self.effectModel = loader.loadModel('models/effects/stunRing')
self.effectModel.node().setAttrib(ColorBlendAttrib.make(ColorBlendAttrib.MAdd, ColorBlendAttrib.OIncomingAlpha, ColorBlendAttrib.OOne))
self.effectModel.setTwoSided(1)
self.effectModel.setScale(1)
self.effectModel.reparentTo(self.dummy)
self.setDepthWrite(0)
self.setLightOff()
self.duration = 1.5
self.effectScale = 1.0
def createTrack(self):
self.effectModel.setScale(Vec3(1 * self.effectScale, 1 * self.effectScale, 1.3 * self.effectScale))
textureStage = self.effectModel.findTextureStage('*')
if textureStage:
self.effectModel.setTexOffset(textureStage, 0.0, 1.0)
self.setColorScale(1.0, 1.0, 1.0, 0.0)
fadeIn = LerpColorScaleInterval(self, 0.5, Vec4(1, 1, 1, 0.40000000000000002), startColorScale = Vec4(0, 0, 0, 0))
fadeOut = LerpColorScaleInterval(self, 0.5, Vec4(0, 0, 0, 0), startColorScale = Vec4(1, 1, 1, 0.40000000000000002))
pulseFadeOut = LerpColorScaleInterval(self.effectModel, self.duration / 2.25, Vec4(1, 1, 1, 0.17999999999999999), startColorScale = Vec4(1, 1, 1, 0.59999999999999998))
pulseFadeIn = LerpColorScaleInterval(self.effectModel, self.duration / 1.75, Vec4(1, 1, 1, 0.59999999999999998), startColorScale = Vec4(1, 1, 1, 0.14999999999999999))
fade = Sequence(pulseFadeIn, pulseFadeOut)
rotateOne = LerpHprInterval(self.effectModel, self.duration / 3.0, Vec3(0, -25, 25), startHpr = Vec3(0, 25, -25), blendType = 'easeOut')
rotateTwo = LerpHprInterval(self.effectModel, self.duration / 3.0, Vec3(0, 25, -25), startHpr = Vec3(0, -25, 25), blendType = 'easeOut')
rotate = Sequence(rotateOne, rotateTwo)
rotateH = LerpHprInterval(self.dummy, self.duration / 3.0, Vec3(0, 0, 0), startHpr = Vec3(360, 0, 0))
self.startEffect = Sequence(Func(rotate.loop), Func(rotateH.loop), Func(fade.loop), fadeIn)
self.endEffect = Sequence(fadeOut, Func(rotate.finish), Func(rotateH.finish), Func(fade.finish), Func(self.cleanUpEffect))
self.track = Sequence(self.startEffect, Wait(3.0 * self.duration), self.endEffect)
def setNewUVs(self, offset, ts):
self.effectModel.setTexOffset(ts, 0.0, -offset)
def cleanUpEffect(self):
EffectController.cleanUpEffect(self)
self.checkInEffect(self)
def destroy(self):
EffectController.destroy(self)
PooledEffect.destroy(self)
|
#!/bin/bash
#
# This script is meant to go fetch the most recent versions of various libraries that
# ManuvrOS has been written against. None of this is strictly required for a basic build,
# but most real-world applications will want at least one of them.
# Manuvr
#rm -rf lib/ManuvrOS
git clone https://github.com/Manuvr/ManuvrOS lib/ManuvrOS
# mbedTLS...
rm -rf lib/mbedtls
git clone https://github.com/ARMmbed/mbedtls.git lib/mbedtls
|
#!/bin/bash -ex
case "$1" in
save)
# Save built images to Travis cache directory
if [[ "${TRAVIS_PULL_REQUEST}" == "false" ]] && [[ "${TRAVIS_BRANCH}" == "master" ]]; then
mkdir -p $(dirname "${DOCKER_CACHE_FILE}")
docker save $(docker history -q local/standup_dev | grep -v '<missing>') | \
gzip > "${DOCKER_CACHE_FILE}"
fi
;;
load)
if [[ -f "${DOCKER_CACHE_FILE}" ]]; then
gunzip -c "${DOCKER_CACHE_FILE}" | docker load
fi
;;
*)
echo "Unknown action $1"
esac
|
#!/bin/sh
python3 geonet_main.py --mode=test_depth --dataset_dir=/media/data/kitti_eigen/ --init_ckpt_file=/media/data/models/model --batch_size=4 --depth_test_split=eigen --output_dir=outputs/
|
<filename>src/main/java/au/org/noojee/irrigation/servlets/PgBootstrapListener.java
package au.org.noojee.irrigation.servlets;
import org.jsoup.nodes.Element;
import com.vaadin.server.BootstrapFragmentResponse;
import com.vaadin.server.BootstrapListener;
import com.vaadin.server.BootstrapPageResponse;
final class PgBootstrapListener implements BootstrapListener
{
private static final long serialVersionUID = 1L;
@Override
public void modifyBootstrapFragment(
BootstrapFragmentResponse response)
{
// NOOP
}
@Override
public void modifyBootstrapPage(BootstrapPageResponse response)
{
Element head = response.getDocument()
.head();
/** Add tags to make this a PWA app **/
/** Give our app a title */
head.prependElement("title").appendText("Pi-Gation");
/** Icon for the home screen */
head.prependElement("link")
.attr("src", "/irrigation/VAADIN/themes/mytheme/images/pi-gation-192x192.png");
/** Set the theme colour **/
head.prependElement("meta")
.attr("name", "theme-color")
.attr("content", "#00b4f0");
/** link to the manifest for the pwa **/
head.prependElement("link")
.attr("rel", "manifest")
.attr("href", "VAADIN/manifest.json");
/** Add the ProgressiveApp.js to the bottom of the page */
Element body = response.getDocument().body();
body.appendElement("script")
.attr("type", "text/javascript")
.attr("src", "./VAADIN/js/ProgressiveApp.js");
}
} |
###### Parameters ######
RUN_FILE=./benchmark_eq
RES_FILE=results/equality-plain/
RUNS=1
########################
mkdir -p $RES_FILE
for ((repeat=1; repeat<=RUNS; repeat++)); do
echo "folklore plain"
####### Folklore Plain #######
log_d=13
for (( log_l=3; log_l<=4; log_l++ )); do
$RUN_FILE fl-plain -l $((2 ** log_l)) -d $log_d -v -w $RES_FILE
$RUN_FILE fl-plain -l $((2 ** log_l)) -d $log_d -p -v -w $RES_FILE
done
log_d=14
for (( log_l=3; log_l<=9; log_l++ )); do
$RUN_FILE fl-plain -l $((2 ** log_l)) -d $log_d -v -w $RES_FILE
$RUN_FILE fl-plain -l $((2 ** log_l)) -d $log_d -p -v -w $RES_FILE
done
####### Constant-weight Plain #######
echo "cw plain k=ell"
# k = ell
DIFF=0
log_d=13
for (( log_l=3; log_l<=4; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=14
for (( log_l=3; log_l<=9; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
echo "cw plain k=ell/2"
# k = ell/2
DIFF=1
log_d=13
for (( log_l=3; log_l<=5; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=14
for (( log_l=3; log_l<=9; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
echo "cw plain k=ell/4"
# k = ell/4
DIFF=2
log_d=12
for (( log_l=3; log_l<=3; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=13
for (( log_l=3; log_l<=6; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=14
for (( log_l=3; log_l<=9; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
echo "cw plain k=ell/8"
# k = ell/8
DIFF=3
log_d=12
for (( log_l=3; log_l<=4; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=13
for (( log_l=3; log_l<=7; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
log_d=14
for (( log_l=3; log_l<=9; log_l++ )); do
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -v -w $RES_FILE
$RUN_FILE cw-plain -l $((2 ** log_l)) -d $log_d -k $((2 ** (log_l-DIFF))) -p -v -w $RES_FILE
done
done |
const mathjax = require('mathjax-full/js/mathjax').mathjax
const register = require('mathjax-full/js/handlers/html').RegisterHTMLHandler
const fromDom = require('hast-util-from-dom')
const toText = require('hast-util-to-text')
const createAdaptor = require('./adaptor')
module.exports = renderer
const adaptor = createAdaptor()
/* To do next major: Keep resultant HTML handler from `register(adaptor)` to
allow registering the AssistiveMmlHandler as in this demo:
https://github.com/mathjax/MathJax-demos-node/tree/master/direct */
/* To do next major: If registering AssistiveMmlHandler is supported through
configuration, move HTML handler registration to beginning of transformer and
unregister at the end of transformer with
`mathjax.handlers.unregister(handler)`. That is to prevent memory leak in
`mathjax.handlers` whenever a new instance of the plugin is used. */
register(adaptor)
function renderer(input, output) {
const doc = mathjax.document('', {InputJax: input, OutputJax: output})
return {render: render, styleSheet: styleSheet}
function render(node, options) {
node.children = [fromDom(doc.convert(toText(node), options))]
}
function styleSheet() {
const value = adaptor.textContent(output.styleSheet(doc))
return {
type: 'element',
tagName: 'style',
properties: {},
children: [{type: 'text', value: value}]
}
}
}
|
<filename>src/icon/IconInstagram.tsx
import React from 'react';
export interface IconInstagramProps extends React.SVGAttributes<SVGElement> {
color?: string;
size?: string | number;
className?: string;
style?: React.CSSProperties;
}
export const IconInstagram: React.SFC<IconInstagramProps> = (
props: IconInstagramProps
): React.ReactElement => {
const { color, size, style, ...restProps } = props;
return (
<svg
xmlns="http://www.w3.org/2000/svg"
width={size}
height={size}
viewBox="0 0 24 24"
fill="none"
stroke={color}
className="feather feather-instagram"
strokeWidth="2"
strokeLinecap="round"
strokeLinejoin="round"
style={{ verticalAlign: 'middle', ...style }}
{...restProps}
>
<rect x="2" y="2" width="20" height="20" rx="5" ry="5" />
<path d="M16 11.37A4 4 0 1 1 12.63 8 4 4 0 0 1 16 11.37z" />
<line x1="17.5" y1="6.5" x2="17.5" y2="6.5" />
</svg>
);
};
IconInstagram.defaultProps = {
color: 'currentColor',
size: '1em',
};
export default IconInstagram;
|
<filename>app/src/main/java/com/young/myresultfix/HotFixUtil.java
package com.young.myresultfix;
import android.content.Context;
import android.util.Log;
import java.io.File;
import java.lang.reflect.Array;
import java.lang.reflect.Field;
import dalvik.system.DexClassLoader;
import dalvik.system.PathClassLoader;
public class HotFixUtil {
private static final String TAG = "young";
private static final String NAME_BASE_DEX_CLASS_LOADER = "dalvik.system.BaseDexClassLoader";
private static final String FIELD_DEX_ELEMENTS = "dexElements";
private static final String FIELD_PATH_LIST = "pathList";
private static final String DEX_SUFFIX = ".dex";
private static final String APK_SUFFIX = ".apk";
private static final String JAR_SUFFIX = ".jar";
private static final String ZIP_SUFFIX = ".zip";
private static final String DEX_DIR = "patch";
private static final String OPTIMIZE_DEX_DIR = "optidex";
public void doHotFix(Context context) {
if (context == null) {
return;
}
File dexDir = context.getExternalFilesDir(DEX_DIR);
if (!dexDir.exists()) {
Log.e("young", "热更新目录不存在,无法热更新");
return;
}
File opdexFile = context.getDir(OPTIMIZE_DEX_DIR, Context.MODE_PRIVATE);
if (!opdexFile.exists()) {
opdexFile.mkdir();
}
File[] files = dexDir.listFiles();
if (files == null || files.length == 0) {
return;
}
String dexPath = getPatchDexPath(files);
String opdexPath = opdexFile.getAbsolutePath();
PathClassLoader classLoader = (PathClassLoader) context.getClassLoader();
DexClassLoader dexClassLoader = new DexClassLoader(dexPath, opdexPath, null, classLoader);
Object pathElements = getDexElements(classLoader);
Object dexElements = getDexElements(dexClassLoader);
Object combineObject = combineElementArray(pathElements, dexElements);
setDexElements(classLoader, combineObject);
}
private String getPatchDexPath(File[] files) {
StringBuilder builder = new StringBuilder();
for (int i = 0; i < files.length; i++) {
File file = files[i];
if (file.getName().endsWith(DEX_SUFFIX) || file.getName().endsWith(ZIP_SUFFIX) || file.getName().endsWith(APK_SUFFIX) || file.getName().endsWith(JAR_SUFFIX)) {
if (i != 0 && i != files.length - 1) {
builder.append(File.pathSeparator);
}
builder.append(file.getAbsolutePath());
}
}
return builder.toString();
}
private Object getDexElements(ClassLoader classLoader) {
try {
Class<?> baseDexClassLoaderClazz = Class.forName(NAME_BASE_DEX_CLASS_LOADER);
Field pathListField = baseDexClassLoaderClazz.getDeclaredField(FIELD_PATH_LIST);
pathListField.setAccessible(true);
Object dexPathList = pathListField.get(classLoader);
Field dexElementsField = dexPathList.getClass().getDeclaredField(FIELD_DEX_ELEMENTS);
dexElementsField.setAccessible(true);
Object dexElements = dexElementsField.get(dexPathList);
return dexElements;
} catch (ClassNotFoundException e) {
e.printStackTrace();
} catch (NoSuchFieldException e) {
e.printStackTrace();
} catch (IllegalAccessException e) {
e.printStackTrace();
}
return null;
}
private Object combineElementArray(Object pathElements, Object dexElements) {
Class<?> componentType = pathElements.getClass().getComponentType();
int i = Array.getLength(pathElements);
int j = Array.getLength(dexElements);
int k = i + j;
Object result = Array.newInstance(componentType, k);
System.arraycopy(dexElements, 0, result, 0, j);
System.arraycopy(pathElements, 0, result, j, i);
return result;
}
private void setDexElements(ClassLoader classLoader, Object value) {
try {
Class<?> baseDexClassLoaderClazz = Class.forName(NAME_BASE_DEX_CLASS_LOADER);
Field pathListField = baseDexClassLoaderClazz.getDeclaredField(FIELD_PATH_LIST);
pathListField.setAccessible(true);
Object pathList = pathListField.get(classLoader);
Field dexElementsField = pathList.getClass().getDeclaredField(FIELD_DEX_ELEMENTS);
dexElementsField.setAccessible(true);
dexElementsField.set(pathList, value);
} catch (ClassNotFoundException | NoSuchFieldException | IllegalAccessException e) {
e.printStackTrace();
}
}
}
|
#export FZF_MARKER_CONF_DIR=~/.config/marker
export FZF_MARKER_COMMAND_COLOR='\x1b[38;5;249m'
export FZF_MARKER_COMMENT_COLOR='\x1b[38;5;32m'
export FZF_MARKER_MAIN_KEY='\C-@'
export FZF_MARKER_PLACEHOLDER_KEY='\C-v'
|
SELECT product_name, price
FROM products_table
WHERE price > 200
ORDER BY price DESC
LIMIT 10; |
def log_matching_keys(dict1, dict2):
common_keys = set(dict1.keys()) & set(dict2.keys())
for key in common_keys:
print("Match: Match key {} found => {} : {}".format(key, dict1[key], dict2[key])) |
<html>
<body>
<form action="">
<input type="text" name="input"/>
<input type="submit" value="Submit"/>
</form>
</body>
</html> |
#!/usr/bin/env bash
export WEBOTS_VERSION=2021b
export WEBOTS_OFFSCREEN=1
export CI=1
export DEBIAN_FRONTEND=noninteractive
export QTWEBENGINE_DISABLE_SANDBOX=1
|
#!/bin/bash
# Copyright 2014 Johns Hopkins University (author: Daniel Povey)
# 2017 Luminar Technologies, Inc. (author: Daniel Galvez)
# 2017 Ewald Enzinger
# Apache 2.0
# Adapted from egs/mini_librispeech/s5/local/download_and_untar.sh (commit 1cd6d2ac3a935009fdc4184cb8a72ddad98fe7d9)
remove_archive=false
if [ "$1" == --remove-archive ]; then
remove_archive=true
shift
fi
if [ $# -ne 3 ]; then
echo "Usage: $0 [--remove-archive] <data-base> <url> <filename>"
echo "e.g.: $0 /export/data/ https://common-voice-data-download.s3.amazonaws.com/cv_corpus_v1.tar.gz cv_corpus_v1.tar.gz"
echo "With --remove-archive it will remove the archive after successfully un-tarring it."
exit 1
fi
data=$1
url=$2
filename=$3
filepath="$data/$filename"
workspace=$PWD
if [ ! -d "$data" ]; then
echo "$0: no such directory $data"
exit 1;
fi
if [ -z "$url" ]; then
echo "$0: empty URL."
exit 1;
fi
if [ -f $data/$filename.complete ]; then
echo "$0: data was already successfully extracted, nothing to do."
exit 0;
fi
if [ -f $filepath ]; then
size=$(/bin/ls -l $filepath | awk '{print $5}')
size_ok=false
if [ "$filesize" -eq "$size" ]; then size_ok=true; fi;
if ! $size_ok; then
echo "$0: removing existing file $filepath because its size in bytes ($size)"
echo "does not equal the size of the archives ($filesize)."
rm $filepath
else
echo "$filepath exists and appears to be complete."
fi
fi
if [ ! -f $filepath ]; then
if ! which wget >/dev/null; then
echo "$0: wget is not installed."
exit 1;
fi
echo "$0: downloading data from $url. This may take some time, please be patient."
cd $data
if ! wget --no-check-certificate $url; then
echo "$0: error executing wget $url"
exit 1;
fi
cd $workspace
fi
cd $data
if ! tar -xf $filename; then
echo "$0: error un-tarring archive $filepath"
exit 1;
fi
cd $workspace
touch $data/$filename.complete
echo "$0: Successfully downloaded and un-tarred $filepath"
if $remove_archive; then
echo "$0: removing $filepath file since --remove-archive option was supplied."
rm $filepath
fi
|
#!/bin/bash
# ディレクトリ作成
if [ ! -d images ]; then
mkdir -v images
fi
if [ ! -d tmp ]; then
mkdir -v tmp
fi
case $1 in
normal|box|row1|column1|row2|column2)
ACTION=$1
;;
55-row1|55-column1|55-row2|55-column2)
ACTION=$1
;;
*)
echo '[Error] input $1, action'
echo ' normal : 枠のない数字の画像を出力する'
echo ' box : 枠に囲まれた数字の画像を出力する'
echo ' row1 : 将棋盤の筋(1から9)に使用するための画像を出力する'
echo ' column1 : 将棋盤の段(一から九)に使用するための画像を出力する'
echo ' row2 : 将棋盤の筋(1から9)に使用するための画像を出力する'
echo ' column2 : 将棋盤の段(aからi)に使用するための画像を出力する'
echo ' 55-row1 : 55将棋の筋(1から5)に使用するための画像を出力する'
echo ' 55-column1 : 55将棋の段(一から五)に使用するための画像を出力する'
echo ' 55-row2 : 55将棋の筋(1から5)に使用するための画像を出力する'
echo ' 55-column2 : 55将棋の段(aからe)に使用するための画像を出力する'
exit 1
;;
esac
case $ACTION in
normal|box)
# 何も書かれていない画像 (48x48) を作成する
python create_blank.py 48 48
;;
row1|row2|55-row1|55-row2)
# 何も書かれていない画像 (97x19) を作成する
python create_blank.py 97 19
;;
column1|column2|55-column1|55-column2)
# 何も書かれていない画像 (22x106) を作成する
python create_blank.py 22 106
;;
esac
# 文字の書かれた画像を作成する
case $ACTION in
normal)
for i in `seq 1 18`; do
python draw_number.py $ACTION $i
done
;;
box)
for i in `seq 1 18`; do
python draw_box_number.py $i
done
;;
row1|column1)
for i in `seq 1 9`; do
python draw_number.py $ACTION $i
done
;;
row2|column2)
for i in `seq 1 9`; do
python draw_number.py $ACTION $i
done
;;
55-row1|55-column1)
for i in `seq 1 5`; do
python draw_number.py $ACTION $i
done
;;
55-row2|55-column2)
for i in `seq 1 5`; do
python draw_number.py $ACTION $i
done
;;
esac
# 180度回転した画像を作成する
case $ACTION in
normal|box)
for i in `seq 1 18`; do
python rotate_number.py $i
done
;;
esac
# 作成した全ての画像を結合し、出力する
python concat_numbers.py $ACTION
|
<filename>src/admin/controller/blog.js
/**
* Created by AaronLeong on 08/12/2016.
*/
'use strict';
import Base from './base.js';
export default class extends Base {
/**
* index action
* @return {Promise} []
*/
async indexAction() {
//auto render template file index_index.html
let sys_setting = await this.model('setting').find({_id: '5849709981be5002840e1dca'})
console.log(sys_setting)
this.assign('sys_setting', sys_setting)
return this.display();
}
async saveAction() {
let sys_setting = this.post()
//auto render template file index_index.html
let uid = await this.model('setting').where({_id: "5849709981be5002840e1dca"}).update(sys_setting);
console.log(uid)
let id = await this.model('setting').find({_id: '5849709981be5002840e1dca'})
console.log(id)
return this.success();
}
async postAction() {
if(this.isPost()){
}else{
this.assign('post', {title:'标题',abstract:'',content:'',keywords:'',author:'',from:'',allowcomment:1,totop:1,torecom:1,topicrecom:1})
this.assign('cates', [{id:0,catename:'科研'}])
this.assign('tagsList', [{id:0,tagname:'科研'}])
this.assign('tagselectedId', 0)
// this.assign('sys_setting', sys_setting)
return this.display();
}
// let sys_setting = this.post()
// //auto render template file index_index.html
// let uid = await this.model('setting').where({_id: "5849709981be5002840e1dca"}).update(sys_setting);
// console.log(uid)
// let id = await this.model('setting').find({_id: '5849709981be5002840e1dca'})
// console.log(id)
// return this.success();
}
} |
function countFilesChangedPerPullRequest(pullRequests: FetchThirdPartyPullRequestPullRequest[]): Record<string, number> {
const filesChangedPerPullRequest: Record<string, number> = {};
// Simulated logic to count files changed for each pull request
for (const pr of pullRequests) {
// Replace this logic with actual implementation based on the provided data structure
const filesChanged = Math.floor(Math.random() * 50); // Simulated files changed count
filesChangedPerPullRequest[pr.id] = filesChanged;
}
return filesChangedPerPullRequest;
} |
#!/bin/bash
# Create aiida_config.yaml configuration file for activate-aiida
# Parse first/last name from GIT_AUTHOR_NAME
# See https://stackoverflow.com/a/17841619/1069467
function join_by { local d=$1; shift; local f=$1; shift; printf %s "$f" "${@/#/$d}"; }
user=($GIT_AUTHOR_NAME)
first_name=${user[0]}
last_name=`join_by ' ' "${user[@]:1}"`
project_dir=`pwd`
cat > aiida_config.yaml <<EOF
store_path: "${project_dir}/repo"
su_db_username: aiidauser
# su_db_password: # not yet supported
db_engine: postgresql_psycopg2
db_backend: django
db_host: localhost
db_port: 5432
db_name: aiida
db_username: aiidauser
db_password: verdi
profile: "default"
email: "$EMAIL"
first_name: "$first_name"
last_name: "$last_name"
institution: Renkulab
non_interactive: true
EOF
# Add AIIDA_PATH environment variable
export AIIDA_PATH="${project_dir}/repo"
# todo: Enable AiiDA line magic
#ipython_startup_dir=$HOME/.ipython/profile_default/startup/
#mkdir -p $ipython_startup_dir
# create database, don't start daemon
reentry scan
source aiida-activate aiida_config.yaml -c
|
//
// UIView+qbLayer.h
// allrichstore
//
// Created by 任强宾 on 16/12/16.
// Copyright © 2016年 allrich88. All rights reserved.
//
#import <UIKit/UIKit.h>
typedef NS_ENUM(NSInteger, CornerSide) {
CornerSideTop = 0,
CornerSideLeft = 1,
CornerSideBottom = 2,
CornerSideRight = 3,
CornerSideTopLeft = 4,
CornerSideTopRight = 5,
CornerSideBottomLeft = 6,
CornerSideBottomRight = 7,
CornerSideAll = 8
};
@interface UIView (qbLayer)
- (void)roundSide:(CornerSide)side
size:(CGSize)size
borderColor:(UIColor *)color
borderWidth:(CGFloat)width;
@end
|
<reponame>rasa/idbenchmark<filename>bolt_test.go
package idbenchmark_test
import (
"errors"
"log"
"testing"
"github.com/boltdb/bolt"
)
func boltConnect() (db *bolt.DB, err error) {
bucketName := []byte(idbenchmarkKey)
db, err = bolt.Open("bolt.db", 0600, nil)
if err != nil {
log.Println(err)
return nil, err
}
err = db.Update(func(tx *bolt.Tx) error {
_, err := tx.CreateBucketIfNotExists(bucketName)
if err != nil {
log.Printf("create bolt bucket error: %s", err)
}
return err
})
if err != nil {
return nil, err
}
return db, nil
}
func runBolt(b *testing.B, db *bolt.DB) {
bucketName := []byte(idbenchmarkKey)
for n := 0; n < b.N; n++ {
err := db.Update(func(tx *bolt.Tx) error {
// Retrieve the users bucket.
// This should be created when the DB is first opened.
b := tx.Bucket(bucketName)
// Generate ID for the user.
// This returns an error only if the Tx is closed or not writeable.
// That can't happen in an Update() call so I ignore the error check.
id, _ := b.NextSequence()
if id == 0 {
log.Printf("id=0")
return errors.New("id=0")
}
return nil
})
if err != nil {
log.Printf("Sequence bolt error: %v", err)
break
}
}
}
func BenchmarkBolt(b *testing.B) {
db, err := boltConnect()
if err != nil {
return
}
defer db.Close()
b.ResetTimer()
runBolt(b, db)
b.StopTimer()
}
func BenchmarkBoltParallel(b *testing.B) {
db, err := boltConnect()
if err != nil {
return
}
defer db.Close()
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
runBolt(b, db)
}
})
b.StopTimer()
}
|
# -*- encoding : utf-8 -*-
class GistsController < ApplicationController
before_action :login_required, only: [:mine, :mine_page, :fork]
before_action :deny_anonymous_if_disallowed, only: [:new, :create, :edit, :update]
def deny_anonymous_if_disallowed
anonymous_allowed || login_required
end
respond_to :html
def index
@gists = Gist.recent.page(1).per(10)
@gist_list_title = 'Public Gists'
end
def search
if params[:search_query].present?
@search_query = params[:search_query]
@gists = Gist.search(@search_query, current_user.try(:id), 1)
@gist_list_title = 'Search Result'
else
@gists = Gist.recent.page(1).per(10)
end
render action: 'index'
end
def show
@gist = find_visible_gist_by_id(params[:id], current_user)
if @gist
@gist_history = @gist.gist_histories.first
else
render_404
end
end
def show_history
@gist_history = GistHistory.where(id: params[:gist_history_id]).first
return render_404 if @gist_history.nil? || @gist_history.gist.nil?
@gist = @gist_history.gist
if is_path_param_gist_id_valid? || is_not_visible_gist?
render_404
else
render action: 'show'
end
end
def show_single_raw_file
@gist = find_visible_gist_by_id(params[:id], current_user)
if @gist && @gist.latest_history.gist_files.size == 1
render text: @gist.latest_history.gist_files.first.body, :content_type => Mime::TEXT
else
render_404
end
end
def show_raw_file
@gist = find_visible_gist_by_id(params[:id], current_user)
return render_404 unless @gist
@gist_file = GistFile.where(id: params[:gist_file_id]).first
return render_404 if @gist_file.nil? || @gist.id != @gist_file.gist_history.gist_id
render text: @gist_file.body, :content_type => Mime::TEXT
end
def new
@gist = Gist.new
@gist_history = GistHistory.new
end
def edit
@gist = find_visible_gist_by_id(params[:id], current_user)
if @gist
@gist_history = @gist.gist_histories.first
else
redirect_to root_path
end
end
def create
@gist = Gist.new(
title: params[:gist][:title],
user_id: current_user.try(:id),
is_public: (current_user.nil? || params[:is_public] || false)
)
save_gist_and_redirect(GistPersistence.new(flash), 'new')
end
def update
@gist = find_visible_gist_by_id(params[:id], current_user)
return render_404 unless @gist
if @gist.user_id.present? && @gist.user_id != current_user.try(:id)
return redirect_to gists_path
end
@gist.title = params[:gist][:title]
@gist.updated_at = Time.now
save_gist_and_redirect(GistPersistence.new(flash), 'edit')
end
def fork
gist_to_fork = Gist.where(id: params[:gist_id]).first
return render_404 unless gist_to_fork
already_forked = Gist.find_already_forked(gist_to_fork.id, current_user.id)
return redirect_to already_forked if already_forked.present?
begin
created_gist = GistForkCreation.new.save!(gist_to_fork, current_user)
redirect_to created_gist, notice: 'Successfully forked.'
rescue Exception => e
debug_log_back_trace(e)
redirect_to gist_to_fork, notice: 'Failed to fork.'
end
end
def destroy
gist = find_visible_gist_by_id(params[:id], current_user)
return render_404 unless gist
if gist.user_id.present? && gist.user_id != current_user.try(:id)
redirect_to root_path, notice: 'Not found.'
else
gist.destroy
redirect_to root_path, notice: 'Successfully deleted.'
end
end
def add_gist_files_input
respond_as_js {}
end
def mine
@gists = Gist.find_my_recent_gists(current_user.id).page(1).per(10)
@gist_list_title = 'My Gists'
end
# ajax paginator
def page
paginator_respond_as_js {
if params[:search_query].present?
@search_query = params[:search_query]
@gists = Gist.search(@search_query, current_user.try(:id), @page)
else
@gists = Gist.recent.page(@page).per(10)
end
}
end
# ajax paginator
def mine_page
paginator_respond_as_js {
@gists = Gist.find_my_recent_gists(current_user.id).page(@page).per(10)
}
end
# ajax paginator
def user_page
paginator_respond_as_js {
return render text: '', status: :not_found unless @user
@gists = Gist.where(user_id: @user.id).recent.page(@page).per(10)
}
end
# ajax paginator
def user_fav_page
paginator_respond_as_js {
return render text: '', status: :not_found unless @user
@favorites = Favorite.where(user_id: @user.id).recent.page(@page).per(10)
}
end
private
def save_gist_and_redirect(gist_saver, failure_view_name)
begin
gist_files = params[:gist_file_names].zip(params[:gist_file_bodies])
gist_saver.save!(@gist, gist_files, current_user)
redirect_to @gist, notice: 'Successfully created.'
rescue Exception => e
debug_log_back_trace(e)
render action: failure_view_name
end
end
def set_paginator_params_to_fields
@page = params[:page]
@user = User.where(id: params[:user_id]).first
end
def respond_as_js
respond_to { |format| format.js { yield }}
end
def paginator_respond_as_js
respond_as_js {
set_paginator_params_to_fields
yield
}
end
def find_visible_gist_by_id(id, current_user)
Gist.where(id: id).first ||
Gist.find_my_gist_even_if_private(id, current_user.try(:id))
end
def is_path_param_gist_id_valid?
@gist.id != params[:id].to_i
end
# private gists should be disclosed to only gist owner
def is_not_visible_gist?
@gist.user_id != current_user.try(:id) && !@gist.is_public
end
end
|
<gh_stars>1-10
package search;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.StringTokenizer;
/**
*
* @author exponential-e
* 백준 18221번: 교수님 저는 취업할래요
*
* @see https://www.acmicpc.net.problem/18221/
*
*/
public class Boj18221 {
private static final int INTERVAL = 25;
private static Point target = new Point(-1, -1);
private static Point trap = new Point(-1, -1);
private static ArrayList<Point> helper = new ArrayList<>();
private static class Point{
int row;
int col;
public Point(int row, int col) {
this.row = row;
this.col = col;
}
}
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
int[][] map = new int[N][N];
for(int i = 0; i < N; i++) {
StringTokenizer st = new StringTokenizer(br.readLine());
for(int j = 0; j < N; j++) {
map[i][j] = Integer.parseInt(st.nextToken());
if(map[i][j] == 1) helper.add(new Point(i, j));
if(map[i][j] == 2) target = new Point(i, j);
if(map[i][j] == 5) trap = new Point(i, j);
}
}
System.out.println(result(map));
}
private static int result(int[][] arr) {
if(!distance(target, trap)) return 0;
int count = 0;
Point min = new Point(Math.min(target.row, trap.row), Math.min(target.col, trap.col));
Point max = new Point(Math.max(target.row, trap.row), Math.max(target.col, trap.col));
for(Point p: helper) {
if(p.row < min.row || p.col < min.col || p.row > max.row || p.col > max.col) continue;
count++;
}
return count / 3 > 0 ? 1: 0;
}
private static boolean distance(Point p1, Point p2) {
return Math.pow(p1.row - p2.row, 2) + Math.pow(p1.col - p2.col, 2) >= INTERVAL;
}
}
|
#!/bin/bash
USERADD=`which useradd`
GROUPADD=`which groupadd`
USERMOD=`which usermod`
WGET=`which wget`
[ -f /etc/debian_version ] && INSTALL=apt-get || INSTALL=yum
AUTH_FILE=authorized_keys
NAG_SSH_DIR=/home/nagios/.ssh
NAG_KEY_FILE=$NAG_SSH_DIR/$AUTH_FILE
PLUGINS="nagios-plugins-emergya.tar.gz"
#NAG_PLUGINS_URL="http://nagios.emergya.com/recursos/$PLUGINS"
NAG_PLUGINS_URL="https://www.fs.mfernandez.es/$PLUGINS"
KEY="ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAQEA0i+3YoBL3jbyc1Tu6eC/H+Ac2BJ499xkD7W3+ubY6AH1jVHh3JzPPpr3E0YOBw1ogd0csWSAdEjRbGDXGG2CiNvTSa8sXZxQvZI1LsHu9EAGIcVYQR8nhT2Bjnas7zaC44/4+FhMn9B8n+xLH2u7dsae0rlpU/fHnw5flt/1j17Siadya6UMjgNeHqc+0YrRVZ5b1WeczNIoFW3bS35T+6cR3hnHKK6+KE/i6vrerGqQHu3mvMJ7L9ftikK0Vic8m1HmmLXjZW6ZcxQ1mU/Ob2cmKijEZQxH3qLzLn03lO7H8e5NKqQWdOh8E15Lx/Iq62Tu30OfgYzKgAAgwVUsyw== nagios@centosNagios"
function check_authorized_keys () {
if [ ! -d $NAG_SSH_DIR ]; then
mkdir -p $NAG_SSH_DIR
touch $NAG_KEY_FILE
chmod 644 $NAG_KEY_FILE
echo "$KEY" >> $NAG_KEY_FILE
else
if [ ! -f $NAG_KEY_FILE ]; then
touch $NAG_KEY_FILE
chmod 644 $NAG_KEY_FILE
fi
grep "$KEY" $NAG_KEY_FILE
[ $? -eq 1 ] && echo "$KEY" >> $NAG_KEY_FILE
fi
}
function nagios_user () {
getent passwd nagios
[ $? -eq 0 ] && IS_NAG=true || IS_NAG=false
[ ! $IS_NAG ] && $USERADD -m nagios
check_authorized_keys
}
function get_plugins () {
[ -z $WGET ] && $INSTALL install -y wget
wget --no-check-certificate $NAG_PLUGINS_URL -P /tmp
tar xzf /tmp/$PLUGINS -C /home/nagios
chown -R nagios:nagios /home/nagios
rm -f /tmp/$PLUGINS
}
# Inicio
nagios_user
get_plugins
logger "[INFO] Sucess..."
|
#!/usr/bin/env bash
#Napisz funkcje, ktora dla otrzymanej liczby N,
#wypisze N pierwszych liczb pierwszych.
czyPierwsza() {
if [[ $1 -eq 2 ]] || [[ $1 -eq 3 ]]; then
return 1
fi
if [[ $(($1 % 2)) -eq 0 ]] || [[ $(($1 % 3)) -eq 0 ]]; then
return 0
fi
temp=5; w=2
while [[ $((temp * temp)) -le $1 ]]; do
if [[ $(($temp % temp)) -eq 0 ]]; then
return 0
fi
temp=$((temp + w))
w=$((6 - w))
done
return 1
}
liczbyPierwszeV1() {
for (( i=2; i<$1; i++ ))
do
if ! czyPierwsza $i; then
echo -n "$i "
fi
done
echo ""
}
main() {
liczbyPierwszeV1 25
}
main "$@"
|
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
#! /bin/bash
export MODEL=<MODEL_DIR_WITH_PRETRAINED_CHECKPOINT>
export LOGDIR=<YOUR_LOG_DIR>
mkdir -p $LOGDIR
# main script
python ddp_main.py \
net.is_train=True \
net.backbone=sparseconv \
data.dataset=sunrgbd \
data.num_workers=8 \
data.batch_size=64 \
data.no_height=True \
data.voxelization=True \
data.voxel_size=0.025 \
optimizer.learning_rate=0.001 \
misc.log_dir=$LOGDIR \
net.weights=$MODEL \
|
if [ "$1" == 'build' ]; then
echo "[info] Build a docker image from the Dockefile"
docker build -t aminehy/tensorrt-opencv-python3:v1.0 .
elif [ "$1" == "push" ]; then
echo "[info] Push the docker image to docker hub"
docker push aminehy/tensorrt-opencv-python3:v1.0
elif [ "$1" == "run" ]; then
echo "[info] Run the docker container"
xhost +
docker run -it --rm -v $(pwd):/workspace --runtime=nvidia -w /workspace -v /tmp/.X11-unix:/tmp/.X11-unix -e DISPLAY=unix$DISPLAY aminehy/tensorrt-opencv-python3:v1.0
fi
|
import ReactGA from 'react-ga';
const googleAnalyticsAction = {};
googleAnalyticsAction.initGoogleAnalytics = async key => {
ReactGA.initialize(key, {
gaOptions: { cookieFlags: 'SameSite=None;Secure' },
});
ReactGA.pageview(window.location.pathname + window.location.search);
};
export { googleAnalyticsAction };
|
#!/usr/bin/env node
/**
* Implements a utility to load firmware into Control Solutions devices using
* a MODBUS interface.
*
* Supports loading via serial port currently.
*
* In order to run this example you need
* npm install chalk minimist serialport
*
*
*/
// get application path
var path = require('path');
// misc utilities
var util = require('util');
// console text formatting
var chalk = require('chalk');
// command-line options will be available in the args variable
var args = require('minimist')(process.argv.slice(2));
// Module which manages the serial port
var SerialPort = require('serialport');
// Load the object that handles communication to the device
var Modbus = require('@csllc/cs-modbus');
// the instance of the modbus master
var master;
const Bootloader = require('..');
const APP_NAME = path.basename(__filename, '.js');
let masterConfig = {
"transport": {
"type": "rtu",
"eofTimeout": 40,
"connection": {
"type": "serial",
}
},
"suppressTransactionErrors": true,
"retryOnException": false,
"maxConcurrentRequests": 1,
"defaultUnit": 1,
"defaultMaxRetries": 0,
"defaultTimeout": 500
};
// handy function to update a single line showing progress
function printProgress(progress){
process.stdout.clearLine();
process.stdout.cursorTo(0);
process.stdout.write(progress.toFixed(0) + '%');
}
// If -h option, print help
if( args.h ) {
console.info( '\r------------------');
console.info( 'Bootloader Utility\r');
console.info( '\rCommand format:\r');
console.info( APP_NAME +
' [-h -v] filename port baudrate\r');
console.info( chalk.underline( '\rOptions\r'));
console.info( ' -h This help output\r');
console.info( ' -v Verbose output (for debugging)\r');
console.info( chalk.underline( '\rResult\r'));
console.info( 'Return value is 0 if successful\r');
console.info( chalk.underline( 'Examples\r'));
console.info( APP_NAME + ' upgrade.hex COM1 115200 (load file at 115200 baud)\r');
process.exit(0);
}
// Parse the arguments
let filename = args._[0] || 'Unknown.hex';
let portName = args._[1] || 'COM1';
let baud = args._[2] || 115200;
let options = {
baudRate: baud,
autoOpen: false,
};
// Check for the list ports option
if( args.l ) {
// Retrieve a list of all ports detected on the system
SerialPort.list()
.then( function( ports ) {
if( ports ) {
// ports is now an array of port descriptions.
ports.forEach(function(port) {
// print each port description
console.log(port.path +
' : ' + port.pnpId + ' : ' + port.manufacturer );
});
}
process.exit(0);
})
.catch( function (err ){
console.error( err );
});
}
else {
let port;
// Open the serial port we are going to use
port = new SerialPort(
portName,
options);
// Make serial port instance available for the modbus master
masterConfig.transport.connection.serialPort = port;
createMaster();
// port errors
port.on('error', function( err ) {
console.error( chalk.underline.bold( err.message ));
});
// Open the port
// the 'open' event is triggered when complete
if( args.v ) {
console.log( 'Opening ' + portName );
}
port.open(function(err) {
if( err ) {
console.log(err);
process.exit(1);
}
});
}
function createMaster( ) {
// Create the MODBUS master
master = Modbus.createMaster( masterConfig );
// Attach event handler for the port opening
master.once( 'connected', function() {
// Start communicating with the bootloader
const bl = new Bootloader( master );
// Set up the bootloader config accordingly
let target = new bl.BootloaderTarget.DefaultPic18Target();
let config = {
target: target,
space: 0
};
// If verbose, catch events from the bootloader and display them
if( args.v ) {
// catch status message from bootloader for display
bl.on('status', function( status ) {
console.log( status );
});
// Catch progress counter
bl.on('progress', function( percent ){
printProgress( percent );
});
}
// start trying to load the file
bl.start( filename, config )
.then( function() {
if( args.v ) {
console.log( chalk.green('Success!'));
}
process.exit( 0 );
})
.catch( function( err ) {
if( args.v ) {
console.error( err.message );
}
process.exit( 1 );
});
});
// Hook events for logging if verbose mode
if( args.v ) {
var connection = master.getConnection();
connection.on('open', function(){
console.log( '[connection#open ]');
});
connection.on('close', function(){
console.log('[connection#close]');
});
connection.on('error', function(err){
console.log('Error: ', '[connection#error] ' + err.message);
});
connection.on('write', function(data){
console.log('[TX] ', util.inspect( data ) );
});
connection.on('data', function(data){
console.log('[RX] ', util.inspect(data ));
});
var transport = master.getTransport();
// catch event when a transaction starts. Hook the events for logging
transport.on('request', function(transaction)
{
transaction.once('timeout', function()
{
console.log('[timeout]');
});
transaction.once('error', function(err)
{
console.log('[error] %s', err.message);
});
transaction.once('response', function(response)
{
if (response.isException())
{
console.log('[response] ', response.toString());
}
else
{
console.log(response.toString());
}
});
transaction.once('complete', function(err, response)
{
if (err)
{
console.log('[complete] ', err.message);
}
else
{
console.log('[complete] %s', response);
}
});
transaction.once('cancel', function()
{
console.log('[cancel]');
});
console.log( transaction.getRequest().toString());
});
}
}
|
<gh_stars>1-10
package com.vc.medium
object L861 {
def matrixScore(arr: Array[Array[Int]]): Int = {
val n = arr.length
val m = arr(0).length
//Flip all rows where arr(i)(0) == 0 so that binary number in row will be max number
arr.indices.foreach(i => {
if(arr(i)(0) == 0) flipRow(i)
})
//Flip all cols where number of zeros are more that number of ones
arr(0).indices.foreach(j => {
var colSum = 0
arr.indices.foreach(i => {
colSum += arr(i)(j)
})
if(colSum * 2 < n) flipCol(j)
})
def flipRow(row: Int): Unit = {
arr(0).indices.foreach(i => {
arr(row)(i) = arr(row)(i) ^ 1
})
}
def flipCol(col: Int): Unit = {
arr.indices.foreach(i => {
arr(i)(col) = arr(i)(col) ^ 1
})
}
var total = 0
arr.indices.foreach(i => {
arr(i).indices.foreach(j => {
total += arr(i)(j) * (1 << m - j - 1)
})
})
total
}
}
|
#!/usr/bin/env bash
# Tags: distributed
CURDIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)
# shellcheck source=../shell_config.sh
. "$CURDIR"/../shell_config.sh
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS distributed_00754;"
${CLICKHOUSE_CLIENT} --query "DROP TABLE IF EXISTS mergetree_00754;"
${CLICKHOUSE_CLIENT} --query "
CREATE TABLE mergetree_00754 (a Int64, b Int64, c String) ENGINE = MergeTree ORDER BY (a, b);
"
${CLICKHOUSE_CLIENT} --query "
CREATE TABLE distributed_00754 AS mergetree_00754
ENGINE = Distributed(test_unavailable_shard, ${CLICKHOUSE_DATABASE}, mergetree_00754, jumpConsistentHash(a+b, 2));
"
${CLICKHOUSE_CLIENT} --query "INSERT INTO mergetree_00754 VALUES (0, 0, 'Hello');"
${CLICKHOUSE_CLIENT} --query "INSERT INTO mergetree_00754 VALUES (1, 0, 'World');"
${CLICKHOUSE_CLIENT} --query "INSERT INTO mergetree_00754 VALUES (0, 1, 'Hello');"
${CLICKHOUSE_CLIENT} --query "INSERT INTO mergetree_00754 VALUES (1, 1, 'World');"
# Should fail because the second shard is unavailable
${CLICKHOUSE_CLIENT} --query "SELECT count(*) FROM distributed_00754;" 2>&1 \
| grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
# Should fail without setting `optimize_skip_unused_shards` = 1
${CLICKHOUSE_CLIENT} --query "SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b = 0;" 2>&1 \
| grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
# Should pass now
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b = 0;
"
# Should still fail because of matching unavailable shard
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 2 AND b = 2;
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
# Try more complex expressions for constant folding - all should pass.
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 1 AND a = 0 WHERE b = 0;
"
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 1 WHERE b = 1 AND length(c) = 5;
"
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a IN (0, 1) AND b IN (0, 1) WHERE c LIKE '%l%';
"
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a IN (0, 1) WHERE b IN (0, 1) AND c LIKE '%l%';
"
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b = 0 OR a = 1 AND b = 1 WHERE c LIKE '%l%';
"
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE (a = 0 OR a = 1) WHERE (b = 0 OR b = 1);
"
# These should fail.
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b <= 1;
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 WHERE c LIKE '%l%';
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 OR a = 1 AND b = 0;
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b = 0 OR a = 2 AND b = 2;
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
${CLICKHOUSE_CLIENT} -n --query="
SET optimize_skip_unused_shards = 1;
SELECT count(*) FROM distributed_00754 PREWHERE a = 0 AND b = 0 OR c LIKE '%l%';
" 2>&1 \ | grep -F -q "All connection tries failed" && echo 'OK' || echo 'FAIL'
$CLICKHOUSE_CLIENT -q "DROP TABLE distributed_00754"
$CLICKHOUSE_CLIENT -q "DROP TABLE mergetree_00754"
|
package com.freekite.android.yard.adbcontest2;
import android.content.Context;
import android.support.test.InstrumentationRegistry;
import android.support.test.runner.AndroidJUnit4;
import com.ddchen.bridge.messchunkpc.Messchunkpc;
import com.ddchen.bridge.messchunkpc.Messchunkpc.Caller;
import com.ddchen.bridge.messchunkpc.Messchunkpc.HandleCallResult;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.junit.Test;
import org.junit.runner.RunWith;
import java.util.concurrent.CountDownLatch;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* Instrumentation test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() throws Exception {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getTargetContext();
assertEquals("com.freekite.android.yard.adbcontest2", appContext.getPackageName());
}
}
|
<reponame>hector23rp/alba-node<filename>src/entity/artist.ts<gh_stars>0
import { Entity, Column, ObjectID, ObjectIdColumn } from "typeorm";
@Entity()
export class Artist{
@ObjectIdColumn()
id: ObjectID;
@Column()
name: string;
@Column()
surname: string;
@Column()
yearBorn?: number;
@Column()
yearDied?: number;
@Column()
country?: string;
} |
#!/bin/bash
#set -x
#
# Copyright (c) 2013-2014, Microsoft Mobile
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the {organization} nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
source tools/utils.sh
command_exists wget
IS_WGET=$?
if [ $IS_WGET -eq 1 ]; then
echo "Couldn't find wget, aborting."
exit 1;
fi
command_exists unzip
IS_UNZIP=$?
if [ $IS_UNZIP -eq 1 ]; then
echo "Couldn't find unzip, aborting."
exit 1;
fi
echo "Fetching 3rdparties..."
git submodule update --init --recursive
if [ ! -e dragsort-0.5.2.zip ]; then
wget -O dragsort-0.5.2.zip "http://download-codeplex.sec.s-msft.com/Download/Release?ProjectName=dragsort&DownloadId=887234&FileTime=130517762092170000&Build=21018"
fi
unzip -n -d 3rdparty/jquery-dragsort dragsort-0.5.2.zip
# Get YUI compressor for minifying JS and CSS files
if [ ! -f ./tools/$YUI ]; then
wget --no-check-certificate -O ./tools/$YUI https://github.com/yui/yuicompressor/releases/download/v$YUIVERSION/$YUI
fi
sudo ./tools/init_wwwroot.sh
echo "Done!"
|
package com.estafet.boostcd.feature.api.openshift;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
@Configuration
@ComponentScan(basePackages = "com.estafet.boostcd.openshift")
public class OpenshiftConfiguration {
}
|
import { h, Component } from 'preact'
import style from './style.scss'
const AnimatedProfileBanner = (props) => {
return (
<div class={style.banner + ' ' + props.class}>
<svg version='1.1' baseProfile='basic' id='Layer_1' x='0px' y='0px' viewBox='0 0 426 198' xmlSpace='preserve'>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='1.5' y1='99.5' x2='100' y2='1'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='98.5' y1='196.5' x2='197' y2='98'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='1.5' y1='98' x2='100' y2='196.5'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='98.5' y1='1' x2='197.3' y2='99.8'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='30.5' y1='30.5' x2='30.5' y2='168.5'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='168.5' y1='167.5' x2='29.5' y2='167.5'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='167.5' y1='30.5' x2='167.5' y2='168.5'/>
<line fill='none' stroke='#DFC17A' stroke-width='3' stroke-miterlimit='10' x1='168' y1='31.5' x2='29.5' y2='31.5'/>
</svg>
</div>
)
}
export default AnimatedProfileBanner
|
#!/bin/bash
size ()
{
echo -n "Size of AUR directory: "
du -s -h "$AURDIR" | cut -f1
}
AURDIR="/home/robert/SOFTWARE/AUR/OWN"
size
echo -n "Remove src directories? [y/n] "
read X
if [[ "$X" = "y" ]]; then
echo -n " Removing src directories... "
rm -rf "$AURDIR/"*"/src"
echo "[DONE]"
fi
size
echo -n "Remove pkg directories? [y/n] "
read X
if [[ "$X" = "y" ]]; then
echo -n " Removing pkg directories... "
rm -rf "$AURDIR/"*"/pkg"
echo "[DONE]"
fi
size
echo -n "Remove src archives? [y/n] "
read X
if [[ "$X" = "y" ]]; then
echo -n " Removing src archives... "
rm -f "$AURDIR/"*"/"*".src.tar.gz"
echo "[DONE]"
fi
size
echo -n "Remove pkg archives? [y/n] "
read X
if [[ "$X" = "y" ]]; then
echo -n " Removing pkg archives... "
rm -f "$AURDIR/"*"/"*".pkg.tar.xz"
echo "[DONE]"
fi
size
|
#!/usr/bin/env bash
# Author: Zhang Huangbin (zhb _at_ iredmail.org)
#---------------------------------------------------------------------
# This file is part of iRedMail, which is an open source mail server
# solution for Red Hat(R) Enterprise Linux, CentOS, Debian and Ubuntu.
#
# iRedMail is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# iRedMail is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with iRedMail. If not, see <http://www.gnu.org/licenses/>.
#---------------------------------------------------------------------
# -------------------------------------------------------
# Dovecot & dovecot-sieve.
# -------------------------------------------------------
dovecot_config()
{
ECHO_INFO "Configure Dovecot (POP3/IMAP/Managesieve/LMTP/LDA)."
backup_file ${DOVECOT_CONF}
# CentOS 7: Dovecot-2.2.36+
# CentOS 8: Dovecot-2.3.8+
# Debian 9: Dovecot-2.2.27
# Debian 10: Dovecot-2.3.4+
# Ubuntu 18.04: Dovecot-2.2.33
# Ubuntu 20.04: Dovecot-2.3.7+
# FreeBSD: Dovecot-2.3.0+
# OpenBSD 6.7: Dovecot-2.3.10+
ECHO_DEBUG "Copy sample Dovecot config file to ${DOVECOT_CONF}."
if [ X"${DOVECOT_VERSION}" == X'2.2' ]; then
cp ${SAMPLE_DIR}/dovecot/dovecot22.conf ${DOVECOT_CONF}
else
cp ${SAMPLE_DIR}/dovecot/dovecot23.conf ${DOVECOT_CONF}
fi
chmod 0664 ${DOVECOT_CONF}
ECHO_DEBUG "Configure dovecot: ${DOVECOT_CONF}."
perl -pi -e 's#PH_DOVECOT_CONF_INCLUDE_DIR#$ENV{DOVECOT_CONF_INCLUDE_DIR}#g' ${DOVECOT_CONF}
perl -pi -e 's#PH_SSL_DH1024_PARAM_FILE#$ENV{SSL_DH1024_PARAM_FILE}#g' ${DOVECOT_CONF}
# Listen address
if [ X"${IREDMAIL_HAS_IPV6}" == X'NO' ]; then
perl -pi -e 's#^(listen.*=).*#${1} \*#g' ${DOVECOT_CONF}
fi
# Service listen addresses and ports.
perl -pi -e 's#PH_LOCAL_ADDRESS#$ENV{LOCAL_ADDRESS}#g' ${DOVECOT_CONF}
#
# Listen on localhost
#
# Managesieve
perl -pi -e 's#PH_MANAGESIEVE_BIND_HOST#$ENV{MANAGESIEVE_BIND_HOST}#g' ${DOVECOT_CONF}
perl -pi -e 's#PH_MANAGESIEVE_BIND_PORT#$ENV{MANAGESIEVE_BIND_PORT}#g' ${DOVECOT_CONF}
# LMTP
perl -pi -e 's#PH_LMTP_BIND_ADDRESS#$ENV{LMTP_BIND_ADDRESS}#g' ${DOVECOT_CONF}
perl -pi -e 's#PH_LMTP_BIND_PORT#$ENV{LMTP_BIND_PORT}#' ${DOVECOT_CONF}
# Base directory.
perl -pi -e 's#PH_BASE_DIR#$ENV{DOVECOT_BASE_DIR}#' ${DOVECOT_CONF}
# base_dir is required on OpenBSD
[ X"${DISTRO}" == X'OPENBSD' ] && \
perl -pi -e 's/^#(base_dir.*)/${1}/' ${DOVECOT_CONF}
# Public mailbox directory
perl -pi -e 's#PH_PUBLIC_MAILBOX_DIR#$ENV{PUBLIC_MAILBOX_DIR}#' ${DOVECOT_CONF}
# Mailbox index directory
if [ -n "${MAILBOX_INDEX_DIR}" ]; then
perl -pi -e 's#^(mail_location.*:INDEX=)%Lh/Maildir/#${1}$ENV{MAILBOX_INDEX_DIR}/%Ld/%Ln/#' ${DOVECOT_CONF}
# Per-user seen flags. Maildir indexes are not shared. INDEXPVT requires v2.2+.
perl -pi -e 's#(location.*:INDEX=)(.*/Shared/.*)#${1}$ENV{MAILBOX_INDEX_DIR}/%Ld/%Ln/Shared/%%Ld/%%Ln:INDEXPVT=$ENV{MAILBOX_INDEX_DIR}/%Ld/%Ln/Shared/%%Ld/%%Ln#g' ${DOVECOT_CONF}
fi
# Provided services.
export DOVECOT_PROTOCOLS
perl -pi -e 's#PH_PROTOCOLS#$ENV{DOVECOT_PROTOCOLS}#' ${DOVECOT_CONF}
# Set correct uid/gid.
perl -pi -e 's#PH_MAIL_UID#$ENV{SYS_USER_VMAIL_UID}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_MAIL_GID#$ENV{SYS_GROUP_VMAIL_GID}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_FIRST_VALID_UID#$ENV{SYS_USER_VMAIL_UID}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_LAST_VALID_UID#$ENV{SYS_USER_VMAIL_UID}#' ${DOVECOT_CONF}
# syslog and log file.
perl -pi -e 's#PH_IREDMAIL_SYSLOG_FACILITY#$ENV{IREDMAIL_SYSLOG_FACILITY}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_LOG_PATH#$ENV{DOVECOT_LOG_FILE}#' ${DOVECOT_CONF}
# Authentication related settings.
# Append this domain name if client gives empty realm.
export FIRST_DOMAIN
perl -pi -e 's#PH_AUTH_DEFAULT_REALM#$ENV{FIRST_DOMAIN}#' ${DOVECOT_CONF}
# service auth {}
perl -pi -e 's#PH_DOVECOT_AUTH_USER#$ENV{SYS_USER_POSTFIX}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_AUTH_GROUP#$ENV{SYS_GROUP_POSTFIX}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_AUTH_MASTER_USER#$ENV{SYS_USER_VMAIL}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_AUTH_MASTER_GROUP#$ENV{SYS_GROUP_VMAIL}#' ${DOVECOT_CONF}
# service stats {}
perl -pi -e 's#PH_DOVECOT_SERVICE_STATS_PORT#$ENV{DOVECOT_SERVICE_STATS_PORT}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SERVICE_STATS_USER#$ENV{DOVECOT_SERVICE_STATS_USER}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SERVICE_STATS_GROUP#$ENV{DOVECOT_SERVICE_STATS_GROUP}#' ${DOVECOT_CONF}
# Virtual mail accounts.
# Reference: http://wiki2.dovecot.org/AuthDatabase/LDAP
if [ X"${BACKEND}" == X'OPENLDAP' ]; then
perl -pi -e 's#PH_USERDB_ARGS#$ENV{DOVECOT_LDAP_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_USERDB_DRIVER#ldap#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_ARGS#$ENV{DOVECOT_LDAP_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_DRIVER#ldap#' ${DOVECOT_CONF}
elif [ X"${BACKEND}" == X'MYSQL' ]; then
# MySQL.
perl -pi -e 's#PH_USERDB_ARGS#$ENV{DOVECOT_MYSQL_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_USERDB_DRIVER#sql#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_ARGS#$ENV{DOVECOT_MYSQL_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_DRIVER#sql#' ${DOVECOT_CONF}
elif [ X"${BACKEND}" == X'PGSQL' ]; then
# PostgreSQL.
perl -pi -e 's#PH_USERDB_ARGS#$ENV{DOVECOT_PGSQL_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_USERDB_DRIVER#sql#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_ARGS#$ENV{DOVECOT_PGSQL_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_PASSDB_DRIVER#sql#' ${DOVECOT_CONF}
fi
# Master user.
perl -pi -e 's#PH_DOVECOT_MASTER_USER_PASSWORD_FILE#$ENV{DOVECOT_MASTER_USER_PASSWORD_FILE}#' ${DOVECOT_CONF}
touch ${DOVECOT_MASTER_USER_PASSWORD_FILE}
chown ${SYS_USER_DOVECOT}:${SYS_GROUP_DOVECOT} ${DOVECOT_MASTER_USER_PASSWORD_FILE}
chmod 0500 ${DOVECOT_MASTER_USER_PASSWORD_FILE}
perl -pi -e 's#PH_DOVECOT_AUTH_MASTER_PATH#$ENV{DOVECOT_AUTH_MASTER_PATH}#' ${DOVECOT_CONF}
# Quota.
perl -pi -e 's#PH_QUOTA_TYPE#$ENV{DOVECOT_QUOTA_TYPE}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_QUOTA_WARNING_SCRIPT#$ENV{DOVECOT_QUOTA_WARNING_SCRIPT}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_QUOTA_WARNING_USER#$ENV{SYS_USER_VMAIL}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_QUOTA_WARNING_GROUP#$ENV{SYS_GROUP_VMAIL}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SERVICE_QUOTA_STATUS_BIND_ADDRESS#$ENV{DOVECOT_SERVICE_QUOTA_STATUS_BIND_ADDRESS}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SERVICE_QUOTA_STATUS_PORT#$ENV{DOVECOT_SERVICE_QUOTA_STATUS_PORT}#' ${DOVECOT_CONF}
# Quota dict.
perl -pi -e 's#PH_SERVICE_DICT_USER#$ENV{SYS_USER_VMAIL}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_SERVICE_DICT_GROUP#$ENV{SYS_GROUP_VMAIL}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SQL_DBN#$ENV{DOVECOT_SQL_DBN}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_REALTIME_QUOTA_CONF#$ENV{DOVECOT_REALTIME_QUOTA_CONF}#' ${DOVECOT_CONF}
# Sieve.
perl -pi -e 's#PH_SIEVE_DIR#$ENV{SIEVE_DIR}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_SIEVE_LOG_FILE#$ENV{DOVECOT_SIEVE_LOG_FILE}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_SIEVE_RULE_FILENAME#$ENV{SIEVE_RULE_FILENAME}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_GLOBAL_SIEVE_FILE#$ENV{DOVECOT_GLOBAL_SIEVE_FILE}#' ${DOVECOT_CONF}
# LMTP
perl -pi -e 's#PH_DOVECOT_LMTP_LOG_FILE#$ENV{DOVECOT_LMTP_LOG_FILE}#' ${DOVECOT_CONF}
# SSL.
perl -pi -e 's#PH_SSL_CERT#$ENV{SSL_CERT_FILE}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_SSL_KEY#$ENV{SSL_KEY_FILE}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_SSL_CIPHERS#$ENV{SSL_CIPHERS}#' ${DOVECOT_CONF}
# Distros ships newer openssl which doesn't support SSLv2 anymore.
if [ X"${DISTRO}" == X'UBUNTU' -a X"${DISTRO_CODENAME}" == X'bionic' ] \
|| [ X"${DISTRO}" == X'RHEL' -a X"${DISTRO_VERSION}" == X'8' ]; then
perl -pi -e 's#^(ssl_protocols).*#ssl_protocols = !SSLv3#' ${DOVECOT_CONF}
fi
# PHP on CentOS 7 doesn't support TLSv1.
if [ X"${DISTRO}" == X'RHEL' -a X"${DISTRO_VERSION}" == X'7' ]; then
perl -pi -e 's#^(ssl_protocols.*) !TLSv1( .*)#${1}${2}#' ${DOVECOT_CONF}
fi
perl -pi -e 's#PH_POSTFIX_CHROOT_DIR#$ENV{POSTFIX_CHROOT_DIR}#' ${DOVECOT_CONF}
# Generate dovecot quota warning script.
mkdir -p $(dirname ${DOVECOT_QUOTA_WARNING_SCRIPT}) >> ${INSTALL_LOG} 2>&1
backup_file ${DOVECOT_QUOTA_WARNING_SCRIPT}
rm -f ${DOVECOT_QUOTA_WARNING_SCRIPT} >> ${INSTALL_LOG} 2>&1
cp -f ${SAMPLE_DIR}/dovecot/dovecot2-quota-warning.sh ${DOVECOT_QUOTA_WARNING_SCRIPT}
if [ X"${DOVECOT_QUOTA_TYPE}" == X'maildir' ]; then
perl -pi -e 's#(.*)(-o.*plugin.*)#${1}#' ${DOVECOT_QUOTA_WARNING_SCRIPT}
fi
# on OpenBSD: get FQDN with command 'hostname', not 'hostname -f'.
if [ X"${DISTRO}" == X'OPENBSD' ]; then
perl -pi -e 's#hostname -f#hostname#g' ${DOVECOT_QUOTA_WARNING_SCRIPT}
fi
export DOVECOT_DELIVER_BIN HOSTNAME
perl -pi -e 's#PH_DOVECOT_DELIVER_BIN#$ENV{DOVECOT_DELIVER_BIN}#' ${DOVECOT_QUOTA_WARNING_SCRIPT}
perl -pi -e 's#PH_HOSTNAME#$ENV{HOSTNAME}#' ${DOVECOT_QUOTA_WARNING_SCRIPT}
chown root ${DOVECOT_QUOTA_WARNING_SCRIPT}
chmod 0755 ${DOVECOT_QUOTA_WARNING_SCRIPT}
# Use '/usr/local/bin/bash' as shabang line, otherwise quota warning will fail.
if [ X"${DISTRO}" == X'FREEBSD' ]; then
perl -pi -e 's#(.*)/usr/bin/env bash.*#${1}/usr/local/bin/bash#' ${DOVECOT_QUOTA_WARNING_SCRIPT}
fi
if [ X"${BACKEND}" == X'OPENLDAP' ]; then
backup_file ${DOVECOT_LDAP_CONF}
cp -f ${SAMPLE_DIR}/dovecot/dovecot-ldap.conf ${DOVECOT_LDAP_CONF}
perl -pi -e 's/^#(iterate_.*)/${1}/' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_SERVER_HOST#$ENV{LDAP_SERVER_HOST}#' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_SERVER_PORT#$ENV{LDAP_SERVER_PORT}#' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_BIND_VERSION#$ENV{LDAP_BIND_VERSION}#' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_BINDDN#$ENV{LDAP_BINDDN}#' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_BINDPW#$ENV{LDAP_BINDPW}#' ${DOVECOT_LDAP_CONF}
perl -pi -e 's#PH_LDAP_BASEDN#$ENV{LDAP_BASEDN}#' ${DOVECOT_LDAP_CONF}
# Set file permission.
chmod 0500 ${DOVECOT_LDAP_CONF}
elif [ X"${BACKEND}" == X'MYSQL' ]; then
backup_file ${DOVECOT_MYSQL_CONF}
cp -f ${SAMPLE_DIR}/dovecot/dovecot-sql.conf ${DOVECOT_MYSQL_CONF}
perl -pi -e 's/^#(iterate_.*)/${1}/' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#(.*mailbox.)(enable.*Lc)(=1)#${1}`${2}`${3}#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_SQL_SERVER_ADDRESS#$ENV{SQL_SERVER_ADDRESS}#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_SQL_SERVER_PORT#$ENV{SQL_SERVER_PORT}#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_SQL_DRIVER#mysql#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_NAME#$ENV{VMAIL_DB_NAME}#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_BIND_USER#$ENV{VMAIL_DB_BIND_USER}#' ${DOVECOT_MYSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_BIND_PASSWD#$ENV{VMAIL_DB_BIND_PASSWD}#' ${DOVECOT_MYSQL_CONF}
# Set file permission.
chmod 0550 ${DOVECOT_MYSQL_CONF}
elif [ X"${BACKEND}" == X'PGSQL' ]; then
backup_file ${DOVECOT_PGSQL_CONF}
cp -f ${SAMPLE_DIR}/dovecot/dovecot-sql.conf ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#(.*mailbox.)(enable.*Lc)(=1)#${1}"${2}"${3}#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's/^#(iterate_.*)/${1}/' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_SQL_DRIVER#pgsql#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_SQL_SERVER_ADDRESS#$ENV{SQL_SERVER_ADDRESS}#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_SQL_SERVER_PORT#$ENV{SQL_SERVER_PORT}#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_NAME#$ENV{VMAIL_DB_NAME}#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_BIND_USER#$ENV{VMAIL_DB_BIND_USER}#' ${DOVECOT_PGSQL_CONF}
perl -pi -e 's#PH_VMAIL_DB_BIND_PASSWD#$ENV{VMAIL_DB_BIND_PASSWD}#' ${DOVECOT_PGSQL_CONF}
# Set file permission.
chmod 0550 ${DOVECOT_PGSQL_CONF}
fi
backup_file ${DOVECOT_LAST_LOGIN_CONF} ${DOVECOT_SHARE_FOLDER_CONF}
# Track last login.
cp ${SAMPLE_DIR}/dovecot/dovecot-last-login.conf ${DOVECOT_LAST_LOGIN_CONF}
cp ${SAMPLE_DIR}/dovecot/dovecot-used-quota.conf ${DOVECOT_REALTIME_QUOTA_CONF}
cp ${SAMPLE_DIR}/dovecot/dovecot-share-folder.conf ${DOVECOT_SHARE_FOLDER_CONF}
chown ${SYS_USER_DOVECOT}:${SYS_GROUP_DOVECOT} \
${DOVECOT_LAST_LOGIN_CONF} \
${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_SHARE_FOLDER_CONF}
chmod 0500 ${DOVECOT_LAST_LOGIN_CONF} \
${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_CONF#$ENV{DOVECOT_SHARE_FOLDER_CONF}#' ${DOVECOT_CONF}
perl -pi -e 's#PH_DOVECOT_LAST_LOGIN_CONF#$ENV{DOVECOT_LAST_LOGIN_CONF}#' ${DOVECOT_CONF}
# Replace place holders in sample config file
perl -pi -e 's#PH_SQL_SERVER_ADDRESS#$ENV{SQL_SERVER_ADDRESS}#' ${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_LAST_LOGIN_CONF}
perl -pi -e 's#PH_SQL_SERVER_PORT#$ENV{SQL_SERVER_PORT}#' ${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_REALTIME_QUOTA_CONF} \
${DOVECOT_LAST_LOGIN_CONF}
# Realtime quota and last login
if [ X"${BACKEND}" == X'OPENLDAP' ]; then
export realtime_quota_db_name="${IREDADMIN_DB_NAME}"
export realtime_quota_db_user="${IREDADMIN_DB_USER}"
export realtime_quota_db_passwd="${IREDADMIN_DB_PASSWD}"
elif [ X"${BACKEND}" == X'MYSQL' ]; then
export realtime_quota_db_name="${VMAIL_DB_NAME}"
export realtime_quota_db_user="${VMAIL_DB_ADMIN_USER}"
export realtime_quota_db_passwd="${VMAIL_DB_ADMIN_PASSWD}"
elif [ X"${BACKEND}" == X'PGSQL' ]; then
export realtime_quota_db_name="${VMAIL_DB_NAME}"
export realtime_quota_db_user="${VMAIL_DB_BIND_USER}"
export realtime_quota_db_passwd="${VMAIL_DB_BIND_PASSWD}"
fi
# realtime quota and last login
perl -pi -e 's#PH_REALTIME_QUOTA_DB_NAME#$ENV{realtime_quota_db_name}#' ${DOVECOT_REALTIME_QUOTA_CONF} ${DOVECOT_LAST_LOGIN_CONF}
perl -pi -e 's#PH_REALTIME_QUOTA_DB_USER#$ENV{realtime_quota_db_user}#' ${DOVECOT_REALTIME_QUOTA_CONF} ${DOVECOT_LAST_LOGIN_CONF}
perl -pi -e 's#PH_REALTIME_QUOTA_DB_PASSWORD#$ENV{realtime_quota_db_passwd}#' ${DOVECOT_REALTIME_QUOTA_CONF} ${DOVECOT_LAST_LOGIN_CONF}
perl -pi -e 's#PH_DOVECOT_REALTIME_QUOTA_TABLE#$ENV{DOVECOT_REALTIME_QUOTA_TABLE}#' ${DOVECOT_REALTIME_QUOTA_CONF}
if [ X"${BACKEND}" == X'OPENLDAP' ]; then
export share_folder_db_name="${IREDADMIN_DB_NAME}"
export share_folder_db_user="${IREDADMIN_DB_USER}"
export share_folder_db_passwd="${IREDADMIN_DB_PASSWD}"
elif [ X"${BACKEND}" == X'MYSQL' -o X"${BACKEND}" == X'PGSQL' ]; then
export share_folder_db_name="${VMAIL_DB_NAME}"
export share_folder_db_user="${VMAIL_DB_ADMIN_USER}"
export share_folder_db_passwd="${VMAIL_DB_ADMIN_PASSWD}"
fi
# Replace place holders in sample config file
perl -pi -e 's#PH_SQL_SERVER_ADDRESS#$ENV{SQL_SERVER_ADDRESS}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_SQL_SERVER_PORT#$ENV{SQL_SERVER_PORT}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_DB_NAME#$ENV{share_folder_db_name}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_DB_USER#$ENV{share_folder_db_user}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_DB_PASSWORD#$ENV{share_folder_db_passwd}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_DB_TABLE#$ENV{DOVECOT_SHARE_FOLDER_DB_TABLE}#' ${DOVECOT_SHARE_FOLDER_CONF}
perl -pi -e 's#PH_DOVECOT_SHARE_FOLDER_ANYONE_DB_TABLE#$ENV{DOVECOT_SHARE_FOLDER_ANYONE_DB_TABLE}#' ${DOVECOT_SHARE_FOLDER_CONF}
ECHO_DEBUG "Copy global sieve filter rule file: ${DOVECOT_GLOBAL_SIEVE_FILE}."
cp -f ${SAMPLE_DIR}/dovecot/dovecot.sieve ${DOVECOT_GLOBAL_SIEVE_FILE}
chown ${SYS_USER_VMAIL}:${SYS_GROUP_VMAIL} ${DOVECOT_GLOBAL_SIEVE_FILE}
chmod 0500 ${DOVECOT_GLOBAL_SIEVE_FILE}
ECHO_DEBUG "Enable dovecot SASL support in postfix: ${POSTFIX_FILE_MAIN_CF}."
cat ${SAMPLE_DIR}/postfix/main.cf.dovecot >> ${POSTFIX_FILE_MAIN_CF}
perl -pi -e 's#PH_DOVECOT_SASL_AUTH_SOCKET#$ENV{DOVECOT_SASL_AUTH_SOCKET}#g' ${POSTFIX_FILE_MAIN_CF}
perl -pi -e 's#PH_DOVECOT_SASL_AUTH_PATH#$ENV{_dovecot_sasl_auth_path}#g' ${POSTFIX_FILE_MAIN_CF}
perl -pi -e 's#PH_DOVECOT_DELIVER_BIN#$ENV{DOVECOT_DELIVER_BIN}#g' ${POSTFIX_FILE_MAIN_CF}
perl -pi -e 's#PH_TRANSPORT#$ENV{TRANSPORT}#g' ${POSTFIX_FILE_MAIN_CF}
ECHO_DEBUG "Create directory for Dovecot plugin: Expire."
dovecot_expire_dict_dir="$(dirname ${DOVECOT_EXPIRE_DICT_BDB})"
mkdir -p ${dovecot_expire_dict_dir} && \
chown -R ${SYS_USER_DOVECOT}:${SYS_GROUP_DOVECOT} ${dovecot_expire_dict_dir} && \
chmod -R 0750 ${dovecot_expire_dict_dir}
# Prevent read-only files forced by systemd.
if [ X"${DISTRO}" == X'RHEL' ]; then
mkdir -p /etc/systemd/system/dovecot.service.d >> ${INSTALL_LOG} 2>&1
cp -f ${SAMPLE_DIR}/dovecot/systemd/override.conf /etc/systemd/system/dovecot.service.d >> ${INSTALL_LOG} 2>&1
fi
# Unfortunately, PostgreSQL doesn't support tracking last login.
if [ X"${BACKEND}" == X'PGSQL' ]; then
perl -pi -e 's/.*# Track user last login//' ${DOVECOT_CONF}
perl -pi -e 's#.*last_login_.*##' ${DOVECOT_CONF}
perl -pi -e 's#(.*mail_plugins = .*)last_login(.*)#${1}${2}#' ${DOVECOT_CONF}
fi
cat >> ${TIP_FILE} <<EOF
Dovecot:
* Configuration files:
- ${DOVECOT_CONF}
- ${DOVECOT_LDAP_CONF} (For OpenLDAP backend)
- ${DOVECOT_MYSQL_CONF} (For MySQL backend)
- ${DOVECOT_PGSQL_CONF} (For PostgreSQL backend)
- ${DOVECOT_REALTIME_QUOTA_CONF} (For real-time quota usage)
- ${DOVECOT_SHARE_FOLDER_CONF} (For IMAP sharing folder)
* Syslog config file:
- ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf (present if rsyslog >= 8.x)
* RC script: ${DIR_RC_SCRIPTS}/${DOVECOT_RC_SCRIPT_NAME}
* Log files:
- ${DOVECOT_LOG_FILE}
- ${DOVECOT_SIEVE_LOG_FILE}
- ${DOVECOT_LMTP_LOG_FILE}
- ${DOVECOT_SYSLOG_FILE_LDA} (present if rsyslog >= 8.x)
- ${DOVECOT_SYSLOG_FILE_IMAP} (present if rsyslog >= 8.x)
- ${DOVECOT_SYSLOG_FILE_POP3} (present if rsyslog >= 8.x)
- ${DOVECOT_SYSLOG_FILE_SIEVE} (present if rsyslog >= 8.x)
* See also:
- ${DOVECOT_GLOBAL_SIEVE_FILE}
- Logrotate config file: ${DOVECOT_LOGROTATE_FILE}
EOF
echo 'export status_dovecot_config="DONE"' >> ${STATUS_FILE}
}
dovecot_log() {
ECHO_DEBUG "Configure Dovecot logging."
mkdir -p ${DOVECOT_LOG_DIR} >> ${INSTALL_LOG} 2>&1
chown ${SYS_USER_SYSLOG}:${SYS_GROUP_SYSLOG} ${DOVECOT_LOG_DIR}
touch ${DOVECOT_LOG_FILE}
chown ${SYS_USER_SYSLOG}:${SYS_GROUP_SYSLOG} ${DOVECOT_LOG_FILE}
chmod 0640 ${DOVECOT_LOG_FILE}
ECHO_DEBUG "Generate modular syslog and log rotate config files for dovecot log files."
if [ X"${KERNEL_NAME}" == X'LINUX' ]; then
# Use rsyslog.
# Copy rsyslog config file used to filter Dovecot log
cp ${SAMPLE_DIR}/rsyslog.d/1-iredmail-dovecot.conf ${SYSLOG_CONF_DIR}
perl -pi -e 's#PH_IREDMAIL_SYSLOG_FACILITY#$ENV{IREDMAIL_SYSLOG_FACILITY}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
perl -pi -e 's#PH_DOVECOT_LOG_FILE#$ENV{DOVECOT_LOG_FILE}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
perl -pi -e 's#PH_DOVECOT_SYSLOG_FILE_LDA#$ENV{DOVECOT_SYSLOG_FILE_LDA}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
perl -pi -e 's#PH_DOVECOT_SYSLOG_FILE_IMAP#$ENV{DOVECOT_SYSLOG_FILE_IMAP}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
perl -pi -e 's#PH_DOVECOT_SYSLOG_FILE_POP3#$ENV{DOVECOT_SYSLOG_FILE_POP3}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
perl -pi -e 's#PH_DOVECOT_SYSLOG_FILE_SIEVE#$ENV{DOVECOT_SYSLOG_FILE_SIEVE}#g' ${SYSLOG_CONF_DIR}/1-iredmail-dovecot.conf
# Although no need to create log files manually, but fail2ban will skip
# the log file which doesn't exist while fail2ban starts up, so we
# create it manually to avoid this.
for f in ${DOVECOT_LOG_FILE} \
${DOVECOT_SYSLOG_FILE_LDA} \
${DOVECOT_SYSLOG_FILE_IMAP} \
${DOVECOT_SYSLOG_FILE_POP3} \
${DOVECOT_SYSLOG_FILE_SIEVE}; do
ECHO_DEBUG "Create dovecot log file: ${f}."
touch ${f}
chown ${SYS_USER_SYSLOG}:${SYS_GROUP_SYSLOG} ${f}
chmod 0640 ${f}
done
cp -f ${SAMPLE_DIR}/logrotate/dovecot ${DOVECOT_LOGROTATE_FILE}
chmod 0644 ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_DOVECOT_LOG_DIR#$ENV{DOVECOT_LOG_DIR}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_SYSLOG_POSTROTATE_CMD#$ENV{SYSLOG_POSTROTATE_CMD}#g' ${DOVECOT_LOGROTATE_FILE}
elif [ X"${KERNEL_NAME}" == X'FREEBSD' ]; then
#
# modular syslog config file
#
cp -f ${SAMPLE_DIR}/freebsd/syslog.d/dovecot.conf ${SYSLOG_CONF_DIR} >> ${INSTALL_LOG} 2>&1
perl -pi -e 's#PH_IREDMAIL_SYSLOG_FACILITY#$ENV{IREDMAIL_SYSLOG_FACILITY}#g' ${SYSLOG_CONF_DIR}/dovecot.conf
perl -pi -e 's#PH_DOVECOT_LOG_FILE#$ENV{DOVECOT_LOG_FILE}#g' ${SYSLOG_CONF_DIR}/dovecot.conf
#
# modular log rotate config file
#
cp -f ${SAMPLE_DIR}/freebsd/newsyslog.conf.d/dovecot ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_DOVECOT_MASTER_PID#$ENV{DOVECOT_MASTER_PID}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_DOVECOT_LOG_FILE#$ENV{DOVECOT_LOG_FILE}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_DOVECOT_SIEVE_LOG_FILE#$ENV{DOVECOT_SIEVE_LOG_FILE}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_DOVECOT_LMTP_LOG_FILE#$ENV{DOVECOT_LMTP_LOG_FILE}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_SYS_USER_SYSLOG#$ENV{SYS_USER_SYSLOG}#g' ${DOVECOT_LOGROTATE_FILE}
perl -pi -e 's#PH_SYS_GROUP_SYSLOG#$ENV{SYS_GROUP_SYSLOG}#g' ${DOVECOT_LOGROTATE_FILE}
elif [ X"${KERNEL_NAME}" == X'OPENBSD' ]; then
#
# modular syslog config file
#
if ! grep "${DOVECOT_LOG_FILE}" ${SYSLOG_CONF} &>/dev/null; then
# '!!' means abort further evaluation after first match
echo '!!dovecot' >> ${SYSLOG_CONF}
echo "${IREDMAIL_SYSLOG_FACILITY}.* ${DOVECOT_LOG_FILE}" >> ${SYSLOG_CONF}
fi
if ! grep "${DOVECOT_LOG_FILE}" /etc/newsyslog.conf &>/dev/null; then
# Define command used to reopen log service after rotated
cat >> /etc/newsyslog.conf <<EOF
${DOVECOT_LOG_FILE} ${SYS_USER_ROOT}:${SYS_GROUP_ROOT} 600 7 * 24 Z "${DOVECOT_DOVEADM_BIN} log reopen"
EOF
fi
if ! grep "${DOVECOT_SIEVE_LOG_FILE}" /etc/newsyslog.conf &>/dev/null; then
# Define command used to reopen log service after rotated
cat >> /etc/newsyslog.conf <<EOF
${DOVECOT_SIEVE_LOG_FILE} ${SYS_USER_ROOT}:${SYS_GROUP_ROOT} 600 7 * 24 Z "${DOVECOT_DOVEADM_BIN} log reopen"
EOF
fi
if ! grep "${DOVECOT_LMTP_LOG_FILE}" /etc/newsyslog.conf &>/dev/null; then
# Define command used to reopen log service after rotated
cat >> /etc/newsyslog.conf <<EOF
${DOVECOT_LMTP_LOG_FILE} ${SYS_USER_VMAIL}:${SYS_GROUP_VMAIL} 600 7 * 24 Z "${DOVECOT_DOVEADM_BIN} log reopen"
EOF
fi
fi
echo 'export status_dovecot_log="DONE"' >> ${STATUS_FILE}
}
dovecot_initialize_db_for_ldap() {
# Create MySQL database ${IREDADMIN_DB_USER} and addition tables:
# - used_quota: used to store realtime quota.
# - share_folder: used to store share folder settings.
if [ X"${BACKEND}" == X'OPENLDAP' ]; then
# If iRedAdmin is not used, create database and import table here.
${MYSQL_CLIENT_ROOT} >> ${INSTALL_LOG} 2>&1 <<EOF
-- Create databases.
CREATE DATABASE IF NOT EXISTS ${IREDADMIN_DB_NAME} DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci;
-- Import SQL template.
USE ${IREDADMIN_DB_NAME};
-- used_quota, share_folder, last_login
SOURCE ${SAMPLE_DIR}/dovecot/sql/used_quota.mysql;
SOURCE ${SAMPLE_DIR}/dovecot/sql/imap_share_folder.mysql;
SOURCE ${SAMPLE_DIR}/dovecot/sql/last_login.mysql;
GRANT ALL ON ${IREDADMIN_DB_NAME}.* TO "${IREDADMIN_DB_USER}"@"${MYSQL_GRANT_HOST}" IDENTIFIED BY "${IREDADMIN_DB_PASSWD}";
GRANT ALL ON ${IREDADMIN_DB_NAME}.* TO "${IREDADMIN_DB_USER}"@"${HOSTNAME}" IDENTIFIED BY "${IREDADMIN_DB_PASSWD}";
FLUSH PRIVILEGES;
EOF
fi
echo 'export status_dovecot_initialize_db_for_ldap="DONE"' >> ${STATUS_FILE}
}
dovecot_setup()
{
check_status_before_run dovecot_config
check_status_before_run dovecot_log
check_status_before_run dovecot_initialize_db_for_ldap
if [ X"${DISTRO}" == X'FREEBSD' ]; then
# Remove the temporary sample config files copied in `functions/backend.sh`.
rm -rf /usr/local/etc/dovecot/conf.d &>/dev/null
# It seems there's a bug in Dovecot port, it will try to invoke '/usr/lib/sendmail'
# to send vacation response which should be '/usr/sbin/mailwrapper'.
[ ! -e /usr/lib/sendmail ] && ln -s /usr/sbin/mailwrapper /usr/lib/sendmail >> ${INSTALL_LOG} 2>&1
# Start service when system start up.
service_control enable 'dovecot_enable' 'YES'
elif [ X"${DISTRO}" == X'OPENBSD' ]; then
# By default, the _dovecot user, and so the Dovecot processes run in
# the login(1) class of "daemon". On a busy server, it may be advisable
# to put the _dovecot user and processes in their own login(1) class
# with tuned resources, such as more open file descriptors etc.
if [ -f /etc/login.conf ]; then
if ! grep '^dovecot:' /etc/login.conf &>/dev/null; then
cat >> /etc/login.conf <<EOF
dovecot:\\
:openfiles-cur=1024:\\
:openfiles-max=4096:\\
:tc=daemon:
EOF
fi
# Rebuild the login.conf.db file if necessary
[ -f /etc/login.conf.db ] && cap_mkdb /etc/login.conf
fi
fi
echo 'export status_dovecot_setup="DONE"' >> ${STATUS_FILE}
}
|
from docutils.parsers.rst import directives, Directive
from sphinx import addnodes
from docutils.parsers.rst import states
class Sourcelink(Directive):
"""
Directive to create a centered line of bold text.
"""
has_content = False
required_arguments = 1
optional_arguments = 0
final_argument_whitespace = True
option_spec = {'command':directives.unchanged,
}
def run(self):
if 0:
# force this to be a sustitution ref
if not isinstance(self.state, states.SubstitutionDef):
error = self.state_machine.reporter.error(
'Invalid context: the "%s" directive can only be used '
'within a substitution definition.' % (self.name),
nodes.literal_block(block_text, block_text), line=lineno)
return [error]
if not len(self.arguments)==1:
raise self.error(
'Error in "%s" directive: need exactly one argument'
% (self.name,))
tdict = self.options.copy()
tdict['file']=self.arguments[0]
if tdict.has_key('alt'):
del tdict['alt']
# like sorted iteritems()
tstrs = []
keys = tdict.keys()
keys.sort()
for k in keys:
v = tdict[k]
tstrs.append( '%s: %s' % (k,v) )
tstr = ', '.join(tstrs)
inodes, messages = self.state.inline_text( 'source(%s)'%tstr,
self.lineno )
return inodes + messages
def setup(app):
app.add_directive('sourcelink',Sourcelink)
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.deserializeValues = exports.serializeValues = void 0;
var _isNil2 = _interopRequireDefault(require("lodash/isNil"));
var _immutable = require("immutable");
var _registry = require("./registry");
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
/**
* Methods for serializing/deserializing entry field values. Most widgets don't
* require this for their values, and those that do can typically serialize/
* deserialize on every change from within the widget. The serialization
* handlers here are for widgets whose values require heavy serialization that
* would hurt performance if run for every change.
* An example of this is the markdown widget, whose value is stored as a
* markdown string. Instead of stringifying on every change of that field, a
* deserialization method is registered from the widget's control module that
* converts the stored markdown string to an AST, and that AST serves as the
* widget model during editing.
*
* Serialization handlers should be registered for each widget that requires
* them, and the registration method is exposed through the registry. Any
* registered deserialization handlers run on entry load, and serialization
* handlers run on persist.
*/
const runSerializer = (values, fields, method) => {
/**
* Reduce the list of fields to a map where keys are field names and values
* are field values, serializing the values of fields whose widgets have
* registered serializers. If the field is a list or object, call recursively
* for nested fields.
*/
return fields.reduce((acc, field) => {
const fieldName = field.get('name');
const value = values.get(fieldName);
const serializer = (0, _registry.getWidgetValueSerializer)(field.get('widget'));
const nestedFields = field.get('fields'); // Call recursively for fields within lists
if (nestedFields && _immutable.List.isList(value)) {
return acc.set(fieldName, value.map(val => runSerializer(val, nestedFields, method)));
} // Call recursively for fields within objects
if (nestedFields && _immutable.Map.isMap(value)) {
return acc.set(fieldName, runSerializer(value, nestedFields, method));
} // Run serialization method on value if not null or undefined
if (serializer && !(0, _isNil2.default)(value)) {
return acc.set(fieldName, serializer[method](value));
} // If no serializer is registered for the field's widget, use the field as is
if (!(0, _isNil2.default)(value)) {
return acc.set(fieldName, value);
}
return acc;
}, (0, _immutable.Map)());
};
const serializeValues = (values, fields) => {
return runSerializer(values, fields, 'serialize');
};
exports.serializeValues = serializeValues;
const deserializeValues = (values, fields) => {
return runSerializer(values, fields, 'deserialize');
};
exports.deserializeValues = deserializeValues;
//# sourceMappingURL=serializeEntryValues.js.map |
<filename>src/js/redux/slices/spells/special.js
import { createSlice, createEntityAdapter } from '@reduxjs/toolkit';
const specialsAdapter = createEntityAdapter({
selectId: spell => spell.id,
sortComparer: (a, b) => a.name.localeCompare(b.name)
});
const initialState = specialsAdapter.getInitialState();
const specialSpellsSlice = createSlice({
name: 'specials',
initialState,
reducers: {
addSpecials: specialsAdapter.addMany
}
});
export default specialSpellsSlice.reducer;
export const { addSpecials } = specialSpellsSlice.actions;
|
<filename>dist/better-timeinput-polyfill.js
/**
* @file src/better-timeinput-polyfill.js
* @version 1.1.1 2013-12-27T18:46:50
* @overview input[type=time] polyfill for better-dom
* @copyright <NAME> 2013
* @license MIT
* @see https://github.com/chemerisuk/better-timeinput-polyfill
*/
(function(DOM, COMPONENT_CLASS) {
"use strict";
if ("orientation" in window) return; // skip mobile/tablet browsers
// polyfill timeinput for desktop browsers
var htmlEl = DOM.find("html"),
timeparts = function(str) {
str = str.split(":");
if (str.length === 2) {
str[0] = parseFloat(str[0]);
str[1] = parseFloat(str[1]);
} else {
str = [];
}
return str;
},
zeropad = function(value) { return ("00" + value).slice(-2) },
ampm = function(pos, neg) { return htmlEl.get("lang") === "en-US" ? pos : neg },
formatISOTime = function(hours, minutes, ampm) {
return zeropad(ampm === "PM" ? hours + 12 : hours) + ":" + zeropad(minutes);
};
DOM.extend("input[type=time]", {
constructor: function() {
var timeinput = DOM.create("input[type=hidden name=${name}]", {name: this.get("name")}),
ampmspan = DOM.create("span.${c}-meridian>(select>option>{AM}^option>{PM})+span>{AM}", {c: COMPONENT_CLASS}),
ampmselect = ampmspan.child(0);
this
// drop native implementation and clear name attribute
.set({type: "text", maxlength: 5, name: null})
.addClass(COMPONENT_CLASS)
.on("change", this.onChange.bind(this, timeinput, ampmselect))
.on("keydown", this.onKeydown, ["which", "shiftKey"])
.after(ampmspan, timeinput);
ampmselect.on("change", this.onMeridianChange.bind(this, timeinput, ampmselect));
// update value correctly on form reset
this.parent("form").on("reset", this.onFormReset.bind(this, timeinput, ampmselect));
// patch set method to update visible input as well
timeinput.set = this.onValueChanged.bind(this, timeinput.set, timeinput, ampmselect);
// update hidden input value and refresh all visible controls
timeinput.set(this.get()).data("defaultValue", timeinput.get());
// update default values to be formatted
this.set("defaultValue", this.get());
ampmselect.next().data("defaultValue", ampmselect.get());
if (this.matches(":focus")) timeinput.fire("focus");
},
onValueChanged: function(setter, timeinput, ampmselect) {
var parts, hours, minutes;
setter.apply(timeinput, Array.prototype.slice.call(arguments, 3));
if (arguments.length === 4) {
parts = timeparts(timeinput.get());
hours = parts[0];
minutes = parts[1];
// select appropriate AM/PM
ampmselect.child((hours -= 12) > 0 ? 1 : Math.min(hours += 12, 0)).set("selected", true);
// update displayed AM/PM
ampmselect.next().set(ampmselect.get());
// update visible input value, need to add zero padding to minutes
this.set(hours < ampm(13, 24) && minutes < 60 ? hours + ":" + zeropad(minutes) : "");
}
return timeinput;
},
onKeydown: function(which, shiftKey) {
return (which === 186 && shiftKey) || (which === 190 && shiftKey) || which < 58;
},
onChange: function(timeinput, ampmselect) {
var parts = timeparts(this.get()),
hours = parts[0],
minutes = parts[1],
value = "";
if (hours < ampm(13, 24) && minutes < 60) {
// refresh hidden input with new value
value = formatISOTime(hours, minutes, ampmselect.get());
} else if (parts.length === 2) {
// restore previous valid value
value = timeinput.get();
}
timeinput.set(value);
},
onMeridianChange: function(timeinput, ampmselect) {
// update displayed AM/PM
ampmselect.next().set(ampmselect.get());
// adjust time in hidden input
timeinput.set(function(el) {
var parts = timeparts(el.get()),
hours = parts[0],
minutes = parts[1];
if (ampmselect.get() === "AM") hours -= 12;
return formatISOTime(hours, minutes, ampmselect.get());
});
},
onFormReset: function(timeinput, ampmselect) {
timeinput.set(timeinput.data("defaultValue"));
ampmselect.next().set(ampmselect.data("defaultValue"));
}
});
}(window.DOM, "better-timeinput"));
|
<filename>lib/br_nfe/product/response/build/nfe_status_servico.rb
module BrNfe
module Product
module Response
module Build
class NfeStatusServico < Base
# Responsável por definir qual classe será instânciada para
# setar os valores de retorno referentes a cada operação.
#
# <b>Type: </b> _Class_
#
def response_class
BrNfe::Product::Response::NfeStatusServico
end
# Responsável por setar os atributos específicos para
# cada tipo de operação.
# Nesse caso irá setar as notas fiscais com seus respectivos
# XMLs.
#
# <b>Type: </b> _Hash_
#
def specific_attributes
{
environment: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:tpAmb', nf: nf_xmlns, ret: url_xmlns_retorno).text,
app_version: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:verAplic', nf: nf_xmlns, ret: url_xmlns_retorno).text,
processed_at: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:dhRecbto', nf: nf_xmlns, ret: url_xmlns_retorno).text,
processing_status_code: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:cStat', nf: nf_xmlns, ret: url_xmlns_retorno).text,
processing_status_motive: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:xMotivo', nf: nf_xmlns, ret: url_xmlns_retorno).text,
uf: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:cUF', nf: nf_xmlns, ret: url_xmlns_retorno).text,
average_time: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:tMed', nf: nf_xmlns, ret: url_xmlns_retorno).text.to_i,
observation: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:xObs', nf: nf_xmlns, ret: url_xmlns_retorno).text,
return_prevision: body_xml.xpath('//ret:nfeStatusServicoNF2Result/nf:retConsStatServ/nf:dhRetorno', nf: nf_xmlns, ret: url_xmlns_retorno).text,
}
end
end
end
end
end
end |
def delete_element(arr, idx):
arr.pop(idx)
return arr |
package com.twitter.finatra.http.internal.routing
import com.twitter.finagle.Service
import com.twitter.finagle.http.{Method, Request, Response, Status}
import com.twitter.finagle.http.Method._
import com.twitter.finatra.http.AnyMethod
import com.twitter.finatra.http.response.SimpleResponse
import com.twitter.inject.Logging
import com.twitter.util.Future
private[http] class RoutingService(
routes: Seq[Route])
extends Service[Request, Response]
with Logging {
private val get = Routes.createForMethod(routes, Get)
private val post = Routes.createForMethod(routes, Post)
private val put = Routes.createForMethod(routes, Put)
private val delete = Routes.createForMethod(routes, Delete)
private val options = Routes.createForMethod(routes, Options)
private val patch = Routes.createForMethod(routes, Patch)
private val head = Routes.createForMethod(routes, Head)
private val trace = Routes.createForMethod(routes, Trace)
private val any = Routes.createForMethod(routes, AnyMethod)
private val routesStr = routes map {_.summary} mkString ", "
/* Public */
override def apply(request: Request): Future[Response] = {
(request.method match {
case Get => get.handle(request)
case Post => post.handle(request)
case Put => put.handle(request)
case Delete => delete.handle(request)
case Options => options.handle(request)
case Patch => patch.handle(request)
case Head => head.handle(request)
case Trace => trace.handle(request)
case _ => badRequest(request.method)
}).getOrElse {
any.handle(request)
.getOrElse(notFound(request))
}
}
/* Private */
private def notFound(request: Request): Future[Response] = {
debug(request + " not found in registered routes: " + routesStr)
Future.value(
SimpleResponse(
Status.NotFound))
}
private def badRequest(method: Method): Option[Future[Response]] = {
Some(Future.value(
SimpleResponse(
Status.BadRequest,
method.toString + " is not a valid HTTP method")))
}
} |
echo "Downloading hardware image tutorial data..."
url=http://i.stanford.edu/hazy/share/fonduer/hardware_image_tutorial_data.tar.gz
data_tar=hardware_image_tutorial_data
if type curl &>/dev/null; then
curl -RLO $url
elif type wget &>/dev/null; then
wget -N -nc $url
fi
mkdir data
echo "Unpacking hardware image tutorial data..."
tar -zxvf $data_tar.tar.gz -C data
echo "Deleting tar file..."
rm $data_tar.tar.gz
echo "Done!"
|
<filename>code/Twitter.py<gh_stars>1-10
# -*- coding: utf-8 -*-
# @Time : 2020/5/28 17:46
# @Author : Sanzzi
from textblob import TextBlob
# 16: 3.22——4.06
path = '../dataset/Twitter_dailies/'
def Twitter_emotion():
words = []
with open(path + "2020-04-06/2020-04-06_top1000trigrams.csv", encoding='utf8') as file: # bigrams、trigrams
for line in file:
# print(line.split()[0]) # check 数据
words.append(line.split()[0])
del words[0] # 第一个删除
with open(path + '2020-04-06/2020-04-06_top1000trigrams.txt', 'w', encoding='utf-8') as f:
sum = 0
len = 0
for word in words:
testimonial = TextBlob(word)
if str(testimonial.sentiment.polarity) != '0.0' :
sum += testimonial.sentiment.polarity
len += 1
f.write(str(testimonial.sentiment.polarity) + '\n')
f.write('average:' + str(sum/len) + '\n')
# with open(path + '2020-03-22/2020-03-22_top1000trigrams.txt', 'w', encoding='utf-8') as f:
# str_01 = ''
# for i in range(len(words)):
# if i % 100 != 0:
# str_01 += words[i]
# else:
# testimonial = TextBlob(str_01)
# f.write(str(testimonial.sentiment.polarity) + '\n')
# str_01 = '' |
package Algorithms.Sorting
/**
* Created by MikBac on 03.09.2020
*/
object HeapSort {
def getHeap(array: Array[Int], arrayLength: Int, j: Int): Unit = {
var largest = j
val (k, i) = (2 * j + 1, 2 * j + 2)
if (k < arrayLength && array(k) > array(largest))
largest = k
if (i < arrayLength && array(i) > array(largest))
largest = i
if (largest != j) {
val swap = array(j)
array(j) = array(largest)
array(largest) = swap
getHeap(array, arrayLength, largest)
}
}
def sort(array: Array[Int]) {
val arrayLength = array.length
for (i <- 0 until arrayLength / 2)
getHeap(array, arrayLength, (arrayLength / 2) - 1 - i)
for (i <- 0 until arrayLength - 1) {
val temp = array(0)
array(0) = array(arrayLength - 1 - i)
array(arrayLength - 1 - i) = temp
getHeap(array, arrayLength - 1 - i, 0)
}
}
def main(args: Array[String]): Unit = {
val array = Array(3, 2, 4, 3, 2, 4, 5, 23, 23, 5, 232, 239, 2, 100, 11, 6, 3, 4, 2)
sort(array)
array.foreach(numb => print(s"$numb "))
}
}
|
#!/bin/bash
python -m unittest discover -s ./tests -p "*.py" -v
|
<filename>src/bookshelf/utils.ts
/**
* The main purpose of this module is to provide utility functions
* that follows the restrictions of the Bookshelf/Mapper/Serializer APIs
* with the goal of simplifying the logic of the main 'map' method.
*/
'use strict';
import { assign, clone, cloneDeep, differenceWith, includes, intersection,
escapeRegExp, forOwn, has, keys, mapValues, merge, omit, reduce } from 'lodash';
import { SerialOpts } from 'jsonapi-serializer';
import { LinkOpts } from '../links';
import { RelationOpts } from '../relations';
import { topLinks, dataLinks, relationshipLinks, includedLinks } from './links';
import { BookOpts, Data, Model, isModel, isCollection } from './extras';
/**
* Main structure used through most utility and recursive functions
*/
export interface Information {
bookOpts: BookOpts;
linkOpts: LinkOpts;
}
/**
* Start the data processing with top level information,
* then handle resources recursively in processSample
*/
export function processData(info: Information, data: Data): SerialOpts {
let { bookOpts: { enableLinks }, linkOpts }: Information = info;
let template: SerialOpts = processSample(info, sample(data));
if (enableLinks) {
template.dataLinks = dataLinks(linkOpts);
template.topLevelLinks = topLinks(linkOpts);
}
return template;
}
/**
* Recursively adds data-related properties to the
* template to be sent to the serializer
*/
function processSample(info: Information, sample: Sample): SerialOpts {
let { bookOpts, linkOpts }: Information = info;
let { enableLinks }: BookOpts = bookOpts;
let template: SerialOpts = {
// Add list of valid attributes
attributes: getAttrsList(sample, bookOpts)
};
// Nested relations (recursive) template generation
forOwn(sample.relations, (relSample: Sample, relName: string): void => {
if (!relationAllowed(bookOpts, relName)) { return; }
let relLinkOpts: LinkOpts = assign(clone(linkOpts), {type: relName});
let relTemplate: SerialOpts = processSample({bookOpts, linkOpts: relLinkOpts}, relSample);
relTemplate.ref = 'id'; // Add reference in nested resources
// Related links
if (enableLinks) {
relTemplate.relationshipLinks = relationshipLinks(linkOpts, relName);
relTemplate.includedLinks = includedLinks(relLinkOpts);
}
// Include links as compound document
if (!includeAllowed(bookOpts, relName)) {
relTemplate.included = false;
}
template[relName] = relTemplate;
(template.attributes as string[]).push(relName);
});
return template;
}
/**
* Representation of a sample, a model with only models in the relations,
* no collections
*/
interface Sample extends Model {
relations: {
[relationName: string]: Sample
};
}
/**
* Convert any data into a model representing
* a complete sample to be used in the template generation
*/
function sample(data: Data): Sample {
if (isModel(data)) {
// override type because we will ovewrite relations
const sampled: Sample = omit<Sample, Model>(clone(data), ['relations', 'attributes']);
sampled.attributes = cloneDeep(data.attributes);
sampled.relations = mapValues(data.relations, sample);
return sampled;
} else if (isCollection(data)) {
const first: Model = data.head();
const rest: Model[] = data.tail();
return reduce(rest, mergeSample, sample(first));
} else {
return {} as Sample;
}
}
/**
* Merge two models into a representation of both
*/
function mergeSample(main: Sample, toMerge: Model): Sample {
const sampled: Sample = sample(toMerge);
main.attributes = merge(main.attributes, sampled.attributes);
main.relations = merge(main.relations, sampled.relations);
return main;
}
/**
* Retrieve model's attribute names
* following filtering rules
*/
function getAttrsList(data: Model, bookOpts: BookOpts): string[] {
let attrs: string[] = keys(data.attributes);
let { omitAttrs = [data.idAttribute] }: BookOpts = bookOpts;
// Only return attributes that don't match any pattern passed by the user
return differenceWith(attrs, omitAttrs,
(attr: string, omit: (RegExp | string)) => {
let reg: RegExp;
if (typeof omit === 'string') {
reg = RegExp(`^${escapeRegExp(omit)}$`);
} else {
reg = omit;
}
return reg.test(attr);
}
);
}
/**
* Based on Bookshelf options, determine if a relation must be included
*/
function relationAllowed(bookOpts: BookOpts, relName: string): boolean {
let { relations }: BookOpts = bookOpts;
if (typeof relations === 'boolean') {
return relations;
} else {
let { fields }: RelationOpts = relations;
return ! fields || includes(fields, relName);
}
}
/**
* Based on Bookshelf options, determine if a relation must be included
*/
function includeAllowed(bookOpts: BookOpts, relName: string): boolean {
let { relations }: BookOpts = bookOpts;
if (typeof relations === 'boolean') {
return relations;
} else {
let { fields, included }: RelationOpts = relations;
if (typeof included === 'boolean') {
return included;
} else {
// If included is an array, only allow relations that are in that array
let allowed: string[] = included;
if (fields) {
// If fields specified, ensure that the included relations
// are listed as one of the relations to be serialized
allowed = intersection(fields, included);
}
return includes(allowed, relName);
}
}
}
/**
* Convert a bookshelf model or collection to
* json adding the id attribute if missing
*/
export function toJSON(data: Data): any {
let json: any = null;
if (isModel(data)) {
json = data.toJSON({shallow: true}); // serialize without the relations
// Assign the id for the model if it's not present already
if (!has(json, 'id')) { json.id = data.id; }
// Loop over model relations to call toJSON recursively on them
forOwn(data.relations, function (relData: Data, relName: string): void {
json[relName] = toJSON(relData);
});
} else if (isCollection(data)) {
// Run a recursive toJSON on each model of the collection
json = data.map(toJSON);
}
return json;
}
|
#include <iostream>
#include <vector>
#include <algorithm>
using namespace std;
void insertionSort(vector<int> &arr)
{
for (int i = 1; i < arr.size(); i++)
{
int temp = arr[i];
int j = i - 1;
while (j >= 0 && arr[j] > temp) {
arr[j + 1] = arr[j];
j = j - 1;
}
arr[j + 1] = temp;
}
}
int main()
{
vector<int> v={3, 6, 4, 2, 1, 0, 7};
insertionSort(v);
for(int el : v)
cout << el << " ";
return 0;
} |
<gh_stars>1-10
package main
// SortByLikes sorter for playlist songs - by number of likes
type SortByLikes Playlist
// Len length of array
func (sorter SortByLikes) Len() int {
return len(sorter.songs)
}
// Swap swap the elements behavior
func (sorter SortByLikes) Swap(a, b int) {
sorter.songs[a], sorter.songs[b] = sorter.songs[b], sorter.songs[a]
}
// Less comparing two elements in the array
func (sorter SortByLikes) Less(a, b int) bool {
return sorter.songs[a].likes > sorter.songs[b].likes
}
|
<reponame>kutyepov/CN-Guided-Practice<gh_stars>0
const axios = require('axios');
const http = require('http');
const url = require('url');
const getUsername = (access_token, function_to_execute) => {
axios.get('https://api.github.com/user', {
headers: {
'Authorization': `token ${access_token}`
},
}).then((response) => {
if ('login' in response.data) {
function_to_execute(null, {
username: response.data.login,
access_token: access_token,
});
} else {
console.error('Missing username in response');
function_to_execute('Missing username', null);
}
}).catch((error) => {
console.error('Error getting username:', error);
function_to_execute(error, null);
});
}
const getOAuthToken = (code, function_to_execute) => {
axios.post('https://github.com/login/oauth/access_token', {
client_id: 'bd6a342322ba0ce2601a',
client_secret: '<KEY>',
code: code,
}, {
headers: {
'Accept': 'application/json'
},
}).then((response) => {
if ('access_token' in response.data) {
getUsername(response.data['access_token'], function_to_execute);
} else {
console.error('Missing OAuth token in response');
function_to_execute('Missing OAuth token', null);
}
}).catch((error) => {
console.error('Error getting OAuth token:', error);
function_to_execute(error, null);
});
};
exports.authAnd = (function_to_execute) => {
const server = http.createServer((request, response) => {
request.on('error', (error) => {
server.close();
console.error('Error getting callback', error);
function_to_execute(error, null);
}).on('data', (chunk) => {}).on('end', () => {
response.on('error', (error) => {
server.close();
console.error('Error getting callback', error);
function_to_execute(error, null);
});
response.writeHead(200, {
'Content-Type': 'text/html'
})
const parsedUrl = url.parse(request.url, true);
if (parsedUrl.pathname === '/authorizationcallback' &&
'code' in parsedUrl.query) {
response.end('Success! Close this tab and return to your terminal.');
server.close();
getOAuthToken(parsedUrl.query['code'], function_to_execute);
} else {
response.end('Failed. Revisit your terminal and try again.');
server.close();
}
});
}).listen(4343);
console.log('Open the following URL in a brower:');
console.log(
'https://github.com/login/oauth/authorize?client_id=bd6a342322ba0ce2601a&scope=public_repo');
console.log('Waiting for callback...');
} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.