text stringlengths 1 1.05M |
|---|
import unittest
from datetime import datetime
from importlib import resources
from hill.data.apt.manage.controls import Controls
from hill.tests.base_test_case import BaseTestCase
class AptTestCase(BaseTestCase):
def test_release_detached(self):
package = "hill.tests.resources"
resource_release = "debian-11-Release.txt"
content_release = resources.read_text(package, resource_release)
# TODO
# resource_gpg = "debian-11-Release.gpg"
# content_gpg = resources.read_text(package, resource_gpg)
controls = Controls()
control = controls.read_content(content_release)
self.assertIsNotNone(control)
self.assertEqual(1, len(control.paragraphs))
f = self._display_dataclass(control.paragraphs[0])
self.assertEqual(len(f), 13)
self.maxDiff = None
expected = {
"origin": "Debian",
"label": "Debian",
"suite": "stable",
"version": "11.0",
"codename": "bullseye",
"created_date": datetime(2021, 8, 14, 7, 57, 38),
"acquire_by_hash": True,
"architectures": [
"all",
"amd64",
"arm64",
"armel",
"armhf",
"i386",
"mips64el",
"mipsel",
"ppc64el",
"s390x",
],
"components": ["main", "contrib", "non-free"],
"description": "Debian 11.0 Released 14 August 2021",
"ignored_fields": 2,
"hash_files_md5": 582,
"hash_files_sha256": 582,
}
self.assertDictEqual(f, expected)
@unittest.skip("not implemented")
def test_release_combined(self):
package = "hill.tests.resources"
resource = "debian-11-InRelease.txt"
content = resources.read_text(package, resource)
raise NotImplementedError()
@unittest.skip("takes a long time")
def test_package(self):
package = "hill.tests.resources"
resource = "debian-11-main-binary-all-Packages.xz"
controls = Controls()
self.maxDiff = None
with resources.path(package, resource) as path:
control = controls.read_file(path)
for index, para in enumerate(control.paragraphs):
if len(para.ignored_fields) > 0:
raise ValueError(para.ignored_fields)
f = self._display_dataclass(control.paragraphs[0])
self.assertEqual(len(f), 17)
self.maxDiff = None
expected = {
"package": "0ad-data",
"version": "0.0.23.1-1.1",
"installed_size": 2044173,
"maintainers": 1,
"architectures": ["all"],
"description": "Real-time strategy game of ancient warfare (data files)",
"homepage_url": "http://play0ad.com/",
"description_md5": "26581e685027d5ae84824362a4ba59ee",
"section": "games",
"priority": "optional",
"filename": "pool/main/0/0ad-data/0ad-data_0.0.23.1-1.1_all.deb",
"compressed_size": 701833824,
"hash_md5": "b2b6e5510898abf0eee79da48995f92f",
"hash_sha256": "afb3f0ddaceb36dc2d716d83d7fee4ada419511a948e4a06fa44bbc1b486e2c0",
"tags": "role::app-data",
"pre_depends": "dpkg (>= 1.15.6~)",
"suggests": "0ad",
}
self.assertDictEqual(f, expected)
f = self._display_dataclass(control.paragraphs[1])
self.assertEqual(len(f), 21)
self.maxDiff = None
expected = {
"package": "0ad-data-common",
"source": "0ad-data",
"version": "0.0.23.1-1.1",
"installed_size": 2423,
"maintainers": 1,
"architectures": ["all"],
"description": "Real-time strategy game of ancient warfare (common data files)",
"homepage_url": "http://play0ad.com/",
"description_md5": "8d014b839c4c4e9b6f82c7512d7e3496",
"section": "games",
"priority": "optional",
"filename": "pool/main/0/0ad-data/0ad-data-common_0.0.23.1-1.1_all.deb",
"compressed_size": 777612,
"hash_md5": "49ad6a3a16eb34ea455bd3146a486aa0",
"hash_sha256": "9bceebe75ab7bca79606aae24fd203681b10d1107b456a1a28f35c996d32199a",
"replaces": "0ad-data (<< 0.0.12-1~)",
"depends": "fonts-dejavu-core | ttf-dejavu-core, fonts-freefont-ttf | ttf-freefont, fonts-texgyre | tex-gyre",
"pre_depends": "dpkg (>= 1.15.6~)",
"suggests": "0ad",
"breaks": "0ad-data (<< 0.0.12-1~)",
"tags": "game::strategy, role::app-data, role::program, use::gameplaying",
}
self.assertDictEqual(f, expected)
|
package com.grasea.grandroid.mvp;
/**
* Created by Rovers on 2016/5/7.
*/
public class GrandroidPresenter<C> {
protected C contract;
public void setContract(C contract) {
this.contract = contract;
}
public C getContract() {
return (C) contract;
}
}
|
/*
* callPP.sql
* Chapter 9, Oracle10g PL/SQL Programming
* by <NAME>, <NAME> and <NAME>
*
* This script demonstrates call to a persistent package.
*/
set serveroutput on
DECLARE
v_BookTable PersistPkg.t_BookTable;
v_NumRows NUMBER := PersistPkg.v_MaxRows;
v_Title books.title%TYPE;
BEGIN
PersistPkg.ReadBooks(v_BookTable, v_NumRows);
DBMS_OUTPUT.PUT_LINE(' Fetched ' || v_NumRows || ' rows:');
FOR v_Count IN 1..v_NumRows LOOP
SELECT title
INTO v_Title
FROM books
WHERE isbn = v_BookTable(v_Count);
DBMS_OUTPUT.PUT_LINE(v_Title);
END LOOP;
END;
/
|
#!/bin/sh
set -e
# set -x
[ -z "$UPSTREAM_PWA" ] && echo "UPSTREAM_PWA is not set" && exit 1
if echo "$UPSTREAM_PWA" | grep -Eq '^https'
then
cat >/etc/nginx/conf.d/listen.conf <<EOF
listen 443 ssl;
ssl_certificate server.crt;
ssl_certificate_key server.key;
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_ciphers HIGH:!aNULL:!MD5;
# https://ma.ttias.be/force-redirect-http-https-custom-port-nginx/
error_page 497 https://\$http_host\$request_uri;
EOF
else
echo "listen 80;" >/etc/nginx/conf.d/listen.conf
fi
[ -f "/etc/nginx/conf.d/default.conf" ] && rm /etc/nginx/conf.d/default.conf
i=1
while true
do
eval "export SUBDOMAIN=\$PWA_${i}_SUBDOMAIN"
eval "export TOPLEVELDOMAIN=\$PWA_${i}_TOPLEVELDOMAIN"
eval "export DOMAIN=\$PWA_${i}_DOMAIN"
if [ ! -z "$DOMAIN" ]
then
[ ! -z "$SUBDOMAIN" ] && echo "ignoring PWA_${i}_SUBDOMAIN as PWA_${i}_DOMAIN is set"
[ ! -z "$TOPLEVELDOMAIN" ] && echo "ignoring PWA_${i}_TOPLEVELDOMAIN as PWA_${i}_DOMAIN is set"
else
if [ ! -z "$SUBDOMAIN" ]
then
[ ! -z "$TOPLEVELDOMAIN" ] && echo "ignoring PWA_${i}_TOPLEVELDOMAIN as PWA_${i}_SUBDOMAIN is set"
export DOMAIN="$SUBDOMAIN\..+"
else
[ ! -z "$TOPLEVELDOMAIN" ] && export DOMAIN=".+\.$TOPLEVELDOMAIN"
fi
fi
[ -z "$DOMAIN" ] && [ "$i" = "1" ] && export DOMAIN=".+"
[ -z "$DOMAIN" ] && break
eval "export CHANNEL=\${PWA_${i}_CHANNEL:-'default'}"
eval "export APPLICATION=\${PWA_${i}_APPLICATION:-'default'}"
eval "export LANG=\${PWA_${i}_LANG:-'default'}"
eval "export FEATURES=\${PWA_${i}_FEATURES:-'default'}"
eval "export THEME=\${PWA_${i}_THEME:-'default'}"
echo "$i DOMAIN=$DOMAIN CHANNEL=$CHANNEL APPLICATION=$APPLICATION LANG=$LANG FEATURES=$FEATURES THEME=$THEME"
envsubst '$UPSTREAM_PWA,$DOMAIN,$CHANNEL,$APPLICATION,$LANG,$FEATURES,$THEME' </etc/nginx/conf.d/channel.conf.tmpl >/etc/nginx/conf.d/channel$i.conf
i=$((i+1))
done
# Generate Pagespeed config based on environment variables
env | grep NPSC_ | sed -e 's/^NPSC_//g' -e "s/\([A-Z_]*\)=/\L\1=/g" -e "s/_\([a-zA-Z]\)/\u\1/g" -e "s/^\([a-zA-Z]\)/\u\1/g" -e 's/=.*$//' -e 's/\=/ /' -e 's/^/\pagespeed /' > /tmp/pagespeed-prefix.txt
env | grep NPSC_ | sed -e 's/^[^=]*=//' -e 's/$/;/' > /tmp/pagespeed-suffix.txt
paste -d" " /tmp/pagespeed-prefix.txt /tmp/pagespeed-suffix.txt >> /etc/nginx/pagespeed.conf
[ ! -z "$DEBUG" ] && find /etc/nginx -name '*.conf' -print -exec cat '{}' \;
if [ -z "$*" ]
then
/usr/local/nginx/sbin/nginx -c /etc/nginx/nginx.conf -g "daemon off;"
else
exec "$@"
fi
|
<!DOCTYPE html>
<html>
<head>
<title>Responsive Website Design</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<style>
/* Styles go here */
body {
margin: 0;
font-family: sans-serif;
}
.container {
max-width: 960px;
margin: 0 auto;
padding: 0 10px;
}
/* Larger devices */
@media only screen and (min-width: 768px) {
/* More Styles go here */
.container {
padding: 0 20px;
}
}
</style>
</head>
<body>
<div class="container">
<!-- Content here -->
<h1>Hello World!</h1>
</div>
</body>
</html> |
package ch.epfl.yinyang
package transformers
import ch.epfl.yinyang._
import ch.epfl.yinyang.transformers._
import scala.reflect.macros.blackbox.Context
import language.experimental.macros
import scala.collection.mutable
/**
* Ascribes terms with their types from the original block. Terms that are ascribed are:
* - applications
* - idents
* - lambda parameters
*/
trait AscriptionTransformation extends MacroModule with TransformationUtils with DataDefs {
import c.universe._
def ascribeTerms: Boolean
object AscriptionTransformer extends (Tree => Tree) {
def apply(tree: Tree) =
if (ascribeTerms) {
val t = new AscriptionTransformer().transform(tree)
log("ascription transformed: " + t, 2)
t
} else
tree
}
private final class AscriptionTransformer extends Transformer {
var ident = 0
var externalApplyFound = false
override def transform(tree: Tree): Tree = {
log(" " * ident + " ==> " + tree, 3)
ident += 1
val result = tree match {
case vd @ ValDef(m, n, t, rhs) if rhs != EmptyTree =>
copy(vd)(ValDef(m, n, t, Typed(transform(rhs), TypeTree(t.tpe))))
case dd @ DefDef(m, n, tp, p, rt, rhs) =>
copy(dd)(DefDef(m, n, tp, p, rt, Typed(transform(rhs), TypeTree(rt.tpe))))
case CaseDef(pat: Tree, guard: Tree, body: Tree) =>
CaseDef(pat, guard, transform(body))
case ap @ Apply(fun, args) =>
val ascrArgs = args map {
x => // TODO cleanup. This can be done easier.
val auniverse = c.universe.asInstanceOf[scala.reflect.internal.Types]
log(s"isConstantType(x.tpe) = " +
auniverse.isConstantType(tree.tpe.asInstanceOf[auniverse.Type]), 3)
Typed(transform(x), TypeTree(
if (x.tpe != null &&
auniverse.isConstantType(x.tpe.asInstanceOf[auniverse.Type]))
x.tpe.erasure
else
x.tpe))
}
if (externalApplyFound) {
Apply(transform(fun), ascrArgs)
} else {
externalApplyFound = true
val baseTree = Apply(transform(fun), ascrArgs)
externalApplyFound = false
Typed(baseTree, TypeTree(ap.tpe))
}
case _ =>
super.transform(tree)
}
ident -= 1
log(" " * ident + " <== " + result, 3)
result
}
}
}
|
!/bin/bash
export EDITOR=ne
export TMUX_SESSION=$1
tmux has-session -t "$TMUX_SESSION"
if [ $? -eq 0 ]
then
echo "Attaching to existing session $TMUX_SESSION"
tmux attach-session -t "$TMUX_SESSION"
else
# bash --init-file <(echo "ls; pwd")
echo "Creating new pyde session $TMUX_SESSION"
tmux -f /usr/local/cluster-prep/resources/tmux.conf new-session -d -s "$TMUX_SESSION" "bash --init-file <(echo ranger)"
tmux split-window -v 'bash --init-file <(echo ptipython)'
tmux split-window -h -p 35 'bash'
tmux -2 attach-session -d
fi |
db.collection.find({"field_name": {$exists: true}}) |
import re
def validatePassword(password):
# Check if the password length is at least 8 characters
if len(password) < 8:
return False
# Check if the password contains at least one uppercase letter, one lowercase letter, one digit, and one special character
if not re.search(r"[A-Z]", password) or not re.search(r"[a-z]", password) or not re.search(r"\d", password) or not re.search(r"[!@#$%^&*]", password):
return False
return True
# Test cases
print(validatePassword("P@ssw0rd")) # Output: True
print(validatePassword("weakpwd")) # Output: False |
#!/bin/bash
# Author: Kun Huang <academicgareth@gmail.com>
# fetch data from review.openstack.org
|
#!/bin/bash
# --------------------------------------------------------------------------
# Generic start-script for qem-system-arm/qemu-system-aarch64
# for the raspi-family of boards.
#
# Author: Bernhard Bablok
# License: GPL3
#
# Website: https://github.com/bablokb/pi-qemu-helper
#
# --------------------------------------------------------------------------
# --- some constants -----------------------------------------------------
EXTRA_ARGS="dwc_otg.fiq_fsm_enable=0"
declare -A kernel dtb qemu
kernel['raspi0']="kernel.img"
kernel['raspi2b']="kernel7.img"
kernel['raspi3b']="kernel8.img"
dtb['raspi0']="bcm2708-rpi-zero.dtb"
dtb['raspi2b']="bcm2709-rpi-2-b.dtb"
dtb['raspi3b']="bcm2710-rpi-3-b.dtb"
qemu['raspi0']="qemu-system-arm"
qemu['raspi2b']="qemu-system-arm"
qemu['raspi3b']="qemu-system-aarch64"
# set defaults --------------------------------------------------------------
setDefaults() {
screen_size=1024x768
verbose=0
}
# --- help -----------------------------------------------------------------
usage() {
local pgm=`basename $0`
echo -e "\n$pgm: emulate Raspberry Pi board with QEmu\n\
\nusage: `basename $0` [options] image [qemu-options]\n\
Possible options:\n\n\
-b board board to emulate (raspi0, raspi2b, raspi3b)
-S size screen-size (default: $screen_size)
-v verbose operation
-h show this help
"
exit 3
}
# --- parse arguments and set variables ------------------------------------
parseArguments() {
while getopts ":b:S:vh" opt; do
case $opt in
b) board="$OPTARG";;
S) screen_size="$OPTARG";;
v) verbose=1;;
h) usage;;
?) echo "error: illegal option: $OPTARG"
usage;;
esac
done
shift $((OPTIND-1))
osimg="$1"
shift
qemu_extra=("$@")
}
# --- check arguments ------------------------------------------------------
checkArguments() {
if [ -z "$osimg" ]; then
echo "error: no board specified!" >&2
usage
fi
local workdir="$(dirname $osimg)"
local osimg="$(basename $osimg)"
if [ -z "$board" ]; then
echo "error: no board specified!" >&2
usage
fi
[ "$board" = "raspi2" ] && board="raspi2b"
[ "$board" = "raspi3" ] && board="raspi3b"
if [ -z "${kernel[$board]}" ]; then
echo "error: board $board is unsupported!" >&2
usage
fi
# build paths
img_file="$workdir/$osimg"
kernel_file="$workdir/${kernel[$board]}"
cmdline_file="$workdir/cmdline.txt"
dtb_file="$workdir/${dtb[$board]}"
# args for screen resolution
local width="${screen_size%x*}"
local height="${screen_size#*x}"
screen_args="bcm2708_fb.fbwidth=$width bcm2708_fb.fbheight=$height"
}
# --- main program ---------------------------------------------------------
setDefaults
parseArguments "$@"
checkArguments
if [ $verbose -eq 1 ]; then
echo -e "info: starting ${qemu[$board]} with \n\
board: $board \n\
image: $img_file \n\
kernel: $kernel_file \n\
dtb: $dtb_file \n\
commandline: $(cat $cmdline_file) $screen_args $EXTRA_ARGS"
fi
"${qemu[$board]}" \
-machine "$board" \
-device usb-mouse \
-device usb-kbd \
-drive file="$img_file",if=sd \
-netdev user,id=net0,hostfwd=tcp::8022-:22 \
-device usb-net,netdev=net0 \
\
-kernel "$kernel_file" \
-append "$(cat $cmdline_file) $screen_args $EXTRA_ARGS" \
-dtb "$dtb_file" \
\
-no-reboot \
-daemonize \
\
"${qemu_extra[@]}"
|
<gh_stars>0
import { expect } from 'chai';
import Cascade, { observable } from 'cascade';
import { Connection } from '../scripts/CascadeManager';
describe('Connection', () => {
it('should Get from the server', () => {
var connection = new Connection('');
return expect(connection.call('https://jsonplaceholder.typicode.com/posts/1')).to.eventually.have.property('id');
});
}); |
<reponame>shyga362/projetoPython<filename>exercicio36.py<gh_stars>1-10
casa = float(input("Digite o valor da casa: R$ "))
salario = float(input("Digite seu salario: R$"))
anos = float(input("Digite o tempo em anos que pretende pagar: "))
prestacao = casa / (anos * 12)
minimo = salario * 30 / 100
if prestacao >= minimo:
print("Emprestimo consedido")
else:
print("")
|
#!/bin/bash
SWIFT_VER="5.2"
if [[ $TRAVIS_OS_NAME = 'osx' ]]; then
brew update >/dev/null
brew bundle
elif [[ $TRAVIS_OS_NAME = 'linux' ]]; then
RELEASE_DOT=$(lsb_release -sr)
RELEASE_NUM=${RELEASE_DOT//[-._]/}
wget https://swift.org/builds/swift-${SWIFT_VER}-release/ubuntu${RELEASE_NUM}/swift-${SWIFT_VER}-RELEASE/swift-${SWIFT_VER}-RELEASE-ubuntu${RELEASE_DOT}.tar.gz
tar xzf swift-${SWIFT_VER}-RELEASE-ubuntu${RELEASE_DOT}.tar.gz
fi
|
<gh_stars>100-1000
import * as tf from '@tensorflow/tfjs';
import {
IParams,
IImageData,
IArgs,
} from './types';
const defaultLayers = ({ classes }: { classes: number }) => {
return [
tf.layers.flatten({inputShape: [7, 7, 256]}),
tf.layers.dense({
units: 100,
activation: 'relu',
kernelInitializer: 'varianceScaling',
useBias: true
}),
tf.layers.dense({
units: classes,
kernelInitializer: 'varianceScaling',
useBias: false,
activation: 'softmax'
})
];
};
const getBatchSize = (batchSize?: number, xs?: tf.Tensor) => {
if (batchSize) {
return batchSize;
}
if (xs !== undefined) {
return Math.floor(xs.shape[0] * 0.4) || 1;
}
return undefined;
};
const getModel = (pretrainedModel: tf.Model, data: IImageData, classes: number, params: IParams, args: IArgs) => {
if (args.trainingModel) {
if (typeof args.trainingModel === 'function') {
return args.trainingModel(data, classes, params);
}
return args.trainingModel;
}
const model = tf.sequential({
layers: defaultLayers({ classes }),
});
const optimizer = tf.train.adam(0.0001);
model.compile({
optimizer,
loss: 'categoricalCrossentropy',
metrics: ['accuracy'],
});
return model;
};
const train = async (pretrainedModel: tf.Model, data: IImageData, classes: number, params: IParams, args: IArgs) => {
const {
xs,
ys,
} = data;
if (xs === undefined || ys === undefined) {
throw new Error('Add some examples before training!');
}
// const batch = data.nextTrainBatch(BATCH_SIZE);
const model = getModel(pretrainedModel, data, classes, params, args);
const batchSize = getBatchSize(params.batchSize, xs);
const history = await model.fit(
xs,
ys,
{
...params,
batchSize,
epochs: params.epochs || 20,
},
);
return {
model,
history,
};
};
export default train;
|
/*
File: MyFolder.java ; This file is part of Twister.
Version: 2.002
Copyright (C) 2012-2013 , Luxoft
Authors: <NAME> <<EMAIL>>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import org.w3c.dom.Node;
class MyFolder{
private Node node,desc;
private String sut = "";
private String sutpath = "";
public MyFolder(Node node){
this.node = node;
}
public Node getNode(){
return node;
}
public void setDesc(Node desc){
this.desc = desc;
}
public Node getDesc(){
return desc;
}
public void setSut(String sut){
this.sut = sut;
}
public String getSut(){
return sut;
}
public void setSutPath(String sutpath){
this.sutpath = sutpath;
}
public String getSutPath(){
return sutpath;
}
public String toString(){
if(sutpath!=null&&!sutpath.equals("")){
return node.getNodeValue()+" - "+sutpath;
}
return node.getNodeValue();
}
}
|
set -euf -o pipefail
ln -s ~/Projects/koboanki/koboanki ~/Library/Application\ Support/Anki2/addons21/koboanki
|
import React from 'react'
const Load = (props) => {
const load = props.name
const id = props.id
console.log("Loads:", props.name)
console.log("Load debug:", id)
console.log("Props in load:", props)
console.log("Props.load:", load, props.name)
function state() {
console.log('ContValue:', props.contValue)
if (props.contValue==='0'){
return(
<div className="label label-success">Päällä</div>
)
} if (props.contValue==='1'){
return(
<div className="label label-danger">Pois</div>
)
} else {
return('None')
}
}
function priority() {
console.log("Priority:", props.priority)
return (
<div className="label label-default">{props.priority}</div>
)
}
return(
<div>
<li className="list-group-item">
<div className="panel panel-default">
<div className="panel-heading">
<h4 className="panel-title">
<a data-toggle="collapse" href={"#"+String(id)}>{props.name} | {state()} | {priority()}</a>
</h4>
</div>
<div id={String(id)} className="panel-collapse collapse">
<ul className="list-group">
<ul>ID: {id} </ul>
<ul> <button onClick={props.this.changeContValue(id, 0)} className="btn-success">
päälle
</button>
<button onClick={props.this.changeContValue(id, 1)} className="btn-danger">
pois
</button>
</ul>
<ul>
<form onSubmit={(e) => props.this.changePriority(e, props.id)}>
Prioriteetti: <input value={props.this.state.newPriority} placeholder={props.priority} onChange={props.this.handlePriorityChange}/>
<button type="submit">Tallenna</button>
</form>
</ul>
</ul>
</div>
</div>
</li>
</div>
)
}
export default Load
|
<filename>2-resources/3-misc/SandboxSQL-master/SandboxSQL-master/src/js/constants/TokenTypes.js
var keyMirror = require('../utils/keyMirror');
/**
* Still not clear how we will handle
* things like ORDER BY Country ASC, CustomerName DESC
* ...
*/
var TokenTypes = keyMirror({
// Stuff like SELECT, FROM, JOIN, etc
KEYWORD: null,
// Column names. Probably will need to add
// a concept for column aliases in a bit...
COLUMN: null,
// General aliases, like COUNT(1) as total, etc
ALIAS: null,
// Table names (similar)
TABLE: null,
// MIN(), MAX(), COUNT()
FUNCTION: null,
// =, >=, <= -- can be a keyword?
// BETWEEN, LIKE, IN etc
OPERATOR: null,
// 100 or 123 or whatever (=123)
VALUE: null,
});
TokenTypes.FUNCTION_TYPES = keyMirror({
MIN: null,
MAX: null,
COUNT: null,
});
module.exports = TokenTypes;
|
########################################################################
#
# Copyright (c) 2021 Wind River Systems, Inc.
#
# SPDX-License-Identifier: Apache-2.0
#
########################################################################
from cgtsclient.common import base
from cgtsclient.common import utils
from cgtsclient import exc
from cgtsclient.v1 import options
CREATION_ATTRIBUTES = ['name', 'service']
class PtpInstance(base.Resource):
def __repr__(self):
return "<PtpInstance %s>" % self._info
class PtpInstanceManager(base.Manager):
resource_class = PtpInstance
@staticmethod
def _path(ptp_instance_id=None):
return '/v1/ptp_instances/%s' % ptp_instance_id if ptp_instance_id \
else '/v1/ptp_instances'
def list(self, q=None):
return self._list(options.build_url(self._path(), q), "ptp_instances")
def list_by_host(self, ihost_uuid):
path = '/v1/ihosts/%s/ptp_instances' % ihost_uuid
return self._list(path, "ptp_instances")
def get(self, ptp_instance_id):
try:
return self._list(self._path(ptp_instance_id))[0]
except IndexError:
return None
def create(self, **kwargs):
data = {}
for (key, value) in kwargs.items():
if key in CREATION_ATTRIBUTES:
data[key] = value
else:
raise exc.InvalidAttribute('%s' % key)
return self._create(self._path(), data)
def update(self, ptp_instance_id, patch):
return self._update(self._path(ptp_instance_id), patch)
def delete(self, ptp_instance_id):
return self._delete(self._path(ptp_instance_id))
def apply(self):
return self.api.json_request('POST', self._path() + "/apply")
def _find_ptp_instance(cc, key):
if key.isdigit() or utils.is_uuid_like(key):
try:
instance = cc.ptp_instance.get(key)
except exc.HTTPNotFound:
raise exc.CommandError('PTP instance not found: %s' % key)
else:
return instance
else:
ptp_instances = cc.ptp_instance.list()
for instance in ptp_instances:
if instance.name == key:
return instance
else:
raise exc.CommandError('PTP instance not found: %s' % key)
|
def random_numbers(min, max):
return Response(
json.dumps({
'numbers': random.sample(range(int(min), int(max)), 10)
}),
status=200,
mimetype='application/json'
) |
class Message:
def __init__(self, channel):
self._channel = channel
def set_attribute(self, key, value):
setattr(self, key, value)
@property
def carrier(self):
return self._channel
def sender(self):
return self._sender
def receiver(self):
return self._receiver
class CallbackMessage(Message):
def set_sender(self, sender):
self._sender = sender
def set_receiver(self, receiver):
self._receiver = receiver
def sender(self):
return self._sender
def receiver(self):
return self._receiver |
const expect = require("chai").expect;
const translatejson = require("../index");
const Translate = new translatejson("fr", "./test/locales/");
describe("Success", () => {
describe("Get", () => {
describe("Normal", () => {
it("NotReplace", (done) => {
expect(Translate.GetLine("hello")).to.equal("Bonjour");
done()
});
it("Replace", (done) => {
expect(Translate.GetLine("hello users", "deathart")).to.equal("Bonjour deathart");
done()
})
});
describe("Block", () => {
it("NotReplace", (done) => {
expect(Translate.GetBlock("block.first_block")).to.equal("[FR] block");
done()
});
it("Replace", (done) => {
expect(Translate.GetBlock("block.sec_block", "deathart")).to.equal("[FR] block deathart remplacé");
done()
})
})
});
describe("Update", () => {
it("Update", (done) => {
expect(Translate.Update("hello", "Hello")).to.be.true;
done()
})
});
describe("Remove", () => {
it("Remove", (done) => {
expect(Translate.Del("hello")).to.be.true;
done()
})
});
describe("Set", () => {
before(() => {
Translate.Del("hello");
});
it("Set", (done) => {
expect(Translate.SetLine("hello", "Bonjour")).to.be.true;
done()
})
});
describe("Resolve", () => {
it("Normal", (done) => {
expect(Translate.resolve("hello")).to.equal("Bonjour");
done()
});
it("Block", (done) => {
expect(Translate.resolve("block.sec_block")).to.equal("[FR] block %s remplacé");
done()
})
})
});
|
<gh_stars>0
package com.jinke.kanbox;
/*
* Created on 2007-10-11 *
* java编程 日记--- 压缩文件和文件夹类Compressor.java
*/
import java.util.zip.*;
import java.io.*;
import android.util.Log;
public class Compressor {
/**
* 压缩文件
* @param zipFileName 保存的压缩包文件路径
* @param inputFile 需要压缩的文件夹或者文件路径
* @throws Exception
*/
public static void zip(String zipFileName, String inputFile) throws Exception {
// long start = System.currentTimeMillis();
// Log.e("zip","start:" + start);
zip(zipFileName, new File(inputFile));
// long end = System.currentTimeMillis();
// Log.e("zip", "end:" + end);
// Log.e("zip", "use:" + (end - start));
/*
*346: sysout:
* E/zip ( 3152): start:1334304490109
E/zip ( 3152): zip f:free_22 start at:1334304490119 end at:1334304520639 use:30520
E/zip ( 3152): end:1334304521207
E/zip ( 3152): use:31098
Log.e
start:1334304692261
zip f:free_22 start at:1334304692274 end at:1334304722863 use:30589
end:1334304723460
E/zip ( 3792): use:31199
E/zip ( 4224): start:1334304822757
E/zip ( 4224): end:1334304853048
E/zip ( 4224): use:30291
E/zip ( 6873): start:1334305507035
E/zip ( 6873): end:1334305507843
E/zip ( 6873): use:808
E/zip ( 6902): start:1334305511931
E/zip ( 6902): end:1334305512735
E/zip ( 6902): use:804
E/zip ( 6930): start:1334305516660
E/zip ( 6930): end:1334305517516
E/zip ( 6930): use:856
E/zip ( 6955): start:1334305521539
E/zip ( 6955): end:1334305522335
E/zip ( 6955): use:796
*/
}
public static void zipPage(String zipFileName, String inputFile) throws Exception {
zipPage(zipFileName, new File(inputFile));
}
private static void zip(String zipFileName, File inputFile) throws Exception {
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(
zipFileName));
zip(out, inputFile, "");//递归压缩方法
zip(out,new File("/data/data/com.jinke.calligraphy.app.branch/databases/calligraphy.db"),"calligraphy.db");
System.out.println("zip done");
out.flush();
out.close();
}
private static void zipPage(String zipFileName, File inputFile) throws Exception {
ZipOutputStream out = new ZipOutputStream(new FileOutputStream(
zipFileName));
zip(out, inputFile, "");//递归压缩方法
System.out.println("zip done");
out.close();
}
/**
* 递归压缩方法
* @param out 压缩包输出流
* @param f 需要压缩的文件
* @param base 压缩的路径
* @throws Exception
*/
private static void zip(ZipOutputStream out, File f, String base) throws Exception {
// System.out.println("Zipping " + f.getName()); //记录日志,开始压缩
// Log.e("zip", "zip:" + f.getName());
if (f.isDirectory()) { // 如果是文件夹,则获取下面的所有文件
long start = System.currentTimeMillis();
// Log.e("zip", "zip f:" + f.getName() + " start at:" + start);
File[] fl = f.listFiles();
out.putNextEntry(new ZipEntry(base + "/"));
base = base.length() == 0 ? "" : base + "/";
for (int i = 0; i < fl.length; i++) {
zip(out, fl[i], base + fl[i].getName());
}
long end = System.currentTimeMillis();
// Log.e("zip", "zip f:" + f.getName() + " start at:" + start + " end at:" + end + " use:" + (end - start));
} else { // 如果是文件,则压缩
out.putNextEntry(new ZipEntry(base)); // 生成下一个压缩节点
FileInputStream in = new FileInputStream(f); // 读取文件内容
byte[] temp = new byte[1024];
int count = 0;
while((count = in.read(temp)) != -1){
out.write(temp, 0, count);
}
// int b;
// while ((b = in.read()) != -1)
// out.write(b); // 写入到压缩包
in.close();
}
}
public static void main(String [] args){
Compressor cpr = new Compressor();
try {
cpr.zip("F:\\client\\update.zip", "F:\\server");
} catch (Exception e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
|
/**
* Copyright 2018 hubohua
* <p>
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.demoncat.dcapp.utils;
import android.text.TextUtils;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* @Class: RegularUtils
* @Description: Regular expression utils
* @Author: hubohua
* @CreateDate: 2018/4/20
*/
public class RegularUtils {
// vin码正则表达式验证,最简单的验证(17位字母和数字的组合)
public static final String VIN_SIMPLE_REGEX = "^[a-zA-Z0-9]{17}$";
// engine码正则表达式验证,最简单的验证(16位字母和数字的组合)
public static final String ENGINE_SIMPLE_REGEX = "^[a-zA-Z0-9]{8}$";
// 车牌号正则表达式验证
public static final String PLATE_REGEX = "^[\u4E00-\u9FFF]{1}+[A-Z0-9]{6}$";
// 邮箱正则表达式
public static final String EMAIL_REGEX = "^([a-zA-Z0-9_\\-\\.]+)@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.)|(([a-zA-Z0-9\\-]+\\.)+))([a-zA-Z]{2,4}|[0-9]{1,3})(\\]?)$";
// 手机号正则表达式
public static final String MOBILE_REGEX = "^[1][34578]\\d{9}$";
// Pin码正则表达式
public static final String PIN_REGUX = "^[0-9]{6}$";
public static final String PIN_REPEAT_REGUX = "^(?=\\d+)(?!([\\d])\\1{5})[\\d]{6}$";
// Password正则表达式
public static final String PASSWORD_REGEX = "^(?![a-zA-Z]+$)(?!\\d+$)(?![\\W_]+$)\\S{6,14}$";
// 非英文数字中文正则表达式
public static final String COMMON_LIMIT_REGEX = "[^a-zA-Z0-9\\u4E00-\\u9FA5]";
//用户名允许输入字符正则
public static final String UNAME_ALLOWED_REGEX = "^[\\u4E00-\\u9FA5A-Za-z0-9\\x21-\\x2F\\x3A-\\x40\\x5B-\\x60\\x7B-\\x7E]+$";
public static final String NUMBER_REGEX = "^[0-9]+$";
/**
* vin码正则表达式验证,最简单的验证(17位字母和数字的组合)
*
* @param vin vin码
* @return true if matched,else false
*/
public static Boolean vinRegular(String vin) {
return regexMatch(VIN_SIMPLE_REGEX, vin);
}
/**
* engine码正则表达式验证,最简单的验证(16位字母和数字的组合)
*
* @param engine engine码
* @return true if matched,else false
*/
public static Boolean engineRegular(String engine) {
return regexMatch(ENGINE_SIMPLE_REGEX, engine);
}
/**
* 车牌号正则表达式验证
*
* @param plate 车牌号
* @return true if matched,else false
*/
public static Boolean plateRegular(String plate) {
return regexMatch(PLATE_REGEX, plate);
}
/**
* Email is regular format
*
* @param email
* @return
*/
public static Boolean emailRegular(String email) {
return regexMatch(EMAIL_REGEX, email);
}
/**
* Password regular format
*
* @param password
* @return
*/
public static Boolean passwordRegular(String password) {
return regexMatch(PASSWORD_REGEX, password);
}
/**
* Password regular format
*
* @param username
* @return
*/
public static Boolean usernameRegular(String username) {
return regexMatch(UNAME_ALLOWED_REGEX, username);
}
/**
* Telephone is regular format
*
* @param telephone
* @return
*/
public static Boolean mobileRegular(String telephone) {
return regexMatch(MOBILE_REGEX, telephone);
}
/**
* Input is regular format
*
* @param numStr
* @return
*/
public static Boolean numberRegular(String numStr) {
return regexMatch(NUMBER_REGEX, numStr);
}
/**
* Validate pin code
*
* @param pinCode
* @return
*/
public static Boolean pinCodeRegular(String pinCode) {
if (TextUtils.isEmpty(pinCode)) {
return false;
} else if (pinCode.length() != 6) {
return false;
} else {
boolean sequence = true;
char[] chars = pinCode.toCharArray();
int gap = 0;
for (int i = chars.length - 1; i > 0; i--) {
if (i == chars.length - 1) {
// first time init gap
gap = chars[i] - chars[i - 1];
} else {
// other time
if (gap != (chars[i] - chars[i - 1])) {
sequence = false;
break;
}
}
}
return regexMatch(PIN_REGUX, pinCode)
&& regexMatch(PIN_REPEAT_REGUX, pinCode)
&& !sequence;
}
}
/**
* 把不符合要求的剔除
*
* @param regex
* @param param
* @return
*/
public static String stringFilter(String regex, String param) {
Pattern p = Pattern.compile(regex);
Matcher m = p.matcher(param);
return m.replaceAll("").trim();
}
/**
* 正则表达式验证
*
* @param regex 正则表达式
* @param param 待匹配的字符串
* @return true if matched,else false
*/
public static Boolean regexMatch(String regex, String param) {
Pattern pattern = Pattern.compile(regex);
return pattern.matcher(param).matches();
}
/**
* 模糊手机号,将中间4位变成*号
*
* @param telephone 手机号
* @return 模糊后的手机号
*/
public static String blurryTelephone(String telephone) {
String BLURRY_TELEPHONE = "(\\d{3})\\d{4}(\\d{4})";
String REGULAR_TELEPHONE = "$1****$2";
return telephone.replaceAll(BLURRY_TELEPHONE, REGULAR_TELEPHONE);
}
/**
* Has Chinese text
*
* @param contents
* @return
*/
public static boolean hasChinese(String contents) {
boolean res = false;
char[] cTemp = contents.toCharArray();
for (int i = 0; i < contents.length(); i++) {
if (isChinese(cTemp[i])) {
res = true;
break;
}
}
return res;
}
/**
* Char is chinese char
*
* @param c
* @return
*/
public static boolean isChinese(char c) {
Character.UnicodeBlock ub = Character.UnicodeBlock.of(c);
return ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS
|| ub == Character.UnicodeBlock.CJK_COMPATIBILITY_IDEOGRAPHS
|| ub == Character.UnicodeBlock.CJK_UNIFIED_IDEOGRAPHS_EXTENSION_A
|| ub == Character.UnicodeBlock.GENERAL_PUNCTUATION
|| ub == Character.UnicodeBlock.CJK_SYMBOLS_AND_PUNCTUATION
|| ub == Character.UnicodeBlock.HALFWIDTH_AND_FULLWIDTH_FORMS;
}
}
|
<filename>src/types.ts
export interface Ref {
ref: string
label: string
isMasterRef: boolean
scheduledAt: string
id: string
}
export interface Language {
id: string
name: string
}
export interface Document<Data = Record<string, unknown>> {
id: string
uid?: string
url?: string
type: string
href: string
tags: string[]
slugs: string[]
lang?: string
alternate_languages: AlternateLanguage[]
first_publication_date: string | null
last_publication_date: string | null
data: Data
}
export interface AlternateLanguage {
id: string
uid?: string
type: string
lang: string
}
|
#!/bin/bash
set -exo pipefail
function env {
export "$1"="$2"
echo "export ${1}=\"${2}\"" >> "${FUZZ_ROOT}/fuzz_env.sh"
}
# Recommended flags from https://github.com/guidovranken/cryptofuzz/blob/master/docs/building.md
export CFLAGS="-fsanitize=address,undefined,fuzzer-no-link -O2 -g"
export CXXFLAGS="-fsanitize=address,undefined,fuzzer-no-link -D_GLIBCXX_DEBUG -O2 -g"
# Setup base of Cryptofuzz
cd "$FUZZ_ROOT"
MODULES_ROOT="${FUZZ_ROOT}/modules"
git clone --depth 1 https://github.com/guidovranken/cryptofuzz.git
cd cryptofuzz
git rev-parse HEAD
CRYPTOFUZZ_SRC=$(pwd)
python3 gen_repository.py
mkdir "$MODULES_ROOT"
cd "$MODULES_ROOT"
# Setup the other crypto libraries for differential fuzzing
# Botan https://github.com/guidovranken/cryptofuzz/blob/master/docs/botan.md
git clone --depth 1 https://github.com/randombit/botan.git
cd botan
git rev-parse HEAD
python3 configure.py --cc-bin=$CXX --cc-abi-flags="$CXXFLAGS" --disable-shared --disable-modules=locking_allocator,x509,tls --build-targets=static --without-documentation
make -j$(nproc)
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_BOTAN"
env LIBBOTAN_A_PATH `realpath libbotan-3.a`
env BOTAN_INCLUDE_PATH `realpath build/include`
cd "${CRYPTOFUZZ_SRC}/modules/botan/"
make -j$(nproc)
# Crypto++ https://github.com/guidovranken/cryptofuzz/blob/master/docs/cryptopp.md
cd "$MODULES_ROOT"
git clone --depth 1 https://github.com/weidai11/cryptopp.git
cd cryptopp/
git rev-parse HEAD
make libcryptopp.a -j$(nproc)
export CXXFLAGS="$CXXFLAGS -DCRYPTOFUZZ_CRYPTOPP"
env LIBCRYPTOPP_A_PATH `realpath libcryptopp.a`
env CRYPTOPP_INCLUDE_PATH `realpath .`
cd "${CRYPTOFUZZ_SRC}/modules/cryptopp/"
make
# Extract the seed corpus, docker layers are already compressed so this won't use any more space and save time when running
cd "$FUZZ_ROOT"
unzip cryptofuzz_data.zip
rm cryptofuzz_data.zip
env CRYPTOFUZZ_SEED_CORPUS `realpath cryptofuzz_seed_corpus`
env CRYPTOFUZZ_DICT `realpath cryptofuzz-dict.txt`
# Save final common flags
env CFLAGS "$CFLAGS"
env CXXFLAGS "$CXXFLAGS"
env CRYPTOFUZZ_SRC "$CRYPTOFUZZ_SRC"
# Cryptofuzz builds its modules into $CRYPTOFUZZ_SRC/modules that includes everything it needs, deleting the module source
# code saves a substantial amount of space in the docker image
rm -rf "$MODULES_ROOT" |
<reponame>flaviamatta/open-condo
// auto generated by kmigrator
// KMIGRATOR:0024_test_text:<KEY>
exports.up = async (knex) => {
await knex.raw(`
BEGIN;
--
-- Add field text to test
--
ALTER TABLE "Test" ADD COLUMN "text" text NULL;
COMMIT;
`)
}
exports.down = async (knex) => {
await knex.raw(`
BEGIN;
--
-- Add field text to test
--
ALTER TABLE "Test" DROP COLUMN "text" CASCADE;
COMMIT;
`)
}
|
$namespace(2, 'core', function (exports) {
var CONTROLLER_ELEMENT_DATA = '$ctrl';
exports.ControllerComponent = $.fn['controller'] = function ControllerComponent() {
return $(this).data(CONTROLLER_ELEMENT_DATA);
}
})
|
class Plugin(object):
def __init__(self, name):
self.name = name
self.instrumented = False
def should_be_used(self):
return True
def is_instrumented(self):
return self.instrumented
def instrument(self):
self.instrumented = True
|
# API Source Code Generator for MASS Client API Developers
# Run this script ONLY on Linux-like systems (including macOS)
# See README.md for details
protoc -I $GOPATH/src/massnet.org/mass-wallet/api/proto \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway \
--go_out=plugins=grpc:. \
$GOPATH/src/massnet.org/mass-wallet/api/proto/api.proto
protoc -I $GOPATH/src/massnet.org/mass-wallet/api/proto \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway \
--grpc-gateway_out=logtostderr=true:. \
$GOPATH/src/massnet.org/mass-wallet/api/proto/api.proto
protoc -I $GOPATH/src/massnet.org/mass-wallet/api/proto \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway/third_party/googleapis \
-I $GOPATH/src/github.com/grpc-ecosystem/grpc-gateway \
--swagger_out=logtostderr=true:. \
$GOPATH/src/massnet.org/mass-wallet/api/proto/api.proto
|
import { GetStaticProps, GetStaticPaths } from "next"
import Link from "next/link"
import {
getCollections,
getItemSlugs,
getCommentAuthorSlugs,
getItemMeta,
getComment,
getAppProps,
} from "../libs/data"
import { ItemMeta, Comment } from "../libs/type"
import { getAverageScoreByComments, slugify } from "../libs/utils"
// import Image from "next/image"
import Card from "react-bootstrap/Card"
import Row from "react-bootstrap/Row"
import Col from "react-bootstrap/Col"
import Date from "../components/date"
export const getStaticProps: GetStaticProps = async ({
params,
}: {
params: {
item: string[]
}
}) => {
const path = params.item
const [collection, item] = path
const itemMeta = getItemMeta(collection, item)
const commentAuthors = getCommentAuthorSlugs(collection, item)
const comments = await Promise.all(
commentAuthors.map(async (author) => {
return await getComment(collection, item, author)
})
)
return {
props: {
itemMeta,
comments,
averageScore: getAverageScoreByComments(comments),
...getAppProps(itemMeta.name),
},
}
}
export const getStaticPaths: GetStaticPaths = async () => {
const collections = getCollections().map((collection) => collection.slug)
const paths = collections
.map((collection) => {
const items = getItemSlugs(collection)
return items.map((item) => ({
params: { item: [collection, item] },
}))
})
.flat()
return {
paths,
fallback: false,
}
}
export default function Item({
itemMeta,
comments,
averageScore,
}: {
itemMeta: ItemMeta
comments: Comment[]
averageScore: number
}) {
return (
<>
<Card bg="light" className="shadow-sm border-info">
<Card.Header className="bg-info text-white text-center py-3">
<h1 className="mb-0">{itemMeta.name}</h1>
</Card.Header>
<Card.Body>
<Row className="m-0 p-0">
<Col
sm={true}
className="col-12 col flex-grow-1"
style={{ flexBasis: 0 }}
>
<Row as="dl" className="mb-0">
{itemMeta.aliases && (
<Col as="dt" lg={2} className="col-12">
其他名称
</Col>
)}
{itemMeta.aliases && (
<Col as="dd" lg={10} className="col-12">
<ul className="list-unstyled">
{itemMeta.aliases.map((alias) => (
<li key={alias}>{alias}</li>
))}
</ul>
</Col>
)}
<Col as="dt" lg={2} className="col-12">
辅助链接
</Col>
<Col as="dd" lg={10} className="col-12">
<ul className="list-unstyled">
{itemMeta.links.map((link) => (
<li key={link.source}>
<a href={link.link}>{link.source}</a>
</li>
))}
</ul>
</Col>
{itemMeta.meta && (
<>
<Col as="dt" lg={2} className="col-12">
其他元信息
</Col>
<Col as="dd" lg={10} className="col-12">
<Row as="dl">
{itemMeta.meta.map((meta) => (
<div key={meta.name}>
<dt className="col-auto">{meta.name}</dt>
<dd className="col">{meta.value}</dd>
</div>
))}
</Row>
</Col>
</>
)}
</Row>
</Col>
{itemMeta.image && (
<Col md="auto" className="col-12 px-0 col">
{
// eslint-disable-next-line @next/next/no-img-element
<img
className="mw-100 d-block mx-auto"
src={itemMeta.image}
alt={`Introduction image of ${itemMeta.name}`}
/>
}
</Col>
)}
</Row>
</Card.Body>
<Card.Footer className="text-white bg-info text-center">
<span className="lead mb-0">
平均得分:<b>{averageScore}</b>
</span>
</Card.Footer>
</Card>
{comments.map((comment) => (
<Card
bg="light"
className="shadow-sm mt-3"
key={comment.metadata.author.name}
>
<Card.Header>
<h3 className="mb-0" id={comment.metadata.author.slug}>
<Link href={`/commenter/${comment.metadata.author.slug}`}>
<a>{comment.metadata.author.name}</a>
</Link>{" "}
的评论
{/* TODO: Add avatar */}
</h3>
<p className="mt-1 mb-0">
时间: <Date dateString={comment.metadata.date} />
</p>
</Card.Header>
<Card.Body>
<div dangerouslySetInnerHTML={{ __html: comment.contents }} />
</Card.Body>
<Card.Footer>
<h3 className="float-start mb-0">{comment.metadata.score} 分</h3>
<div className="float-end">
{comment.metadata.tags.map((tag) => (
<Link key={slugify(tag)} href={`/tag/${slugify(tag)}`} passHref>
<a className="tagButton">{tag}</a>
</Link>
))}
</div>
</Card.Footer>
</Card>
))}
</>
)
}
|
Ext.override(Rally.data.wsapi.Proxy, { timeout:240000 });
/* global Ext Rally Constants Utils */
Ext.define("Rally.app.BacklogHealth", {
extend: 'Rally.app.App',
componentCls: 'app',
layout: {
type: 'vbox',
align: 'stretch'
},
items: [{
id: 'Utils.AncestorPiAppFilter.RENDER_AREA_ID',
xtype: 'container',
layout: {
type: 'hbox',
align: 'middle',
defaultMargins: '0 10 10 0',
}
},
{
xtype: 'container',
itemId: 'controls-area',
layout: 'hbox'
},
{
xtype: 'container',
itemId: 'filters-area',
},
{
id: 'grid-area',
xtype: 'container',
flex: 1,
type: 'vbox',
align: 'stretch'
}
],
config: {
defaultSettings: {
artifactType: 'HierarchicalRequirement',
timeboxType: Constants.TIMEBOX_TYPE_ITERATION,
timeboxCount: 5,
currentTimebox: false,
query: "((Project.Parent.Parent.Parent.Parent.Parent = null) AND (Project.Parent.Parent.Parent.Parent != null))",
includeAll: false,
points: true
}
},
timeboxStartDateField: 'StartDate',
timeboxEndDateField: 'EndDate',
timeboxType: 'Iteration',
modelName: 'HierarchicalRequirement',
settingsChanged: false,
launch: function() {
var status = this._getNewStatus();
this.addControls();
var promises = [
// this.getSquads(status),
this.getFutureTimeboxes(this.getSetting('timeboxCount'),status),
TimeboxExtendedModelBuilder.build(this.timeboxType,'Extended' + this.timeboxType)
];
Deft.Promise.all(promises).then({
success: function(results){
return this.getTimeboxes(results,status);
},
failiure: this._showError,
scope: this
}).then({
success: function(timeboxGroups){
return this.getArtifactsLookback(timeboxGroups,status);
},
failure: this._showError,
scope: this
}).then({
success: this.buildChart,
failure: this._showError,
scope: this
}).always(function(){
this.setLoading(false);
},this);
},
// isProjectHighLevel: function(app){
// //TODO: Make sure this isn't returning closed projects
// var deferred = Ext.create('Deft.Deferred');
// Ext.create('Rally.data.wsapi.Store', {
// model: 'Project',
// fetch: ['Name','Parent','Children'],
// autoLoad: false,
// pageSize: 1,
// filters: {
// "property": "Parent.Parent.ObjectID",
// "value": this.getContext().getProject().ObjectID
// }
// }).load({
// callback: function(records, operation, store){
// if (operation.wasSuccessful()){
// app.isProjectHighLevel = records.length > 0;
// deferred.resolve(app.isProjectHighLevel);
// } else {
// deferred.reject("Error calculating project level");
// }
// }
// });
// return deferred.promise;
// },
/**
* Return a promise that resolves once the controls are initialized and
* have initial values
*/
addControls: function() {
var filterDeferred = Ext.create('Deft.Deferred');
var context = this.getContext();
var controlsArea = this.down('#controls-area');
controlsArea.removeAll();
controlsArea.add({
xtype: 'container',
flex: 1
});
controlsArea.add({
xtype: 'tsfieldpickerbutton',
margin: '0 10 0 0',
toolTipConfig: {
html: 'Columns to Export',
anchor: 'top'
},
getTitle: function() {
return 'Export Columns';
},
modelNames: [this.modelName],
_fields: Constants.STORY_DEFAULT_FIELDS,
context: context,
stateful: true,
stateId: context.getScopedStateId(this.modelName + 'fields'), // columns specific to type of object
// Always need the accepted date field
alwaysSelectedValues: Constants.ALWAYS_SELECTED_FIELDS
});
controlsArea.add({
xtype: 'rallybutton',
style: {'float': 'right'},
cls: 'secondary rly-small',
frame: false,
itemId: 'actions-menu-button',
iconCls: 'icon-export',
listeners: {
click: function(button) {
var menu = Ext.widget({
xtype: 'rallymenu',
items: [{
text: 'Export to CSV...',
handler: this.exportToCSV,
scope: this
}, {
text: 'Export teams without velocity...',
handler: this.exportTeamsWithoutVelocity,
scope: this
}]
});
menu.showBy(button.getEl());
if (button.toolTip) {
button.toolTip.hide();
}
},
scope: this
}
});
},
exportTeamsWithoutVelocity: function() {
var fields = {
name: 'Team'
};
var data = _.reduce(this.timeboxGroups, function(obj,timeboxGroup){
fields[timeboxGroup[0].get('Name')] = timeboxGroup[0].get('Name');
for (var i=0; i<timeboxGroup.length ; i++){
var timebox = timeboxGroup[i],
project = timebox.get('Project')._refObjectName,
name = timebox.get('Name');
if (!obj[project]){ obj[project] = {}; }
obj[project][name] = timebox.getPlannedCapacity() > 0 ? "" : "Missing";
}
return obj;
},{});
var fieldKeys = _.keys(fields);
var rows = _.reduce(data, function(csv,timeboxObj, projectName){
var row = {
name: projectName
}
for (var i=0; i<fieldKeys.length; i++){
if (fieldKeys[i] != 'name'){
row[fieldKeys[i]] = timeboxObj[fieldKeys[i]] || "";
}
}
csv.push(row);
return csv;
},[]);
var csvText = CArABU.technicalservices.FileUtilities.convertDataArrayToCSVText(rows, fields);
CArABU.technicalservices.FileUtilities.saveCSVToFile(csvText, 'missing_velocity.csv');
},
exportToCSV: function(){
this.setLoading(true);
var key = 'export data';
var status = this._getNewStatus();
var dataContext = this.getContext().getDataContext();
dataContext.includePermissions = false;
var timeboxGroups = this.timeboxGroups;
var fetchFieldsForExport = this.getFieldsForExport();
var promises = _.map(timeboxGroups, function(timeboxGroup){
var timeboxOids = _.map(timeboxGroup, function(t){
return t.get('ObjectID');
});
return this.fetchArtifacts(this.modelName,fetchFieldsForExport,timeboxOids,status,key,true);
}, this);
if (promises.length > 0){
Deft.Promise.all(promises).then({
scope: this,
success: function(groups) {
var artifacts = _.map(_.flatten(groups), function(a){
return a.getData();
});
var exportfields = _.reduce(fetchFieldsForExport, function(accum, field) {
accum[field] = field;
return accum;
}, {}, this);
console.log('artifacts',artifacts,exportfields)
var csvText = CArABU.technicalservices.FileUtilities.convertDataArrayToCSVText(artifacts, exportfields);
CArABU.technicalservices.FileUtilities.saveCSVToFile(csvText, 'backlog-health.csv');
this.setLoading(false);
}
});
} else {
Rally.ui.notify.Notifier.show({message: "No data to export."});
}
},
getFieldsForExport: function() {
var fieldPicker = this.down('tsfieldpickerbutton');
var result = [];
if (fieldPicker) {
result = fieldPicker.getFields();
}
if (this.getSetting('points')) {
result.push('PlanEstimate');
}
return result;
},
// Usual monkey business to size gridboards
onResize: function() {
this.callParent(arguments);
var gridArea = this.down('#grid-area');
var gridboard = this.down('rallygridboard');
if (gridArea && gridboard) {
gridboard.setHeight(gridArea.getHeight() - Constants.APP_RESERVED_HEIGHT)
}
},
_getKey: function(artifactOid, timeboxOid){
return artifactOid + '-' + timeboxOid;
},
_getNewStatus: function(){
var app = this;
return {
counters: {},
errors: [],
addError: function(key) {
this.errors.push('Error loading ' + key);
},
progressStart: function(key) {
this.counters[key] = this.counters[key] || {total: 0, complete: 0};
this.counters[key].total++;
this.progressUpdate(key);
},
progressEnd: function(key) {
this.counters[key] = this.counters[key] || {total: 0, complete: 0};
this.counters[key].complete++;
this.progressUpdate(key);
},
progressUpdate: function() {
if (this.errors.length > 0) {
app.setLoading(this.errors.join('\n'));
} else {
var statusMessages = _.map(this.counters, function(val, key) {
return key + ' (' + val.complete + '/' + val.total + ')'
})
app.setLoading(statusMessages.join('<br/>'));
}
}
};
},
buildChart: function(timeboxGroups){
var chartConfig = this.buildChartConfig(timeboxGroups);
var chartArea = this.down('#grid-area')
this.timeboxGroups = timeboxGroups;
chartArea.removeAll();
chartArea.add(chartConfig);
},
buildChartConfig: function(timeboxGroups){
var yAxisTitle = ["Points/Count","Team Count"],
chartData = this.buildChartData(timeboxGroups);
return {
xtype: 'rallychart',
loadMask: false,
chartColors: [
"#8DC63F", // $lime
"#FFA500", // $orange
"#000000"// $black
],
chartConfig: {
chart: {
type: 'column',
animation: false
},
title: {
text: Constants.CHART_TITLE + ' by ' + this.timeboxType
},
legend: {
labelFormatter: function() {
return this.name;
}
},
plotOptions: {
column: {
stacking: 'normal'
},
series: {
animation: false,
dataLabels: {
align: 'center',
verticalAlign: 'top',
},
events: {
legendItemClick: function() {
return false;
} // Disable hiding some of data on legend click
}
}
},
yAxis: [{
allowDecimals: false,
min: 0,
title: {
text: yAxisTitle[0]
}
}, {
allowDecimals: false,
min: 0,
title: {
text: yAxisTitle[1]
},
opposite: true
}
]
},
chartData: chartData
};
},
buildChartData: function(timeboxGroups){
var chartData = {
categories: [],
series: [{
data: [],
stack: 0,
legendIndex: 1,
name: Constants.PLANNED
}, {
data: [],
stack: 1,
name: 'Capacity'
}, {
data: [],
type: 'spline',
yAxis: 1,
stack: 0,
name: 'Teams with no Velocity'
}]
};
var usePoints = this.getUsePoints(),
includeAll = this.getIncludeAll();
_.each(timeboxGroups, function(timeboxGroup, timeboxName){
var missingVelocities = 0,
planned = 0,
capacity = 0;
for (var i=0; i< timeboxGroup.length; i++){
var timebox = timeboxGroup[i],
plannedVelocity = timebox.getPlannedCapacity(),
actualPlanned = timebox.getPlannedBacklog(usePoints, includeAll);
planned += actualPlanned;
capacity += plannedVelocity;
if (!plannedVelocity){
missingVelocities++;
}
}
chartData.categories.push(timeboxName);
chartData.series[0].data.push(planned);
chartData.series[1].data.push(capacity);
chartData.series[2].data.push(missingVelocities);
});
return chartData;
},
getSquadFilters: function(){
var queryString = this.getSetting('query');
if (queryString){
return Rally.data.wsapi.Filter.fromQueryString(queryString);
}
return [];
},
getSquads: function(status) {
var dataContext = this.getContext().getDataContext();
var key = "Loading Projects"
status.progressStart(key);
return Ext.create('Rally.data.wsapi.Store', {
model: 'Project',
autoLoad: false,
context: dataContext,
fetch: ['ObjectID', 'Name'],
filters: this.getSquadFilters(),
pageSize: 2000
}).load().then({
scope: this,
success: function(projects) {
status.progressEnd(key);
return _.map(projects, function(prj) {
return prj.get('ObjectID')
});
}
});
},
getFutureTimeboxes: function(timeboxCount,status){
var key = "loading future timeboxes";
var deferred = Ext.create('Deft.Deferred');
status.progressStart(key);
Ext.create('Rally.data.wsapi.Store', {
model: this.timeboxType,
autoLoad: false,
context: {
projectScopeDown: false,
projectScopeUp: false
},
sorters: [{
property: 'StartDate',
direction: 'ASC'
}],
filters: [{
property: 'StartDate',
operator: '>=',
value: 'today'
}],
pageSize: timeboxCount
}).load({
callback: function(records,operation,success){
if (operation.wasSuccessful()){
status.progressEnd(key);
deferred.resolve(records);
} else {
deferred.reject("Error loading timeboxes: ");
}
}
});
return deferred.promise;
},
getTimeboxes: function(results,status) {
// Get the N upcoming timeboxes in the current project
// Sort by name
// Get timeboxes by name from all child projects
var squads = results[0],
timeboxes = results[0],
timeboxModel = results[1],
timeboxFetch = this.getTimeboxFetchFields(),
key = "loading timeboxes",
deferred = Ext.create('Deft.Deferred');
var projectFilter = this.getSquadFilters();
if (timeboxes.length) {
var dataContext = this.getContext().getDataContext();
dataContext.includePermissions = false;
var timeboxPromises = _.map(timeboxes, function(timebox) {
var timeboxFilter = [{
property: 'Name',
value: timebox.get('Name')
},{
property: this.timeboxStartDateField,
value: timebox.get(this.timeboxStartDateField)
},{
property: this.timeboxEndDateField,
value: timebox.get(this.timeboxEndDateField)
},projectFilter];
status.progressStart(key);
return Ext.create('Rally.data.wsapi.Store', {
model: timeboxModel,
autoLoad: false,
context: dataContext,
useShallowFetch: true,
fetch: timeboxFetch,
enablePostGet: true,
sorters: [{
property: this.timeboxEndDateField,
direction: 'DESC'
}],
listeners:{
load: function(){
status.progressEnd(key);
}
},
filters: timeboxFilter,
pageSize: 2000,
limit: Infinity
}).load();
}, this);
Deft.Promise.all(timeboxPromises).then({
success: function(results){
var timeboxes = _.flatten(results);
// Group by timebox name
var timeboxGroups = _.groupBy(timeboxes, function(timebox) {
return timebox.get('Name');
});
deferred.resolve(timeboxGroups);
},
failure: function(msg){
deferred.reject(msg);
},
scope: this
});
} else {
deferred.resolve({});
}
return deferred.promise;
},
getIncludeAll: function(){
return this.getSetting('includeAll') === true || this.getSetting('includeAll') === "true";
},
getTimeboxFetchFields: function(){
var fields = ['ObjectID', this.timeboxStartDateField, this.timeboxEndDateField, 'Name', 'PlannedVelocity', 'PlanEstimate', 'Project'];
if (this.getIncludeAll()){
fields.push('WorkProducts');
}
return fields;
},
getArtifactsLookback: function(timeboxGroups,status){
var timeboxesByOid = {},
key = "loading Artifacts",
deferred = Ext.create('Deft.Deferred'),
usePoints = this.getUsePoints();
var promises = [];
var fetchFields = ['ObjectID',this.timeboxType,'PlanEstimate'];
var promises = _.map(timeboxGroups, function(timeboxGroup) {
var timeboxOids = _.map(timeboxGroup, function(tbox) {
timeboxesByOid[tbox.get('ObjectID')] = tbox;
return tbox.get('ObjectID');
});
return this.fetchArtifactsLookback(this.modelName,fetchFields,timeboxOids,status,key)
}, this);
if (promises.length > 0){
Deft.Promise.all(promises).then({
scope: this,
failure: function(){
status.addError(key);
deferred.reject('Error loading artifacts');
},
success: function(groups) {
for (var i=0; i<groups.length; i++){
for (var j=0; j<groups[i].length; j++){
var artifact = groups[i][j];
var timeboxOid = artifact.get('Iteration');
if (!timeboxesByOid[timeboxOid]){
timeboxesByOid[timeboxOid] = 0;
}
timeboxesByOid[timeboxOid].addArtifact(usePoints, artifact.getData());
}
}
deferred.resolve(timeboxGroups);
}});
}
return deferred.promise;
},
fetchArtifactsLookback: function(model,fetchFields,timeboxOids,status,key){
var dataContext = this.getContext().getDataContext();
dataContext.includePermissions = false;
var filter = Rally.data.lookback.QueryFilter.and([{
property: '__At',
value: "current"
},{
property: '_TypeHierarchy',
value: model
},{
property: this.timeboxType,
operator: 'in',
value: timeboxOids
},{
property: '_ProjectHierarchy',
value: Rally.util.Ref.getOidFromRef(dataContext.project)
},{
property: 'AcceptedDate',
value: null
}
]);
var store = Ext.create('Rally.data.lookback.SnapshotStore', {
autoLoad: false,
context: dataContext,
fetch: fetchFields,
hydrate: [],
remoteSort: false,
sortConfig: {},
compress: true,
useHttpPost: true,
filters: filter,
exceptionHandler: function(proxy, request){
status.addError(key);
},
listeners: {
beforeload: function(){
status.progressStart(key);
},
load: function(){
status.progressEnd(key);
},
scope: this
},
limit: Infinity,
});
return store.load();
},
getArtifacts: function(timeboxGroups,status){
var timeboxesByOid = {},
key = "loading Artifacts",
deferred = Ext.create('Deft.Deferred'),
usePoints = this.getUsePoints();
var promises = [];
var fetchFields = ['ObjectID',this.timeboxType,'Project','PlanEstimate','ScheduleState','FormattedID'];
var promises = _.map(timeboxGroups, function(timeboxGroup) {
var timeboxOids = _.map(timeboxGroup, function(tbox) {
timeboxesByOid[tbox.get('ObjectID')] = tbox;
return tbox.get('ObjectID');
});
return this.fetchArtifacts(this.modelName,fetchFields,timeboxOids,status,key)
}, this);
var usePoints = this.getUsePoints();
if (promises.length > 0){
Deft.Promise.all(promises).then({
scope: this,
failure: function(){
status.addError(key);
deferred.reject('Error loading artifacts');
},
success: function(groups) {
for (var i=0; i<groups.length; i++){
for (var j=0; j<groups[i].length; j++){
var artifact = groups[i][j];
var timeboxOid = Rally.util.Ref.getOidFromRef(artifact.get('Iteration')._ref);
if (timeboxesByOid)
timeboxesByOid[timeboxOid].addArtifact(usePoints, artifact.getData());
}
}
deferred.resolve(timeboxGroups);
}
});
} else {
deferred.resolve(timeboxGroups);
}
return deferred.promise;
},
fetchArtifacts: function(modelName,fetchFields,timeboxOids,status,key, fullFetch){
var pageSize=2000;
var dataContext = this.getContext().getDataContext(),
filters = [{
property: 'Iteration.ObjectID',
operator: 'in',
value: timeboxOids
},{
property: "AcceptedDate",
value: null
}];
dataContext.includePermissions = false;
var updatedFetchFields = fetchFields.slice();
var shallowFetch = true;
if (fullFetch === true){
updatedFetchFields.push('EmailAddress');
console.log('updatedFetchFields',updatedFetchFields)
shallowFetch = false;
}
status.progressStart(key);
return Ext.create('Rally.data.wsapi.Store',{
model: modelName,
fetch: updatedFetchFields,
pageSize: pageSize,
limit: Infinity,
autoLoad: false,
context: dataContext,
useShallowFetch: shallowFetch,
enablePostGet: true,
filters: filters,
listeners: {
load: function(){
status.progressEnd(key);
}
}
}).load();
},
getUsePoints: function(){
return this.getSetting('points') === true || this.getSetting('points') == "true";
},
getIncludeAll: function(){
return this.getSetting('includeAll') === true || this.getSetting('includeAll') == "true";
},
getModelScopedStateId: function(modelName, id) {
return this.getContext().getScopedStateId(modelName + '-' + id);
},
getSettingsFields: function() {
var timeboxTypeStore = Ext.create('Ext.data.Store', {
fields: ['name', 'value'],
data: [
{name: Constants.TIMEBOX_TYPE_ITERATION_LABEL, value: Constants.TIMEBOX_TYPE_ITERATION},
{name: Constants.TIMEBOX_TYPE_RELEASE_LABEL, value: Constants.TIMEBOX_TYPE_RELEASE},
]
});
var typeStoreData = [
{name: 'User Story', value: 'HierarchicalRequirement'},
];
// Called from getSettingsFields which is invoked before launch sets up the lowestPiType. Handle
// this case.
if (this.lowestPiType) {
typeStoreData.push({name: this.lowestPiType.get('Name'), value: this.lowestPiType.get('TypePath')})
}
var artifactTypeStore = Ext.create('Ext.data.Store', {
fields: ['name', 'value'],
data: typeStoreData
});
return [{
// xtype: 'combobox',
// name: 'artifactType',
// value: this.getSetting('artifactType'),
// fieldLabel: 'Artifact type',
// labelWidth: 150,
// store: artifactTypeStore,
// queryMode: 'local',
// displayField: 'name',
// valueField: 'value',
// listeners: {
// scope: this,
// change: function(field, newValue, oldValue) {
// if (newValue != oldValue) {
// this.updateSettingsValues({
// settings: {
// artifactType: newValue
// }
// });
// // Choice of artifact has changed
// this.setModelFieldsForType(newValue);
// // If Feature, also update timebox type to 'Release'
// var timeboxTypeControl = Ext.ComponentManager.get('timeboxType');
// var pointsControl = Ext.ComponentManager.get('points');
// if (this.isPiTypeSelected()) {
// timeboxTypeControl.setValue(Constants.TIMEBOX_TYPE_RELEASE);
// timeboxTypeControl.disable(); // User cannot pick other timeboxes for Features
// pointsControl.setValue(false);
// pointsControl.disable();
// } else {
// timeboxTypeControl.enable();
// pointsControl.enable();
// }
// }
// }
// }
// },
// {
// xtype: 'combobox',
// name: 'timeboxType',
// id: 'timeboxType',
// value: this.getSetting('timeboxType'),
// fieldLabel: 'Timebox type',
// labelWidth: 150,
// store: timeboxTypeStore,
// queryMode: 'local',
// displayField: 'name',
// valueField: 'value',
// disabled: this.isPiTypeSelected(),
// listeners: {
// scope: this,
// change: function(field, newValue, oldValue) {
// if (newValue != oldValue) {
// this.updateSettingsValues({
// settings: {
// timeboxType: newValue
// }
// });
// // Choice of timebox has changed
// this.setTimeboxFieldsForType(newValue);
// }
// }
// }
// },
// {
xtype: 'rallynumberfield',
name: 'timeboxCount',
value: this.getSetting('timeboxCount'),
fieldLabel: "Timebox Count",
labelWidth: 150,
minValue: 1,
allowDecimals: false
}, {
xtype: 'rallycheckboxfield',
name: 'points',
id: 'points',
value: this.getSetting('points'),
fieldLabel: 'Show by sum of Plan Estimate.',
labelWidth: 150
}, {
type: 'query',
fieldLabel: 'Project Query String'
}
]
}
}); |
#!/usr/bin/env bash
# Exit on error, since this is an executable and not a sourced file.
set -eo pipefail
if [ -n "$ASDF_DIRENV_DEBUG" ]; then
set -x
fi
# Load direnv stdlib if not already loaded
if [ -z "$direnv" ] && [ -z "$(declare -f -F watch_file)" ]; then
# We need to eval direnv stdlib, but before doing so
# we need to determine which direnv executable we should use.
#
# The fastest way is a user specified $ASDF_DIRENV_BIN.
#
# Otherwise if we find direnv in PATH, we use that.
#
# Otherwise - but a bit slow path - we ask asdf to resolve direnv.
#
# If all of this fails, we inform the users with an error.
#
direnv="${ASDF_DIRENV_BIN}"
if [ -z "$direnv" ]; then
direnv="$(command -v direnv || true)" # prevent exit on failure
fi
if [ -z "$direnv" ]; then
direnv="$(asdf which direnv 2>/dev/null || true)" # prevent exit on failure
fi
if [ -z "$direnv" ]; then
cat <<-'EOF' >&2
No direnv executable found. Please do one of the following:
With a system installed direnv
export ASDF_DIRENV_BIN="$(command -v direnv)"
With an asdf installed direnv
export ASDF_DIRENV_BIN="$(asdf which direnv)"
EOF
exit 1
fi
eval "$("$direnv" stdlib)"
fi
# This is inspired by https://stackoverflow.com/a/1116890
_follow_symlink() {
path="$1"
# Start in the directory of the (possible) symlink.
cd "$(dirname "$path")"
filename="$(basename "$path")"
# Follow symlinks until we run out of symlinks.
# This probably will loop forever if there's a cycle.
while [ -L "$path" ]; do
path="$(readlink "$filename")"
cd "$(dirname "$path")"
filename="$(basename "$path")"
done
# Now print out the final directory we ended up in, plus the final filename.
echo "$(pwd -P)/$filename"
}
_load_asdf_utils() {
if [ -z "$(declare -f -F with_plugin_env)" ]; then
ASDF_DIR="${ASDF_DIR:-"$(_follow_symlink "$(command -v asdf)" | xargs dirname | xargs dirname)"}"
# libexec is a Homebrew specific thing. See
# https://github.com/asdf-community/asdf-direnv/issues/95 for details.
local lib_file
lib_file=$(ls "$ASDF_DIR"/{lib,libexec/lib}/utils.bash 2>/dev/null || true)
if [ ! -f "$lib_file" ]; then
log_error "Could not find asdf utils.bash file in $ASDF_DIR"
return 1
fi
# shellcheck source=/dev/null # we don't want shellcheck trying to find this file
source "$lib_file"
fi
}
_cache_dir() {
XDG_CACHE_HOME=${XDG_CACHE_HOME:-$HOME/.cache}
local dir
dir=$XDG_CACHE_HOME/asdf-direnv
mkdir -p "$dir"
echo "$dir"
}
_asdf_cached_envrc() {
local dump_dir tools_file tools_cksum env_file
dump_dir="$(_cache_dir)/env"
generating_dump_dir="$(_cache_dir)/env-generating"
tools_file="$(_local_versions_file)"
tools_cksum="$(_cksum "$tools_file" "$ASDF_DIRENV_DEBUG" "$@")"
env_file="$dump_dir/$tools_cksum"
if [ -f "$env_file" ] && [ -z "$ASDF_DIRENV_DEBUG" ]; then
echo "$env_file"
return 0
fi
_load_asdf_utils
mkdir -p "$dump_dir" "$generating_dump_dir"
rm "$dump_dir/$(echo "$tools_cksum" | cut -d- -f1-2)"-* 2>/dev/null || true
log_status "Creating env file $env_file"
# Write to a temp file first instead of directly to ${env_file} so if we
# crash while generating the file, we don't leave the (broken) cached file
# around.
# We use a randomly chosen filename to allow two different processes to
# generate this at the same time without stepping on each other's toes.
generating_env_file="$(mktemp "$generating_dump_dir/$tools_cksum.XXXX")"
_asdf_envrc "$tools_file" | _no_dups >"${generating_env_file}"
mv "${generating_env_file}" "${env_file}"
echo "$env_file"
}
_asdf_envrc() {
if [ -n "$ASDF_DIRENV_DEBUG" ]; then
echo 'set -x'
fi
local tools_file="$1"
_load_global_plugins_env "$tools_file"
_load_local_plugins_env "$tools_file"
}
# compute a checksump to see if we can use the cache or have to compute the environment again
_cksum() {
local file="$1"
# working directory, the arguments given to use_asdf, direnv status, and the tools-version modification times.
# shellcheck disable=SC2154 # var is referenced but not assigned.
cksum <(pwd) <(echo "$@") <("$direnv" status) <(test -f "$file" && ls -l "$file") | cut -d' ' -f 1 | tr $'\n' '-' | sed -e 's/-$//'
}
_tgrep() {
# Never failing grep
grep "$@" || true
}
_tail_r() {
# portable version of tail -r
cat -n | sort -nr | cut -f2-
}
_no_dups() {
awk '!a[$0]++' -
}
_each_do() {
while IFS=$'\n' read -r line; do
"$@" "$line"
done
}
_local_versions_file() {
local tool_versions
tool_versions="$(find_up .tool-versions)"
if [ -f "$tool_versions" ]; then
echo "$tool_versions"
elif [ -f "$HOME/.tool-versions" ]; then
echo "$HOME/.tool-versions"
fi
}
_plugins_in_file() {
local tool_versions=$1
cut -d'#' -f1 "$tool_versions" | cut -d' ' -f1 | awk NF | uniq
}
_all_plugins_list() {
find "$(get_plugin_path)" -maxdepth 1 -mindepth 1 -exec basename '{}' \;
}
_except_local_plugins_list() {
local tool_versions=$1
if [ -f "$tool_versions" ]; then
_all_plugins_list | _new_items <(_plugins_in_file "$tool_versions")
else
_all_plugins_list
fi
}
_load_global_plugins_env() {
local tool_versions=$1
_except_local_plugins_list "$tool_versions" | sort | _tail_r | _each_do _load_plugin_version_and_file
}
_load_local_plugins_env() {
local tool_versions=$1
if [ -f "$tool_versions" ]; then
_plugins_in_file "$tool_versions" | _tail_r | _each_do _load_plugin_version_and_file
fi
}
# from asdf plugin_current_command
_load_plugin_version_and_file() {
local plugin_name=$1
local versions_and_path
versions_and_path="$(find_versions "$plugin_name" "$(pwd)")"
if test -z "$versions_and_path"; then
return 0
fi
local path
path=$(cut -d '|' -f 2 <<<"$versions_and_path")
local versions=()
while IFS=$' \t' read -r -a inline_versions; do
for ((idx = ${#inline_versions[@]} - 1; idx >= 0; idx--)); do
versions+=("${inline_versions[idx]}")
done
done <<<"$(cut -d '|' -f 1 <<<"$versions_and_path" | uniq | _tail_r)"
for version in "${versions[@]}"; do
echo log_status "using asdf ${plugin_name} ${version}"
_plugin_env_bash "$plugin_name" "$version" "$plugin $version not installed. Run 'asdf install' and then 'direnv reload'."
done
if [ -f "$path" ]; then
printf 'watch_file %q\n' "$path"
fi
}
_new_items() {
# Output only the lines from STDIN not present in $1 file
awk 'NR == FNR { a[$0]; next } !($0 in a)' "$1" -
}
_path_changed_entries() {
local old_path new_path
old_path="$(echo -n "$1" | tr ':' $'\n')"
new_path="$(echo -n "$2" | tr ':' $'\n')"
echo -n "$new_path" | _new_items <(echo -n "$old_path")
}
_direnv_bash_dump() {
"$direnv" dump bash | sed -e $'s#;export#\\\nexport#g' | sed -e 's#;$##'
}
_plugin_env_bash() {
local plugin="${1}"
local version="${2}"
local not_installed_message="${3}"
# NOTE: unlike asdf, asdf-direnv does not support other installation types.
local install_type="version"
plugin_path=$(get_plugin_path "$plugin")
if [ ! -d "$plugin_path" ]; then
log_error "asdf plugin not installed: $plugin"
exit 1
fi
if [ "$version" != "system" ]; then
install_path=$(get_install_path "$plugin" "$install_type" "$version")
if [ ! -d "$install_path" ]; then
log_error "$not_installed_message"
exit 1
fi
fi
# If plugin has custom-shims, add them first to they appear last on final PATH
if [ -d "$plugin_path/shims" ]; then
echo PATH_add "$plugin_path/shims"
fi
# Add plugin bin_paths to PATH. We add them in reverse order to preserve original PATH.
# NOTE: The plugin returns a list of space-separated dirs relative to install_dir.
# NOTE: We don't add custom shims into path.
# NOTE: If install_path is empty (ex. "system" version), skip this step so /bin doesn't get added to PATH.
if [ -n "$install_path" ]; then
list_plugin_bin_paths "$plugin_name" "$version" "$install_type" |
tr $' ' $'\n' | _tail_r | sed -e "s#^#$install_path/#" | _each_do echo PATH_add
fi
# If the plugin defines custom environment, source it.
if [ -f "${plugin_path}/bin/exec-env" ]; then
echo "ASDF_INSTALL_TYPE='$install_type' ASDF_INSTALL_VERSION='$version' ASDF_INSTALL_PATH='$install_path' source_env ${plugin_path}/bin/exec-env"
fi
}
if [ "$0" == "${BASH_SOURCE[0]}" ]; then
case "$1" in
"_"*)
"$@"
;;
*)
exec "$direnv" "$@"
;;
esac
fi
|
#!/usr/bin/env bash
set -Eeuo pipefail
# see https://golang.org/dl/
declare -A golangArches=(
['amd64']='linux-amd64'
['arm32v7']='linux-armv6l'
['arm64v8']='linux-arm64'
['i386']='linux-386'
['ppc64le']='linux-ppc64le'
['s390x']='linux-s390x'
['windows-amd64']='windows-amd64'
# special case (fallback)
['src']='src'
)
cd "$(dirname "$(readlink -f "$BASH_SOURCE")")"
versions=( "$@" )
if [ ${#versions[@]} -eq 0 ]; then
versions=( */ )
json='{}'
else
json="$(< versions.json)"
fi
versions=( "${versions[@]%/}" )
# https://github.com/golang/go/issues/13220
allGoVersions='{}'
apiBaseUrl='https://www.googleapis.com/storage/v1/b/golang/o?fields=nextPageToken,items%2Fname'
pageToken=
while [ "$pageToken" != 'null' ]; do
page="$(curl -fsSL "$apiBaseUrl&pageToken=$pageToken")"
# now that we have this page's data, get ready for the next request
pageToken="$(jq <<<"$page" -r '.nextPageToken')"
# for each API page, collect the "version => arches" pairs we find
goVersions="$(
jq <<<"$page" -r '
[
.items as $items
| $items[].name
| match("^go([0-9].*)[.](src|(linux|windows)-[^.]+)[.](tar[.]gz|zip)$")
| .captures[0].string as $version
| .captures[1].string as $arch
| { version: $version, arch: $arch }
] | reduce .[] as $o (
{};
.[$o.version] += [ $o.arch ]
)
'
)"
# ... and aggregate them together into a single object of "version => arches" pairs
allGoVersions="$(
jq <<<"$allGoVersions"$'\n'"$goVersions" -cs '
map(to_entries) | add
| reduce .[] as $o (
{};
.[$o.key] = (
$o.value + .[$o.key]
| unique
)
)
'
)"
done
for version in "${versions[@]}"; do
rcVersion="${version%-rc}"
rcRegex='^[^a-z]*$'
if [ "$rcVersion" != "$version" ]; then
# beta, rc, etc
rcRegex='[a-z]+[0-9]*$'
fi
export rcVersion rcRegex
fullVersion="$(
jq <<<"$allGoVersions" -r '
. as $map
| keys[] | select(
startswith(env.rcVersion)
and (
ltrimstr(env.rcVersion)
| test(env.rcRegex)
)
and ($map[.] | index("src"))
)
' | sort -rV | head -1
)"
if [ -z "$fullVersion" ]; then
echo >&2 "warning: cannot find full version for $version"
continue
fi
echo "$version: $fullVersion"
export fullVersion
doc="$(
jq -nc '{
version: env.fullVersion,
arches: {},
variants: [],
}'
)"
arches="$(jq <<<"$allGoVersions" -c '.[env.fullVersion]')"
# loop over bashbrew arches, get sha256 for each one supported
for bashbrewArch in "${!golangArches[@]}"; do
arch="${golangArches[$bashbrewArch]}"
export arch
if jq <<<"$arches" -e 'index(env.arch) != null' > /dev/null; then
file="go${fullVersion}.$arch.$([[ "$arch" == windows-* ]] && echo 'zip' || echo 'tar.gz')"
url="https://storage.googleapis.com/golang/$file"
# https://github.com/golang/build/commit/24f7399f96feb8dd2fc54f064e47a886c2f8bb4a
if sha256="$(curl -fsSL "$url.sha256")"; then
export bashbrewArch arch url sha256
doc="$(
jq <<<"$doc" -c '.arches[env.bashbrewArch] = {
arch: env.arch,
url: env.url,
sha256: env.sha256,
}'
)"
fi
fi
done
# order here controls the order of the library/ file
for variant in \
buster \
stretch \
\
alpine3.12 \
alpine3.11 \
\
windows/windowsservercore-{1809,ltsc2016} \
windows/nanoserver-1809 \
; do
base="${variant%%/*}" # "buster", "windows", etc.
[ -d "$version/$base" ] || continue
if [ "$base" = 'windows' ] && ! jq <<<"$arches" -e 'index("windows-amd64")' > /dev/null; then
continue
fi
export variant
doc="$(jq <<<"$doc" -c '.variants += [ env.variant ]')"
done
export version
json="$(jq <<<"$json" -c --argjson doc "$doc" '.[env.version] = $doc')"
done
jq <<<"$json" -S . > versions.json
|
<gh_stars>0
import React, { useState, useEffect, useRef, useImperativeHandle, forwardRef } from 'react';
import Button from '@material-ui/core/Button';
import TextField from '@material-ui/core/TextField';
import Dialog from '@material-ui/core/Dialog';
import DialogActions from '@material-ui/core/DialogActions';
import DialogContent from '@material-ui/core/DialogContent';
import DialogContentText from '@material-ui/core/DialogContentText';
import DialogTitle from '@material-ui/core/DialogTitle';
import Grid from '@material-ui/core/Grid';
import OutlinedInput from '@material-ui/core/OutlinedInput';
import InputLabel from '@material-ui/core/InputLabel'
import FormControlLabel from '@material-ui/core/FormControlLabel';
import Checkbox from '@material-ui/core/Checkbox';
import DateFnsUtils from '@date-io/date-fns';
import { KeyboardDatePicker, MuiPickersUtilsProvider } from '@material-ui/pickers';
import viLocale from "date-fns/locale/vi";
import { createMuiTheme } from "@material-ui/core";
import { ThemeProvider } from "@material-ui/styles";
import Typography from "@material-ui/core/Typography";
import { makeStyles } from '@material-ui/core/styles';
import CircularProgress from '@material-ui/core/CircularProgress';
import Tabs from '@material-ui/core/Tabs';
import Tab from '@material-ui/core/Tab';
import Box from '@material-ui/core/Box';
import Divider from '@material-ui/core/Divider';
import IconButton from '@material-ui/core/IconButton';
import AddCircleIcon from '@material-ui/icons/AddCircle';
import { convertToRightDate } from '../Utilities/utils.js';
import DeleteIcon from '@material-ui/icons/Delete';
import ClearIcon from '@material-ui/icons/Clear';
import AddAPhotoIcon from '@material-ui/icons/AddAPhoto';
import AddPhotoAlternateIcon from '@material-ui/icons/AddPhotoAlternate';
import { Alert } from '@material-ui/lab';
const defaultMaterialTheme = createMuiTheme({
spacing: 2,
});
const useStyles = makeStyles((theme) => ({
button: {
margin: theme.spacing(1),
backgroundColor: 'white',
color: 'green',
borderColor: 'green',
padding: '5px 10px',
width: '82px'
},
loader: {
color: 'green'
},
root: {
"&$selected": {
outline: 0
}
},
selected: {},
addButton: {
color: 'green'
},
upload: {
margin: theme.spacing(1),
backgroundColor: 'white',
color: 'green',
borderColor: 'green',
margin: 0
},
alert: {
'& .MuiAlert-message': {
padding: 0,
display: 'flex',
justifyContent: 'center',
flexDirection: 'column'
},
'& .MuiTypography-caption': {
marginBottom: 0
}
}
}));
function TabPanel(props) {
const { children, value, index } = props;
const classes = useStyles();
return (
<div
role="tabpanel"
hidden={value !== index}s
id={`simple-tabpanel-${index}`}
aria-labelledby={`simple-tab-${index}`}
>
{value === index && (
<Box p={3}>
<Typography>{children}</Typography>
</Box>
)}
</div>
);
}
function a11yProps(index) {
return {
id: `simple-tab-${index}`,
'aria-controls': `simple-tabpanel-${index}`,
};
}
// const CreateDialog = React.memo(({ isOpen, hideCreateDialog, showSuccessSnackBar, prependDoituong }) =>
function CreateDialog({ isOpen, hideCreateDialog, showSuccessSnackBar, prependDoituong }) {
const classes = useStyles();
const [ngaysinh, setNgaysinh] = useState(new Date(1995, 11, 31));
const [isLoading, setLoader] = useState(false);
const [errors, setErrors] = useState([]);
const hovatenInputRef = useRef();
const tenthuonggoiInputRef = useRef();
const gioitinhnamInputRef = useRef();
const gcnthanInputRef = useRef();
const nhanthanInputRef = useRef();
const lsngheInputRef = useRef();
const ghichuInputRef = useRef();
const hinhanhInputRef = useRef();
function submitInputs(event) {
event.preventDefault();
if(hovatenInputRef.current.value.length == 0 || ngaysinh == null) {
let errors = []
if(hovatenInputRef.current.value.length == 0){
errors.push('Họ tên không được để trống!');
}
if(ngaysinh == null){
errors.push('Ngày sinh không được để trống!');
}
setErrors([...errors])
return;
}
setLoader(true);
var data = new FormData();
data.append('hovaten', hovatenInputRef.current.value);
data.append('tenthuonggoi', tenthuonggoiInputRef.current.value);
data.append('ngaysinh', convertToRightDate(ngaysinh).toISOString().slice(0, 10));
data.append('gioitinhnam', gioitinhnamInputRef.current.checked);
data.append('gcnthan', gcnthanInputRef.current.value);
data.append('nhanthan', nhanthanInputRef.current.value);
data.append('lsnghe', lsngheInputRef.current.value);
data.append('ghichu', ghichuInputRef.current.value);
hinhanhInputRef.current.hinhanh.filter(i => i != null).forEach(i => {
data.append('hinhanh[]', i);
})
hinhanhInputRef.current.hinhanh.forEach((item, index) => {
if (item != null) {
data.append('thoigian[]', convertToRightDate(hinhanhInputRef.current.times[index]).toISOString().slice(0, 10));
}
})
for (var pair of data.entries()) {
console.log(pair[0] + ', ' + pair[1]);
}
fetch('/api/doituong', {
method: 'post',
headers: {
'X-CSRF-TOKEN': document.querySelector('meta[name="csrf-token"]').getAttribute('content')
},
body: data
})
.then((response) => response.json())
.then((data) => {
setLoader(false);
hideCreateDialogChild();
showSuccessSnackBar("Tạo mới đối tượng thành công !");
prependDoituong(data.success);
})
.catch((error) => {
console.log('Request failed', error);
});
}
function hideCreateDialogChild() {
hideCreateDialog();
if(errors.length > 0) {
setErrors([]);
}
}
useEffect(() => {
})
return (
<Dialog open={isOpen} aria-labelledby="form-dialog-title" fullWidth={true} maxWidth="sm">
<DialogTitle id="form-dialog-title">Tạo Mới Đối Tượng</DialogTitle>
<DialogContent>
<Grid container spacing={2}>
<Grid item sm={6} >
<TextField
autoFocus
variant="outlined"
margin="dense"
label="<NAME>"
type="text"
fullWidth
inputRef={hovatenInputRef}
/>
</Grid>
<Grid item sm={6}>
<TextField
variant="outlined"
margin="dense"
label="<NAME>"
type="text"
fullWidth
inputRef={tenthuonggoiInputRef}
/>
</Grid>
</Grid>
<Grid container spacing={2}>
<Grid item sm={6}>
<ThemeProvider theme={defaultMaterialTheme}>
<MuiPickersUtilsProvider utils={DateFnsUtils} locale={viLocale}>
<KeyboardDatePicker
autoOk
variant="inline"
inputVariant="outlined"
label="Ngày Sinh"
format="dd/MM/yyyy"
padding="small"
margin="dense"
invalidDateMessage="Sai định dạng"
onChange={(date) => {
setNgaysinh(date);
}}
value={ngaysinh}
/>
</MuiPickersUtilsProvider>
</ThemeProvider>
</Grid>
{/* <Grid item sm={2}>
</Grid> */}
<Grid item sm={6}>
<Grid item xs={4}>
<Typography variant="body2">
Giới Tính:
</Typography>
</Grid>
<Grid item xs={2}>
<FormControlLabel
control={
<Checkbox
name="checkedB"
color="primary"
style={{ padding: '4px', paddingLeft: '15px' }}
value="Nam"
inputRef={gioitinhnamInputRef}
defaultChecked={true}
/>
}
label="Nam"
/>
</Grid>
</Grid>
</Grid>
<Grid container spacing={2}>
<Grid item sm={12}>
<InputLabel htmlFor="my-input">GCNTH Án</InputLabel>
<OutlinedInput
multiline
rows={2}
rowsMax={4}
color="primary"
fullWidth
notched={false}
inputRef={gcnthanInputRef}
/>
</Grid>
</Grid>
<Grid container spacing={2}>
<Grid item sm={12}>
<InputLabel htmlFor="my-input">Nhân Thân</InputLabel>
<OutlinedInput
multiline
rows={2}
rowsMax={4}
color="primary"
fullWidth
notched={false}
inputRef={nhanthanInputRef}
defaultValue="- Thường trú:
- Dân tộc:
- Tôn giáo:
- Họ tên cha:
- Nghề nghiệp cha:
- Họ tên mẹ:
- Nghề nghiệp mẹ:"
/>
</Grid>
</Grid>
<Grid container spacing={2}>
<Grid item sm={12}>
<InputLabel>Lịch Sử Nghề Nghiệp</InputLabel>
<OutlinedInput
multiline
rows={2}
rowsMax={4}
color="primary"
fullWidth
notched={false}
inputRef={lsngheInputRef}
defaultValue="-Nghề nghiệp (từ đến ): "
/>
</Grid>
</Grid>
<Grid container spacing={2}>
<Grid item sm={12}>
<InputLabel htmlFor="my-input"><NAME></InputLabel>
<OutlinedInput
multiline
rows={1}
rowsMax={2}
color="primary"
fullWidth
notched={false}
inputRef={ghichuInputRef}
/>
</Grid>
</Grid>
<Box m={2}>
<Divider />
</Box>
<HinhanhInputWithRef ref={hinhanhInputRef} />
{/* <DialogContentText>
To subscribe to this website, please enter your email address here. We will send updates
occasionally.
</DialogContentText> */}
</DialogContent>
<DialogActions>
<Grid container justify="space-between">
<Grid item sm={8}>
{errors != null && errors.length > 0 ? <Alert severity="error" className={classes.alert}>{errors.map(i =>
<Typography variant="caption" display="block" gutterBottom>
- {i}
</Typography>
)}</Alert> : ''}
</Grid>
<Grid item sm={4}>
<Button variant="outlined" onClick={submitInputs} className={classes.button} disabled={isLoading}>
{isLoading ? <CircularProgress color="primary" size={24} className={classes.loader} /> : "Tạo Mới"}
</Button>
<Button variant="outlined" onClick={hideCreateDialogChild}>
Hủy Bỏ
</Button>
</Grid>
</Grid>
</DialogActions>
</Dialog>
);
}
const HinhanhInputWithRef = forwardRef(HinhanhInput)
function HinhanhInput(props, ref) {
const classes = useStyles();
const [file, setFile] = useState([null]);
const [hinhanh, setHinhanh] = useState([null]);
const [items, setItems] = useState(['1']);
const [times, setTimes] = useState([new Date()]);
function handleImgFileChange(e, index) {
let newArrFile = [...file];
newArrFile[index] = URL.createObjectURL(e.target.files[0]);
setFile([...newArrFile]);
let newArrHinhanh = [...hinhanh];
newArrHinhanh[index] = e.target.files[0];
setHinhanh([...newArrHinhanh]);
}
function handleDateChange(date, index) {
let newArrTimes = [...times];
newArrTimes[index] = date;
setTimes([...newArrTimes]);
}
useImperativeHandle(ref, () => ({
hinhanh: hinhanh,
times: times
}));
function handleAddButton(e) {
e.preventDefault();
let length = items.length;
let newItem = parseInt(items[items.length - 1]) + 1;
setItems([...items, newItem.toString()]);
setFile([...file, null]);
setHinhanh([...hinhanh, null]);
setTimes([...times, new Date()]);
console.log(items);
}
function handleRemoveItem(e, index) {
e.preventDefault();
setItems([...items.filter((it, ind) => ind != index)]);
setFile([...file.filter((it, ind) => ind != index)]);
setHinhanh([...hinhanh.filter((it, ind) => ind != index)]);
setTimes([...times.filter((it, ind) => ind != index)]);
}
return (
<Box mt={1}>
<Grid container spacing={2}>
<Grid item sm={11}>
<InputLabel>Hình Ảnh</InputLabel>
</Grid>
<Grid item sm={1}>
<IconButton onClick={handleAddButton}>
<AddCircleIcon className={classes.addButton} />
</IconButton>
</Grid>
</Grid>
{/* <Grid container spacing={2}>
<Grid item sm={6}>
<InputLabel htmlFor="my-input">Hình Ảnh</InputLabel>
<TextField type="file" onChange={handleImgFileChange} />
</Grid>
<Grid item sm={6}>
<img src={file} style={{ height: 'auto', width: 'auto', maxWidth: '100%' }} />
</Grid>
</Grid> */}
{items.map((item, index) =>
<React.Fragment key={item}>
<Grid container spacing={2}>
<Grid item sm={6}>
<Divider />
<Grid container justify="flex-end">
<Grid item sm={1}>
<TextField type="file" id={"file-create-" + index} style={{ display: 'none' }} onChange={(e) => {
e.preventDefault();
handleImgFileChange(e, index);
}} />
<IconButton
aria-label="delete"
size="small"
color="secondary"
onClick={(e) => { handleRemoveItem(e, index) }}
>
<ClearIcon />
</IconButton>
</Grid>
</Grid>
<Box>
<Grid container justify="space-around">
<Grid item sm={7}>
<ThemeProvider theme={defaultMaterialTheme}>
<MuiPickersUtilsProvider utils={DateFnsUtils} locale={viLocale}>
<KeyboardDatePicker
autoOk
variant="inline"
label="Ngày"
format="dd/MM/yyyy"
padding="small"
margin="dense"
onChange={(date) => {
handleDateChange(date, index)
}}
value={times[index]}
/>
</MuiPickersUtilsProvider>
</ThemeProvider>
</Grid>
<Grid item sm={4}>
<label htmlFor={"file-create-" + index}>
<IconButton color="primary" className={classes.upload} component="span">
<AddPhotoAlternateIcon fontSize="large" />
</IconButton>
</label>
</Grid>
</Grid>
</Box>
</Grid>
<Grid item sm={6}>
<img src={file[index]} style={{ height: 'auto', width: 'auto', maxWidth: '100%' }} />
</Grid>
</Grid>
</React.Fragment>
)}
</Box>
);
}
export default CreateDialog; |
#!/usr/bin/env bash
# Utility to use local user, taken from:
# https://github.com/mavlink/MAVSDK/blob/main/docker/entrypoint.sh
# Use LOCAL_USER_ID if passed in at runtime.
if [ -n "${LOCAL_USER_ID}" ]; then
echo "Starting with UID: $LOCAL_USER_ID"
usermod -u $LOCAL_USER_ID user
export HOME=/home/user
chown -R user:user $HOME
exec su-exec user "$@"
else
exec "$@"
fi
|
emulator @and81 &
|
#!/bin/bash
set -e
# Requirements for this script
# installed versions of: FSL (version 5.0.6), HCP-gradunwarp (HCP version 1.0.2)
# environment: FSLDIR and PATH for gradient_unwarp.py
SCRIPT_NAME="T2WToT1wDistortionCorrectAndReg.sh"
# -----------------------------------------------------------------------------------
# Constants for specification of Averaging and Readout Distortion Correction Method
# -----------------------------------------------------------------------------------
SIEMENS_METHOD_OPT="SiemensFieldMap"
SPIN_ECHO_METHOD_OPT="TOPUP"
GENERAL_ELECTRIC_METHOD_OPT="GeneralElectricFieldMap"
FIELDMAP_METHOD_OPT="FIELDMAP"
################################################ SUPPORT FUNCTIONS ##################################################
Usage() {
echo "`basename $0`: Script for performing gradient-nonlinearity and susceptibility-inducted distortion correction on T1w and T2w images, then also registering T2w to T1w"
echo " "
echo "Usage: `basename $0` [--workingdir=<working directory>]"
echo " --t1=<input T1w image>"
echo " --t1brain=<input T1w brain-extracted image>"
echo " --t2=<input T2w image>"
echo " --t2brain=<input T2w brain-extracted image>"
echo " [--fmapmag=<input fieldmap magnitude image>]"
echo " [--fmapphase=<input fieldmap phase images (single 4D image containing 2x3D volumes)>]"
echo " [--fmapgeneralelectric=<input General Electric field map (two volumes: 1. field map in deg, 2. magnitude)>]"
echo " [--echodiff=<echo time difference for fieldmap images (in milliseconds)>]"
echo " [--SEPhaseNeg=<input spin echo negative phase encoding image>]"
echo " [--SEPhasePos=<input spin echo positive phase encoding image>]"
echo " [--echospacing=<effective echo spacing of fMRI image, in seconds>]"
echo " [--seunwarpdir=<direction of distortion according to voxel axes>]"
echo " --t1sampspacing=<sample spacing (readout direction) of T1w image - in seconds>"
echo " --t2sampspacing=<sample spacing (readout direction) of T2w image - in seconds>"
echo " --unwarpdir=<direction of distortion according to voxel axes (post reorient2std)>"
echo " --ot1=<output corrected T1w image>"
echo " --ot1brain=<output corrected, brain-extracted T1w image>"
echo " --ot1warp=<output warpfield for distortion correction of T1w image>"
echo " --ot2=<output corrected T2w image>"
echo " --ot2brain=<output corrected, brain-extracted T2w image>"
echo " --ot2warp=<output warpfield for distortion correction of T2w image>"
echo " --method=<method used for readout distortion correction>"
echo ""
echo " ${FIELDMAP_METHOD_OPT}"
echo " equivalent to ${SIEMENS_METHOD_OPT} (see below)"
echo " ${SIEMENS_METHOD_OPT} is preferred. This option is maintained for"
echo " backward compatibility."
echo " ${SPIN_ECHO_METHOD_OPT}"
echo " use Spin Echo Field Maps for readout distortion correction"
echo " ${GENERAL_ELECTRIC_METHOD_OPT}"
echo " use General Electric specific Gradient Echo Field Maps for"
echo " readout distortion correction"
echo " ${SIEMENS_METHOD_OPT}"
echo " use Siemens specific Gradient Echo Field Maps for readout"
echo " distortion correction"
echo ""
echo " [--topupconfig=<topup config file>]"
echo " [--gdcoeffs=<gradient distortion coefficients (SIEMENS file)>]"
echo " --useT2=<False if T2w image is poor or not available. Default is True>"
}
# function for parsing options
getopt1() {
sopt="$1"
shift 1
for fn in $@ ; do
if [ `echo $fn | grep -- "^${sopt}=" | wc -w` -gt 0 ] ; then
echo $fn | sed "s/^${sopt}=//"
return 0
fi
done
}
defaultopt() {
echo $1
}
################################################### OUTPUT FILES #####################################################
# For distortion correction:
#
# Output files (in $WD): Magnitude Magnitude_brain Phase FieldMap
# Magnitude_brain_warppedT1w Magnitude_brain_warppedT1w2${TXwImageBrainBasename}
# fieldmap2${T1wImageBrainBasename}.mat FieldMap2${T1wImageBrainBasename}
# FieldMap2${T1wImageBrainBasename}_ShiftMap
# FieldMap2${T1wImageBrainBasename}_Warp ${T1wImageBasename} ${T1wImageBrainBasename}
# Plus the versions with T1w -> T2w
#
# Output files (not in $WD): ${OutputT1wTransform} ${OutputT1wImage} ${OutputT1wImageBrain}
# Note that these outputs are actually copies of the last three entries in the $WD list
#
#
# For registration:
#
# Output images (in $WD/T2w2T1w): sqrtT1wbyT2w T2w_reg.mat T2w_reg_init.mat
# T2w_dc_reg (the warp field)
# T2w_reg (the warped image)
# Output images (not in $WD): ${OutputT2wTransform} ${OutputT2wImage}
# Note that these outputs are copies of the last two images (respectively) from the T2w2T1w subdirectory
################################################## OPTION PARSING #####################################################
# Just give usage if no arguments specified
if [ $# -eq 0 ] ; then Usage; exit 0; fi
# check for correct options
if [ $# -lt 17 ] ; then Usage; exit 1; fi
# parse arguments
WD=`getopt1 "--workingdir" $@`
T1wImage=`getopt1 "--t1" $@`
T1wImageBrain=`getopt1 "--t1brain" $@`
T2wImage=`getopt1 "--t2" $@`
T2wImageBrain=`getopt1 "--t2brain" $@`
MagnitudeInputName=`getopt1 "--fmapmag" $@`
PhaseInputName=`getopt1 "--fmapphase" $@`
GEB0InputName=`getopt1 "--fmapgeneralelectric" $@`
TE=`getopt1 "--echodiff" $@`
SpinEchoPhaseEncodeNegative=`getopt1 "--SEPhaseNeg" $@`
SpinEchoPhaseEncodePositive=`getopt1 "--SEPhasePos" $@`
DwellTime=`getopt1 "--echospacing" $@`
SEUnwarpDir=`getopt1 "--seunwarpdir" $@`
T1wSampleSpacing=`getopt1 "--t1sampspacing" $@`
T2wSampleSpacing=`getopt1 "--t2sampspacing" $@`
UnwarpDir=`getopt1 "--unwarpdir" $@`
OutputT1wImage=`getopt1 "--ot1" $@`
OutputT1wImageBrain=`getopt1 "--ot1brain" $@`
OutputT1wTransform=`getopt1 "--ot1warp" $@`
OutputT2wImage=`getopt1 "--ot2" $@`
OutputT2wTransform=`getopt1 "--ot2warp" $@`
DistortionCorrection=`getopt1 "--method" $@`
TopupConfig=`getopt1 "--topupconfig" $@`
GradientDistortionCoeffs=`getopt1 "--gdcoeffs" $@`
UseJacobian=`getopt1 "--usejacobian" $@`
useT2=`getopt1 "--useT2" $@`
# default parameters
WD=`defaultopt $WD .`
T1wImage=`${FSLDIR}/bin/remove_ext $T1wImage`
T1wImageBrain=`${FSLDIR}/bin/remove_ext $T1wImageBrain`
if $useT2; then
T2wImage=`${FSLDIR}/bin/remove_ext $T2wImage`
T2wImageBrain=`${FSLDIR}/bin/remove_ext $T2wImageBrain`
fi
T1wImageBrainBasename=`basename "$T1wImageBrain"`
T1wImageBasename=`basename "$T1wImage"`
if $useT2; then
T2wImageBrainBasename=`basename "$T2wImageBrain"`
T2wImageBasename=`basename "$T2wImage"`
fi
if $useT2; then Modalities="T1w T2w"; else Modalities="T1w"; fi
echo " "
echo " START: ${SCRIPT_NAME}"
mkdir -p $WD
mkdir -p ${WD}/FieldMap
# Record the input options in a log file
echo "$0 $@" >> $WD/log.txt
echo "PWD = `pwd`" >> $WD/log.txt
echo "date: `date`" >> $WD/log.txt
echo " " >> $WD/log.txt
########################################## DO WORK ##########################################
case $DistortionCorrection in
${FIELDMAP_METHOD_OPT} | ${SIEMENS_METHOD_OPT})
# --------------------------------------
# -- Siemens Gradient Echo Field Maps --
# --------------------------------------
### Create fieldmaps (and apply gradient non-linearity distortion correction)
echo " "
echo " "
echo " "
${HCPPIPEDIR_Global}/FieldMapPreprocessingAll.sh \
--workingdir=${WD}/FieldMap \
--fmapmag=${MagnitudeInputName} \
--fmapphase=${PhaseInputName} \
--echodiff=${TE} \
--ofmapmag=${WD}/Magnitude \
--ofmapmagbrain=${WD}/Magnitude_brain \
--ofmap=${WD}/FieldMap \
--gdcoeffs=${GradientDistortionCoeffs}
;;
${GENERAL_ELECTRIC_METHOD_OPT})
# -----------------------------------------------
# -- General Electric Gradient Echo Field Maps --
# -----------------------------------------------
### Create fieldmaps (and apply gradient non-linearity distortion correction)
echo " "
echo " "
echo " "
${HCPPIPEDIR_Global}/GeneralElectricFieldMapPreprocessingAll.sh \
--workingdir=${WD}/FieldMap \
--fmap=${GEB0InputName} \
--ofmapmag=${WD}/Magnitude \
--ofmapmagbrain=${WD}/Magnitude_brain \
--ofmap=${WD}/FieldMap \
--gdcoeffs=${GradientDistortionCoeffs}
;;
${SPIN_ECHO_METHOD_OPT})
# --------------------------
# -- Spin Echo Field Maps --
# --------------------------
if [[ ${SEUnwarpDir} = "x" || ${SEUnwarpDir} = "y" ]] ; then
ScoutInputName="${SpinEchoPhaseEncodePositive}"
elif [[ ${SEUnwarpDir} = "-x" || ${SEUnwarpDir} = "-y" || ${SEUnwarpDir} = "x-" || ${SEUnwarpDir} = "y-" ]] ; then
ScoutInputName="${SpinEchoPhaseEncodeNegative}"
fi
# Use topup to distortion correct the scout scans
# using a blip-reversed SE pair "fieldmap" sequence
${HCPPIPEDIR_Global}/TopupPreprocessingAll.sh \
--workingdir=${WD}/FieldMap \
--phaseone=${SpinEchoPhaseEncodeNegative} \
--phasetwo=${SpinEchoPhaseEncodePositive} \
--scoutin=${ScoutInputName} \
--echospacing=${DwellTime} \
--unwarpdir=${SEUnwarpDir} \
--ofmapmag=${WD}/Magnitude \
--ofmapmagbrain=${WD}/Magnitude_brain \
--ofmap=${WD}/FieldMap \
--ojacobian=${WD}/Jacobian \
--gdcoeffs=${GradientDistortionCoeffs} \
--topupconfig=${TopupConfig} \
--usejacobian=${UseJacobian}
;;
*)
echo "${SCRIPT_NAME} - ERROR - Unable to create FSL-suitable readout distortion correction field map"
echo "${SCRIPT_NAME} Unrecognized distortion correction method: ${DistortionCorrection}"
exit 1
esac
### LOOP over available modalities ###
for TXw in $Modalities ; do
# set up required variables
if [ $TXw = T1w ] ; then
TXwImage=$T1wImage
TXwImageBrain=$T1wImageBrain
TXwSampleSpacing=$T1wSampleSpacing
TXwImageBasename=$T1wImageBasename
TXwImageBrainBasename=$T1wImageBrainBasename
else
TXwImage=$T2wImage
TXwImageBrain=$T2wImageBrain
TXwSampleSpacing=$T2wSampleSpacing
TXwImageBasename=$T2wImageBasename
TXwImageBrainBasename=$T2wImageBrainBasename
fi
# Forward warp the fieldmap magnitude and register to TXw image (transform phase image too)
${FSLDIR}/bin/fugue --loadfmap=${WD}/FieldMap --dwell=${TXwSampleSpacing} --saveshift=${WD}/FieldMap_ShiftMap${TXw}.nii.gz
${FSLDIR}/bin/convertwarp --relout --rel --ref=${WD}/Magnitude --shiftmap=${WD}/FieldMap_ShiftMap${TXw}.nii.gz --shiftdir=${UnwarpDir} --out=${WD}/FieldMap_Warp${TXw}.nii.gz
case $DistortionCorrection in
${FIELDMAP_METHOD_OPT} | ${SIEMENS_METHOD_OPT} | ${GENERAL_ELECTRIC_METHOD_OPT})
${FSLDIR}/bin/applywarp --rel --interp=spline -i ${WD}/Magnitude -r ${WD}/Magnitude -w ${WD}/FieldMap_Warp${TXw}.nii.gz -o ${WD}/Magnitude_warpped${TXw}
${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${WD}/Magnitude_warpped${TXw} -ref ${TXwImage} -out ${WD}/Magnitude_warpped${TXw}2${TXwImageBasename} -omat ${WD}/Fieldmap2${TXwImageBasename}.mat -searchrx -30 30 -searchry -30 30 -searchrz -30 30
;;
${SPIN_ECHO_METHOD_OPT})
${FSLDIR}/bin/applywarp --rel --interp=spline -i ${WD}/Magnitude_brain -r ${WD}/Magnitude_brain -w ${WD}/FieldMap_Warp${TXw}.nii.gz -o ${WD}/Magnitude_brain_warpped${TXw}
${FSLDIR}/bin/flirt -interp spline -dof 6 -in ${WD}/Magnitude_brain_warpped${TXw} -ref ${TXwImageBrain} -out ${WD}/Magnitude_brain_warpped${TXw}2${TXwImageBasename} -omat ${WD}/Fieldmap2${TXwImageBasename}.mat -searchrx -30 30 -searchry -30 30 -searchrz -30 30
;;
*)
echo "${SCRIPT_NAME} - ERROR - Unable to apply readout distortion correction"
echo "${SCRIPT_NAME} Unrecognized distortion correction method: ${DistortionCorrection}"
exit 1
esac
${FSLDIR}/bin/flirt -in ${WD}/FieldMap.nii.gz -ref ${TXwImage} -applyxfm -init ${WD}/Fieldmap2${TXwImageBasename}.mat -out ${WD}/FieldMap2${TXwImageBasename}
# Convert to shift map then to warp field and unwarp the TXw
${FSLDIR}/bin/fugue --loadfmap=${WD}/FieldMap2${TXwImageBasename} --dwell=${TXwSampleSpacing} --saveshift=${WD}/FieldMap2${TXwImageBasename}_ShiftMap.nii.gz
${FSLDIR}/bin/convertwarp --relout --rel --ref=${TXwImageBrain} --shiftmap=${WD}/FieldMap2${TXwImageBasename}_ShiftMap.nii.gz --shiftdir=${UnwarpDir} --out=${WD}/FieldMap2${TXwImageBasename}_Warp.nii.gz
${FSLDIR}/bin/applywarp --rel --interp=spline -i ${TXwImage} -r ${TXwImage} -w ${WD}/FieldMap2${TXwImageBasename}_Warp.nii.gz -o ${WD}/${TXwImageBasename}
# Make a brain image (transform to make a mask, then apply it)
${FSLDIR}/bin/applywarp --rel --interp=nn -i ${TXwImageBrain} -r ${TXwImageBrain} -w ${WD}/FieldMap2${TXwImageBasename}_Warp.nii.gz -o ${WD}/${TXwImageBrainBasename}
${FSLDIR}/bin/fslmaths ${WD}/${TXwImageBasename} -mas ${WD}/${TXwImageBrainBasename} ${WD}/${TXwImageBrainBasename}
# Copy files to specified destinations
if [ $TXw = T1w ] ; then
${FSLDIR}/bin/imcp ${WD}/FieldMap2${TXwImageBasename}_Warp ${OutputT1wTransform}
${FSLDIR}/bin/imcp ${WD}/${TXwImageBasename} ${OutputT1wImage}
${FSLDIR}/bin/imcp ${WD}/${TXwImageBrainBasename} ${OutputT1wImageBrain}
fi
done
### END LOOP over modalities ###
if $useT2; then
### Now do T2w to T1w registration
mkdir -p ${WD}/T2w2T1w
# Main registration: between corrected T2w and corrected T1w
${FSLDIR}/bin/epi_reg --epi=${WD}/${T2wImageBrainBasename} --t1=${WD}/${T1wImageBasename} --t1brain=${WD}/${T1wImageBrainBasename} --out=${WD}/T2w2T1w/T2w_reg
# Make a warpfield directly from original (non-corrected) T2w to corrected T1w (and apply it)
${FSLDIR}/bin/convertwarp --relout --rel --ref=${T1wImage} --warp1=${WD}/FieldMap2${T2wImageBasename}_Warp.nii.gz --postmat=${WD}/T2w2T1w/T2w_reg.mat -o ${WD}/T2w2T1w/T2w_dc_reg
${FSLDIR}/bin/applywarp --rel --interp=spline --in=${T2wImage} --ref=${T1wImage} --warp=${WD}/T2w2T1w/T2w_dc_reg --out=${WD}/T2w2T1w/T2w_reg
# Add 1 to avoid exact zeros within the image (a problem for myelin mapping?)
${FSLDIR}/bin/fslmaths ${WD}/T2w2T1w/T2w_reg.nii.gz -add 1 ${WD}/T2w2T1w/T2w_reg.nii.gz -odt float
# QA image
${FSLDIR}/bin/fslmaths ${WD}/T2w2T1w/T2w_reg -mul ${T1wImage} -sqrt ${WD}/T2w2T1w/sqrtT1wbyT2w -odt float
# Copy files to specified destinations
${FSLDIR}/bin/imcp ${WD}/T2w2T1w/T2w_dc_reg ${OutputT2wTransform}
${FSLDIR}/bin/imcp ${WD}/T2w2T1w/T2w_reg ${OutputT2wImage}
fi
echo " "
echo " END: ${SCRIPT_NAME}"
echo " END: `date`" >> $WD/log.txt
########################################## QA STUFF ##########################################
if [ -e $WD/qa.txt ] ; then rm -f $WD/qa.txt ; fi
echo "cd `pwd`" >> $WD/qa.txt
if $useT2; then
echo "# View registration result of corrected T2w to corrected T1w image: showing both images + sqrt(T1w*T2w)" >> $WD/qa.txt
echo "fslview ${OutputT1wImage} ${OutputT2wImage} ${WD}/T2w2T1w/sqrtT1wbyT2w" >> $WD/qa.txt
fi
echo "# Compare pre- and post-distortion correction for T1w" >> $WD/qa.txt
echo "fslview ${T1wImage} ${OutputT1wImage}" >> $WD/qa.txt
if $useT2; then
echo "# Compare pre- and post-distortion correction for T2w" >> $WD/qa.txt
echo "fslview ${T2wImage} ${WD}/${T2wImageBasename}" >> $WD/qa.txt
fi
##############################################################################################
|
#!/usr/bin/env bash
# This is bash lib file for the convenience build scripts
# Don't call this script directly
# Jacob Alexander 2015
# Make sure all of the relevant variables have been set
# NOTE: PartialMaps and DefaultMap do not have to be set
VariablesList=(BuildPath BaseMap ScanModule MacroModule OutputModule DebugModule Chip Compiler)
ExitEarly=false
for var in ${VariablesList[@]}; do
if [ -z ${!var+x} ]; then
echo "ERROR: Unset variable => '${var}'"
ExitEarly=true
fi
done
# Error was detected, exit immediately
if $ExitEarly; then
exit 1
fi
# Prepare PartialMaps
PartialMapsExpanded="${PartialMaps[1]}"
count=2 # Start the loop at index 2
while [ "$count" -le "${#PartialMaps[@]}" ]; do
PartialMapsExpanded="${PartialMapsExpanded};${PartialMaps[count]}"
count=$(($count+1))
done
# Internal Variables
CMakeListsPath="../.."
PROG_NAME=$(basename $0)
# Process the command line arguments (if any)
while (( "$#" >= "1" )); do
# Scan each argument
key="$1"
case $key in
-c|--cmakelists-path)
CMakeListsPath="$2"
shift
;;
-f|--force-rebuild)
# Remove the old directory first
rm -rf "${BuildPath}"
;;
-o|--output-path)
BuildPath="$2"
shift
;;
-h|--help)
echo "Usage: $PROG_NAME [options...]"
echo ""
echo "Convenience script to build the source of a given keyboard."
echo "Edit '$PROG_NAME' to configure the keyboard options such as KLL layouts."
echo ""
echo "Arguments:"
echo " -c, --cmakelists-path PATH Set the path of CMakeLists.txt"
echo " Default: ${CMakeListsPath}"
echo " -f, --force-rebuild Deletes the old build directory and rebuilds from scratch."
echo " -o, --output-path PATH Set the path of the build files."
echo " Default: ${BuildPath}"
echo " -h, --help This message."
exit 1
;;
*)
echo "INVALID ARG: '$1'"
exit 2
;;
esac
# Shift to the next argument
shift
done
# Run CMake commands
## TODO Check for windows and do windows specific things ##
mkdir -p "${BuildPath}"
cd "${BuildPath}"
cmake -DCHIP="${Chip}" -DCOMPILER="${Compiler}" -DScanModule="${ScanModule}" -DMacroModule="${MacroModule}" -DOutputModule="${OutputModule}" -DDebugModule="${DebugModule}" -DBaseMap="${BaseMap}" -DDefaultMap="${DefaultMap}" -DPartialMaps="${PartialMapsExpanded}" "${CMakeListsPath}"
return_code=$?
if [ $return_code != 0 ] ; then
echo "Error in cmake. Exiting..."
exit $return_code
fi
make
return_code=$?
if [ $return_code != 0 ] ; then
echo "Error in make. Exiting..."
exit $return_code
fi
echo "Firmware has been compiled into: '${BuildPath}'"
cd -
|
<filename>lang/py/cookbook/v2/source/cb2_5_11_exm_3.py
q=lambda x:(lambda o=lambda s:[i for i in x if cmp(i,x[0])==s]:
len(x)>1 and q(o(-1))+o(0)+q(o(1)) or x)()
|
<gh_stars>0
package com.designre.blog.service;
import com.designre.blog.model.entity.Comment;
public interface EmailService {
void sendEmailToAdmin(Comment comment);
void sendEmailToUser(Comment comment, String replyEmail);
}
|
import { useContext } from "react";
import { Box, Flex, Link } from "rebass";
import { ContentContext } from "../providers/ContentProvider";
import Brand from "./Brand";
import { WidthControlledContainer } from "./Containers";
const Footer = () => {
const content = useContext(ContentContext);
return (
<Box as="footer" sx={{position: 'fixed', bottom: 0, left: 0, right: 0}} >
<WidthControlledContainer>
<Flex alignItems="center">
<Brand />
{content.issuesLink && <Link ml={5} href={content.issuesLink} color="text" sx={{':visited': {
color: 'initial',
textDecoration: 'none'
}}}>Issues</Link> }
</Flex>
</WidthControlledContainer>
</Box>
)
}
export default Footer;
|
import string
def clean_string(s):
s = s.replace('\r', '').replace('\n', '')
s = ''.join([c for c in s if c not in string.punctuation])
return s
cleaned_string = clean_string("India is a diversified country!\r\nIt's known for its rich culture.")
print(cleaned_string) |
#!/bin/sh
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -e
export TEST=true
cd "$(dirname "$0")/../../.."
submodules=$(mktemp /tmp/submXXXXXX)
want_submodules=$(mktemp /tmp/submXXXXXX)
git submodule | awk '{ print $1 }' | sort > "$submodules"
cat << EOF | awk '{ print $1 }' | sort > "$want_submodules"
74d91756c11bc22f9b0108b94da9326f7f9e376f third_party/abseil-cpp (74d9175)
090faecb454fbd6e6e17a75ef8146acb037118d4 third_party/benchmark (v1.5.0)
73594cde8c9a52a102c4341c244c833aa61b9c06 third_party/bloaty (remotes/origin/wide-14-g73594cd)
b29b21a81b32ec273f118f589f46d56ad3332420 third_party/boringssl (remotes/origin/chromium-stable)
afc30d43eef92979b05776ec0963c9cede5fb80f third_party/boringssl-with-bazel (fips-20180716-116-gafc30d43e)
e982924acee7f7313b4baa4ee5ec000c5e373c30 third_party/cares/cares (cares-1_15_0)
c181f78882e54c0e5c63f332562ef6954ee7932f third_party/envoy-api (heads/master)
28f50e0fed19872e0fd50dd23ce2ee8cd759338e third_party/gflags (v2.2.0-5-g30dbc81)
80ed4d0bbf65d57cc267dfc63bd2584557f11f9b third_party/googleapis (common-protos-1_3_1-915-g80ed4d0bb)
c9ccac7cb7345901884aabf5d1a786cfa6e2f397 third_party/googletest (6e2f397)
09745575a923640154bcf307fba8aedff47f240a third_party/protobuf (v3.7.0-rc.2-247-g09745575)
e143189bf6f37b3957fb31743df6a1bcf4a8c685 third_party/protoc-gen-validate (v0.0.10)
94324803a497c8f76dbc78df393ef629d3a9f3c3 third_party/udpa (heads/master)
9effcbcb27f0a665f9f345030188c0b291e32482 third_party/upb (heads/master)
cacf7f1d4e3d44d871b605da3b647f07d718623f third_party/zlib (v1.2.11)
EOF
diff -u "$submodules" "$want_submodules"
rm "$submodules" "$want_submodules"
|
<gh_stars>1-10
package com.acmvit.acm_app.ui.members;
import android.app.Application;
import androidx.lifecycle.LiveData;
import androidx.lifecycle.MediatorLiveData;
import androidx.lifecycle.MutableLiveData;
import com.acmvit.acm_app.model.User;
import com.acmvit.acm_app.pref.SessionManager;
import com.acmvit.acm_app.repository.MembersRepository;
import com.acmvit.acm_app.ui.ActivityViewModel;
import com.acmvit.acm_app.ui.base.BaseViewModel;
import com.acmvit.acm_app.util.reactive.SingleSourceMediatorLD;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
public class MembersBottomViewModel extends BaseViewModel {
private static final String TAG = "MembersBottomViewModel";
private final SingleSourceMediatorLD<List<User>> users = new SingleSourceMediatorLD<>();
private final MediatorLiveData<List<User>> filteredUsers = new MediatorLiveData<>();
private final MembersRepository membersRepository;
private final SessionManager sessionManager;
private final MutableLiveData<String> searchText = new MutableLiveData<>(
""
);
public MembersBottomViewModel(
ActivityViewModel activityViewModel,
Application application
) {
super(activityViewModel, application);
sessionManager = getActivityViewModel().getSessionManager();
membersRepository = MembersRepository.getInstance();
users.addSource(
membersRepository.getAllUsers(),
usersRes -> {
if (usersRes.data != null) {
users.setValue(usersRes.data);
}
}
);
Runnable filterTask = () -> {
List<User> input = users.getValue();
if (input == null) return;
List<User> filteredUsersL = new ArrayList<>();
if (searchText.getValue().isEmpty()) {
for (User user : input) {
if (
!user
.getUser_id()
.equals(
sessionManager.getUserDetails().getUser_id()
)
) {
filteredUsersL.add(user);
} else {
filteredUsersL.add(0, user);
}
}
filteredUsers.setValue(filteredUsersL);
return;
}
for (User user : input) {
if (
user
.getName()
.toLowerCase()
.contains(searchText.getValue().toLowerCase())
) {
filteredUsersL.add(user);
}
}
Collections.sort(
filteredUsersL,
(user1, user2) -> {
int index1 = user1
.getName()
.toLowerCase()
.indexOf(searchText.getValue().toLowerCase());
int index2 = user2
.getName()
.toLowerCase()
.indexOf(searchText.getValue().toLowerCase());
return Integer.compare(index1, index2);
}
);
filteredUsers.setValue(filteredUsersL);
};
filteredUsers.addSource(searchText, s -> filterTask.run());
filteredUsers.addSource(users, i -> filterTask.run());
}
public LiveData<List<User>> getFilteredUsers() {
return filteredUsers;
}
public MutableLiveData<String> getSearchText() {
return searchText;
}
}
|
#!/bin/bash
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# URL from which the latest version of this script can be downloaded.
# Gitiles returns the result as base64 formatted, so the result needs to be
# decoded. See https://code.google.com/p/gitiles/issues/detail?id=7 for
# more information about this security precaution.
script_url="https://chromium.googlesource.com/chromium/src.git/+/master"
script_url+="/tools/android/adb_remote_setup.sh"
script_url+="?format=TEXT"
# Replaces this file with the latest version of the script and runs it.
update-self() {
local script="${BASH_SOURCE[0]}"
local new_script="${script}.new"
local updater_script="${script}.updater"
curl -sSf "$script_url" | base64 --decode > "$new_script" || return
chmod +x "$new_script" || return
# Replace this file with the newly downloaded script.
cat > "$updater_script" << EOF
#!/bin/bash
if mv "$new_script" "$script"; then
rm -- "$updater_script"
else
echo "Note: script update failed."
fi
ADB_REMOTE_SETUP_NO_UPDATE=1 exec /bin/bash "$script" $@
EOF
exec /bin/bash "$updater_script" "$@"
}
if [[ "$ADB_REMOTE_SETUP_NO_UPDATE" -ne 1 ]]; then
update-self "$@" || echo 'Note: script update failed'
fi
if [[ $# -ne 1 && $# -ne 2 ]]; then
cat <<'EOF'
Usage: adb_remote_setup.sh REMOTE_HOST [REMOTE_ADB]
Configures adb on a remote machine to communicate with a device attached to the
local machine. This is useful for installing APKs, running tests, etc while
working remotely.
Arguments:
REMOTE_HOST hostname of remote machine
REMOTE_ADB path to adb on the remote machine (you can omit this if adb is in
the remote host's path)
EOF
exit 1
fi
remote_host="$1"
remote_adb="${2:-adb}"
# Ensure adb is in the local machine's path.
if ! which adb >/dev/null; then
echo "error: adb must be in your local machine's path."
exit 1
fi
if which kinit >/dev/null; then
# Allow ssh to succeed without typing your password multiple times.
kinit -R || kinit
fi
# Ensure local and remote versions of adb are the same.
remote_adb_version=$(ssh "$remote_host" "$remote_adb version")
local_adb_version=$(adb version)
if [[ "$local_adb_version" != "$remote_adb_version" ]]; then
echo >&2
echo "WARNING: local adb is not the same version as remote adb." >&2
echo "This should be fixed since it may result in protocol errors." >&2
echo " local adb: $local_adb_version" >&2
echo " remote adb: $remote_adb_version" >&2
echo >&2
sleep 5
fi
# Kill the adb server on the remote host.
ssh "$remote_host" "$remote_adb kill-server"
# Start the adb server locally.
adb start-server
# Forward various ports from the remote host to the local host:
# 5037: adb
# 8001: http server
# 9031: sync server
# 9041: search by image server
# 9051: policy server
# 10000: net unittests
# 10201: net unittests
ssh -C \
-R 5037:localhost:5037 \
-L 8001:localhost:8001 \
-L 9031:localhost:9031 \
-L 9041:localhost:9041 \
-L 9051:localhost:9051 \
-R 10000:localhost:10000 \
-R 10201:localhost:10201 \
"$remote_host"
|
#include <linux/module.h>
#include <linux/of.h>
#include <linux/of_device.h>
#include <linux/platform_device.h>
#include <linux/string.h>
#include "axp-cfg.h"
static struct of_device_id axp_device_match[] = {
{ .compatible = "allwinner,", .data = NULL },
{ }
};
static s32 axp_device_probe(struct platform_device *pdev)
{
struct device_node *node = pdev->dev.of_node;
const struct of_device_id *device;
struct axp_config_info *axp_config = NULL;
device = of_match_device(axp_device_match, &pdev->dev);
if (!device)
return -ENODEV;
axp_config = (struct axp_config_info *)device->data;
if (NULL == axp_config) {
return -ENOENT;
}
if (!of_device_is_available(node)) {
axp_config->pmu_used = 0;
pr_err("%s: pmu_used = 0", __func__);
return -EPERM;
} else
axp_config->pmu_used = 1;
#ifdef CONFIG_AXP_TWI_USED
if (of_property_read_u32(node, "pmu_twi_id", &axp_config->pmu_twi_id))
axp_config->pmu_twi_id = 0;
if (of_property_read_u32(node, "pmu_twi_addr", &axp_config->pmu_twi_addr))
axp_config->pmu_twi_addr = 0x34;
#endif
if (of_property_read_u32(node, "pmu_irq_id", &axp_config->pmu_irq_id))
axp_config->pmu_irq_id = 32;
if (of_property_read_u32(node, "pmu_battery_rdc", &axp_config->pmu_battery_rdc))
axp_config->pmu_battery_rdc = BATRDC;
if (of_property_read_u32(node, "pmu_battery_cap", &axp_config->pmu_battery_cap))
axp_config->pmu_battery_cap = 4000;
if (of_property_read_u32(node, "pmu_batdeten", &axp_config->pmu_batdeten))
axp_config->pmu_batdeten = 1;
if (of_property_read_u32(node, "pmu_chg_ic_temp", &axp_config->pmu_chg_ic_temp))
axp_config->pmu_chg_ic_temp = 0;
if (of_property_read_u32(node, "pmu_runtime_chgcur", &axp_config->pmu_runtime_chgcur))
axp_config->pmu_runtime_chgcur = INTCHGCUR / 1000;
axp_config->pmu_runtime_chgcur = axp_config->pmu_runtime_chgcur * 1000;
if (of_property_read_u32(node, "pmu_suspend_chgcur", &axp_config->pmu_suspend_chgcur))
axp_config->pmu_suspend_chgcur = 1200;
axp_config->pmu_suspend_chgcur = axp_config->pmu_suspend_chgcur * 1000;
if (of_property_read_u32(node, "pmu_shutdown_chgcur", &axp_config->pmu_shutdown_chgcur))
axp_config->pmu_shutdown_chgcur = 1200;
axp_config->pmu_shutdown_chgcur = axp_config->pmu_shutdown_chgcur * 1000;
if (of_property_read_u32(node, "pmu_init_chgvol", &axp_config->pmu_init_chgvol))
axp_config->pmu_init_chgvol = INTCHGVOL / 1000;
axp_config->pmu_init_chgvol = axp_config->pmu_init_chgvol * 1000;
if (of_property_read_u32(node, "pmu_init_chgend_rate", &axp_config->pmu_init_chgend_rate))
axp_config->pmu_init_chgend_rate = INTCHGENDRATE;
if (of_property_read_u32(node, "pmu_init_chg_enabled", &axp_config->pmu_init_chg_enabled))
axp_config->pmu_init_chg_enabled = 1;
if (of_property_read_u32(node, "pmu_init_bc_en", &axp_config->pmu_init_bc_en))
axp_config->pmu_init_bc_en = 0;
if (of_property_read_u32(node, "pmu_init_adc_freq", &axp_config->pmu_init_adc_freq))
axp_config->pmu_init_adc_freq = INTADCFREQ;
if (of_property_read_u32(node, "pmu_init_adcts_freq", &axp_config->pmu_init_adcts_freq))
axp_config->pmu_init_adcts_freq = INTADCFREQC;
if (of_property_read_u32(node, "pmu_init_chg_pretime", &axp_config->pmu_init_chg_pretime))
axp_config->pmu_init_chg_pretime = INTCHGPRETIME;
if (of_property_read_u32(node, "pmu_init_chg_csttime", &axp_config->pmu_init_chg_csttime))
axp_config->pmu_init_chg_csttime = INTCHGCSTTIME;
if (of_property_read_u32(node, "pmu_batt_cap_correct", &axp_config->pmu_batt_cap_correct))
axp_config->pmu_batt_cap_correct = 1;
if (of_property_read_u32(node, "pmu_chg_end_on_en", &axp_config->pmu_chg_end_on_en))
axp_config->pmu_chg_end_on_en = 0;
if (of_property_read_u32(node, "ocv_coulumb_100", &axp_config->ocv_coulumb_100))
axp_config->ocv_coulumb_100 = 0;
if (of_property_read_u32(node, "pmu_bat_para1", &axp_config->pmu_bat_para1))
axp_config->pmu_bat_para1 = OCVREG0;
if (of_property_read_u32(node, "pmu_bat_para2", &axp_config->pmu_bat_para2))
axp_config->pmu_bat_para2 = OCVREG1;
if (of_property_read_u32(node, "pmu_bat_para3", &axp_config->pmu_bat_para3))
axp_config->pmu_bat_para3 = OCVREG2;
if (of_property_read_u32(node, "pmu_bat_para4", &axp_config->pmu_bat_para4))
axp_config->pmu_bat_para4 = OCVREG3;
if (of_property_read_u32(node, "pmu_bat_para5", &axp_config->pmu_bat_para5))
axp_config->pmu_bat_para5 = OCVREG4;
if (of_property_read_u32(node, "pmu_bat_para6", &axp_config->pmu_bat_para6))
axp_config->pmu_bat_para6 = OCVREG5;
if (of_property_read_u32(node, "pmu_bat_para7", &axp_config->pmu_bat_para7))
axp_config->pmu_bat_para7 = OCVREG6;
if (of_property_read_u32(node, "pmu_bat_para8", &axp_config->pmu_bat_para8))
axp_config->pmu_bat_para8 = OCVREG7;
if (of_property_read_u32(node, "pmu_bat_para9", &axp_config->pmu_bat_para9))
axp_config->pmu_bat_para9 = OCVREG8;
if (of_property_read_u32(node, "pmu_bat_para10", &axp_config->pmu_bat_para10))
axp_config->pmu_bat_para10 = OCVREG9;
if (of_property_read_u32(node, "pmu_bat_para11", &axp_config->pmu_bat_para11))
axp_config->pmu_bat_para11 = OCVREGA;
if (of_property_read_u32(node, "pmu_bat_para12", &axp_config->pmu_bat_para12))
axp_config->pmu_bat_para12 = OCVREGB;
if (of_property_read_u32(node, "pmu_bat_para13", &axp_config->pmu_bat_para13))
axp_config->pmu_bat_para13 = OCVREGC;
if (of_property_read_u32(node, "pmu_bat_para14", &axp_config->pmu_bat_para14))
axp_config->pmu_bat_para14 = OCVREGD;
if (of_property_read_u32(node, "pmu_bat_para15", &axp_config->pmu_bat_para15))
axp_config->pmu_bat_para15 = OCVREGE;
if (of_property_read_u32(node, "pmu_bat_para16", &axp_config->pmu_bat_para16))
axp_config->pmu_bat_para16 = OCVREGF;
if (of_property_read_u32(node, "pmu_bat_para17", &axp_config->pmu_bat_para17))
axp_config->pmu_bat_para17 = OCVREG10;
if (of_property_read_u32(node, "pmu_bat_para18", &axp_config->pmu_bat_para18))
axp_config->pmu_bat_para18 = OCVREG11;
if (of_property_read_u32(node, "pmu_bat_para19", &axp_config->pmu_bat_para19))
axp_config->pmu_bat_para19 = OCVREG12;
if (of_property_read_u32(node, "pmu_bat_para20", &axp_config->pmu_bat_para20))
axp_config->pmu_bat_para20 = OCVREG13;
if (of_property_read_u32(node, "pmu_bat_para21", &axp_config->pmu_bat_para21))
axp_config->pmu_bat_para21 = OCVREG14;
if (of_property_read_u32(node, "pmu_bat_para22", &axp_config->pmu_bat_para22))
axp_config->pmu_bat_para22 = OCVREG15;
if (of_property_read_u32(node, "pmu_bat_para23", &axp_config->pmu_bat_para23))
axp_config->pmu_bat_para23 = OCVREG16;
if (of_property_read_u32(node, "pmu_bat_para24", &axp_config->pmu_bat_para24))
axp_config->pmu_bat_para24 = OCVREG17;
if (of_property_read_u32(node, "pmu_bat_para25", &axp_config->pmu_bat_para25))
axp_config->pmu_bat_para25 = OCVREG18;
if (of_property_read_u32(node, "pmu_bat_para26", &axp_config->pmu_bat_para26))
axp_config->pmu_bat_para26 = OCVREG19;
if (of_property_read_u32(node, "pmu_bat_para27", &axp_config->pmu_bat_para27))
axp_config->pmu_bat_para27 = OCVREG1A;
if (of_property_read_u32(node, "pmu_bat_para28", &axp_config->pmu_bat_para28))
axp_config->pmu_bat_para28 = OCVREG1B;
if (of_property_read_u32(node, "pmu_bat_para29", &axp_config->pmu_bat_para29))
axp_config->pmu_bat_para29 = OCVREG1C;
if (of_property_read_u32(node, "pmu_bat_para30", &axp_config->pmu_bat_para30))
axp_config->pmu_bat_para30 = OCVREG1D;
if (of_property_read_u32(node, "pmu_bat_para31", &axp_config->pmu_bat_para31))
axp_config->pmu_bat_para31 = OCVREG1E;
if (of_property_read_u32(node, "pmu_bat_para32", &axp_config->pmu_bat_para32))
axp_config->pmu_bat_para32 = OCVREG1F;
if (of_property_read_u32(node, "pmu_ac_vol", &axp_config->pmu_ac_vol))
axp_config->pmu_ac_vol = 4400;
if (of_property_read_u32(node, "pmu_usbpc_vol", &axp_config->pmu_usbpc_vol))
axp_config->pmu_usbpc_vol = 4400;
if (of_property_read_u32(node, "pmu_ac_cur", &axp_config->pmu_ac_cur))
axp_config->pmu_ac_cur = 0;
if (of_property_read_u32(node, "pmu_usbpc_cur", &axp_config->pmu_usbpc_cur))
axp_config->pmu_usbpc_cur = 0;
if (of_property_read_u32(node, "pmu_pwroff_vol", &axp_config->pmu_pwroff_vol))
axp_config->pmu_pwroff_vol = 3300;
if (of_property_read_u32(node, "pmu_pwron_vol", &axp_config->pmu_pwron_vol))
axp_config->pmu_pwron_vol = 2900;
if (of_property_read_u32(node, "pmu_powkey_off_time", &axp_config->pmu_powkey_off_time))
axp_config->pmu_powkey_off_time = 6000;
if (of_property_read_u32(node, "pmu_powkey_off_func", &axp_config->pmu_powkey_off_func))
axp_config->pmu_powkey_off_func = 0;
if (of_property_read_u32(node, "pmu_powkey_off_en", &axp_config->pmu_powkey_off_en))
axp_config->pmu_powkey_off_en = 1;
if (of_property_read_u32(node, "pmu_powkey_off_delay_time", &axp_config->pmu_powkey_off_delay_time))
axp_config->pmu_powkey_off_delay_time = 0;
if (of_property_read_u32(node, "pmu_powkey_long_time", &axp_config->pmu_powkey_long_time))
axp_config->pmu_powkey_long_time = 1500;
if (of_property_read_u32(node, "pmu_pwrok_time", &axp_config->pmu_pwrok_time))
axp_config->pmu_pwrok_time = 64;
if (of_property_read_u32(node, "pmu_powkey_on_time", &axp_config->pmu_powkey_on_time))
axp_config->pmu_powkey_on_time = 1000;
if (of_property_read_u32(node, "pmu_reset_shutdown_en", &axp_config->pmu_reset_shutdown_en))
axp_config->pmu_reset_shutdown_en = 0;
if (of_property_read_u32(node, "pmu_battery_warning_level1", &axp_config->pmu_battery_warning_level1))
axp_config->pmu_battery_warning_level1 = 15;
if (of_property_read_u32(node, "pmu_battery_warning_level2", &axp_config->pmu_battery_warning_level2))
axp_config->pmu_battery_warning_level2 = 0;
if (of_property_read_u32(node, "pmu_restvol_adjust_time", &axp_config->pmu_restvol_adjust_time))
axp_config->pmu_restvol_adjust_time = 30;
if (of_property_read_u32(node, "pmu_ocv_cou_adjust_time", &axp_config->pmu_ocv_cou_adjust_time))
axp_config->pmu_ocv_cou_adjust_time = 60;
if (of_property_read_u32(node, "pmu_chgled_func", &axp_config->pmu_chgled_func))
axp_config->pmu_chgled_func = 0;
if (of_property_read_u32(node, "pmu_chgled_type", &axp_config->pmu_chgled_type))
axp_config->pmu_chgled_type = 0;
if (of_property_read_u32(node, "pmu_vbusen_func", &axp_config->pmu_vbusen_func))
axp_config->pmu_vbusen_func = 1;
if (of_property_read_u32(node, "pmu_reset", &axp_config->pmu_reset))
axp_config->pmu_reset = 0;
if (of_property_read_u32(node, "pmu_IRQ_wakeup", &axp_config->pmu_IRQ_wakeup))
axp_config->pmu_IRQ_wakeup = 0;
if (of_property_read_u32(node, "pmu_hot_shutdown", &axp_config->pmu_hot_shutdown))
axp_config->pmu_hot_shutdown = 1;
if (of_property_read_u32(node, "pmu_inshort", &axp_config->pmu_inshort))
axp_config->pmu_inshort = 0;
if (of_property_read_u32(node, "power_start", &axp_config->power_start))
axp_config->power_start = 0;
if (of_property_read_u32(node, "pmu_bat_temp_enable", &axp_config->pmu_bat_temp_enable))
axp_config->pmu_bat_temp_enable = 0;
if (of_property_read_u32(node, "pmu_bat_charge_ltf", &axp_config->pmu_bat_charge_ltf))
axp_config->pmu_bat_charge_ltf = 0xA5;
if (of_property_read_u32(node, "pmu_bat_charge_htf", &axp_config->pmu_bat_charge_htf))
axp_config->pmu_bat_charge_htf = 0x1F;
if (of_property_read_u32(node, "pmu_bat_shutdown_ltf", &axp_config->pmu_bat_shutdown_ltf))
axp_config->pmu_bat_shutdown_ltf = 0xFC;
if (of_property_read_u32(node, "pmu_bat_shutdown_htf", &axp_config->pmu_bat_shutdown_htf))
axp_config->pmu_bat_shutdown_htf = 0x16;
if (of_property_read_u32(node, "pmu_bat_temp_para1", &axp_config->pmu_bat_temp_para1))
axp_config->pmu_bat_temp_para1 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para2", &axp_config->pmu_bat_temp_para2))
axp_config->pmu_bat_temp_para2 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para3", &axp_config->pmu_bat_temp_para3))
axp_config->pmu_bat_temp_para3 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para4", &axp_config->pmu_bat_temp_para4))
axp_config->pmu_bat_temp_para4 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para5", &axp_config->pmu_bat_temp_para5))
axp_config->pmu_bat_temp_para5 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para6", &axp_config->pmu_bat_temp_para6))
axp_config->pmu_bat_temp_para6 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para7", &axp_config->pmu_bat_temp_para7))
axp_config->pmu_bat_temp_para7 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para8", &axp_config->pmu_bat_temp_para8))
axp_config->pmu_bat_temp_para8 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para9", &axp_config->pmu_bat_temp_para9))
axp_config->pmu_bat_temp_para9 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para10", &axp_config->pmu_bat_temp_para10))
axp_config->pmu_bat_temp_para10 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para11", &axp_config->pmu_bat_temp_para11))
axp_config->pmu_bat_temp_para11 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para12", &axp_config->pmu_bat_temp_para12))
axp_config->pmu_bat_temp_para12 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para13", &axp_config->pmu_bat_temp_para13))
axp_config->pmu_bat_temp_para13 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para14", &axp_config->pmu_bat_temp_para14))
axp_config->pmu_bat_temp_para14 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para15", &axp_config->pmu_bat_temp_para15))
axp_config->pmu_bat_temp_para15 = 0;
if (of_property_read_u32(node, "pmu_bat_temp_para16", &axp_config->pmu_bat_temp_para16))
axp_config->pmu_bat_temp_para16 = 0;
return 0;
}
static struct platform_driver axp_device_driver = {
.probe = axp_device_probe,
.driver = {
.name = "axp-device",
.owner = THIS_MODULE,
.of_match_table = axp_device_match,
},
};
s32 axp_device_tree_parse(char *pmu_type, struct axp_config_info *axp_config)
{
s32 ret;
strcat(axp_device_match[0].compatible, pmu_type);
axp_device_match[0].data = (void *)axp_config;
ret = platform_driver_register(&axp_device_driver);
return ret;
}
|
# Find Project Name
DIR=$(echo "${PWD}")
# DIR=$(dirname "$DIR")
PROJECT_NAME=$(echo "${DIR##*/}")
# Find Conda Installation Path to activate conda
CONDA_BASE=$(conda info --base)
. $CONDA_BASE/etc/profile.d/conda.sh
conda activate $PROJECT_NAME |
createcertificatesForOrg1() {
echo
echo "Enroll the CA admin"
echo
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/
export FABRIC_CA_CLIENT_HOME=${PWD}/../crypto-config/peerOrganizations/org1.example.com/
fabric-ca-client enroll -u https://admin:adminpw@localhost:7054 --caname ca.org1.example.com --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
echo 'NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/localhost-7054-ca-org1-example-com.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/localhost-7054-ca-org1-example-com.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/localhost-7054-ca-org1-example-com.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/localhost-7054-ca-org1-example-com.pem
OrganizationalUnitIdentifier: orderer' >${PWD}/../crypto-config/peerOrganizations/org1.example.com/msp/config.yaml
echo
echo "Register peer0"
echo
fabric-ca-client register --caname ca.org1.example.com --id.name peer0 --id.secret peer0pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
echo
echo "Register user"
echo
fabric-ca-client register --caname ca.org1.example.com --id.name user1 --id.secret user1pw --id.type client --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
echo
echo "Register the org admin"
echo
fabric-ca-client register --caname ca.org1.example.com --id.name org1admin --id.secret org1adminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/peers
# -----------------------------------------------------------------------------------
# Peer 0
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com
echo
echo "## Generate the peer0 msp"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp --csr.hosts peer0.org1.example.com --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp/config.yaml
echo
echo "## Generate the peer0-tls certificates"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls --enrollment.profile tls --csr.hosts peer0.org1.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/signcerts/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.crt
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/keystore/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/peerOrganizations/org1.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/msp/tlscacerts/ca.crt
mkdir ${PWD}/../crypto-config/peerOrganizations/org1.example.com/tlsca
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/tlsca/tlsca.org1.example.com-cert.pem
mkdir ${PWD}/../crypto-config/peerOrganizations/org1.example.com/ca
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/peers/peer0.org1.example.com/msp/cacerts/* ${PWD}/../crypto-config/peerOrganizations/org1.example.com/ca/ca.org1.example.com-cert.pem
# --------------------------------------------------------------------------------------------------
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/users
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/users/User1@org1.example.com
echo
echo "## Generate the user msp"
echo
fabric-ca-client enroll -u https://user1:user1pw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/../crypto-config/peerOrganizations/org1.example.com/users/User1@org1.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org1.example.com/users/Admin@org1.example.com
echo
echo "## Generate the org admin msp"
echo
fabric-ca-client enroll -u https://org1admin:org1adminpw@localhost:7054 --caname ca.org1.example.com -M ${PWD}/../crypto-config/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org1/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org1.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org1.example.com/users/Admin@org1.example.com/msp/config.yaml
}
# createcertificatesForOrg1
createCertificatesForOrg2() {
echo
echo "Enroll the CA admin"
echo
mkdir -p /../crypto-config/peerOrganizations/org2.example.com/
export FABRIC_CA_CLIENT_HOME=${PWD}/../crypto-config/peerOrganizations/org2.example.com/
fabric-ca-client enroll -u https://admin:adminpw@localhost:8054 --caname ca.org2.example.com --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
echo 'NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/localhost-8054-ca-org2-example-com.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/localhost-8054-ca-org2-example-com.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/localhost-8054-ca-org2-example-com.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/localhost-8054-ca-org2-example-com.pem
OrganizationalUnitIdentifier: orderer' >${PWD}/../crypto-config/peerOrganizations/org2.example.com/msp/config.yaml
echo
echo "Register peer0"
echo
fabric-ca-client register --caname ca.org2.example.com --id.name peer0 --id.secret peer0pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
echo
echo "Register user"
echo
fabric-ca-client register --caname ca.org2.example.com --id.name user1 --id.secret user1pw --id.type client --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
echo
echo "Register the org admin"
echo
fabric-ca-client register --caname ca.org2.example.com --id.name org2admin --id.secret org2adminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org2.example.com/peers
mkdir -p ../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com
# --------------------------------------------------------------
# Peer 0
echo
echo "## Generate the peer0 msp"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp --csr.hosts peer0.org2.example.com --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp/config.yaml
echo
echo "## Generate the peer0-tls certificates"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls --enrollment.profile tls --csr.hosts peer0.org2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/signcerts/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/server.crt
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/keystore/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/peerOrganizations/org2.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/msp/tlscacerts/ca.crt
mkdir ${PWD}/../crypto-config/peerOrganizations/org2.example.com/tlsca
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/tlsca/tlsca.org2.example.com-cert.pem
mkdir ${PWD}/../crypto-config/peerOrganizations/org2.example.com/ca
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/peers/peer0.org2.example.com/msp/cacerts/* ${PWD}/../crypto-config/peerOrganizations/org2.example.com/ca/ca.org2.example.com-cert.pem
# --------------------------------------------------------------------------------
mkdir -p ../crypto-config/peerOrganizations/org2.example.com/users
mkdir -p ../crypto-config/peerOrganizations/org2.example.com/users/User1@org2.example.com
echo
echo "## Generate the user msp"
echo
fabric-ca-client enroll -u https://user1:user1pw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/../crypto-config/peerOrganizations/org2.example.com/users/User1@org2.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org2.example.com/users/Admin@org2.example.com
echo
echo "## Generate the org admin msp"
echo
fabric-ca-client enroll -u https://org2admin:org2adminpw@localhost:8054 --caname ca.org2.example.com -M ${PWD}/../crypto-config/peerOrganizations/org2.example.com/users/Admin@org2.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org2/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org2.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org2.example.com/users/Admin@org2.example.com/msp/config.yaml
}
# createCertificateForOrg2
createCertificatesForOrg3() {
echo
echo "Enroll the CA admin"
echo
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/
export FABRIC_CA_CLIENT_HOME=${PWD}/../crypto-config/peerOrganizations/org3.example.com/
fabric-ca-client enroll -u https://admin:adminpw@localhost:10054 --caname ca.org3.example.com --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
echo 'NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/localhost-10054-ca-org3-example-com.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/localhost-10054-ca-org3-example-com.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/localhost-10054-ca-org3-example-com.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/localhost-10054-ca-org3-example-com.pem
OrganizationalUnitIdentifier: orderer' >${PWD}/../crypto-config/peerOrganizations/org3.example.com/msp/config.yaml
echo
echo "Register peer0"
echo
fabric-ca-client register --caname ca.org3.example.com --id.name peer0 --id.secret peer0pw --id.type peer --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
echo
echo "Register user"
echo
fabric-ca-client register --caname ca.org3.example.com --id.name user1 --id.secret user1pw --id.type client --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
echo
echo "Register the org admin"
echo
fabric-ca-client register --caname ca.org3.example.com --id.name org3admin --id.secret org3adminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/peers
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com
# --------------------------------------------------------------
# Peer 0
echo
echo "## Generate the peer0 msp"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:10054 --caname ca.org3.example.com -M ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/msp --csr.hosts peer0.org3.example.com --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/msp/config.yaml
echo
echo "## Generate the peer0-tls certificates"
echo
fabric-ca-client enroll -u https://peer0:peer0pw@localhost:10054 --caname ca.org3.example.com -M ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls --enrollment.profile tls --csr.hosts peer0.org3.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/signcerts/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/server.crt
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/keystore/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/peerOrganizations/org3.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/msp/tlscacerts/ca.crt
mkdir ${PWD}/../crypto-config/peerOrganizations/org3.example.com/tlsca
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/tlsca/tlsca.org3.example.com-cert.pem
mkdir ${PWD}/../crypto-config/peerOrganizations/org3.example.com/ca
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/peers/peer0.org3.example.com/msp/cacerts/* ${PWD}/../crypto-config/peerOrganizations/org3.example.com/ca/ca.org3.example.com-cert.pem
# --------------------------------------------------------------------------------
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/users
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/users/User1@org3.example.com
echo
echo "## Generate the user msp"
echo
fabric-ca-client enroll -u https://user1:user1pw@localhost:10054 --caname ca.org3.example.com -M ${PWD}/../crypto-config/peerOrganizations/org3.example.com/users/User1@org3.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
mkdir -p ../crypto-config/peerOrganizations/org3.example.com/users/Admin@org3.example.com
echo
echo "## Generate the org admin msp"
echo
fabric-ca-client enroll -u https://org3admin:org3adminpw@localhost:10054 --caname ca.org3.example.com -M ${PWD}/../crypto-config/peerOrganizations/org3.example.com/users/Admin@org3.example.com/msp --tls.certfiles ${PWD}/fabric-ca/org3/tls-cert.pem
cp ${PWD}/../crypto-config/peerOrganizations/org3.example.com/msp/config.yaml ${PWD}/../crypto-config/peerOrganizations/org3.example.com/users/Admin@org3.example.com/msp/config.yaml
}
createCretificatesForOrderer() {
echo
echo "Enroll the CA admin"
echo
mkdir -p ../crypto-config/ordererOrganizations/example.com
export FABRIC_CA_CLIENT_HOME=${PWD}/../crypto-config/ordererOrganizations/example.com
fabric-ca-client enroll -u https://admin:adminpw@localhost:9054 --caname ca-orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
echo 'NodeOUs:
Enable: true
ClientOUIdentifier:
Certificate: cacerts/localhost-9054-ca-orderer.pem
OrganizationalUnitIdentifier: client
PeerOUIdentifier:
Certificate: cacerts/localhost-9054-ca-orderer.pem
OrganizationalUnitIdentifier: peer
AdminOUIdentifier:
Certificate: cacerts/localhost-9054-ca-orderer.pem
OrganizationalUnitIdentifier: admin
OrdererOUIdentifier:
Certificate: cacerts/localhost-9054-ca-orderer.pem
OrganizationalUnitIdentifier: orderer' >${PWD}/../crypto-config/ordererOrganizations/example.com/msp/config.yaml
echo
echo "Register orderer"
echo
fabric-ca-client register --caname ca-orderer --id.name orderer --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
echo
echo "Register orderer2"
echo
fabric-ca-client register --caname ca-orderer --id.name orderer2 --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
echo
echo "Register orderer3"
echo
fabric-ca-client register --caname ca-orderer --id.name orderer3 --id.secret ordererpw --id.type orderer --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
echo
echo "Register the orderer admin"
echo
fabric-ca-client register --caname ca-orderer --id.name ordererAdmin --id.secret ordererAdminpw --id.type admin --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
mkdir -p ../crypto-config/ordererOrganizations/example.com/orderers
# mkdir -p ../crypto-config/ordererOrganizations/example.com/orderers/example.com
# ---------------------------------------------------------------------------
# Orderer
mkdir -p ../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com
echo
echo "## Generate the orderer msp"
echo
fabric-ca-client enroll -u https://orderer:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/msp --csr.hosts orderer.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/config.yaml ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/msp/config.yaml
echo
echo "## Generate the orderer-tls certificates"
echo
fabric-ca-client enroll -u https://orderer:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls --enrollment.profile tls --csr.hosts orderer.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/signcerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/server.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/keystore/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/msp/tlscacerts/tlsca.example.com-cert.pem
mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem
# -----------------------------------------------------------------------
# Orderer 2
mkdir -p ../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com
echo
echo "## Generate the orderer msp"
echo
fabric-ca-client enroll -u https://orderer2:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/msp --csr.hosts orderer2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/config.yaml ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/config.yaml
echo
echo "## Generate the orderer-tls certificates"
echo
fabric-ca-client enroll -u https://orderer2:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls --enrollment.profile tls --csr.hosts orderer2.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/signcerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/server.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/keystore/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/msp/tlscacerts/tlsca.example.com-cert.pem
# mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts
# cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer2.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem
# ---------------------------------------------------------------------------
# Orderer 3
mkdir -p ../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com
echo
echo "## Generate the orderer msp"
echo
fabric-ca-client enroll -u https://orderer3:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/msp --csr.hosts orderer3.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/config.yaml ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/config.yaml
echo
echo "## Generate the orderer-tls certificates"
echo
fabric-ca-client enroll -u https://orderer3:ordererpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls --enrollment.profile tls --csr.hosts orderer3.example.com --csr.hosts localhost --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/ca.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/signcerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/server.crt
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/keystore/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/server.key
mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/tlscacerts
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/msp/tlscacerts/tlsca.example.com-cert.pem
# mkdir ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts
# cp ${PWD}/../crypto-config/ordererOrganizations/example.com/orderers/orderer3.example.com/tls/tlscacerts/* ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/tlscacerts/tlsca.example.com-cert.pem
# ---------------------------------------------------------------------------
mkdir -p ../crypto-config/ordererOrganizations/example.com/users
mkdir -p ../crypto-config/ordererOrganizations/example.com/users/Admin@example.com
echo
echo "## Generate the admin msp"
echo
fabric-ca-client enroll -u https://ordererAdmin:ordererAdminpw@localhost:9054 --caname ca-orderer -M ${PWD}/../crypto-config/ordererOrganizations/example.com/users/Admin@example.com/msp --tls.certfiles ${PWD}/fabric-ca/ordererOrg/tls-cert.pem
cp ${PWD}/../crypto-config/ordererOrganizations/example.com/msp/config.yaml ${PWD}/../crypto-config/ordererOrganizations/example.com/users/Admin@example.com/msp/config.yaml
}
# createCretificateForOrderer
sudo rm -rf ../crypto-config/*
# sudo rm -rf fabric-ca/*
createcertificatesForOrg1
createCertificatesForOrg2
createCertificatesForOrg3
createCretificatesForOrderer
|
import os
def validate_yang_model(lint: bool, debug_level: int) -> str:
bash_command = ['pyang']
pyang_param = '--lint' if lint else '--ietf'
bash_command.append(pyang_param)
bash_command.append('2>&1')
if debug_level > 0:
print('DEBUG: running command {}'.format(' '.join(bash_command)))
result_pyang = os.popen(' '.join(bash_command)).read()
return result_pyang |
def reduceTimeComplexity(n, arr):
dp = [1 for i in range(n+1)]
for i in range(1, n+1):
for j in range(i + 1, n+1):
if arr[i - 1] > arr[j - 1] and dp[j] < dp[i] + 1:
dp[j] = dp[i] + 1
return max(dp) |
import * as tslib_1 from "tslib";
import { SignalOperator } from "./SignalOperator";
import { WaveShaper } from "./WaveShaper";
/**
* Return the absolute value of an incoming signal.
*
* @example
* import { Abs, Signal } from "tone";
* const signal = new Signal(-1);
* const abs = new Abs();
* signal.connect(abs);
* // the output of abs is 1.
* @category Signal
*/
var Abs = /** @class */ (function (_super) {
tslib_1.__extends(Abs, _super);
function Abs() {
var _this = _super !== null && _super.apply(this, arguments) || this;
_this.name = "Abs";
/**
* The node which converts the audio ranges
*/
_this._abs = new WaveShaper({
context: _this.context,
mapping: function (val) {
if (Math.abs(val) < 0.001) {
return 0;
}
else {
return Math.abs(val);
}
},
});
/**
* The AudioRange input [-1, 1]
*/
_this.input = _this._abs;
/**
* The output range [0, 1]
*/
_this.output = _this._abs;
return _this;
}
/**
* clean up
*/
Abs.prototype.dispose = function () {
_super.prototype.dispose.call(this);
this._abs.dispose();
return this;
};
return Abs;
}(SignalOperator));
export { Abs };
//# sourceMappingURL=Abs.js.map |
package fwcd.fructose.math;
import fwcd.fructose.math.algebra.FieldElement;
/**
* A complex number (represented using two doubles).
*/
public class Complex implements Numeric<Complex>, FieldElement<Complex> {
public static final Complex ZERO = new Complex(0, 0);
public static final Complex ONE = new Complex(1, 0);
public static final Complex I = new Complex(0, 1);
private final double real;
private final double imag;
private Complex(double real, double imag) {
this.real = real;
this.imag = imag;
}
public static Complex ofReal(double real) {
return of(real, 0);
}
public static Complex of(Real real) {
return ofReal(real.getValue());
}
public static Complex of(double real, double imag) {
if (real == 0.0) {
if (imag == 0.0) {
return ZERO;
} else if (imag == 1.0) {
return I;
}
} else if (real == 1.0 && imag == 0.0) {
return ONE;
}
return new Complex(real, imag);
}
public double getReal() {
return real;
}
public double getImag() {
return imag;
}
@Override
public Complex add(Complex rhs) {
return of(real + rhs.real, imag + rhs.imag);
}
@Override
public Complex sub(Complex rhs) {
return of(real - rhs.real, imag - rhs.imag);
}
public Complex invertReal() {
return of(-real, imag);
}
public Complex invertImag() {
return of(real, -imag);
}
public Complex conjugate() {
return invertImag();
}
public Complex multiply(double factor) {
return of(factor * real, factor * imag);
}
public Complex divide(double denom) {
return of(real / denom, imag / denom);
}
public Complex exp() {
return of(Math.exp(real) * Math.cos(imag), Math.exp(real) * Math.sin(imag));
}
public Complex sin() {
return of(Math.sin(real) * Math.cosh(imag), Math.cos(real) * Math.sinh(imag));
}
public Complex cos() {
return of(Math.cos(real) * Math.cosh(imag), -Math.sin(real) * Math.sinh(imag));
}
public Complex tan() {
return sin().divide(cos());
}
public Complex signum() {
return divide(abs());
}
@Override
public Complex reciprocal() {
double scale = (real * real) + (imag * imag);
return of(real / scale, -imag / scale);
}
@Override
public Complex negate() {
return of(-real, -imag);
}
public Complex divide(Complex other) {
return multiply(other.reciprocal());
}
/**
* @return The angle theta between the real axis and this number
*/
public double argument() {
return Math.atan2(imag, real);
}
@Override
public Complex multiply(Complex other) {
// Firsts + outers + inners + lasts:
// (a+bi)(c+di) = ac + adi + bci + bdi^2
// i^2 = -1, so the equation becomes:
// (a+bi)(c+di) = ac + adi + bci - bd
// = (ac - bd) + (ad + bc)i
return of(
(real * other.real) - (imag * other.imag),
(real * other.imag) + (imag * other.real)
);
}
public Complex square() {
return multiply(this);
}
public double abs() {
return Math.sqrt((real * real) + (imag * imag));
}
public double absSquared() {
double abs = abs();
return abs * abs;
}
public Complex pow(int exponent) {
if (exponent < 1) {
return pow((double) exponent);
}
Complex result = this;
for (int i=0; i<exponent-1; i++) {
result = result.multiply(this);
}
return result;
}
public Complex pow(double exponent) {
return pow(Complex.ofReal(exponent));
}
public Complex pow(Complex exponent) {
double a = real;
double b = imag;
double c = exponent.real;
double d = exponent.imag;
double aSqPlusBSq = (a * a) + (b * b);
double baseArg = argument();
double theta = (c * baseArg) + (0.5 * d * Math.log(aSqPlusBSq));
return of(Math.cos(theta), Math.sin(theta))
.multiply(Math.pow(aSqPlusBSq, c / 2) * Math.exp(-d * baseArg));
}
@Override
public String toString() {
if (imag > 0) {
return "(" + real + " + " + imag + "i)";
} else if (imag < 0) {
return "(" + real + " - " + -imag + "i)";
} else {
return Double.toString(real);
}
}
@Override
public int hashCode() {
return 27 * Double.hashCode(real) * Double.hashCode(imag);
}
@Override
public boolean equals(Complex other, double epsilon) {
return (Math.abs(real - other.real) < epsilon)
&& (Math.abs(imag - other.imag) < epsilon);
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
} else if (obj == null) {
return false;
} else if (getClass() != obj.getClass()) {
return false;
}
Complex other = (Complex) obj;
return real == other.real && imag == other.imag;
}
@Override
public Complex add(Real rhs) {
return of(real + rhs.getValue(), imag);
}
@Override
public Complex sub(Real rhs) {
return of(real - rhs.getValue(), imag);
}
@Override
public Complex multiply(Real rhs) {
return multiply(rhs.getValue());
}
@Override
public Complex divide(Real rhs) {
return divide(rhs.getValue());
}
}
|
<gh_stars>0
import {DecoratorTypes, Type} from "@tsed/core";
import {JsonEntitiesContainer, JsonEntityStore} from "../domain/JsonEntityStore";
import {JsonClassStore} from "../domain/JsonClassStore";
import {JsonPropertyStore} from "../domain/JsonPropertyStore";
import {JsonParameterStore} from "../domain/JsonParameterStore";
import {JsonMethodStore} from "../domain/JsonMethodStore";
JsonEntitiesContainer.set(DecoratorTypes.CLASS, JsonClassStore);
JsonEntitiesContainer.set(DecoratorTypes.PROP, JsonPropertyStore);
JsonEntitiesContainer.set(DecoratorTypes.PARAM, JsonParameterStore);
JsonEntitiesContainer.set(DecoratorTypes.METHOD, JsonMethodStore);
/**
* @ignore
*/
export function isJsonEntityStore(model: any): model is JsonEntityStore {
return model.isStore;
}
/**
* Get entity store from decorator args
* @param target
*/
export function getJsonEntityStore<T extends JsonClassStore = JsonClassStore>(target: Type<any>): T;
export function getJsonEntityStore<T extends JsonPropertyStore = JsonPropertyStore>(
target: Type<any> | any,
propertyKey: string | symbol
): T;
export function getJsonEntityStore<T extends JsonParameterStore = JsonParameterStore>(
target: Type<any> | any,
propertyKey: string | symbol,
index: number
): T;
export function getJsonEntityStore<T extends JsonMethodStore = JsonMethodStore>(
target: Type<any> | any,
propertyKey: string | symbol,
descriptor: PropertyDescriptor
): T;
export function getJsonEntityStore<T extends JsonEntityStore = JsonEntityStore>(...args: any[]): T;
export function getJsonEntityStore<T extends JsonEntityStore = JsonEntityStore>(...args: any[]): T {
return JsonEntityStore.from<T>(...args);
}
|
<reponame>mukesh-openchannel/angular-template-libraries
import { ComponentsPage, FullAppData } from '@openchannel/angular-common-components/src/lib/common-components';
import { TemplateRef } from '@angular/core';
/** Default column IDs. */
export type ModifyColumnId =
| 'left-placeholder'
| 'name'
| 'summary'
| 'create-date'
| 'status'
| 'app-options'
| 'right-placeholder'
| string;
/**
* Context for table cells, will added to your TemplateRef.<br>
* Used only for {@link ModifyColumnConfig.rowCellTemplate}.<br>
*/
export interface ColumnTemplateContext {
app: FullAppData | any;
isChild: boolean;
index: number;
}
/**
* Context for bottom table area.<br>
* Used only for {@link OcAppTableComponent#tableBottomRowTemplate}.
*/
export interface AppsNotFoundTemplateContext {
/** Count of table columns. */
columnCount: number;
}
export interface FieldPathConfig {
/**
* By this path will be get icon url from {@link FullAppData}.<br>
* Used for showing image in column 'Name'. Look: {@link ModifyColumnId}.<br>
*/
appIconPath?: 'customData.icon' | string;
/**
* By this path will be get icon text from {@link FullAppData}.<br>
* Used for showing app description in column 'Name'. Look: {@link ModifyColumnId}.<br>
*/
appDescriptionPath?: 'customData.summary' | string;
}
/**
* This config used for:<br>
* 1. Overriding default table cells {@link ModifyColumnId}.
* 2. Adding new table columns.
*/
export type ModifyColumnConfig = {
[key in ModifyColumnId]: {
/**
* Template for header cell.
* When non null, will be override default cell.
*/
headerCellTemplate?: TemplateRef<any>;
/**
* Template for row cell (showing same app data, like: name, description, date etc.).
* When non null, will be override default cell.
*/
rowCellTemplate?: TemplateRef<ColumnTemplateContext>;
};
};
/** Configuration model for the App Listing component */
export interface AppListing {
/** layout of the component. Default: 'table' */
layout: 'table';
/** data response with list of apps, pagination, etc. */
data: ComponentsPage<FullAppData | any>;
/** array of options which will be applied in dropdown menu of the component */
options: AppListingOptions[];
/**
* A URL template for the preview.
* @example https://mysite.com/apps/{appId}/{version}
*/
previewTemplate?: string;
}
/** The available options of the dropdown menu */
export type AppListingOptions = string | 'EDIT' | 'PREVIEW' | 'PUBLISH' | 'SUBMIT' | 'SUSPEND' | 'UNSUSPEND' | 'DELETE';
/** Interface for the action from a dropdown menu */
export interface AppListMenuAction {
/** Which action was chosen */
action: AppListingOptions;
/** ID of the app which has been chosen */
appId: string;
/** Version of the app which has been chosen */
appVersion: number;
/** Marker for apps which has been subversion of the main app */
isChild?: boolean;
}
// ---- Sorting for app table columns ----
/** Column names for sorting {@link OcAppTableComponent}. */
export type AppGridSortColumn = 'name' | 'created' | 'status';
/**
* Config for setting current sort icon direction (up or down). Used in {@link OcAppTableComponent#sortOptions}.
*
* Values:<br>
* -1 => sort icon to down.<br>
* null => sort icon to down.<br>
* 1 => sort icon to up.<br>
*/
export type AppGridSortOptions = {
[name in AppGridSortColumn]: 1 | -1 | null;
};
export type AppGridSortChosen = {
/** New sort config. */
sortOptions: AppGridSortOptions;
/** Updated column ID. */
changedSortOption: AppGridSortColumn;
};
|
def get_safe_settings():
return {'USERNAME': 'admin', 'PASSWORD': 'secretpassword'}
class SafeSettings(object):
"""
Map attributes to values in the safe settings dict
"""
def __init__(self):
self._settings = get_safe_settings()
def __getattr__(self, name):
try:
return self._settings[name.upper()]
except KeyError:
raise AttributeError
settings_obj = SafeSettings() |
<gh_stars>0
require("dotenv").config("../.env");
module.exports = {
development: {
username: process.env.DB_USERNAME_DEV,
password: <PASSWORD>,
database: "mju_club",
host: process.env.DB_HOST_DEV,
dialect: "mysql",
timezone: "+09:00",
},
test: {
username: "mju_club_dev",
password: <PASSWORD>,
database: "database_test",
host: "127.0.0.1",
dialect: "mysql",
timezone: "+09:00",
},
production: {
username: process.env.DB_USERNAME,
password: <PASSWORD>,
database: "mju_club",
host: process.env.DB_HOST,
dialect: "mysql",
logging: false,
timezone: "+09:00",
},
};
|
#!/usr/bin/env sh
pst_debug_echo "$BASH_SOURCE"
if [ -z "$(type -t git)" ] ; then
echo "It appears that the Git executable 'git' is not in the path!"
return
fi
# ------- GIT PROMPT SETTINGS ---------
GIT_PS1_SHOWDIRTYSTATE=true
GIT_PS1_SHOWSTASHSTATE=true
GIT_PS1_SHOWUNTRACKEDFILES=true
GIT_PS1_SHOWUPSTREAM="auto"
GIT_PS1_SHOWCOLORHINTS="yes"
source "$PST_ROOT/dev/git/git-prompt.sh"
GIT_PS1='$(__git_ps1)'
|
#!/usr/bin/env bash
export NVM_DIR="$(pwd)/nvm"
mkdir -p "${NVM_DIR}"
if [ ! -f "${NVM_DIR}/nvm.sh" ]; then
curl "https://raw.githubusercontent.com/creationix/nvm/master/nvm.sh" > "${NVM_DIR}/nvm.sh";
fi
export npm_config_prefix=
source "${NVM_DIR}/nvm.sh"
while [ $# -gt 0 ]; do
node_version="$1"; shift
npm_version="$1"; shift
if nvm use "${node_version}-${npm_version}"; then
:
else
nvm install "${node_version}"
nvm use "${node_version}"
npm install -g "npm@${npm_version}"
mv "${NVM_DIR}/versions/node/${node_version}" "${NVM_DIR}/versions/node/${node_version}-${npm_version}"
nvm alias default "${node_version}-${npm_version}" # nvm will use first installed version as default.
# After moving it, deafult version will be lost
# forcing it to remain
fi
done
|
<filename>app/models/postcode_checker.rb
class PostcodeChecker
include ActiveModel::Model
attr_accessor :postcode
validates :postcode, postcode: true
end
|
<filename>src/main/java/com/thinkgem/jeesite/modules/yipan/entity/YpCardHolder.java<gh_stars>0
/**
* Copyright © 2012-2016 <a href="https://github.com/thinkgem/jeesite">JeeSite</a> All rights reserved.
*/
package com.thinkgem.jeesite.modules.yipan.entity;
import org.hibernate.validator.constraints.Length;
import java.util.Date;
import com.fasterxml.jackson.annotation.JsonFormat;
import com.thinkgem.jeesite.common.persistence.DataEntity;
/**
* 持卡信息Entity
* @author zcx
* @version 2020-05-30
*/
public class YpCardHolder extends DataEntity<YpCardHolder> {
private static final long serialVersionUID = 1L;
private String cardNo; // 卡号
private String cardType; // 卡类型
private String openid; // openid
private String carduser; // 持卡人姓名
private Long remainingTimes; // 剩余次数
private String beans; // 易豆
private Date expirationDate; // 失效日期
private Date buyDate; // 购卡日期
public YpCardHolder() {
super();
}
public YpCardHolder(String id){
super(id);
}
@Length(min=0, max=255, message="卡号长度必须介于 0 和 255 之间")
public String getCardNo() {
return cardNo;
}
public void setCardNo(String cardNo) {
this.cardNo = cardNo;
}
@Length(min=0, max=255, message="卡类型长度必须介于 0 和 255 之间")
public String getCardType() {
return cardType;
}
public void setCardType(String cardType) {
this.cardType = cardType;
}
@Length(min=0, max=255, message="openid长度必须介于 0 和 255 之间")
public String getOpenid() {
return openid;
}
public void setOpenid(String openid) {
this.openid = openid;
}
@Length(min=0, max=255, message="持卡人姓名长度必须介于 0 和 255 之间")
public String getCarduser() {
return carduser;
}
public void setCarduser(String carduser) {
this.carduser = carduser;
}
public Long getRemainingTimes() {
return remainingTimes;
}
public void setRemainingTimes(Long remainingTimes) {
this.remainingTimes = remainingTimes;
}
@Length(min=0, max=255, message="易豆长度必须介于 0 和 255 之间")
public String getBeans() {
return beans;
}
public void setBeans(String beans) {
this.beans = beans;
}
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
public Date getExpirationDate() {
return expirationDate;
}
public void setExpirationDate(Date expirationDate) {
this.expirationDate = expirationDate;
}
@JsonFormat(pattern = "yyyy-MM-dd HH:mm:ss")
public Date getBuyDate() {
return buyDate;
}
public void setBuyDate(Date buyDate) {
this.buyDate = buyDate;
}
} |
import React from "react";
import { BrowserRouter as Router, Route } from "react-router-dom";
import Overview from "./page/Overview";
import AddGame from "./page/AddGame";
import Play from "./page/Play";
// TODO: save generated user id in localstorage & read from localstorage to load id if it exists
const App = () => (
<Router>
<React.Fragment>
<Route exact path="/" component={Overview} />
<Route exact path="/game/add" component={AddGame} />
<Route exact path="/game/play/:id" component={Play} />
</React.Fragment>
</Router>
);
export default App;
|
<reponame>sagarl/Raigad
/**
* Copyright 2014 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.raigad.indexmanagement;
import com.netflix.raigad.indexmanagement.exception.UnsupportedAutoIndexException;
import com.netflix.raigad.indexmanagement.indexfilters.DailyIndexNameFilter;
import com.netflix.raigad.indexmanagement.indexfilters.MonthlyIndexNameFilter;
import com.netflix.raigad.indexmanagement.indexfilters.YearlyIndexNameFilter;
import org.codehaus.jackson.annotate.JsonCreator;
import org.codehaus.jackson.annotate.JsonProperty;
/**
* Courtesy: <NAME>
*/
public class IndexMetadata {
public enum RETENTION_TYPE {
DAILY, MONTHLY, YEARLY
}
private final String indexName;
private final RETENTION_TYPE retentionType;
private final int retentionPeriod;
private final IIndexNameFilter indexNameFilter;
private final boolean preCreate;
@JsonCreator
public IndexMetadata(
@JsonProperty("indexName") String indexName,
@JsonProperty("retentionType") String retentionType,
@JsonProperty("retentionPeriod") int retentionPeriod,
@JsonProperty("preCreate") boolean preCreate) throws UnsupportedAutoIndexException {
this.indexName = indexName;
if(retentionType == null)
retentionType = "DAILY";
this.retentionType = RETENTION_TYPE.valueOf(retentionType.toUpperCase());
switch(this.retentionType) {
case DAILY:
this.indexNameFilter = new DailyIndexNameFilter();
break;
case MONTHLY:
this.indexNameFilter = new MonthlyIndexNameFilter();
break;
case YEARLY:
this.indexNameFilter = new YearlyIndexNameFilter();
break;
default:
this.indexNameFilter = null;
throw new UnsupportedAutoIndexException("Given index is not (DAILY or MONTHLY or YEARLY), please check your configuration.");
}
this.retentionPeriod = retentionPeriod;
this.preCreate = preCreate;
}
@Override
public String toString() {
return "IndexMetadata{" +
"indexName='" + indexName + '\'' +
", retentionType=" + retentionType +
", retentionPeriod=" + retentionPeriod +
'}';
}
public String getIndexName() {
return indexName;
}
public RETENTION_TYPE getRetentionType() {
return retentionType;
}
public int getRetentionPeriod() {
return retentionPeriod;
}
public IIndexNameFilter getIndexNameFilter() {
return indexNameFilter;
}
public boolean isPreCreate() {
return preCreate;
}
}
|
public int compareTo(Key<? extends Collection<? extends CharSequence>> o) {
Iterator<? extends CharSequence> other = o.iterator();
Iterator<? extends CharSequence> self = this.iterator();
while (self.hasNext() && other.hasNext()) {
int comparison = self.next().toString().compareTo(other.next().toString());
if (comparison != 0) {
return comparison;
}
}
if (self.hasNext()) {
return 1; // The calling Key has more elements, so it is greater
} else if (other.hasNext()) {
return -1; // The other Key has more elements, so it is greater
} else {
return 0; // Both Key objects have the same elements
}
} |
var audioStorage = "/uploads/audio/";
var flashvars = {
quality : 80,
mode: "mic"
};
var params = {
menu: "false",
scale: "noScale",
allowFullscreen: "true",
allowScriptAccess: "always",
bgcolor: "",
wmode: "transparent"
};
var attributes = {
id:"flashrecorder"
};
swfobject.embedSWF(
"/assets/flash_recorder/bin/flashrecorder(4)(4).swf",
"flash-box", "250", "150", "10.0.0",
"/assets/flash_recorder/bin/expressInstall.swf",
flashvars, params, attributes);
function makeToken(length)
{
if (!length) {
length = 32;
}
var text = "";
var possible = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789";
for( var i=0; i < length; i++ )
text += possible.charAt(Math.floor(Math.random() * possible.length));
console.log('token made: ' + text);
return text;
}
var token = makeToken(32);
var audio_bytes = [];
var audio_temp_bytes = [];
var c = 1;
var audio_count = 0;
// BRIDGE STUFF
// logs from Flash
function log() {
var log = Function.prototype.bind.call(console.log, console);
log.apply(console, arguments);
}
function swf_events(event, data){
switch(event){
case 'mic_ok':
$('#play').show();
$('.flash_cont').css({'height': 0});
// $('#flashrecorder').css({'visibility' : 'hidden'});
break;
case 'mic_muted':
$('#play').hide();
break;
case 'cam_not_found':
break;
case 'video_frame_data':
break;
case 'sound_recording_progress':
// audio_bytes.push(data);
audio_temp_bytes.push(data);
if( c%5 === 0 ) {
audio_bytes.push(encodeURIComponent(audio_temp_bytes));
audio_temp_bytes.length = 0;
}
c++;
break;
case 'generation_wave_complete':
break;
case 'mp3_encode_progress':
break;
case 'mp3_encode_complete':
break;
}
return true;
}
var isPaused = false;
var seconds1 = 0;
var seconds2 = 0;
var minutes = 0;
var message_length = '0:00';
if($('#record_length').length !== 0) {
document.getElementById("record_length").innerHTML = message_length;
}
function func() {
seconds2++;
if(seconds2 == 10){
seconds2 = 0;
seconds1++;
}
if(seconds1 == 6){
seconds1 = 0;
minutes++;
}
message_length = minutes+":"+seconds1+seconds2;
document.getElementById("record_length").innerHTML = message_length;
}
if (typeof $.timer != 'undefined'){
var timer = $.timer(func, 1000, true);
timer.stop();
}
var timeout;
$('#play').on('click', function() {
timer.play();
var swfObject = document.getElementById("flashrecorder");
$('#stop').show();
$('#play').hide();
swfObject.capture();
if($('.is_premium').val() == false){
timeout = setTimeout(sendAudio, 300000);
} else {
timeout = setTimeout(sendAudio, 600000);
}
})
$('#stop').on('click', function() {
clearTimeout(timeout);
timer.stop();
var swfObject = document.getElementById("flashrecorder");
swfObject.stop();
$("#spanimg").show();
localStorage.message_length = message_length;
if($('.check_domain').val() === 'true') {
$('.display_div').hide();
$('.display_div:nth-child(5)').show();
}
document.getElementById('play').style.display = 'inline-block';
sendAudio();
})
$('#re-record').on('click', function(){
token = makeToken(32);
audio_count = 0;
c = 1;
audio_bytes = [];
audio_temp_bytes = [];
})
// function getFlashAudio() {
// $.post('/messages/upload-file', {flashAudio: encodeURIComponent(audio_bytes), token: token}, function(response){
// audio_bytes.length = 0;
// $("#spanimg").css("display", "none");
// $('.btn-holder').show();
// localStorage.fileName = response.file_name;
// localStorage.file = audioStorage + response.file_name;
// localStorage.id = response.id;
// $('#music').prop('src', localStorage.file);
// document.getElementById('message_length').innerHTML = localStorage.message_length;
// document.getElementById("file_name").value = localStorage.fileName;
// document.getElementById("duration").value = localStorage.message_length;
// document.getElementById('token').value = localStorage.token;
// seconds1 = 0;
// seconds2 = 0;
// minutes = 0;
// message_length = '0:00';
// })
// if($('.check_domain').val() === 'true') {
// $('.display_div').hide();
// $('.display_div:nth-child(5)').show();
// }
// document.getElementById('play').style.display = 'inline-block';
// }
function sendAudio() {
console.log('jr sending audio');
$.ajax({
type : 'POST',
url : 'https://app.voicestak.com:4444/only-audio',
contentType : "application/x-www-form-urlencoded;charset=UTF-8",
cache : false,
data : {flashAudio: audio_bytes[audio_count], token : token},
success : function(data){
if(audio_count < audio_bytes.length) {
audio_count++;
sendAudio();
} else {
var audio_final_value = encodeURIComponent(audio_temp_bytes);
$.post("https://app.voicestak.com:4444/only-audio-complete", {token:token, flashAudio:audio_final_value, finish: true}, function(data){
$.post('/messages/upload-file', {file_name: data.file_name, file_type: data.file_type}, function(response){
audio_bytes.length = 0;
$("#spanimg").css("display", "none");
$('#playing').show();
$('.btn-holder').show();
$('.btn-holder-r').show();
localStorage.fileName = response.file_name;
localStorage.file = audioStorage + response.file_name;
localStorage.id = response.id;
$('#music').prop('src', localStorage.file);
document.getElementById('message_length').innerHTML = localStorage.message_length;
document.getElementById("file_name").value = localStorage.fileName;
document.getElementById("duration").value = localStorage.message_length;
document.getElementById('token').value = localStorage.token;
seconds1 = 0;
seconds2 = 0;
minutes = 0;
message_length = '0:00';
})
})
}
}
})
} |
#!/bin/sh
set -ex
./000-prereqs.sh
./010-create-services.sh
./020-create-functions.sh
./030-create-vpc.sh
# ./035-flow-log-create.sh
|
class BookingResponse
# Cancel visit return codes
VISIT_NOT_FOUND = 'visit_does_not_exist'.freeze
VISIT_ALREADY_CANCELLED = 'visit_already_cancelled'.freeze
VISIT_COMPLETED = 'visit_completed'.freeze
INVALID_CANCELLATION_CODE = 'invalid_cancellation_code'.freeze
# Process visit return codes
ALREADY_PROCESSED_ERROR = 'already_processed'.freeze
PROCESS_REQUIRED_ERROR = 'process_required'.freeze
# Generic return codes
NOMIS_API_ERROR = 'nomis_api_error'.freeze
SUCCESS = 'success'.freeze
attr_reader :message
def self.successful
new(SUCCESS)
end
def self.process_required
new(PROCESS_REQUIRED_ERROR)
end
def self.nomis_api_error
new(NOMIS_API_ERROR)
end
def self.already_processed
new(ALREADY_PROCESSED_ERROR)
end
def self.invalid_cancellation_code
new(INVALID_CANCELLATION_CODE)
end
def self.visit_not_found
new(VISIT_NOT_FOUND)
end
def self.visit_already_cancelled
new(VISIT_ALREADY_CANCELLED)
end
def self.visit_completed
new(VISIT_COMPLETED)
end
def initialize(message)
self.message = message
end
def success?
message == SUCCESS
end
def already_processed?
message == ALREADY_PROCESSED_ERROR
end
private
attr_writer :message
end
|
import plotly.graph_objects as go
import numpy as np
# Create x-values ranging from 0 to 10 with a step size of 0.1
x_values = np.arange(0, 10.1, 0.1)
# Define the three functions
def curve1(x):
return x ** 2
def curve2(x):
return 2 * x + 5
def curve3(x):
return 10 * np.sin(x)
# Create the figure
fig = go.Figure()
# Add traces for the three curves
fig.add_trace(go.Scatter(x=x_values, y=[curve1(x) for x in x_values], mode='lines', name='y = x^2'))
fig.add_trace(go.Scatter(x=x_values, y=[curve2(x) for x in x_values], mode='lines', name='y = 2x + 5'))
fig.add_trace(go.Scatter(x=x_values, y=[curve3(x) for x in x_values], mode='lines', name='y = 10sin(x)'))
# Customize the plot
fig.update_layout(title='Multiple Curves Plot',
xaxis_title='X',
yaxis_title='Y',
legend=dict(x=0, y=1, traceorder='normal'))
# Display the plot
fig.show() |
<gh_stars>1-10
# frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V7
module Enums
# Container for enum representing the attribution model that describes how to
# distribute credit for a particular conversion across potentially many prior
# interactions.
class AttributionModelEnum
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# The attribution model that describes how to distribute credit for a
# particular conversion across potentially many prior interactions.
module AttributionModel
# Not specified.
UNSPECIFIED = 0
# Used for return value only. Represents value unknown in this version.
UNKNOWN = 1
# Uses external attribution.
EXTERNAL = 100
# Attributes all credit for a conversion to its last click.
GOOGLE_ADS_LAST_CLICK = 101
# Attributes all credit for a conversion to its first click using Google
# Search attribution.
GOOGLE_SEARCH_ATTRIBUTION_FIRST_CLICK = 102
# Attributes credit for a conversion equally across all of its clicks using
# Google Search attribution.
GOOGLE_SEARCH_ATTRIBUTION_LINEAR = 103
# Attributes exponentially more credit for a conversion to its more recent
# clicks using Google Search attribution (half-life is 1 week).
GOOGLE_SEARCH_ATTRIBUTION_TIME_DECAY = 104
# Attributes 40% of the credit for a conversion to its first and last
# clicks. Remaining 20% is evenly distributed across all other clicks. This
# uses Google Search attribution.
GOOGLE_SEARCH_ATTRIBUTION_POSITION_BASED = 105
# Flexible model that uses machine learning to determine the appropriate
# distribution of credit among clicks using Google Search attribution.
GOOGLE_SEARCH_ATTRIBUTION_DATA_DRIVEN = 106
end
end
end
end
end
end
end
|
rm -rf grpc/tradingdb2
rm -rf grpc/jrj
rm -rf grpc/bitmex
rm -rf grpc/simtrading
rm -rf grpc/simtrading2
rm -rf unittestdata/tradingdb2
rm -rf unittestdata/jrj
rm -rf unittestdata/bitmex
rm -rf unittestdata/simtrading
rm -rf unittestdata/simtrading2
rm -rf utils/*.log
go test -coverprofile=coverage.out ./...
go tool cover -html=coverage.out
rm -rf grpc/tradingdb2
rm -rf grpc/jrj
rm -rf grpc/bitmex
rm -rf grpc/simtrading
rm -rf grpc/simtrading2
rm -rf unittestdata/tradingdb2
rm -rf unittestdata/jrj
rm -rf unittestdata/bitmex
rm -rf unittestdata/simtrading
rm -rf unittestdata/simtrading2
rm -rf utils/*.log
rm -rf *.log |
<gh_stars>100-1000
import logging
import logging.handlers
import sys
import logbook
from webui import config
logger = logbook.Logger(__name__)
host = '0.0.0.0'
port = 8000
bind = '{}:{}'.format(host, port)
backlog = 2048
workers = 1
timeout = 600
worker_class = 'sync'
pidfile = '/tmp/gunicorn.pid'
def post_fork(server, worker):
server.log.info('Worker spawned (pid: %s)', worker.pid)
logging_rotating_file_handler = logging.handlers.RotatingFileHandler(
config.LOG_FILE_PATH.replace('.log', f'.{worker.pid}.flask.log'),
maxBytes=5 * 1024 * 1024,
backupCount=5,
)
root_logger = logging.getLogger()
root_logger.addHandler(logging_rotating_file_handler)
root_logger.setLevel(logging.CRITICAL)
logger_setup = logbook.NestedSetup(
[
logbook.StreamHandler(sys.stdout, level=logbook.INFO, bubble=True),
logbook.RotatingFileHandler(
config.LOG_FILE_PATH.replace('.log', f'.{worker.pid}.log'),
level=logbook.INFO,
max_size=5 * 1024 * 1024,
bubble=True,
),
]
)
logger_setup.push_application()
|
<filename>middlewares/validateRequest.js
var jwt = require('jwt-simple');
module.exports = function (req, res, next) {
var token = (req.body && req.body.access_token) || (req.query && req.query.access_token) || req.headers[ 'token' ];
var key = (req.body && req.body.x_key) || (req.query && req.query.x_key) || req.headers[ 'x-key' ];
//var username=req.headers['x-username'];
if (token || key) {
try {
var decoded = jwt.decode(token, require('../config/secret.js')());
if (decoded.exp <= Date.now()) {
console.log("expe");
//res.status(400);
res.json({
"status": 400,
"message": "Token Expired.Please Re-login"
});
return;
//res.redirect('http://localhost:3039/#/');
//res.json({"msg":"your token expired"});
//res.end("ended");
//return;
}
var dbUser = validateUser(key);
// The key would be the logged in user's username
if (dbUser) {
if ((req.url.indexOf('admin') >= 0 && dbUser.role == 'admin') || (req.url.indexOf('admin') < 0 && req.url.indexOf('/') >= 0)) {
next(); // To move to next middleware
} else {
// res.status(403);
res.json({
"status": false,
"message": "Not Authorized"
});
return;
}
} else {
// No user with this name exists, respond back with a 401
//res.status(401);
res.json({
"status": false,
"message": "Invalid User"
});
return;
}
} catch (err) {
//res.status(500);
res.json({
"status": false,
"message": "Oops something went wrong",
"error": err
});
}
} else {
res.status(401);
res.json({
"status": false,
"message": "Invalid Token or Key"
});
return;
}
function validateUser(key) {
console.log("key");
var dbUserObj = { // spoofing a userobject from the DB.
name: 'durgaprasad',
role: 'developer'
};
return dbUserObj;
}
};
|
#!/bin/bash
dieharder -d 1 -g 52 -S 757983941
|
use reqwest;
fn make_get_request(url: &str) -> Result<String, String> {
let response = reqwest::blocking::get(url);
match response {
Ok(res) => {
match res.text() {
Ok(body) => Ok(body),
Err(err) => Err(err.to_string()),
}
}
Err(err) => Err(err.to_string()),
}
} |
export class Reviewer {
id: string;
fireBaseUId:string;
name: string;
birthday: Date;
email: string;
phoneNumber: number;
avatar: string;
certification: string;
unpublishedReviews: number;
publishedReview: number;
waitingReviews: string;
status: string;
campusName: string;
uniName: string;
majorId: string;
constructor(res) {
if(null !== res) {
this.id = res.id;
this.name = res.name;
this.birthday = res.birthday;
this.email = res.email;
this.phoneNumber = res.phoneNumber;
this.avatar = res.avatar;
this.certification = res.certification;
this.status = res.status;
}
}
}
|
import React from 'react';
import './Style.css';
export default class Listing extends React.Component {
constructor(props) {
super(props);
let urls = props['urls'];
let previewURL = "https://joshcroyle.com/wp-content/uploads/2019/01/Logo-Menu.png";
this.state = {
urls: urls,
previewURL: previewURL,
}
this.displayPreview = this.displayPreview.bind(this);
}
displayPreview(e){
e.preventDefault();
let previewURL = e.target.href;
this.setState(
{
previewURL: previewURL
}
);
}
render(){
let urls = this.props['urls'];
let previewURL = this.state['previewURL'];
return (
<div id="listing">
<div id="urls">
<div id="title">
<h3>
Shorted URLs
</h3>
</div>
<div id="list">
<ul>
{
urls.map(
(item, index) => (
<li id={index} key={index}>
{item.name} - <a href={item.url} target={"_blank"}>{item.url}</a>
</li>
)
)
}
</ul>
</div>
</div>
</div>
);
}
} |
#! /usr/bin/env sh
# Where are we going to mount the remote bucket resource in our container.
DEST=${AWS_S3_MOUNT:-/opt/s3fs/bucket}
# Check variables and defaults
if [ -z "${AWS_S3_ACCESS_KEY_ID}" -a -z "${AWS_S3_SECRET_ACCESS_KEY}" -a -z "${AWS_S3_SECRET_ACCESS_KEY_FILE}" -a -z "${AWS_S3_AUTHFILE}" ]; then
echo "You need to provide some credentials!!"
exit
fi
if [ -z "${AWS_S3_BUCKET}" ]; then
echo "No bucket name provided!"
exit
fi
if [ -z "${AWS_S3_URL}" ]; then
AWS_S3_URL="https://s3.amazonaws.com"
fi
if [ -n "${AWS_S3_SECRET_ACCESS_KEY_FILE}" ]; then
AWS_S3_SECRET_ACCESS_KEY=$(read ${AWS_S3_SECRET_ACCESS_KEY_FILE})
fi
# Create or use authorisation file
if [ -z "${AWS_S3_AUTHFILE}" ]; then
AWS_S3_AUTHFILE=/opt/s3fs/passwd-s3fs
echo "${AWS_S3_ACCESS_KEY_ID}:${AWS_S3_SECRET_ACCESS_KEY}" > ${AWS_S3_AUTHFILE}
chmod 600 ${AWS_S3_AUTHFILE}
fi
# forget about the password once done (this will have proper effects when the
# PASSWORD_FILE-version of the setting is used)
if [ -n "${AWS_S3_SECRET_ACCESS_KEY}" ]; then
unset AWS_S3_SECRET_ACCESS_KEY
fi
# Create destination directory if it does not exist.
if [ ! -d $DEST ]; then
mkdir -p $DEST
fi
# Add a group
if [ $GID -gt 0 ]; then
addgroup -g $GID -S $GID
fi
# Add a user
if [ $UID -gt 0 ]; then
adduser -u $UID -D -G $GID $UID
RUN_AS=$UID
chown $UID $AWS_S3_MOUNT
chown $UID ${AWS_S3_AUTHFILE}
chown $UID /opt/s3fs
fi
# Debug options
DEBUG_OPTS=
if [ $S3FS_DEBUG = "1" ]; then
DEBUG_OPTS="-d -d"
fi
# Mount and verify that something is present. davfs2 always creates a lost+found
# sub-directory, so we can use the presence of some file/dir as a marker to
# detect that mounting was a success. Execute the command on success.
su - $RUN_AS -c "s3fs $DEBUG_OPTS ${S3FS_ARGS} \
-o passwd_file=${AWS_S3_AUTHFILE} \
-o url=${AWS_S3_URL} \
-o uid=$UID \
-o gid=$GID \
${AWS_S3_BUCKET} ${AWS_S3_MOUNT}"
# s3fs can claim to have a mount even though it didn't succeed.
# Doing an operation actually forces it to detect that and remove the mount.
ls "${AWS_S3_MOUNT}"
mounted=$(mount | grep fuse.s3fs | grep "${AWS_S3_MOUNT}")
if [ -n "${mounted}" ]; then
echo "Mounted bucket ${AWS_S3_BUCKET} onto ${AWS_S3_MOUNT}"
exec "$@"
else
echo "Mount failure"
fi
|
package de.zalando.zally.apireview;
import com.fasterxml.jackson.annotation.JsonIgnore;
import de.zalando.zally.rule.api.Severity;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.ManyToOne;
import java.io.Serializable;
@Entity
public class RuleViolation implements Serializable {
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
@JsonIgnore
private Long id;
@JsonIgnore
@ManyToOne(optional = false)
private ApiReview apiReview;
@Column(nullable = false)
private String name;
@Column(nullable = false)
@Enumerated(EnumType.STRING)
private Severity type;
@Deprecated
@Column(nullable = false)
private int occurrence;
/**
* for Hibernate
*/
protected RuleViolation() {
super();
}
public RuleViolation(ApiReview apiReview, String name, Severity type, @Deprecated int occurrence) {
this.apiReview = apiReview;
this.name = name;
this.type = type;
this.occurrence = occurrence;
}
public Long getId() {
return id;
}
public void setId(Long id) {
this.id = id;
}
public ApiReview getApiReview() {
return apiReview;
}
public void setApiReview(ApiReview apiReview) {
this.apiReview = apiReview;
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Severity getType() {
return type;
}
public void setType(Severity type) {
this.type = type;
}
@Deprecated
public int getOccurrence() {
return occurrence;
}
@Deprecated
public void setOccurrence(int occurrence) {
this.occurrence = occurrence;
}
}
|
<reponame>eengineergz/Lambda<filename>7-assets/_SNIPPETS/bryan-guner-gists/keypressJS/keypress.js
var keypress = require( 'keypress' );
// make `process.stdin` begin emitting "keypress" events
keypress( process.stdin );
// listen for the "keypress" event
process.stdin.on( 'keypress', function ( ch, key ) {
// console.log('got "keypress"', key);
if ( key && key.name == 'right' ) {
console.log( 'right' )
}
if ( key && key.name == 'left' ) {
console.log( 'left' )
}
if ( key && key.name == 'up' ) {
console.log( 'up' )
}
if ( key && key.name == 'down' ) {
console.log( 'down' )
}
if ( key && key.ctrl && key.name == 'c' ) {
process.stdin.pause();
}
} );
process.stdin.setRawMode( true );
process.stdin.resume();
|
<gh_stars>0
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package brooklyn.test;
import static com.google.common.base.Preconditions.checkArgument;
import java.util.Map;
import java.util.Map.Entry;
import javax.management.Attribute;
import javax.management.AttributeList;
import javax.management.DynamicMBean;
import javax.management.MBeanAttributeInfo;
import javax.management.MBeanConstructorInfo;
import javax.management.MBeanInfo;
import javax.management.MBeanNotificationInfo;
import javax.management.MBeanOperationInfo;
import javax.management.MBeanParameterInfo;
import com.google.common.base.Function;
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
/**
* A quick-and-simple general-purpose implementation of DynamicMBean.
*
* This class provides an implementation of {@link DynamicMBean}. Its initial set of attribute names and values are
* provided to the constructor; from this it figures an {@link MBeanInfo}.
* <p>
* It presently assumes that all attributes are read-only; operations and notifications are not currently supported.
* Choosing the descriptions is not supported - they are set to be the same as the name.
* <p>
* Getting a valid dynamic MBean (in Groovy) is as simple as:
* <pre>
* new GeneralisedDynamicMBean(meaning: 42, advice: "Don't panic")
* </pre>
*/
public class GeneralisedDynamicMBean implements DynamicMBean {
private final MBeanInfo mBeanInfo;
private final Map<String,Object> attributes = Maps.newLinkedHashMap();
private final Map<String,Function> operations = Maps.newLinkedHashMap();
public GeneralisedDynamicMBean(Map<String,?> initialAttributes, Map<?,?> initialOperations) {
attributes.putAll(initialAttributes);
for (Entry<?,?> entry : initialOperations.entrySet()) {
checkArgument(entry.getKey() instanceof String || entry.getKey() instanceof MBeanOperationInfo, "entry.key=%s", entry.getKey());
String opName = (entry.getKey() instanceof String) ? (String)entry.getKey() : ((MBeanOperationInfo)entry.getKey()).getName();
operations.put(opName, (Function) entry.getValue());
}
Iterable<MBeanAttributeInfo> attrInfo = Iterables.transform(initialAttributes.entrySet(), new Function<Map.Entry<String,?>, MBeanAttributeInfo>() {
@Override public MBeanAttributeInfo apply(Map.Entry<String,?> entry) {
return new MBeanAttributeInfo(entry.getKey(), entry.getValue().getClass().getName(), entry.getKey(), true, false, false);
}
});
Iterable<MBeanOperationInfo> opInfo = Iterables.transform(initialOperations.keySet(), new Function<Object, MBeanOperationInfo>() {
public MBeanOperationInfo apply(Object it) {
if (it instanceof MBeanOperationInfo) {
return (MBeanOperationInfo) it;
} else if (it instanceof CharSequence) {
return new MBeanOperationInfo(
it.toString(),
"my descr",
new MBeanParameterInfo[0],
"void",
MBeanOperationInfo.ACTION_INFO);
} else {
throw new IllegalArgumentException("Cannot convert "+it+" to MBeanOperationInfo");
}
}});
mBeanInfo = new MBeanInfo(
GeneralisedDynamicMBean.class.getName(),
GeneralisedDynamicMBean.class.getName(),
Iterables.toArray(attrInfo, MBeanAttributeInfo.class),
new MBeanConstructorInfo[0],
Iterables.toArray(opInfo, MBeanOperationInfo.class),
new MBeanNotificationInfo[0]);
}
public void updateAttributeValue(String name, Object value) {
attributes.put(name, value);
}
@Override
public Object getAttribute(String s) {
return attributes.get(s);
}
@Override
public void setAttribute(Attribute attribute) {
attributes.put(attribute.getName(), attribute.getValue());
}
@Override
public AttributeList getAttributes(String[] strings) {
AttributeList result = new AttributeList();
for (Object obj : mBeanInfo.getAttributes()) {
Attribute attrib = (Attribute) obj;
result.add(new Attribute(attrib.getName(), attributes.get(attrib.getName())));
}
return result;
}
@Override
public AttributeList setAttributes(AttributeList attributeList) {
for (Object element : attributeList) {
Attribute attrib = (Attribute) element;
attributes.put(attrib.getName(), attrib.getValue());
}
return attributeList;
}
@Override
public Object invoke(String s, Object[] objects, String[] strings) {
Function op = operations.get(s);
if (op != null) {
return op.apply(objects);
} else {
throw new RuntimeException("Unknown operation "+s);
}
}
@Override
public MBeanInfo getMBeanInfo() {
return mBeanInfo;
}
}
|
#include <iostream>
#include <ctime> // To seed random generator
#include <sys/time.h> // To seed random generator
#include <string>
//include shared files
#include <SomeTime.h>
#include <Interface/AnalyzerData.h>
#include "MainSolver/MainSolver.h"
#include "Basics.h"
using namespace std;
CMainSolver theSolver;
// No description
void finalcSATEvaluation()
{
const AnalyzerData &rAda = theRunAn.getData();
if (rAda.theExitState == TIMEOUT)
{
toSTDOUT(endl << " TIMEOUT !"<<endl);
return;
}
toSTDOUT(endl<<endl);
toSTDOUT("#Variables:\t\t"<< rAda.nVars<<endl);
if (rAda.nVars != rAda.nUsedVars)
toSTDOUT("#used Variables:\t"<< rAda.nUsedVars<<endl);
toSTDOUT("#Clauses:\t\t"<< rAda.nOriginalClauses<<endl);
toSTDOUT("#Clauses removed:\t"<< rAda.nRemovedClauses<<endl);
toSTDOUT("\n#added Clauses: \t"<< rAda.nAddedClauses<<endl);
toSTDOUT("\n# of all assignments:\t" << rAda.getAllAssignments()
<< " = 2^(" << rAda.nVars<<")" <<endl);
toSTDOUT("Pr[satisfaction]:\t" << rAda.rnProbOfSat <<endl);
toSTDOUT("# of solutions:\t\t" << rAda.getNumSatAssignments() <<endl);
toSTDOUT("#SAT (full): \t\t");
if (!CSolverConf::quietMode)
rAda.printNumSatAss_whole();
toSTDOUT(endl);
toDEBUGOUT(".. found in:\t\t" << rAda.nReceivedSatAssignments << " units"<<endl);
toSTDOUT(endl);
toSTDOUT("Num. conflicts:\t\t" << rAda.nConflicts<<endl);
toSTDOUT("Num. implications:\t" << rAda.nImplications<<endl);
toSTDOUT("Num. decisions:\t\t" << rAda.nDecisions<<endl);
toSTDOUT("max decision level:\t" << rAda.maxDecLevel<<"\t\t");
toSTDOUT("avg decision level:\t"<< rAda.get(AVG_DEC_LEV)<<endl);
toSTDOUT("avg conflict level:\t"<< rAda.get(AVG_CONFLICT_LEV)<<endl);
toSTDOUT("avg solution level:\t"<< rAda.get(AVG_SOLUTION_LEV)<<endl);
toSTDOUT("CCLLen 1stUIP - max:\t"<< rAda.get(LONGEST_CCL_1stUIP));
toSTDOUT("\t avg:\t"<< rAda.get(AVG_CCL_1stUIP)<<endl);
toSTDOUT("CCLLen lastUIP - max:\t"<< rAda.get(LONGEST_CCL_lastUIP));
toSTDOUT("\t avg:\t"<< rAda.get(AVG_CCL_lastUIP)<<endl);
toSTDOUT(endl);
toSTDOUT("FormulaCache stats:"<<endl);
toSTDOUT("memUse:\t\t\t"<<rAda.get(FCACHE_MEMUSE) <<endl);
toSTDOUT("cached:\t\t\t"<<rAda.get(FCACHE_CACHEDCOMPS)<<endl);
toSTDOUT("used Buckets:\t\t"<<rAda.get(FCACHE_USEDBUCKETS)<<endl);
toSTDOUT("cache retrievals:\t"<<rAda.get(FCACHE_RETRIEVALS)<<endl);
toSTDOUT("cache tries:\t\t"<<rAda.get(FCACHE_INCLUDETRIES)<<endl);
toSTDOUT("\n\nTime: "<<rAda.elapsedTime<<"s\n\n");
cout << "Runtime:" << rAda.elapsedTime << endl;
}
int main(int argc, char *argv[])
{
char *s;
char dataFile[1024];
memset(dataFile, 0, 1024);
strcpy(dataFile, "data.txt");
bool fileout = false;
char graphFile[1024];
memset(graphFile, 0, 1024);
strcpy(graphFile, "bdg.txt");
bool graphFileout = false;
char nnfFile[1024];
memset(nnfFile, 0, 1024);
strcpy(nnfFile, "nnf.txt");
bool nnfFileout = false;
//<NAME>:
bool smoothNNF = false;
CSolverConf::analyzeConflicts = true;
CSolverConf::doNonChronBackTracking = true;
CSolverConf::nodeCount = 0;
if (argc <= 1)
{
cout << "Usage: dsharp [options] [CNF_File]" << endl;
cout << "Options: " << endl;
cout << "\t -priority [v1,v2,..] \t\t use the priority variables as the first decision nodes" << endl;
cout << "\t -noPP \t\t turn off preprocessing" << endl;
cout << "\t -noCA \t\t turn off conflict analysis" << endl;
cout << "\t -noCC \t\t turn off component caching" << endl;
cout << "\t -noNCB \t\t turn off nonchronological backtracking" << endl;
cout << "\t -noIBCP\t\t turn off implicit BCP" << endl;
cout << "\t -noDynDecomp\t\t turn off dynamic decomposition" << endl;
cout << "\t -q \t\t quiet mode" << endl;
cout << "\t -t [s] \t\t set time bound to s seconds" << endl;
cout << "\t -cs [n]\t\t set max cache size to n MB" << endl;
cout << "\t -FrA [file] \t\t file to output the run statistics" << endl;
cout << "\t -Fgraph [file] \t file to output the backdoor or d-DNNF graph" << endl;
cout << "\t -Fnnf [file] \t\t file to output the nnf graph to" << endl;
//<NAME>:
cout << "\t -smoothNNF \t\t post processing to smoothed d-DNNF" << endl;
cout << "\t -disableAllLits \t when producing a smooth d-DNNF, don't bother enforcing every literal" << endl;
cout << "\t" << endl;
return -1;
}
for (int i = 1; i < argc; i++)
{
if (strcmp(argv[i], "-noNCB") == 0)
CSolverConf::doNonChronBackTracking = false;
if (strcmp(argv[i], "-noCC") == 0)
CSolverConf::allowComponentCaching = false;
if (strcmp(argv[i], "-noIBCP") == 0)
CSolverConf::allowImplicitBCP = false;
//<NAME>:
if (strcmp(argv[i], "-smoothNNF") == 0)
CSolverConf::smoothNNF = true;
if (strcmp(argv[i], "-disableAllLits") == 0)
CSolverConf::ensureAllLits = false;
if (strcmp(argv[i], "-noDynDecomp") == 0)
CSolverConf::disableDynamicDecomp = true;
if (strcmp(argv[i], "-noPP") == 0)
CSolverConf::allowPreProcessing = false;
else if (strcmp(argv[i], "-noCA") == 0)
{
CSolverConf::analyzeConflicts = false;
}
else if (strcmp(argv[i], "-q") == 0)
CSolverConf::quietMode = true;
else if (strcmp(argv[i], "-FrA") == 0)
{
memset(dataFile, 0, 1024);
fileout = true;
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
strcpy(dataFile, argv[i + 1]);
}
else if (strcmp(argv[i], "-Fgraph") == 0)
{
memset(graphFile, 0, 1024);
graphFileout = true;
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
strcpy(graphFile, argv[i + 1]);
}
else if (strcmp(argv[i], "-Fnnf") == 0)
{
memset(nnfFile, 0, 1024);
nnfFileout = true;
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
strcpy(nnfFile, argv[i + 1]);
}
else if (strcmp(argv[i], "-t") == 0)
{
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
CSolverConf::secsTimeBound = atoi(argv[i + 1]);
toSTDOUT("time bound:" <<CSolverConf::secsTimeBound<<"s\n");
theSolver.setTimeBound(CSolverConf::secsTimeBound);
}
else if (strcmp(argv[i], "-priority") == 0)
{
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
size_t pos = 0;
string s = string(argv[i+1]);
string token;
while ((pos = s.find(",")) != string::npos) {
token = s.substr(0, pos);
theSolver.priorityVars.insert(atoi(token.c_str()));
s.erase(0, pos + 1);
}
theSolver.priorityVars.insert(atoi(s.c_str()));
toSTDOUT("Using " << theSolver.priorityVars.size() << " priority variables.\n");
}
else if (strcmp(argv[i], "-cs") == 0)
{
if (argc <= i + 1)
{
toSTDOUT("wrong parameters"<<endl);
return -1;
}
CSolverConf::maxCacheSize = atoi(argv[i + 1]) * 1024 * 1024;
//cout <<"maxCacheSize:" <<CSolverConf::maxCacheSize<<"bytes\n";
}
else
s = argv[i];
}
toSTDOUT("cachesize Max:\t"<<CSolverConf::maxCacheSize/1024 << " kbytes"<<endl);
// first: delete all data in the output
if (fileout)
theRunAn.getData().writeToFile(dataFile);
theRunAn = CRunAnalyzer();
theSolver.solve(s);
theRunAn.finishcountSATAnalysis();
finalcSATEvaluation();
if (fileout)
theRunAn.getData().writeToFile(dataFile);
bool falsify = false;
if (0 == theRunAn.getData().getNumSatAssignments()) {
cout << "\nTheory is unsat. Resetting d-DNNF to empty Or.\n" << endl;
falsify = true;
}
if (graphFileout)
theSolver.writeBDG(graphFile, falsify);
if (nnfFileout)
theSolver.writeNNF(nnfFile, falsify);
return 0;
}
|
<filename>flybirds/core/plugin/plugins/default/ui_driver/poco/poco_ele.py
# -*- coding: utf-8 -*-
"""
Poco element apis
"""
import time
import flybirds.core.global_resource as gr
import flybirds.core.plugin.plugins.default.ui_driver.poco.findsnap \
as find_snap
import flybirds.core.plugin.plugins.default.ui_driver.poco.poco_manage as pm
import flybirds.utils.flybirds_log as log
from flybirds.core.exceptions import FlybirdEleExistsException
from flybirds.core.exceptions import FlybirdVerifyException
from flybirds.core.global_context import GlobalContext as g_Context
from flybirds.utils import language_helper as lan
def wait_exists(poco, selector_str, optional):
"""
determine whether the element exists within the specified time
"""
timeout = optional["timeout"]
current_wait_second = 1
find_success = False
while timeout > 0:
create_success = False
try:
poco_target = pm.create_poco_object_by_dsl(
poco, selector_str, optional
)
create_success = True
search_time = current_wait_second
if search_time > 3:
search_time = 3
ele_exists = poco_target.exists()
log.info(
"wait_exists: {}, ele_exists: {}, timeout: {}".format(
selector_str, ele_exists, timeout
)
)
if ele_exists:
find_success = True
break
# modal error detection
detect_error()
poco_target.wait_for_appearance(timeout=search_time)
find_success = True
log.info(
"wait_for_appearance: find_success: {}, timeout: {}".format(
find_success, timeout
)
)
break
except Exception:
if not create_success:
time.sleep(current_wait_second)
if current_wait_second > 3:
time.sleep(current_wait_second - 3)
timeout -= current_wait_second
current_wait_second += 2
if not find_success:
message = "during {}s time, not find {} in page".format(
optional["timeout"], selector_str
)
raise FlybirdVerifyException(message)
def not_exist(poco, selector_str, optional):
"""
determine whether the element does not exist
"""
ele_exists = False
try:
poco_object = pm.create_poco_object_by_dsl(
poco, selector_str, optional
)
ele_exists = poco_object.exists()
except Exception:
pass
if ele_exists:
message = "{} exists in page".format(selector_str)
raise FlybirdEleExistsException(message)
def wait_disappear(poco, selector_str, optional):
"""
determine whether the element disappears within the specified time
"""
timeout = optional["timeout"]
current_wait_second = 1
disappear_success = False
while timeout > 0:
create_success = False
try:
poco_target = pm.create_poco_object_by_dsl(
poco, selector_str, optional
)
create_success = True
search_time = current_wait_second
if search_time > 3:
search_time = 3
poco_target.wait_for_disappearance(timeout=search_time)
disappear_success = True
break
except Exception:
if not create_success:
time.sleep(current_wait_second)
if current_wait_second > 3:
time.sleep(current_wait_second - 3)
timeout -= current_wait_second
current_wait_second += 1
if not disappear_success:
message = "during {}s time, {} not disappear in page".format(
optional["timeout"], selector_str
)
raise FlybirdVerifyException(message)
def detect_error():
language = g_Context.get_current_language()
modal_list = lan.parse_glb_str("modal_list", language)
poco = g_Context.ui_driver_instance
for error_str in modal_list:
error_target = pm.create_poco_object_by_dsl(
poco, error_str, None
)
is_existed = error_target.exists()
if is_existed:
error_target.click()
if gr.get_frame_config_value("use_snap", False):
find_snap.fix_refresh_status(True)
log.info("detect_error: {}, layer_errors_exists: true"
.format(error_str))
time.sleep(0.5)
|
<gh_stars>0
package com.wanshare.wscomponent.qrscan.decoding;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import com.google.zxing.BinaryBitmap;
import com.google.zxing.DecodeHintType;
import com.google.zxing.MultiFormatReader;
import com.google.zxing.ReaderException;
import com.google.zxing.Result;
import com.google.zxing.common.HybridBinarizer;
import com.wanshare.wscomponent.qrscan.QrUtils;
import com.wanshare.wscomponent.qrscan.R;
import com.wanshare.wscomponent.qrscan.camera.PlanarYUVLuminanceSource;
import com.wanshare.wscomponent.qrscan.view.CameraView;
import java.util.Hashtable;
/**
* 二维码解析Handler.
*
* @author wangdunwei
* @since 1.0.0
*/
public class DecodeHandler extends HandlerThread implements Handler.Callback {
private final MultiFormatReader multiFormatReader;
private Handler handler;
private CameraView cameraView;
private final Hashtable<DecodeHintType, Object> hints;
public DecodeHandler(CameraView cameraView) {
super("ZXing-Decode");
this.cameraView = cameraView;
hints = new Hashtable<DecodeHintType, Object>(3);
hints.put(DecodeHintType.POSSIBLE_FORMATS, QrUtils.SUPPORT_FORMATS);
multiFormatReader = new MultiFormatReader();
}
@Override
protected void onLooperPrepared() {
handler = new Handler(getLooper(), this);
}
@Override
public boolean handleMessage(Message message) {
if (message.what == R.id.decode) {
decode((byte[]) message.obj, message.arg1, message.arg2);
} else if (message.what == R.id.quit) {
quit();
}
return true;
}
/**
* Decode the data within the viewfinder rectangle, and time how long it took. For efficiency,
* reuse the same reader objects from one decode to the next.
*
* @param data The YUV preview frame.
* @param width The width of the preview frame.
* @param height The height of the preview frame.
*/
private void decode(byte[] data, int width, int height) {
Result rawResult = null;
//modify here
byte[] rotatedData = new byte[data.length];
for (int y = 0; y < height; y++) {
for (int x = 0; x < width; x++) {
rotatedData[x * height + height - y - 1] = data[x + y * width];
}
}
int tmp = width; // Here we are swapping, that's the difference to #11
width = height;
height = tmp;
PlanarYUVLuminanceSource source = cameraView.buildLuminanceSource(rotatedData, width, height);
BinaryBitmap bitmap = new BinaryBitmap(new HybridBinarizer(source));
try {
rawResult = multiFormatReader.decodeWithState(bitmap);
} catch (ReaderException re) {
// continue
} finally {
multiFormatReader.reset();
}
if (rawResult != null) {
Message message = Message.obtain(cameraView.getHandler(), R.id.decode_succeeded, rawResult);
Bundle bundle = new Bundle();
bundle.putParcelable(QrUtils.BARCODE_BITMAP, source.renderCroppedGreyscaleBitmap());
message.setData(bundle);
message.sendToTarget();
} else {
Message message = Message.obtain(cameraView.getHandler(), R.id.decode_failed);
message.sendToTarget();
}
}
public Handler getHandler() {
return handler;
}
}
|
const Sequelize = require('sequelize')
const db = require('../db')
// const {Order, OrderProduct, Product} = require('../models')
const Order = require('./order')
const OrderProduct = require('./orderProduct')
const Product = require('./product')
const Cart = db.define('cart', {
temporaryUserId: {
type: Sequelize.STRING,
allowNull: true
}
})
Cart.prototype.createOrder = async function({orderInfo}) {
//create an order
const order = await Order.create({...orderInfo})
const cart = await Cart.findById(this.id, {include: [Product]})
//create orderProducts
await Promise.all(
cart.products.map(product => {
OrderProduct.create({
productId: product.id,
orderId: order.id,
price: product.price,
quantity: product.cartProduct.quantity
})
})
)
await cart.destroy()
}
module.exports = Cart
|
#!/bin/bash -x
# workon richard
/home/carl/.virtualenvs/richard/bin/python ./manage.py runserver 0.0.0.0:9000
|
#!/bin/bash
# This script parses in the command line parameters from runCust,
# maps them to the correct command line parameters for DispNet training script and launches that task
# The last line of runCust should be: bash $CONFIG_FILE --data-dir $DATA_DIR --log-dir $LOG_DIR
# Parse the command line parameters
# that runCust will give out
DATA_DIR=NONE
LOG_DIR=NONE
CONFIG_DIR=NONE
MODEL_DIR=NONE
# Parsing command line arguments:
while [[ $# > 0 ]]
do
key="$1"
case $key in
-h|--help)
echo "Usage: run_dispnet_training_philly.sh [run_options]"
echo "Options:"
echo " -d|--data-dir <path> - directory path to input data (default NONE)"
echo " -l|--log-dir <path> - directory path to save the log files (default NONE)"
echo " -p|--config-file-dir <path> - directory path to config file directory (default NONE)"
echo " -m|--model-dir <path> - directory path to output model file (default NONE)"
exit 1
;;
-d|--data-dir)
DATA_DIR="$2"
shift # pass argument
;;
-p|--config-file-dir)
CONFIG_DIR=`dirname $2`
shift # pass argument
;;
-m|--model-dir)
MODEL_DIR="$2"
shift # pass argument
;;
-l|--log-dir)
LOG_DIR="$2"
shift
;;
*)
echo Unkown option $key
;;
esac
shift # past argument or value
done
# Prints out the arguments that were passed into the script
echo "DATA_DIR=$DATA_DIR"
echo "LOG_DIR=$LOG_DIR"
echo "CONFIG_DIR=$CONFIG_DIR"
echo "MODEL_DIR=$MODEL_DIR"
# Run training on philly
# Add the root folder of the code to the PYTHONPATH
export PYTHONPATH=$PYTHONPATH:$CONFIG_DIR
# Run the actual job
python $CONFIG_DIR/anytime_models/examples/resnet-ann.py \
--data_dir=$DATA_DIR \
--log_dir=$LOG_DIR \
--model_dir=$MODEL_DIR \
--load=${MODEL_DIR}/checkpoint \
-f=5 --samloss=100 -n=9 -c=16 -s=4 --ds_name=svhn --batch_size=64 --nr_gpu=1 --prediction_feature=none
|
package main
import "fmt"
func suma(arr ...int) (sum int) {
for _,v:=range arr{
sum += v
}
return
}
func sumb(arr []int) (sum int) {
for _,v:=range arr{
sum += v
}
return
}
func main() {
slice := []int{1,2,3,4}
//array := [...]int{1,2,3,4}
fmt.Println(suma(slice ...))
fmt.Printf("%t\n",suma)
fmt.Printf("%t\n",sumb)
}
|
#!/bin/bash
SERVICE_NAME=redis-proxy
if [[ -z "$JAVA_HOME" && -d /usr/java/latest/ ]]; then
export JAVA_HOME=/usr/java/latest/
fi
cd `dirname $0`/..
if [[ ! -f $SERVICE_NAME".jar" && -d current ]]; then
cd current
fi
if [[ -f $SERVICE_NAME".jar" ]]; then
chmod a+x $SERVICE_NAME".jar"
./$SERVICE_NAME".jar" stop
fi
|
#!/bin/sh
rm Arrr-ay-llvm.cbp
rm CMakeCache.txt
rm CMakeFiles -rf
rm bin/ -rf
rm dist/ -rf
rm CPack*.cmake
rm cmake_install.cmake
rm Makefile |
<reponame>acavalin/gollum-lib
# ~*~ encoding: utf-8 ~*~
require File.expand_path(File.join(File.dirname(__FILE__), "helper"))
bilbo_page = "<h1 class=\"editable\"><a class=\"anchor\" id=\"bilbo-baggins\" href=\"#bilbo-baggins\"></a>Bilbo Baggins</h1>\n\n<p>Bilbo Baggins is the protagonist of The <a class=\"internal present\" href=\"/Hobbit.md\">Hobbit</a> and also makes a few\nappearances in The Lord of the Rings, two of the most well-known of <a class=\"internal absent\" href=\"/J.%20R.%20R.%20Tolkien\">J. <NAME></a>'s\n fantasy writings. The story of The Hobbit featuring Bilbo is also\nretold from a different perspective in the Chapter The Quest of Erebor in\nUnfinished Tales.</p>\n\n<p>In Tolkien's narrative conceit, in which all the writings of Middle-earth are\n'really' translations from the fictitious volume of The Red Book of Westmarch,\nBilbo is the author of The Hobbit and translator of The Silmarillion.</p>\n\n<p>From <a href=\"http://en.wikipedia.org/wiki/Bilbo_Baggins\">http://en.wikipedia.org/wiki/Bilbo_Baggins</a>.</p>"
context "Page" do
setup do
@wiki = Gollum::Wiki.new(testpath("examples/lotr.git"))
end
test "get existing page" do
page = @wiki.page('Bilbo-Baggins')
assert_equal Gollum::Page, page.class
assert page.raw_data =~ /^# Bilbo Baggins\n\nBilbo Baggins/
expected = bilbo_page
actual = page.formatted_data
assert_html_equal expected, actual
assert_equal 'Bilbo-Baggins.md', page.path
assert_equal 'Bilbo-Baggins.md', page.escaped_url_path
assert_equal :markdown, page.format
assert_equal @wiki.repo.commits.first.id, page.version.id
assert_not_nil page.last_version
assert_equal page.versions.first.id, page.last_version.id
assert_equal page.path, page.last_version.stats.files.first[:new_file]
end
test "getting pages is case sensitive" do
assert_not_equal Gollum::Page, @wiki.page('bilbo-baggins').class
assert_not_equal Gollum::Page, @wiki.page('Bilbo-baggins').class
assert_equal Gollum::Page, @wiki.page('Bilbo-Baggins').class
end
test "requested path is sanitized" do
assert_not_nil @wiki.page('//Bilbo-Baggins')
end
test "do not substitute whitespace for hyphens or underscores (regression test < 5.x)" do
assert_not_nil @wiki.page('Bilbo-Baggins').path
assert_nil @wiki.page('Bilbo_Baggins')
assert_nil @wiki.page('Bilbo Baggins')
end
test "get nested page" do
page = @wiki.page('Mordor/Eye-Of-Sauron')
assert_equal 'Mordor/Eye-Of-Sauron.md', page.path
page = @wiki.page('/Mordor/Eye-Of-Sauron')
assert_equal 'Mordor/Eye-Of-Sauron.md', page.path
assert_equal 'Mordor/Eye-Of-Sauron.md', page.escaped_url_path
end
test "url_path" do
page = @wiki.page('Bilbo-Baggins')
assert_equal 'Bilbo-Baggins.md', page.url_path
end
test "nested url_path" do
page = @wiki.page('/Mordor/Eye-Of-Sauron')
assert_equal 'Mordor/Eye-Of-Sauron.md', page.url_path
end
test "page versions" do
page = @wiki.page('Bilbo-Baggins')
assert_equal ["ea8114ad3c40b90c536c18fae9ed8d1063b1b6fc", "f25eccd98e9b667f9e22946f3e2f945378b8a72d", "5bc1aaec6149e854078f1d0f8b71933bbc6c2e43"],
page.versions.map { |v| v.id }
end
test "page versions across renames" do
page = @wiki.page 'My-Precious'
assert_equal ['60f12f4254f58801b9ee7db7bca5fa8aeefaa56b', '94523d7ae48aeba575099dd12926420d8fd0425d'],
page.versions(:follow => true).map { |v| v.id }
end
test "page versions without renames" do
page = @wiki.page 'My-Precious'
assert_equal ['60f12f4254f58801b9ee7db7bca5fa8aeefaa56b'],
page.versions(:follow => false).map { |v| v.id }
end
test "specific page version" do
page = @wiki.page('Bilbo-Baggins', 'fbabba862dfa7ac35b39042dd4ad780c9f67b8cb')
assert_equal 'fbabba862dfa7ac35b39042dd4ad780c9f67b8cb', page.version.id
end
test "no page match" do
assert_nil @wiki.page('I do not exist')
end
test "no version match" do
assert_nil @wiki.page('Bilbo-Baggins', 'I do not exist')
end
test "no non-page match" do
assert_nil @wiki.page('Data')
end
test "match with page extension" do
page = @wiki.page 'Bilbo-Baggins.textile'
assert_nil page
page = @wiki.page 'Bilbo-Baggins.md'
assert_equal Gollum::Page, page.class
end
test "title from filename with normal contents 1" do
page = @wiki.page('Bilbo-Baggins')
assert_equal 'Bilbo-Baggins', page.title
end
test "top level header" do
header = @wiki.page('Home').header
assert_equal "Hobbits\n", header.raw_data
assert_equal "_Header.md", header.path
end
test "nested header" do
header = @wiki.page('Mordor/Eye-Of-Sauron').header
assert_equal "Sauron\n", header.raw_data
assert_equal "Mordor/_Header.md", header.path
end
test "header itself" do
header = @wiki.page("_Header")
assert_nil header.header
assert_nil header.footer
assert_nil header.sidebar
end
test "top level footer" do
footer = @wiki.page('Home').footer
assert_equal 'Lord of the Rings wiki', footer.raw_data
assert_equal '_Footer.md', footer.path
end
test "nested footer" do
footer = @wiki.page('Mordor/Eye-Of-Sauron').footer
assert_equal "Ones does not simply **walk** into Mordor!\n", footer.raw_data
assert_equal "Mordor/_Footer.md", footer.path
end
test "footer itself" do
footer = @wiki.page("_Footer")
assert_nil footer.header
assert_nil footer.footer
assert_nil footer.sidebar
end
test "top level sidebar" do
sidebar = @wiki.page('Home').sidebar
assert_equal 'Lord of the Rings wiki', sidebar.raw_data
assert_equal '_Sidebar.md', sidebar.path
end
test "nested sidebar" do
sidebar = @wiki.page('Mordor/Eye-Of-Sauron').sidebar
assert_equal "Ones does not simply **walk** into Mordor!\n", sidebar.raw_data
assert_equal "Mordor/_Sidebar.md", sidebar.path
end
test "sidebar itself" do
sidebar = @wiki.page("_Sidebar")
assert_nil sidebar.header
assert_nil sidebar.footer
assert_nil sidebar.sidebar
end
test "normalize_dir" do
assert_equal "", Gollum::BlobEntry.normalize_dir("")
assert_equal "", Gollum::BlobEntry.normalize_dir(".")
assert_equal "", Gollum::BlobEntry.normalize_dir("/")
assert_equal "", Gollum::BlobEntry.normalize_dir("c:/")
assert_equal "/foo", Gollum::BlobEntry.normalize_dir("foo")
assert_equal "/foo", Gollum::BlobEntry.normalize_dir("/foo")
end
test 'page has sha id' do
assert_equal "f83327d2f76d2ba94820f1ca4c20e700e8e62519", page = @wiki.page('Bilbo-Baggins').sha
end
test "tell whether metadata should be rendered" do
page = @wiki.page('Bilbo-Baggins')
assert_equal false, page.display_metadata?
page.stubs(:metadata).returns({'race' => 'hobbit'})
assert_equal true, page.display_metadata?
page.stubs(:metadata).returns({'title' => 'Only override title'})
assert_equal false, page.display_metadata?
page.stubs(:metadata).returns({'title' => 'Override title and have some more metadata', 'race' => 'hobbit'})
assert_equal true, page.display_metadata?
page.stubs(:metadata).returns({
'title' => 'Override title and have some more metadata but explicitly turn off displaying of metadata',
'race' => 'hobbit',
'display_metadata' => false
})
assert_equal false, page.display_metadata?
end
end
context "with a checkout" do
setup do
@path = cloned_testpath("examples/lotr.git")
@wiki = Gollum::Wiki.new(@path)
end
teardown do
FileUtils.rm_rf(@path)
end
test "get existing page with symbolic link" do
page = @wiki.page("Hobbit")
assert_equal Gollum::Page, page.class
assert page.raw_data =~ /^# Bilbo Baggins\n\nBilbo Baggins/
expected = bilbo_page
actual = page.formatted_data
assert_html_equal expected, actual
assert_equal 'Hobbit.md', page.path
assert_equal :markdown, page.format
end
end
context "with a page-file-dir enabled" do
setup do
@wiki = Gollum::Wiki.new(testpath("examples/lotr.git"), { :page_file_dir => 'Rivendell' })
end
test "get existing page" do
page = @wiki.page('Elrond')
assert_equal Gollum::Page, page.class
assert page.raw_data =~ /^# Elrond\n\nElrond/
assert_equal 'Rivendell/Elrond.md', page.path
assert_equal 'Elrond.md', page.escaped_url_path
assert_equal :markdown, page.format
assert_equal @wiki.repo.commits.first.id, page.version.id
end
test "should not get page from parent dir" do
page = @wiki.page('Bilbo-Baggins')
assert_equal nil, page
end
test "should inherit header/footer/sidebar pages from parent directories" do
page = @wiki.page('Elrond')
assert page.sidebar.parent_page == page
assert_equal Gollum::Page, page.sidebar.class
assert_equal Gollum::Page, page.header.class
assert_equal Gollum::Page, page.footer.class
assert page.sidebar.raw_data =~ /^Lord of the Rings/
assert page.header.raw_data =~ /^Hobbits/
assert page.footer.raw_data =~ /^Lord of the Rings/
end
test "get metadata on page" do
page = @wiki.page('Elrond')
assert_equal Gollum::Page, page.class
assert_equal 'elf', page.metadata['race']
end
end
context "with custom markup engines" do
setup do
Gollum::Markup.register(:redacted, "Redacted", :extensions => ['rd']) { |content| content.gsub(/\S/, '-') }
@wiki = Gollum::Wiki.new(testpath("examples/lotr.git"))
end
test "should use the specified engine" do
page = @wiki.page('Riddles')
assert_equal :redacted, page.format
assert page.raw_data.include? 'Time'
assert page.raw_data =~ /^[\s\-]*$/
end
end
context "with global metadata" do
setup do
@metadata = {'header_enum' => 'true'}
@wiki = Gollum::Wiki.new(testpath("examples/lotr.git"), :metadata => @metadata)
end
test "global metadata available on each page" do
page = @wiki.page('Bilbo-Baggins')
assert_equal @metadata, page.metadata
end
test "global metadata merges with page specific metadata" do
page = @wiki.page('Rivendell/Elrond')
result = {'race' => 'elf'}.merge(@metadata)
assert_equal result, page.metadata
end
test "page metadata overrides global metadata" do
page = @wiki.page('Rivendell/Elrond')
@wiki.stubs(:metadata).returns({'race' => 'wombat'})
result = {'race' => 'elf'}
assert_equal result, page.metadata
end
end
|
<reponame>MacroGDM/Administracion-de-horarios
function ver()
{
if (document.form.mes.value =='Sep')
{
alert("Yes");
}
} |
import React from 'react';
import styled from 'styled-components';
import { Avatar, Link, styles } from '@storybook/design-system';
import Subheading from './Subheading';
const { breakpoint, typography } = styles;
const Heading = styled.div`
font-size: 36px;
font-weight: ${typography.weight.black};
line-height: 40px;
text-align: center;
margin-bottom: 48px;
`;
const Editor = styled.div`
display: flex;
margin-top: ${props => (props.isFirst ? 24 : 46)}px;
`;
const EditorAvatar = styled(Avatar)`
height: 48px;
width: 48px;
margin-right: 20px;
margin-top: 8px;
@media (min-width: ${breakpoint}px) {
height: 80px;
width: 80px;
margin-right: 30px;
}
`;
const EditorName = styled.span`
font-size: ${typography.size.s3}px;
font-weight: ${typography.weight.black};
line-height: 36px;
margin-right: 4px;
`;
const EditorDescription = styled.div`
flex: 1;
`;
const MeetTheTeam = () => (
<>
<Heading>Meet the team</Heading>
<Subheading>Editors</Subheading>
<Editor isFirst>
<EditorAvatar src="https://avatars0.githubusercontent.com/u/14214630?s=460&v=4" size="large" />
<EditorDescription>
<div>
<EditorName>Sabari </EditorName>
<Link secondary target="_blank" rel="noopener" href="https://www.linkedin.com/in/sabarinathan-in/">
@sabari
</Link>
</div>
<div>
Sabarinathan is a mobile developer who focuses on end to end mobile app development and architecture.
Previously, he developed many large scale enterprise applications that uses industry's best practises. He worked with big MNC to many startups as consultant. He writes about best practices, performance, and process.
</div>
</EditorDescription>
</Editor>
<Editor isFirst>
<EditorAvatar src="https://media.licdn.com/dms/image/C5103AQGhqSJVIh-Yqg/profile-displayphoto-shrink_200_200/0?e=1583366400&v=beta&t=tKEaza4S_XMzo9qMONsFUoIjr3PFBPoZHJbNKkcIgds" size="large" />
<EditorDescription>
<div>
<EditorName>Kalai </EditorName>
<Link secondary target="_blank" rel="noopener" href="https://www.linkedin.com/in/kalaiselvan-c-b3b6a162/">
@kalai
</Link>
</div>
<div>
Kalai is a Google Certified Android Associate Developer. He does real engineering with the mobile apps. He worked in some of the biggest IT companies in the world.
He is a Flight simulation enthusiast and he builds simulators in free time.
</div>
</EditorDescription>
</Editor>
<Editor isFirst>
<EditorAvatar src="https://media.licdn.com/dms/image/C4E03AQH8ZdWuLHm8Aw/profile-displayphoto-shrink_200_200/0?e=1583366400&v=beta&t=Rv6eJuiY5RdhSSYzMpFwUVtC6PBH_IOw9AONh5YEBuU" size="large" />
<EditorDescription>
<div>
<EditorName>Prasanth </EditorName>
<Link secondary target="_blank" rel="noopener" href="https://www.linkedin.com/in/prasanth-kumar-lalapeta-369a42a9/">
@prasanth
</Link>
</div>
<div>
Prasanth is a rockstar Frontend developer. He is highly skilled in React, Angular. Prasanth is aspirating UX guy also.
He holds multiple hats in the organization. He is a renowned speaker in the tech community. And he holds experience working in biggest IT company to startups.
</div>
</EditorDescription>
</Editor>
</>
);
export default MeetTheTeam;
|
# This file must be used with "source deploy_local_beta/run.sh" *from bash*
# you cannot run it directly
if [ "${BASH_SOURCE-}" = "$0" ]; then
echo "You must source this script: \$ source $0" >&2
exit 33
fi
#TODO: check if exist:
# python 2 or 3
# pip
# virtualenv or python venv pkg
# git
#
# SETTINGS
##########
. ./deploy_local_beta/settings.sh
# PARSE ARGS
############
while (( "$#" )); do
ANYBETA_PARAM=`echo $1 | awk -F= '{ print $1 }'`
ANYBETA_VALUE=`echo $1 | awk -F= '{ print $2 }'`
case $ANYBETA_PARAM in
-p|--python-path)
ANYBETA_PYTHON_PATH=$ANYBETA_VALUE
shift
;;
-h|--help)
ANYBETA_usage
exit 0
;;
*)
ANYBETA_error "Error: unknown parameter $ANYBETA_PARAM"
exit 1
;;
esac
done
$ANYBETA_DEPLOY/deploy_local_beta.sh "$@"
ANYBETA_activate
|
#!/usr/bin/env bash
lua53dir="macnojit53/"
lua51dir="macnojit/"
luapath=""
lualibname="liblua"
outpath=""
while :
do
echo "Please choose (1)lua5.1; (2)lua5.3"
read input
case $input in
"1")
luapath=$lua51dir
outpath="Plugins"
break
;;
"2")
luapath=$lua53dir
outpath="Plugins53"
break
;;
*)
echo "Please enter 1 or 2!!"
continue
;;
esac
done
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
cd $DIR/$luapath
xcodebuild clean
xcodebuild -configuration=Release
cp -r build/Release/tolua.bundle ../$outpath/
|
import android.app.Activity;
import android.os.Bundle;
import android.view.View;
import android.widget.Button;
import android.widget.TextView;
public class TicTacToeActivity extends Activity {
private Button btn1;
private Button btn2;
private Button btn3;
private Button btn4;
private Button btn5;
private Button btn6;
private Button btn7;
private Button btn8;
private Button btn9;
private TextView tView;
private int currentPlayer;
private int[][] playerCells;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
btn1 = (Button) findViewById(R.id.btn1);
btn2 = (Button) findViewById(R.id.btn2);
btn3 = (Button) findViewById(R.id.btn3);
btn4 = (Button) findViewById(R.id.btn4);
btn5 = (Button) findViewById(R.id.btn5);
btn6 = (Button) findViewById(R.id.btn6);
btn7 = (Button) findViewById(R.id.btn7);
btn8 = (Button) findViewById(R.id.btn8);
btn9 = (Button) findViewById(R.id.btn9);
tView = (TextView) findViewById(R.id.tView);
tView.setText("X's turn to play");
currentPlayer = 1;
playerCells = new int[3][3];
setClickListener();
}
// other methods
} |
/*
* @link https://libretask.org/
* @license https://github.com/LibreTask/desktop/blob/master/LICENSE.md
*/
const CONSTANTS = {
PASSWORD_RESET_LINK: "https://libretask.org/reset-password",
PRODUCT_PRIVACY_LINK: "https://libretask.org/privacy",
PRODUCT_TERMS_LINK: "https://libretask.org/terms",
WEBSITE_LINK: "https://libretask.org",
SOURCE_CODE_LINK: "https://github.com/LibreTask/desktop",
APP_UPDATE_LINK: "https://libretask.org/apps", // TODO -
APP_VERSION: "0.0.1", // TODO - pull from package.json instead
APP_NAME: "LibreTask",
// TODO - move these button constants to more appropriate location
EDIT_NAVBAR_BUTTON: "EDIT_NAVBAR_BUTTON",
CREATE_NAVBAR_BUTTON: "CREATE_NAVBAR_BUTTON",
DELETE_NAVBAR_BUTTON: "DELETE_NAVBAR_BUTTON",
SAVE_NAVBAR_BUTTON: "SAVE_NAVBAR_BUTTON",
BACK_NAVBAR_BUTTON: "BACK_NAVBAR_BUTTON",
SETTINGS_NAV_BUTTON: "SETTINGS_NAV_BUTTON",
INITIAL_WINDOW_HEIGHT: 420,
INITIAL_WINDOW_WIDTH: 380,
SYNC_INTERVAL_MILLIS: 30 * 1000, // 30 seconds
QUEUED_TASK_SUBMISSION_INTERVAL_MILLIS: 60 * 1000, // 60 seconds
QUEUED_PROFILE_SUBMISSION_INTERVAL_MILLIS: 60 * 1000, // 60 seconds
TASK_CLEANUP_INTERVAL_MILLIS: 60 * 1000 * 60 * 24, // one day
// check each minute whether the taskview should be updated
// note this is primarily used to update the TaskView at midnight
TASKVIEW_REFRESH_CHECK_INTERVAL_MILLIS: 60 * 1000
};
module.exports = CONSTANTS;
|
<gh_stars>0
//
// FTViewController.h
// test
//
// Created by <NAME> on 01/03/2020.
// Copyright (c) 2020 <NAME>. All rights reserved.
//
@import UIKit;
@interface FTViewController : UIViewController
@end
|
#!/bin/bash
sudo mkdir -p /opt/<%= appName %>/
sudo mkdir -p /opt/<%= appName %>/config
sudo mkdir -p /opt/<%= appName %>/tmp
sudo chown ${USER} /opt/<%= appName %> -R
sudo chown ${USER} /etc/init
sudo chown ${USER} /etc/
sudo npm install -g userdown wait-for-mongo node-gyp
# Creating a non-privileged user
sudo useradd meteoruser || :
|
#!/usr/bin/env bash
#############################################################################
#
# This script sets the cross-compile environment for Android.
#
# Based upon OpenSSL's setenv-android.sh by TH, JW, and SM.
# Heavily modified by JWW for Crypto++.
# Modified by Skycoder42 Android NDK-r19 and above.
#
# Crypto++ Library is copyrighted as a compilation and (as of version 5.6.2)
# licensed under the Boost Software License 1.0, while the individual files
# in the compilation are all public domain.
#
# Also see:
# https://android.googlesource.com/platform/ndk.git/+/HEAD/docs/UnifiedHeaders.md
# https://android.googlesource.com/platform/ndk/+/master/docs/PlatformApis.md
# https://developer.android.com/ndk/guides/abis.html and
# https://developer.android.com/ndk/guides/cpp-support.
#
# See http://www.cryptopp.com/wiki/Android_(Command_Line) for more details
#############################################################################
#########################################
##### Some validation #####
#########################################
# cryptest-android.sh may run this script without sourcing.
if [ "$0" = "${BASH_SOURCE[0]}" ]; then
echo "setenv-android.sh is usually sourced, but not this time."
fi
# This supports both 'source setenv-android.sh 21 arm64' and
# 'source setenv-android.sh ANDROID_API=21 ANDROID_CPU=arm64'
if [[ -n "$1" ]]
then
arg1=$(echo "$1" | cut -f 1 -d '=')
arg2=$(echo "$1" | cut -f 2 -d '=')
if [[ -n "${arg2}" ]]; then
ANDROID_API="${arg2}"
else
ANDROID_API="${arg1}"
fi
printf "Using positional arg, ANDROID_API=%s\n" "${ANDROID_API}"
fi
# This supports both 'source setenv-android.sh 21 arm64' and
# 'source setenv-android.sh ANDROID_API=21 ANDROID_CPU=arm64'
if [[ -n "$2" ]]
then
arg1=$(echo "$2" | cut -f 1 -d '=')
arg2=$(echo "$2" | cut -f 2 -d '=')
if [[ -n "${arg2}" ]]; then
ANDROID_CPU="${arg2}"
else
ANDROID_CPU="${arg1}"
fi
printf "Using positional arg, ANDROID_CPU=%s\n" "${ANDROID_CPU}"
fi
if [ -z "${ANDROID_API}" ]; then
echo "ANDROID_API is not set. Please set it"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
if [ -z "${ANDROID_CPU}" ]; then
echo "ANDROID_CPU is not set. Please set it"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
#########################################
##### Clear old options #####
#########################################
unset IS_IOS
unset IS_ANDROID
unset IS_ARM_EMBEDDED
unset ANDROID_CPPFLAGS
unset ANDROID_CXXFLAGS
unset ANDROID_LDFLAGS
unset ANDROID_SYSROOT
#####################################################################
# ANDROID_NDK_ROOT should always be set by the user (even when not running this script)
# http://groups.google.com/group/android-ndk/browse_thread/thread/a998e139aca71d77.
# If the user did not specify the NDK location, try and pick it up. Something like
# ANDROID_NDK_ROOT=/opt/android-ndk-r19c or ANDROID_NDK_ROOT=/usr/local/android-ndk-r20.
if [ -n "${ANDROID_NDK_ROOT}" ]; then
echo "ANDROID_NDK_ROOT is ${ANDROID_NDK_ROOT}"
else
echo "ANDROID_NDK_ROOT is empty. Searching for the NDK"
ANDROID_NDK_ROOT=$(find /opt -maxdepth 1 -type d -name "android-ndk*" 2>/dev/null | tail -n -1)
if [ -z "${ANDROID_NDK_ROOT}" ]; then
ANDROID_NDK_ROOT=$(find /usr/local -maxdepth 1 -type d -name "android-ndk*" 2>/dev/null | tail -n -1)
fi
if [ -z "${ANDROID_NDK_ROOT}" ]; then
ANDROID_NDK_ROOT=$(find "$HOME" -maxdepth 1 -type d -name "android-ndk*" 2>/dev/null | tail -n -1)
fi
if [ -d "$HOME/Library/Android/sdk/ndk-bundle" ]; then
ANDROID_NDK_ROOT="$HOME/Library/Android/sdk/ndk-bundle"
fi
fi
# Error checking
if [ ! -d "${ANDROID_NDK_ROOT}" ]; then
echo "ERROR: ANDROID_NDK_ROOT is not a valid path for ${USER}. Please set it."
echo "ANDROID_NDK_ROOT is '${ANDROID_NDK_ROOT}'"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -d "${ANDROID_SDK_ROOT}" ]; then
echo "ERROR: ANDROID_SDK_ROOT is not a valid path for ${USER}. Please set it."
echo "ANDROID_SDK_ROOT is '${ANDROID_SDK_ROOT}'"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# User feedback
#echo "ANDROID_NDK_ROOT is '${ANDROID_NDK_ROOT}'"
#echo "ANDROID_SDK_ROOT is '${ANDROID_SDK_ROOT}'"
#####################################################################
# Need to set HOST_TAG to darwin-x86_64, linux-x86_64,
# windows, or windows-x86_64
if [[ "$(uname -s | grep -i -c darwin)" -ne 0 ]]; then
HOST_TAG=darwin-x86_64
elif [[ "$(uname -s | grep -i -c linux)" -ne 0 ]]; then
HOST_TAG=linux-x86_64
else
echo "ERROR: Unknown host"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
ANDROID_TOOLCHAIN="${ANDROID_NDK_ROOT}/toolchains/llvm/prebuilt/${HOST_TAG}/bin"
ANDROID_SYSROOT="${ANDROID_NDK_ROOT}/toolchains/llvm/prebuilt/${HOST_TAG}/sysroot"
# Error checking
if [ ! -d "${ANDROID_TOOLCHAIN}" ]; then
echo "ERROR: ANDROID_TOOLCHAIN is not a valid path. Please set it."
echo "ANDROID_TOOLCHAIN is '${ANDROID_TOOLCHAIN}'"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -d "${ANDROID_SYSROOT}" ]; then
echo "ERROR: ANDROID_SYSROOT is not a valid path. Please set it."
echo "ANDROID_SYSROOT is '${ANDROID_SYSROOT}'"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
#####################################################################
THE_ARCH=$(tr '[:upper:]' '[:lower:]' <<< "${ANDROID_CPU}")
# https://developer.android.com/ndk/guides/abis.html and
# https://developer.android.com/ndk/guides/cpp-support.
# Since NDK r16 the only STL available is libc++, so we
# add -std=c++11 -stdlib=libc++ to CXXFLAGS. This is
# consistent with Android.mk and 'APP_STL := c++_shared'.
case "$THE_ARCH" in
armv7*|armeabi*)
CC="armv7a-linux-androideabi${ANDROID_API}-clang"
CXX="armv7a-linux-androideabi${ANDROID_API}-clang++"
LD="arm-linux-androideabi-ld"
AS="arm-linux-androideabi-as"
AR="arm-linux-androideabi-ar"
RANLIB="arm-linux-androideabi-ranlib"
STRIP="arm-linux-androideabi-strip"
# You may need this on older NDKs
# ANDROID_CPPFLAGS="-D__ANDROID__=${ANDROID_API}"
# Android NDK r19 and r20 no longer use -mfloat-abi=softfp. Add it as required.
ANDROID_CXXFLAGS="-target armv7-none-linux-androideabi${ANDROID_API} -std=c++11 -stdlib=libc++"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -march=armv7-a -mthumb"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fstack-protector-strong -funwind-tables -fexceptions -frtti"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fno-addrsig -fno-experimental-isel"
;;
armv8*|aarch64|arm64*)
CC="aarch64-linux-android${ANDROID_API}-clang"
CXX="aarch64-linux-android${ANDROID_API}-clang++"
LD="aarch64-linux-android-ld"
AS="aarch64-linux-android-as"
AR="aarch64-linux-android-ar"
RANLIB="aarch64-linux-android-ranlib"
STRIP="aarch64-linux-android-strip"
# You may need this on older NDKs
# ANDROID_CPPFLAGS="-D__ANDROID__=${ANDROID_API}"
ANDROID_CXXFLAGS="-target aarch64-none-linux-android${ANDROID_API}"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -std=c++11 -stdlib=libc++"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fstack-protector-strong -funwind-tables -fexceptions -frtti"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fno-addrsig -fno-experimental-isel"
;;
i686|x86)
CC="i686-linux-android${ANDROID_API}-clang"
CXX="i686-linux-android${ANDROID_API}-clang++"
LD="i686-linux-android-ld"
AS="i686-linux-android-as"
AR="i686-linux-android-ar"
RANLIB="i686-linux-android-ranlib"
STRIP="i686-linux-android-strip"
# You may need this on older NDKs
# ANDROID_CPPFLAGS="-D__ANDROID__=${ANDROID_API}"
ANDROID_CXXFLAGS="-target i686-none-linux-android${ANDROID_API}"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -std=c++11 -stdlib=libc++"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -mtune=intel -mssse3 -mfpmath=sse"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fstack-protector-strong -funwind-tables -fexceptions -frtti"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fno-addrsig -fno-experimental-isel"
;;
x86_64|x64)
CC="x86_64-linux-android${ANDROID_API}-clang"
CXX="x86_64-linux-android${ANDROID_API}-clang++"
LD="x86_64-linux-android-ld"
AS="x86_64-linux-android-as"
AR="x86_64-linux-android-ar"
RANLIB="x86_64-linux-android-ranlib"
STRIP="x86_64-linux-android-strip"
# You may need this on older NDKs
# ANDROID_CPPFLAGS="-D__ANDROID__=${ANDROID_API}"
ANDROID_CXXFLAGS="-target x86_64-none-linux-android${ANDROID_API}"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -std=c++11 -stdlib=libc++"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -march=x86-64 -msse4.2 -mpopcnt -mtune=intel"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fstack-protector-strong -funwind-tables -fexceptions -frtti"
ANDROID_CXXFLAGS="${ANDROID_CXXFLAGS} -fno-addrsig -fno-experimental-isel"
;;
*)
echo "ERROR: Unknown architecture ${ANDROID_CPU}"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
;;
esac
#####################################################################
# GNUmakefile-cross and Autotools expect these to be set.
# They are also used in the tests below.
export IS_ANDROID=1
export CPP CC CXX LD AS AR RANLIB STRIP
export ANDROID_CPPFLAGS ANDROID_CXXFLAGS ANDROID_LDFLAGS
export ANDROID_API ANDROID_CPU ANDROID_SYSROOT
# Do NOT use ANDROID_SYSROOT_INC or ANDROID_SYSROOT_LD
# https://github.com/android/ndk/issues/894#issuecomment-470837964
#####################################################################
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$CC" ]; then
echo "ERROR: Failed to find Android clang. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$CXX" ]; then
echo "ERROR: Failed to find Android clang++. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$RANLIB" ]; then
echo "ERROR: Failed to find Android ranlib. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$AR" ]; then
echo "ERROR: Failed to find Android ar. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$AS" ]; then
echo "ERROR: Failed to find Android as. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
# Error checking
if [ ! -e "${ANDROID_TOOLCHAIN}/$LD" ]; then
echo "ERROR: Failed to find Android ld. Please edit this script."
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
#####################################################################
# Add tools to head of path, if not present already
LENGTH=${#ANDROID_TOOLCHAIN}
SUBSTR=${PATH:0:$LENGTH}
if [ "$SUBSTR" != "${ANDROID_TOOLCHAIN}" ]; then
export PATH="${ANDROID_TOOLCHAIN}:$PATH"
fi
#####################################################################
# Now that we are using cpu-features from Android rather than
# CPU probing, we need to copy cpu-features.h and cpu-features.c
# from the NDK into our source directory and then build it.
if [[ ! -e "${ANDROID_NDK_ROOT}/sources/android/cpufeatures/cpu-features.h" ]]; then
echo "ERROR: Unable to locate cpu-features.h"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
if [[ ! -e "${ANDROID_NDK_ROOT}/sources/android/cpufeatures/cpu-features.c" ]]; then
echo "ERROR: Unable to locate cpu-features.c"
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 1 || return 1
fi
cp "${ANDROID_NDK_ROOT}/sources/android/cpufeatures/cpu-features.h" .
cp "${ANDROID_NDK_ROOT}/sources/android/cpufeatures/cpu-features.c" .
# Cleanup the sources for the C++ compiler
# https://github.com/weidai11/cryptopp/issues/926
sed -e 's/= memmem/= (const char*)memmem/g' \
-e 's/= memchr/= (const char*)memchr/g' \
-e 's/= malloc/= (char*)malloc/g' \
cpu-features.c > cpu-features.c.fixed
mv cpu-features.c.fixed cpu-features.c
# Fix permissions. For some reason cpu-features.h is +x.
chmod u=rw,go=r cpu-features.h cpu-features.c
#####################################################################
VERBOSE=${VERBOSE:-1}
if [ "$VERBOSE" -gt 0 ]; then
echo "ANDROID_TOOLCHAIN: ${ANDROID_TOOLCHAIN}"
echo "ANDROID_API: ${ANDROID_API}"
echo "ANDROID_CPU: ${ANDROID_CPU}"
echo "ANDROID_SYSROOT: ${ANDROID_SYSROOT}"
if [ -n "${ANDROID_CPPFLAGS}" ]; then
echo "ANDROID_CPPFLAGS: ${ANDROID_CPPFLAGS}"
fi
echo "ANDROID_CXXFLAGS: ${ANDROID_CXXFLAGS}"
if [ -n "${ANDROID_LDFLAGS}" ]; then
echo "ANDROID_LDFLAGS: ${ANDROID_LDFLAGS}"
fi
if [ -e "cpu-features.h" ] && [ -e "cpu-features.c" ]; then
echo "CPU FEATURES: cpu-features.h and cpu-features.c are present"
fi
fi
#####################################################################
echo
echo "*******************************************************************************"
echo "It looks the the environment is set correctly. Your next step is build"
echo "the library with 'make -f GNUmakefile-cross'."
echo "*******************************************************************************"
echo
[ "$0" = "${BASH_SOURCE[0]}" ] && exit 0 || return 0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.