text stringlengths 1 1.05M |
|---|
import React from 'react';
import axios from 'axios';
class ProductList extends React.Component {
constructor(props) {
super(props);
this.state = {
products: []
};
}
componentDidMount() {
axios
.get('/products.json') // Get product data from url
.then((response) => {
this.setState({ products: response.data }); // Set the product data in state
});
}
render() {
return (
<div>
<h1>Product List</h1>
{this.state.products.map((product) => (
<ProductItem key={product.id} product={product} />
))}
</div>
);
}
}
function ProductItem(props) {
return (
<div>
<h2>{props.product.name}</h2>
<h3>{props.product.price}</h3>
<p>{props.product.description}</p>
</div>
);
}
export default ProductList; |
# Imports and Setup
import flask
from flask import Flask, request, redirect, render_template
from flask_login import LoginManager, login_user, login_required
import os
import sqlite3
# Configure application
app = Flask(__name__)
app.secret_key = os.urandom(24)
# Configure database
db_file = './data.db'
conn = sqlite3.connect(db_file)
cursor = conn.cursor()
# User authentication
login_manager = LoginManager()
login_manager.init_app(app)
@login_manager.user_loader
def load_user(user_id):
cursor.execute(f"SELECT * FROM users WHERE id = {user_id}")
user = cursor.fetchone()
return User(*user)
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html')
elif request.method == 'POST':
username = request.form['username']
password = request.form['password']
cursor.execute(f"SELECT * FROM users WHERE username = ?" , (username,))
user = cursor.fetchone()
if user is not None and check_password_hash(user[3], password):
login_user(User(*user))
return redirect(url_for('index'))
else:
return redirect(url_for('login'))
# Application routes
@app.route('/')
@login_required
def index():
return 'Welcome to the application!'
if __name__ == '__main__':
app.run() |
/*
* CPAchecker is a tool for configurable software verification.
* This file is part of CPAchecker.
*
* Copyright (C) 2007-2014 <NAME>
* All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*
* CPAchecker web page:
* http://cpachecker.sosy-lab.org
*/
package org.sosy_lab.cpachecker.core.algorithm;
import org.sosy_lab.cpachecker.cfa.CFA;
import org.sosy_lab.cpachecker.core.reachedset.ReachedSet;
import org.sosy_lab.cpachecker.exceptions.CPAEnabledAnalysisPropertyViolationException;
import org.sosy_lab.cpachecker.exceptions.CPAException;
public class RestrictedProgramDomainAlgorithm implements Algorithm {
private final Algorithm innerAlgorithm;
private final CFA cfa;
public RestrictedProgramDomainAlgorithm(Algorithm pAlgorithm, CFA pCfa) {
this.innerAlgorithm = pAlgorithm;
this.cfa = pCfa;
}
@Override
public AlgorithmStatus run(ReachedSet pReachedSet) throws CPAException, InterruptedException,
CPAEnabledAnalysisPropertyViolationException {
if (cfa.getVarClassification().isPresent()) {
if (cfa.getVarClassification().orElseThrow().hasRelevantNonIntAddVars()) {
return AlgorithmStatus.UNSOUND_AND_PRECISE;
}
}
return innerAlgorithm.run(pReachedSet);
}
}
|
#!/usr/bin/env bash
# Requirements: aws-cli & jq
#
# This script will register an IoT thing. Create, download and attach the keys and certificates and attach an all topics/actions policy to the certificates and the IoT Thing.
if [ $# -ne 1 ]; then
echo "Usage: ./create-aws-iot-thing.sh <Thing>"
echo "<Thing> is a name for the IoT Thing you would like to create."
else
aws iot create-thing --thing-name $1
CERTIFICATE_ARN=$(aws iot create-keys-and-certificate --set-as-active --certificate-pem-outfile client-cert.pem --public-key-outfile public-key.pem --private-key-outfile private-key.pem | jq -r ".certificateArn")
curl -s -S https://www.symantec.com/content/en/us/enterprise/verisign/roots/VeriSign-Class%203-Public-Primary-Certification-Authority-G5.pem -o root-ca.pem
aws iot create-policy --policy-name "PubSubToAnyTopic" --policy-document file://Device-Policy.json
aws iot attach-principal-policy --principal ${CERTIFICATE_ARN} --policy-name "PubSubToAnyTopic"
aws iot attach-thing-principal --thing-name $1 --principal ${CERTIFICATE_ARN}
IOT_ENDPOINT=$(aws iot describe-endpoint | jq -r ".endpointAddress")
echo -e "AWS_IOT_MQTT_HOST = ${IOT_ENDPOINT}\n" > config.properties
echo -e "AWS_IOT_MQTT_PORT = 8883\n" >> config.properties
echo -e "AWS_IOT_MQTT_CLIENT_ID = $1\n" >> config.properties
echo -e "AWS_IOT_MY_THING_NAME = $1\n" >> config.properties
echo -e "AWS_IOT_ROOT_CA_FILENAME = $(pwd)/root-ca.pem\n" >> config.properties
echo -e "AWS_IOT_CERTIFICATE_FILENAME = $(pwd)/client-cert.pem\n" >> config.properties
echo -e "AWS_IOT_PRIVATE_KEY_FILENAME = $(pwd)/private-key.pem\n" >> config.properties
fi
|
pacat --format=s16be --channels=1 --channel-map=mono --rate=44100 --device=alsa_output.usb-Burr-Brown_from_TI_USB_Audio_CODEC-00.analog-stereo
|
#!/bin/csh
# generated by BIGNASim metatrajectory generator
#$ -cwd
#$ -N BIGNaSim_curl_call_BIGNASim55ce1f70e3226
#$ -o CURL.BIGNASim55ce1f70e3226.out
#$ -e CURL.BIGNASim55ce1f70e3226.err
# Launching CURL...
# CURL is calling a REST WS that generates the metatrajectory.
curl -i -H "Content-Type: application/json" -X GET -d '{"idSession":"NAFlex55ce102c3f610","idTraj":"NAFlex_1dcw","name":"BIGNASim55ce1f70e3226-NAFlex_1dcw-1_20_1","description":"Subtrajectory of NAFlex_1dcw with 1_20_1 frames selected","mask":"name *","frames":"1:20:1","format":"mdcrd"}' http://ms2/download
|
#!/bin/bash
# Clean sysctl config directories
rm -rf /usr/lib/sysctl.d/* /run/sysctl.d/* /etc/sysctl.d/*
sed -i "/net.ipv6.conf.default.accept_redirects/d" /etc/sysctl.conf
echo "net.ipv6.conf.default.accept_redirects = 1" >> /etc/sysctl.conf
# Setting correct runtime value
sysctl -w net.ipv6.conf.default.accept_redirects=0
|
CREATE TABLE IF NOT EXISTS gcdefault.dbversion (
version INTEGER DEFAULT 0 NOT NULL,
updateon TIMESTAMP(6) DEFAULT now() NOT NULL,
CONSTRAINT PK_dbversion PRIMARY KEY (version)
);
CREATE TABLE IF NOT EXISTS gcdefault.mapnotecategory (
mapnotecategoryid INTEGER DEFAULT nextval('gcdefault.mapnotecategory_mapnotecategoryid_seq'::regclass) NOT NULL,
name VARCHAR(100) NOT NULL,
sortorder INTEGER DEFAULT 0 NOT NULL,
isactive BOOLEAN DEFAULT true NOT NULL,
CONSTRAINT PK_mapnotecategory PRIMARY KEY (mapnotecategoryid)
);
CREATE TABLE IF NOT EXISTS gcdefault.mapnote
(
mapnoteid integer NOT NULL DEFAULT nextval('gcdefault.mapnote_mapnoteid_seq'::regclass),
name character varying(100) NOT NULL,
mapnotecategoryid integer NOT NULL,
remarks character varying(1000),
createdby character varying(255) NOT NULL,
createddate timestamp without time zone NOT NULL DEFAULT now(),
lastupdateby character varying(255) NOT NULL,
lastupdate timestamp without time zone NOT NULL DEFAULT now(),
geom geometry(Point,4326),
CONSTRAINT pk_mapnote PRIMARY KEY (mapnoteid),
CONSTRAINT fk_mapnote_mapnotecategory FOREIGN KEY (mapnotecategoryid)
REFERENCES gcdefault.mapnotecategory (mapnotecategoryid) MATCH SIMPLE
ON UPDATE NO ACTION
ON DELETE NO ACTION
);
CREATE TABLE IF NOT EXISTS gcdefault.version (
key VARCHAR(10) NOT NULL,
versionnumber BIGINT NOT NULL,
seton TIMESTAMP(6) DEFAULT now() NOT NULL,
CONSTRAINT PK_version PRIMARY KEY (key)
);
CREATE TABLE IF NOT EXISTS gcdefault.versionhistory (
key VARCHAR(10) NOT NULL,
versionnumber BIGINT NOT NULL,
createdon TIMESTAMP(6) DEFAULT now() NOT NULL,
removedon TIMESTAMP(6),
isavailable BOOLEAN DEFAULT true NOT NULL,
CONSTRAINT PK_versionhistory PRIMARY KEY (key, versionnumber)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.parcel
(
gid integer NOT NULL,
parcel_id integer NOT NULL,
countykey character varying(5),
county character varying(30),
statekey character varying(2),
apn character varying(50),
apn2 character varying(50),
addr character varying(100),
city character varying(50),
state character varying(2),
zip character varying(5),
geom geometry(MultiPolygon,4326),
CONSTRAINT parcel_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.county
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.county_gid_seq'::regclass),
polygon_id bigint,
countyname character varying(200),
area_id bigint,
geom geometry(MultiPolygon,4326),
CONSTRAINT county_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.places
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.places_gid_seq'::regclass),
id bigint,
placename character varying(50),
countyname character varying(50),
polygon_id bigint,
area_id bigint,
iscity character varying(1),
zip character varying(5),
geom geometry(MultiPolygon,4326),
CONSTRAINT places_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.railroads
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.railroads_gid_seq'::regclass),
link_id bigint,
name character varying(200),
bridge character varying(1),
tunnel character varying(1),
geom geometry(MultiLineString,4326),
CONSTRAINT railroad_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.streets
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.streets_gid_seq'::regclass),
l_f_add character varying(30),
l_t_add character varying(30),
r_f_add character varying(30),
r_t_add character varying(30),
prefix character varying(10),
name character varying(100),
type character varying(16),
suffix character varying(16),
strplace character varying(100),
streetname character varying(100),
placename character varying(100),
countyname character varying(100),
geom geometry(MultiLineString,4326),
fcc character varying(20),
original_street character varying(255),
CONSTRAINT streets_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.surfacewater
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.surfacewater_gid_seq'::regclass),
polygon_id bigint,
disp_class character varying(1),
name character varying(200),
feat_type character varying(40),
feat_cod bigint,
geom geometry(MultiPolygon,4326),
CONSTRAINT waterpoly_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcverbase00001.water
(
gid integer NOT NULL DEFAULT nextval('gcverbase00001.water_gid_seq'::regclass),
link_id bigint,
disp_class character varying(1),
name character varying(200),
geom geometry(MultiLineString,4326),
CONSTRAINT water_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcversa00001.servicearea
(
gid integer NOT NULL DEFAULT nextval('gcversa00001.servicearea_gid_seq'::regclass),
code character varying(20),
geom geometry(MultiPolygon,4326),
CONSTRAINT servicearea_pkey PRIMARY KEY (gid)
);
CREATE TABLE IF NOT EXISTS gcversa00001.serviceareatype
(
gid integer NOT NULL DEFAULT nextval('gcversa00001.serviceareatype_gid_seq'::regclass),
code character varying(20) NOT NULL,
typename character varying(20) NOT NULL,
CONSTRAINT pk_serviceareatype PRIMARY KEY (gid)
); |
let facade = require('gamecloud')
let {TableField, EntityType, NotifyType, ReturnCode} = facade.const
/**
* 邮箱管理器
* Updated by liub on 2017-07-26.
*/
class mail extends facade.Control
{
/**
* 读取邮件列表
* @param {UserEntity} user
* @param {*} objData
*/
async getList(user, objData) {
if(!objData.pageSize) {
objData.pageSize = 10;
}
if(!objData.page){
objData.page = 1;
}
let muster = this.core.GetMapping(EntityType.Mail)
.groupOf(user.openid)
.orderby('time', 'desc')
.paginate(objData.pageSize, objData.page);
let data = {
total : muster.pageNum,
page: muster.pageCur,
}
data.list = muster.records(TableField.Mail);
return { code: ReturnCode.Success, data: data };
}
/**
* 向指定用户发送一封文本邮件
* @param {UserEntity} user
* @param {*} objData
*/
async send(user, objData) {
this.core.GetMapping(EntityType.Mail).Create(user, objData.con, user.openid, objData.openid);
return {code: ReturnCode.Success};
}
/**
* 删除一篇邮件
* @param {UserEntity} user
* @param {*} objData
*/
async del(user, objData)
{
let mail = this.core.GetObject(EntityType.Mail, objData.idx);
if(!!mail && (mail.src == user.openid || mail.dst == user.openid)) {
await this.core.GetMapping(EntityType.Mail).Delete(objData.idx);
user.CheckMailboxState();
return {code: ReturnCode.Success, data:{idx:objData.idx}};
}
return {code: -1};
}
/**
* 阅读一篇邮件
* @param {UserEntity} user
* @param {*} objData
*/
async read(user, objData)
{
let mail = this.core.GetObject(EntityType.Mail, objData.idx);
if(!!mail && (mail.src == user.openid || mail.dst == user.openid)) {
await mail.read(user);
user.CheckMailboxState();
}
return {code: ReturnCode.Success, data: {idx:objData.idx}};
}
}
exports = module.exports = mail;
|
<filename>src/mathcard/game/CardPicking.java
package mathcard.game;
import java.util.ArrayList;
import java.util.List;
import mathcard.card.Card;
import mathcard.player.Player;
public class CardPicking {
private List<Card> cards;
private Player p1, p2;
public CardPicking(Player p1, Player p2)
{
this.p1 = p1;
this.p2 = p2;
cards = new ArrayList<>();
}
public void add(Card c)
{
cards.add(c);
}
public boolean isEmpty()
{
return cards.isEmpty();
}
public void pickAll()
{
Player current = p1;
while (!isEmpty())
{
current.pickCard(cards);
current = otherPlayer(current);
}
}
private Player otherPlayer(Player p)
{
if (p == p1) return p2;
else if (p == p2) return p1;
else return null;
}
}
|
<template>
<div>
<h1>Movies</h1>
<input type="text" v-model="keyword" />
<ul>
<li v-for="movie in movies" :key="movie.title">{{ movie.title }}</li>
</ul>
</div>
</template>
<script>
export default {
data() {
return {
keyword: '',
movies: []
};
},
async created() {
const response = await axios.get('/api/movies');
const movies = await response.data;
this.movies = movies;
},
watch: {
keyword(newKeyword, oldKeyword) {
this.movies = this.movies.filter(movie =>
movie.title.includes(newKeyword)
);
}
}
};
</script> |
#!/usr/bin/env bash
# String manipulation!
str=FooBarBazQuux
echo "${str,,}" # Lower case
echo "${str^^}" # Upper case
echo "${str:4:7}" # Slice
echo "${str#*B}" # Prefix snip: remove from the left until the first match of *B
echo "${str##*B}" # Be greedy: remove from the left until the last match of *B
echo "${str%u*}" # Suffix snip: remove from the right after the first match of u*
echo "${str%%u*}" # Be greedy: remove from the right after the last match of u*
# Assertions and defaults!
foo=
var="${foo:-DEFAULT VALUE}"
echo "$var"
var="${bar:-DEFAULT VALUE}"
echo "$var"
# But how do I check if it's defined or not?
[ -v foo ] && echo THERE || echo NOT THERE
# Arrays!
a=(1 2 3)
echo "$a"
echo "${a[@]}"
a+=(4 5 6)
echo "${a[@]}"
for i in "${a[@]}"; do
echo next
echo $i;
done
echo length "${#a[@]}"
a[2]='foo'
echo "${a[@]}"
echo "${a[@]:3}"
echo "${a[@]:3:2}"
# Assertions! Prints to stderr and exits 1
v=
echo "${v:?Must set v}"
v=1
echo "${v:?Must set v}" |
ARGFILE=./sh/argfiles/resnet_164_slimming
RESUME_FILENAME=04_April_2018_Wednesday_20_23_17resnet164_slim08_most_recent
python -m examples.lab $(cat $ARGFILE) --save_prefix=$SAVE_PREFIX --resume_mode=standard --res_file=${RESUME_FILENAME} --plot_flop_reduction_by_layer --plot_title="Resnet 164 after 20% reduction in flops via network slimming. "
|
<gh_stars>1-10
'use strict';
const {wrap} = require('../util/hooks');
const random = require('../util/random');
const arr = require('../util/arr');
const wordSeed = require('./word');
const lastName = {
cn: [
'赵', '钱', '孙', '李', '周', '吴', '郑', '王', '冯', '陈', '楮', '卫', '蒋', '沈',
'韩', '杨', '朱', '秦', '尤', '许', '何', '吕', '施', '张', '孔', '曹', '严', '华', '金',
'魏', '陶', '姜', '戚', '谢', '邹', '柏', '水', '窦', '章', '云', '苏', '潘',
'范', '彭', '郎', '鲁', '韦', '昌', '马', '苗', '凤', '花', '方', '俞', '任', '袁', '柳',
'酆', '鲍', '史', '唐', '费', '廉', '岑', '薛', '雷', '贺', '倪', '汤', '滕', '殷', '罗', '毕',
'郝', '邬', '安', '常', '乐', '于', '时', '傅', '皮', '卞', '齐', '康', '伍', '余', '元', '卜', '顾',
'孟', '平', '黄', '和', '穆', '萧', '尹', '姚', '邵', '湛', '汪', '祁', '毛', '禹', '狄', '米', '贝', '明',
'臧', '计', '伏', '成', '戴', '谈', '宋', '茅', '庞', '熊', '纪', '舒', '屈', '项', '祝', '董', '梁', '杜',
'阮', '蓝', '闽', '席', '季', '麻', '强', '贾', '路', '娄', '危', '江', '童', '颜', '郭', '梅', '盛', '林',
'刁', '锺', '徐', '丘', '骆', '高', '夏', '蔡', '田', '樊', '胡', '凌', '霍', '虞', '万', '支', '柯', '昝'
],
en: [
'Baker', 'Hunter', 'Carter', 'Smith', 'Cook', 'Miller', 'Turner',
'London', 'Hall', 'Kent', 'Mill', 'Brook', 'Churchill', 'Hill', 'Lake',
'Field', 'Green', 'Wood', 'Well', 'Brown', 'White', 'Longman', 'Short',
'Sharp', 'Hard', 'Yonng', 'Sterling', 'Back', 'Hand', 'Finger', 'Brain',
'Bird', 'Bull', 'Fox', 'Hawk', 'Bush', 'Stock', 'Cotton', 'Reed', 'George',
'Henry', 'David', 'Clinton', 'Macadam', 'Arthur'
]
};
const firstName = {
en: [
'Abe', 'Abel', 'Abner', 'Abraham', 'Allen', 'Adam', 'Adolf', 'Albin', 'Alden',
'Baldwin', 'Bertran', 'Bryan', 'Barnaby', 'Barry', 'Bartholomew',
'Caesar', 'Calvin', 'Carlton', 'Cary', 'Christian', 'Carl', 'Cecil', 'Cedric',
'Dexter', 'Derby', 'Dale', 'Daniel', 'Dan', 'Danny', 'Darrell', 'Darren',
'Edwin', 'Eliot', 'Elmer', 'Elroy', 'Emlyn', 'Enoch', 'Eric', 'Ernest',
'Freddie', 'Felix', 'Ferdinand', 'Fergus', 'Floyd', 'Francis', 'Frank',
'Gabriel', 'Gareth', 'Gary', 'Gavin', 'Gene', 'Geoffrey', 'Geoff', 'George',
'Humphry', 'Hal', 'Hank', 'Harold', 'Harry', 'Henry', 'Herbert', 'Horace',
'Ivan', 'Ivor', 'Ira', 'Irving', 'Irwin', 'Jarvis', 'Jean', 'Job', 'Jack',
'Jacob', 'Jake', 'James', 'Jamie', 'Jason', 'Jasper', 'Jed', 'Jeff',
'Kenny', 'Kent', 'Kevin', 'Kit', 'Kev', 'Kirk',
'Laban', 'Lee', 'Lance', 'Larry', 'Laurence', 'Len', 'Lenny', 'Leo',
'Malcolm', 'Mark', 'Martin', 'Malachi', 'Marshall', 'Marvin', 'Marty',
'Na', 'Nat', 'Nathan', 'Nahum', 'Napoleon', 'Nelson', 'Newton', 'Noah',
'Ollie', 'Oliver', 'Oscar', 'Oswald', 'Owen', 'Oz', 'Ozzie', 'Octavius',
'Paddy', 'Pat', 'Patrick', 'Paul', 'Percy', 'Pete', 'Peter', 'Phil',
'Quentin', 'Quincy', 'Rene', 'Reuben', 'Ralph', 'Randolf', 'Randy', 'Raphael',
'Robby', 'Robert', 'Robin', 'Rod', 'Roderick', 'Rodney', 'Rodge', 'Roger', 'Ronald',
'Ron', 'Ronnie', 'Rory', 'Roy', 'Rudolph', 'Rufus', 'Rupert', 'Russ', 'Reuel',
'Sebastian', 'Sid', 'Sidney', 'Simon', 'Stan', 'Stanley', 'Steve', 'Steven',
'Tom', 'Tommy', 'Tony', 'Theobald', 'Theodoric', 'Terence', 'Trevor', 'Troy',
'Urban', 'Van', 'Vivian', 'Vic', 'Victor', 'Vince', 'Vincent', 'Viv',
'Wallace', 'Wally', 'Walter', 'Warren', 'Wayne', 'Wesley', 'Winston',
'Will', 'Wilbur', 'Wilfred', 'Willy', 'William', 'Willis'
]
};
let name = wrap(() => name.en());
name.en = wrap(() => {
return name.en.firstName() + ' ' + name.en.lastName();
});
name.en.firstName = wrap(() => random.array(firstName.en));
name.en.lastName = wrap(() => random.array(lastName.en));
name.cn = wrap(() => {
return name.cn.lastName() + name.cn.firstName();
});
name.cn.firstName = wrap(() => {
return arr(random.int(1, 2), () => wordSeed.cn()).join('');
});
name.cn.lastName = wrap(() => random.array(lastName.cn));
module.exports = name;
|
#!/usr/bin/env bash
# Copyright (c) 2014, Cloudera, Inc. All Rights Reserved.
#
# Cloudera, Inc. licenses this file to you under the Apache License,
# Version 2.0 (the "License"). You may not use this file except in
# compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# This software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for
# the specific language governing permissions and limitations under the
# License.
function usageAndExit {
echo "$1"
echo "usage: oryx-run.sh command [--option value] ..."
echo " where command is one of:"
echo " batch Run Batch Layer"
echo " speed Run Speed Layer"
echo " serving Run Serving Layer"
echo " kafka-setup Inspect ZK/Kafka config and configure Kafka topics"
echo " kafka-tail Follow output from Kafka topics"
echo " kafka-input Push data to input topic"
echo " and options are one of:"
echo " --layer-jar Oryx JAR file, like oryx-{serving,speed,batch}-x.y.z.jar"
echo " Defaults to any oryx-*.jar in working dir"
echo " --conf Oryx configuration file, like oryx.conf. Defaults to 'oryx.conf'"
echo " --app-jar User app JAR file"
echo " --jvm-args Extra args to Oryx JVM processes (including drivers and executors)"
echo " --deployment Only for Serving Layer now; can be 'yarn' or 'local', Default: local."
echo " --input-file Only for kafka-input. Input file to send"
echo " --help Display this message"
exit 1
}
if [ "$1" == "--help" ]; then
usageAndExit
fi
COMMAND=$1
shift
while (($#)); do
if [ "$1" == "--layer-jar" ]; then
LAYER_JAR="$2"
elif [ "$1" == "--conf" ]; then
CONFIG_FILE="$2"
elif [ "$1" == "--app-jar" ]; then
APP_JAR="$2"
elif [ "$1" == "--jvm-args" ]; then
JVM_ARGS="$2"
elif [ "$1" == "--deployment" ]; then
DEPLOYMENT="$2"
elif [ "$1" == "--input-file" ]; then
INPUT_FILE="$2"
else
usageAndExit "Unrecognized option $1"
fi
shift
shift
done
# Forces Java 8 on CDH
export BIGTOP_JAVA_MAJOR=8
if [ -z "${LAYER_JAR}" ]; then
case "${COMMAND}" in
batch|speed|serving)
LAYER_JAR=$(ls -1 oryx-${COMMAND}-*.jar 2> /dev/null)
;;
*)
LAYER_JAR=$(ls -1 oryx-batch-*.jar oryx-speed-*.jar oryx-serving-*.jar 2> /dev/null | head -1)
;;
esac
fi
if [ -z "${CONFIG_FILE}" ]; then
CONFIG_FILE="oryx.conf"
fi
if [ ! -f "${LAYER_JAR}" ]; then
usageAndExit "Layer JAR ${LAYER_JAR} does not exist"
fi
if [ ! -f "${CONFIG_FILE}" ]; then
usageAndExit "Config file ${CONFIG_FILE} does not exist"
fi
CONFIG_PROPS=$(java -cp ${LAYER_JAR} -Dconfig.file=${CONFIG_FILE} com.cloudera.oryx.common.settings.ConfigToProperties)
if [ -z "${CONFIG_PROPS}" ]; then
usageAndExit "Config file ${CONFIG_FILE} could not be parsed"
fi
# If first arg is FOO and second is bar, and CONFIG_PROPS contains a property bar=baz, then
# environment variable FOO is set to value baz by this function. The second argument must be
# expressed as a regular expression; "foo\.bar" not "foo.bar"
function setVarFromProperty {
local __resultvar=$1
local property=$2
local result=$(echo "${CONFIG_PROPS}" | grep -E "^${property}=.+$" | grep -oE "[^=]+$")
eval $__resultvar=$result
}
case "${COMMAND}" in
kafka-setup|kafka-tail|kafka-input)
# Helps execute kafka-foo or kafka-foo.sh as appropriate.
# Kind of assume we're using all one or the other
if [ -x "$(command -v kafka-topics)" ]; then
KAFKA_TOPICS_SH="kafka-topics"
KAFKA_CONSOLE_CONSUMER_SH="kafka-console-consumer"
KAFKA_CONSOLE_PRODUCER_SH="kafka-console-producer"
elif [ -x "$(command -v kafka-topics.sh)" ]; then
KAFKA_TOPICS_SH="kafka-topics.sh"
KAFKA_CONSOLE_CONSUMER_SH="kafka-console-consumer.sh"
KAFKA_CONSOLE_PRODUCER_SH="kafka-console-producer.sh"
else
echo "Can't find kafka scripts like kafka-topics"
exit 2
fi
;;
esac
case "${COMMAND}" in
batch|speed|serving)
# Main Layer handling script
CONFIG_FILE_NAME=$(basename ${CONFIG_FILE})
if [ -n "${APP_JAR}" ]; then
APP_JAR_NAME=$(basename ${APP_JAR})
fi
MAIN_CLASS="com.cloudera.oryx.${COMMAND}.Main"
setVarFromProperty "APP_ID" "oryx\.id"
case "${COMMAND}" in
batch|speed)
SPARK_DRIVER_JAVA_OPTS="-Dconfig.file=${CONFIG_FILE}"
SPARK_EXECUTOR_JAVA_OPTS="-Dconfig.file=${CONFIG_FILE_NAME}"
if [ -n "${JVM_ARGS}" ]; then
if [[ "${JVM_ARGS}" == *"-Xmx"* ]]; then
echo "Warning: -Xmx is set in --jvm-args, but it will be overridden by .conf file settings";
fi
SPARK_DRIVER_JAVA_OPTS="${JVM_ARGS} ${SPARK_DRIVER_JAVA_OPTS}"
SPARK_EXECUTOR_JAVA_OPTS="${JVM_ARGS} ${SPARK_EXECUTOR_JAVA_OPTS}"
fi
# Force to spark-submit for Spark-based batch/speed layer
DEPLOYMENT="spark-submit"
case "${COMMAND}" in
batch)
APP_NAME="OryxBatchLayer-${APP_ID}"
;;
speed)
APP_NAME="OryxSpeedLayer-${APP_ID}"
;;
esac
setVarFromProperty "SPARK_MASTER" "oryx\.${COMMAND}\.streaming\.master"
setVarFromProperty "SPARK_DEPLOY_MODE" "oryx\.${COMMAND}\.streaming\.deploy-mode"
setVarFromProperty "DRIVER_MEMORY" "oryx\.${COMMAND}\.streaming\.driver-memory"
setVarFromProperty "EXECUTOR_MEMORY" "oryx\.${COMMAND}\.streaming\.executor-memory"
setVarFromProperty "EXECUTOR_CORES" "oryx\.${COMMAND}\.streaming\.executor-cores"
setVarFromProperty "NUM_EXECUTORS" "oryx\.${COMMAND}\.streaming\.num-executors"
setVarFromProperty "DYNAMIC_ALLOCATION" "oryx\.${COMMAND}\.streaming\.dynamic-allocation"
setVarFromProperty "SPARK_UI_PORT" "oryx\.${COMMAND}\.ui\.port"
SPARK_EXTRA_CONFIG=$(echo "${CONFIG_PROPS}" | grep -E "^oryx\.${COMMAND}\.streaming\.config\..+=.+$" | grep -oE "spark.+$")
;;
serving)
# The file compute-classpath.sh must be in the same directory as this file.
COMPUTE_CLASSPATH="compute-classpath.sh"
if [ ! -x "$COMPUTE_CLASSPATH" ]; then
usageAndExit "$COMPUTE_CLASSPATH script does not exist or isn't executable"
fi
BASE_CLASSPATH=$(bash ${COMPUTE_CLASSPATH} | paste -s -d: -)
setVarFromProperty "MEMORY_MB" "oryx\.serving\.memory"
MEMORY_MB=$(echo ${MEMORY_MB} | grep -oE "[0-9]+")
# Only for Serving Layer now
case "${DEPLOYMENT}" in
yarn)
setVarFromProperty "YARN_CORES" "oryx\.serving\.yarn\.cores"
setVarFromProperty "YARN_INSTANCES" "oryx\.serving\.yarn\.instances"
APP_NAME="OryxServingLayer-${APP_ID}"
JVM_HEAP_MB=$(echo "${MEMORY_MB} * 0.9" | bc | grep -oE "^[0-9]+")
;;
*)
JVM_HEAP_MB=${MEMORY_MB}
;;
esac
EXTRA_PROPS="-Xmx${JVM_HEAP_MB}m"
;;
esac
case "${DEPLOYMENT}" in
spark-submit)
# Launch Spark-based layer with spark-submit
if [ -x "$(command -v spark2-submit)" ]; then
SPARK_SUBMIT_SCRIPT="spark2-submit"
else
SPARK_SUBMIT_SCRIPT="spark-submit"
fi
JARS_ARG=""
if [ -n "${APP_JAR}" ]; then
JARS_ARG="--jars ${APP_JAR}"
fi
SPARK_SUBMIT_CMD="${SPARK_SUBMIT_SCRIPT} --master ${SPARK_MASTER} --deploy-mode ${SPARK_DEPLOY_MODE} \
--name ${APP_NAME} --class ${MAIN_CLASS} ${JARS_ARG} --files ${CONFIG_FILE} \
--driver-memory ${DRIVER_MEMORY} --driver-java-options \"${SPARK_DRIVER_JAVA_OPTS}\" \
--executor-memory ${EXECUTOR_MEMORY} --executor-cores ${EXECUTOR_CORES} \
--conf spark.executor.extraJavaOptions=\"${SPARK_EXECUTOR_JAVA_OPTS}\" --conf spark.ui.port=${SPARK_UI_PORT}"
for SPARK_KEY_VALUE_CONF in ${SPARK_EXTRA_CONFIG}; do
SPARK_SUBMIT_CMD="${SPARK_SUBMIT_CMD} --conf ${SPARK_KEY_VALUE_CONF}"
done
case "${DYNAMIC_ALLOCATION}" in
true)
SPARK_SUBMIT_CMD="${SPARK_SUBMIT_CMD} --conf spark.dynamicAllocation.enabled=true \
--conf spark.dynamicAllocation.minExecutors=1 --conf spark.dynamicAllocation.maxExecutors=${NUM_EXECUTORS} \
--conf spark.dynamicAllocation.executorIdleTimeout=60 --conf spark.shuffle.service.enabled=true"
;;
*)
SPARK_SUBMIT_CMD="${SPARK_SUBMIT_CMD} --conf spark.dynamicAllocation.enabled=false \
--num-executors=${NUM_EXECUTORS}"
;;
esac
SPARK_SUBMIT_CMD="${SPARK_SUBMIT_CMD} ${LAYER_JAR}"
echo "${SPARK_SUBMIT_CMD}"
bash -c "${SPARK_SUBMIT_CMD}"
;;
yarn)
# Launch layer in YARN
LAYER_JAR_NAME=$(basename ${LAYER_JAR})
FINAL_CLASSPATH="${LAYER_JAR_NAME}:${BASE_CLASSPATH}"
if [ -n "${APP_JAR_NAME}" ]; then
FINAL_CLASSPATH="${APP_JAR_NAME}:${FINAL_CLASSPATH}"
fi
LOCAL_SCRIPT_DIR="/tmp/${APP_NAME}"
LOCAL_SCRIPT="${LOCAL_SCRIPT_DIR}/run-yarn.sh"
YARN_LOG4J="${LOCAL_SCRIPT_DIR}/log4j.properties"
# APP_NAME will match the base of what distributedshell uses, in the home dir
HDFS_APP_DIR="${APP_NAME}"
# Only one copy of the app can be running anyway, so fail if it already seems
# to be running due to presence of directories
if [ -d ${LOCAL_SCRIPT_DIR} ]; then
usageAndExit "${LOCAL_SCRIPT_DIR} already exists; is ${APP_NAME} running?"
fi
if hdfs dfs -test -d ${HDFS_APP_DIR}; then
usageAndExit "${HDFS_APP_DIR} already exists; is ${APP_NAME} running?"
fi
# Make temp directories to stage resources, locally and in HDFS
mkdir -p ${LOCAL_SCRIPT_DIR}
hdfs dfs -mkdir -p ${HDFS_APP_DIR}
echo "Copying ${LAYER_JAR} and ${CONFIG_FILE} to ${HDFS_APP_DIR}/"
hdfs dfs -put ${LAYER_JAR} ${CONFIG_FILE} ${HDFS_APP_DIR}/
if [ -n "${APP_JAR}" ]; then
echo "Copying ${APP_JAR} to ${HDFS_APP_DIR}/"
hdfs dfs -put ${APP_JAR} ${HDFS_APP_DIR}/
fi
echo "log4j.logger.org.apache.hadoop.yarn.applications.distributedshell=WARN" >> ${YARN_LOG4J}
# Need absolute path
OWNER=$(hdfs dfs -stat '%u' ${HDFS_APP_DIR})
echo "hdfs dfs -get /user/${OWNER}/${HDFS_APP_DIR}/* ." >> ${LOCAL_SCRIPT}
echo "java ${JVM_ARGS} ${EXTRA_PROPS} -Dconfig.file=${CONFIG_FILE_NAME} -cp ${FINAL_CLASSPATH} ${MAIN_CLASS}" >> ${LOCAL_SCRIPT}
YARN_DIST_SHELL_JAR=$(bash ${COMPUTE_CLASSPATH} | grep distributedshell)
echo "Running ${YARN_INSTANCES} ${APP_NAME} (${YARN_CORES} cores / ${MEMORY_MB}MB)"
echo "Note that you will need to find the Application Master in YARN to find the Serving Layer"
echo "instances, and kill the application with 'yarn application -kill [app ID]'"
echo
yarn jar ${YARN_DIST_SHELL_JAR} \
-jar ${YARN_DIST_SHELL_JAR} \
org.apache.hadoop.yarn.applications.distributedshell.Client \
-appname ${APP_NAME} \
-container_memory ${MEMORY_MB} \
-container_vcores ${YARN_CORES} \
-master_memory 256 \
-master_vcores 1 \
-num_containers ${YARN_INSTANCES} \
-log_properties ${YARN_LOG4J} \
-timeout 2147483647 \
-shell_script ${LOCAL_SCRIPT}
# TODO timeout above is the max, is 24 days, and can't be disabled
# Clean up temp dirs; they are only used by this application anyway
hdfs dfs -rm -r -skipTrash "${HDFS_APP_DIR}"
rm -r "${LOCAL_SCRIPT_DIR}"
;;
*)
# Launch Layer as local process
FINAL_CLASSPATH="${LAYER_JAR}:${BASE_CLASSPATH}"
if [ -n "${APP_JAR}" ]; then
FINAL_CLASSPATH="${APP_JAR}:${FINAL_CLASSPATH}"
fi
java ${JVM_ARGS} ${EXTRA_PROPS} -Dconfig.file=${CONFIG_FILE} -cp ${FINAL_CLASSPATH} ${MAIN_CLASS}
;;
esac
;;
kafka-setup|kafka-tail|kafka-input)
setVarFromProperty "INPUT_ZK" "oryx\.input-topic\.lock\.master"
setVarFromProperty "INPUT_KAFKA" "oryx\.input-topic\.broker"
setVarFromProperty "INPUT_TOPIC" "oryx\.input-topic\.message\.topic"
setVarFromProperty "UPDATE_ZK" "oryx\.update-topic\.lock\.master"
setVarFromProperty "UPDATE_KAFKA" "oryx\.update-topic\.broker"
setVarFromProperty "UPDATE_TOPIC" "oryx\.update-topic\.message\.topic"
echo "Input ZK ${INPUT_ZK}"
echo " Kafka ${INPUT_KAFKA}"
echo " topic ${INPUT_TOPIC}"
echo "Update ZK ${INPUT_ZK}"
echo " Kafka ${UPDATE_KAFKA}"
echo " topic ${UPDATE_TOPIC}"
echo
case "${COMMAND}" in
kafka-setup)
ALL_TOPICS=$(${KAFKA_TOPICS_SH} --list --zookeeper ${INPUT_ZK})
echo "All available topics:"
echo "${ALL_TOPICS}"
echo
if [ -z $(echo "${ALL_TOPICS}" | grep ${INPUT_TOPIC}) ]; then
read -p "Input topic ${INPUT_TOPIC} does not exist. Create it? " CREATE
case "${CREATE}" in
y|Y)
echo "Creating topic ${INPUT_TOPIC}"
${KAFKA_TOPICS_SH} --zookeeper ${INPUT_ZK} --create --replication-factor 1 --partitions 4 --topic ${INPUT_TOPIC}
;;
esac
fi
echo "Status of topic ${INPUT_TOPIC}:"
${KAFKA_TOPICS_SH} --zookeeper ${INPUT_ZK} --describe --topic ${INPUT_TOPIC}
echo
if [ -z $(echo "${ALL_TOPICS}" | grep ${UPDATE_TOPIC}) ]; then
read -p "Update topic ${UPDATE_TOPIC} does not exist. Create it? " CREATE
case "${CREATE}" in
y|Y)
echo "Creating topic ${UPDATE_TOPIC}"
${KAFKA_TOPICS_SH} --zookeeper ${UPDATE_ZK} --create --replication-factor 1 --partitions 1 --topic ${UPDATE_TOPIC}
${KAFKA_TOPICS_SH} --zookeeper ${UPDATE_ZK} --alter --topic ${UPDATE_TOPIC} --config retention.ms=86400000 --config max.message.bytes=16777216
;;
esac
fi
echo "Status of topic ${UPDATE_TOPIC}:"
${KAFKA_TOPICS_SH} --zookeeper ${UPDATE_ZK} --describe --topic ${UPDATE_TOPIC}
echo
;;
kafka-tail)
${KAFKA_CONSOLE_CONSUMER_SH} --zookeeper ${INPUT_ZK} --whitelist ${INPUT_TOPIC},${UPDATE_TOPIC} --property fetch.message.max.bytes=16777216
;;
kafka-input)
if [ ! -f "${INPUT_FILE}" ]; then
usageAndExit "Input file ${INPUT_FILE} does not exist"
fi
${KAFKA_CONSOLE_PRODUCER_SH} --broker-list ${INPUT_KAFKA} --topic ${INPUT_TOPIC} < "${INPUT_FILE}"
;;
esac
;;
*)
usageAndExit "Invalid command ${COMMAND}"
;;
esac
|
#!/bin/bash
#
# Adopted from https://github.com/rapidsai/cudf/blob/branch-0.13/ci/cpu/upload_anaconda.sh
set -e
BRANCH_REGEX='^(master|((v|branch\-)[0-9]+\.[0-9]+\.(x|[0-9]+|[0-9]+\-preview[0-9]*)))$'
# Restrict uploads to master branch
if [[ ! "${GIT_BRANCH}" =~ ${BRANCH_REGEX} ]]; then
echo "Skipping upload"
exit 0
fi
if [ -z "$MY_UPLOAD_KEY" ]; then
echo "No upload key"
exit 0
fi
if [ "$UPLOAD_ARCTERN" == "1" ]; then
export ARCTERN_FILE=`conda build conda/recipes/arctern/cpu -c conda-forge -c defaults --output`
LABEL_OPTION="--label main"
echo "LABEL_OPTION=${LABEL_OPTION}"
test -e ${ARCTERN_FILE}
echo "Upload arctern..."
echo ${ARCTERN_FILE}
anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-arctern} ${LABEL_OPTION} --force ${ARCTERN_FILE}
fi
if [ "$UPLOAD_ARCTERN_WEBSERVER" == "1" ]; then
export ARCTERN_WEBSERVER_FILE=`conda build conda/recipes/arctern-webserver -c conda-forge -c defaults --output`
LABEL_OPTION="--label main"
echo "LABEL_OPTION=${LABEL_OPTION}"
test -e ${ARCTERN_WEBSERVER_FILE}
echo "Upload arctern-webserver..."
echo ${ARCTERN_WEBSERVER_FILE}
anaconda -t ${MY_UPLOAD_KEY} upload -u ${CONDA_USERNAME:-arctern} ${LABEL_OPTION} --force ${ARCTERN_WEBSERVER_FILE}
fi
|
#!/usr/bin/bash
/usr/bin/java -jar build/lib/redsqaure.jar |
#!/bin/bash
set -o errexit
set -o nounset
set -o pipefail
set -o xtrace
# This script uses a connection to Bitwarden to populate k8s secrets used for
# the OKD CI infrastructure. To use this script, first get the BitWarden CLI at:
# https://help.bitwarden.com/article/cli/#download--install
# Then, log in to create a session:
# $ BW_SESSION="$( bw login username@company.com password --raw )"
# Pass that environment variable to this script so that it can use the session.
#
# WARNING: BitWarden sessions are sticky -- if changes have occurred to the
# content of the BitWarden vault after your current session was started,
# you will need to create a new session to be able to view those changes.
if [[ -z "${BW_SESSION:-}" ]]; then
echo "[WARNING] Ensure you have an active BitWarden session and provide the session token with \$BW_SESSION"
exit 1
fi
# Fetching attachments saves files locally
# that we need to track and clean up. Also,
# we're making a local copy of all of the
# secrets for faster processing, so we need
# to clean that up, too
work_dir="$( mktemp -d )"
cd "${work_dir}"
function cleanup() {
rm -rf "${work_dir}"
}
trap cleanup EXIT
# BitWarden's `get item $name` invocation does a search on
# the data stored in every secret, so secrets with names
# that are similar to fields in other secrets will not be
# addressable. There is also no way to specifically target
# the item's name field for searching. Therefore, we need
# to dump the list of secrets and search through it explicitly
# using jq. Thankfully, that's not too hard.
secrets="${work_dir}/secrets.json"
bw --session "${BW_SESSION}" list items > "${secrets}"
if [[ "$( jq ". | length" <"${secrets}" )" == 0 ]]; then
echo "[WARNING] Your active BitWarden session does not have access to secrets. If you created your session before you got access, refresh it by logging out and in again."
exit 1
fi
# retrieve the value of a top-level field from an item in BitWarden
# and format it in a key-value pair for a k8s secret
function format_field() {
local item="$1"
local field="$2"
local name="${3:-"${item}"}"
echo "--from-literal=${name}=$( jq ".[] | select(.name == \"${item}\") | ${field}" --raw-output <"${secrets}" )"
}
# retrieve the value of a field from an item in BitWarden
function get_field_value() {
local item="$1"
local field="$2"
jq ".[] | select(.name == \"${item}\") | .fields[] | select(.name == \"${field}\") | .value" --raw-output <"${secrets}"
}
# retrieve the value of a field from an item in BitWarden
# and format it in a key-value pair for a k8s secret
function format_field_value() {
local item="$1"
local field="$2"
local name="${3:-"${item}"}"
echo "--from-literal=${name}=$(get_field_value "${item}" "${field}")"
}
# retrieve the content of an attachment from an item in BitWarden
function get_attachment() {
local item="$1"
local attachment="$2"
local item_id="$( jq ".[] | select(.name == \"${item}\") | .id" --raw-output <"${secrets}" )"
local attachment_id="$( jq ".[] | select(.name == \"${item}\") | .attachments[] | select(.fileName == \"${attachment}\") | .id" --raw-output <"${secrets}" )"
bw --session "${BW_SESSION}" get attachment "${attachment_id}" --itemid "${item_id}" --raw
}
# retrieve the content of an attachment from an item in BitWarden
# and format it in a key-value pair for a k8s secret
function format_attachment() {
local item="$1"
local attachment="$2"
local name="${3:-"${attachment}"}"
echo "--from-file=${name}=$(get_attachment "${item}" "${attachment}")"
}
# merge all pull secret credentials into a single json
# object
function merge_pull_secrets() {
local quay_io
quay_io="$(get_field_value quay.io 'Pull Credentials')"
printf '%s\n' "${quay_io}" \
| jq --slurp --compact-output 'reduce .[] as $x ({}; . * $x)'
}
function update_secret() {
local name
name=$2
oc create secret "$@" --dry-run -o yaml | oc apply -f -
oc label secret --overwrite "${name}" "ci.openshift.io/managed=true"
}
# retrieve the value of a field and format it as a string, for
# use when more complex values are required to generate a secret
function field_value() {
local item="$1"
local field="$2"
echo "$( jq ".[] | select(.name == \"${item}\") | .fields[] | select(.name == \"${field}\") | .value" --raw-output <"${secrets}")"
}
# Jenkins credentials are stored as separate items in Bitwarden,
# with the token recorded as the password for the account
for master in "ci.openshift.redhat.com" "kata-jenkins-ci.westus2.cloudapp.azure.com"; do
update_secret generic "jenkins-credentials-${master}" "$( format_field "${master}" ".login.password" "password" )"
done
# Client certificates for the ci.dev Jenkins
# master are stored in a special set of fields
master="ci.dev.openshift.redhat.com"
update_secret generic "jenkins-credentials-${master}" \
"$( format_field "${master}" ".login.password" "password" )" \
"$( format_attachment "${master}" cert.pem )" \
"$( format_attachment "${master}" key.pem )" \
"$( format_attachment "${master}" ca.pem )"
# OAuth tokens for GitHub are stored as a text field named
# "GitHub OAuth Token" on login credential items for each robot.
for login in "openshift-bot" "openshift-build-robot" "openshift-cherrypick-robot" "openshift-ci-robot" "openshift-merge-robot" "openshift-publish-robot"; do
update_secret generic "github-credentials-${login}" "$( format_field_value "${login}" "GitHub OAuth Token" "oauth" )"
done
# openshift-publish-robot also has a token that grants read-only
# access to private repositories.
update_secret generic "private-git-cloner" "$( format_field_value "openshift-publish-robot" private-git-cloner "oauth" )"
# Configuration for Slack ci-chat-bot is stored under "Token"
# and the key value is "token" in the secret
update_secret generic ci-chat-bot-slack-token "$( format_field_value ci-chat-bot-slack-token "Token" "token" )"
# Configuration for api_url, which is for slack incoming hooks and can be used eg in prometheus alert-manager, is stored under "url"
# and the key value is "url" in the secret
update_secret generic ci-slack-api-url "$( format_field_value ci-slack-api-url "url" "url" )"
# Configuration for GitHub OAuth Apps are stored
# as an opaque field "Client Configuration"
update_secret generic github-app-credentials "$( format_field_value prow.svc.ci.openshift.org "Client Configuration" "config.json" )"
# Cookie secret to encrypt frontend and backend
# communication is stored in the "Cookie" field
update_secret generic cookie "$( format_field_value prow.svc.ci.openshift.org Cookie "cookie" )"
# HMAC token for encrypting GitHub webhook payloads
# is stored in the "HMAC Token" field
update_secret generic github-webhook-credentials "$( format_field_value hmac "HMAC Token" "hmac" )"
# DeploymentConfig token is used to auth trigger events
# for DeploymentConfigs from GitHub
update_secret generic github-deploymentconfig-trigger "$( format_field_value github-deploymentconfig-webhook-token "Token" "WebHookSecretKey" )"
# Credentials for GCE service accounts are stored
# as an attachment on each distinct credential
for account in "aos-pubsub-subscriber" "ci-vm-operator" "gcs-publisher" "gcs-tide-publisher"; do
update_secret generic "gce-sa-credentials-${account}" "$( format_attachment "${account}" credentials.json service-account.json )"
done
# Some GCE serviceaccounts also have SSH keys
for account in "aos-serviceaccount" "jenkins-ci-provisioner"; do
update_secret generic "gce-sa-credentials-${account}" \
"$( format_attachment "${account}" credentials.json service-account.json )" \
"$( format_attachment "${account}" ssh-privatekey )" \
"$( format_attachment "${account}" ssh-publickey )"
done
# Credentials for registries are stored as
# separate fields on individual items
for registry in "docker.io" "quay.io" "quay.io/openshift-knative"; do
update_secret generic "registry-push-credentials-${registry//\//\-}" $( format_field_value "${registry}" "Push Credentials" "config.json" )
# we want to be able to build and push out to registries
oc secrets link builder "registry-push-credentials-${registry//\//\-}"
done
registry="quay.io"
update_secret generic "registry-pull-credentials-${registry}" $( format_field_value "${registry}" "Pull Credentials" "config.json" )
update_secret generic "ci-pull-credentials" --type=kubernetes.io/dockerconfigjson $( format_field_value "${registry}" "Pull Credentials" ".dockerconfigjson" )
# Cluster credentials aggregate multiple items
# of information for easy consumption by tests
target_cloud="aws"
update_secret generic "cluster-secrets-${target_cloud}" \
"$( format_attachment "quay.io" pull-secret )" \
"$( format_attachment "jenkins-ci-iam" .awscred )" \
"$( format_attachment "jenkins-ci-iam" ssh-privatekey )" \
"$( format_attachment "mirror.openshift.com" cert-key.pem ops-mirror.pem )" \
"$( format_attachment "jenkins-ci-iam" ssh-publickey )"
target_cloud="gcp"
update_secret generic "cluster-secrets-${target_cloud}" \
"$( format_attachment "quay.io" pull-secret )" \
"$( format_attachment "jenkins-ci-provisioner" credentials.json gce.json )" \
"$( format_attachment "jenkins-ci-provisioner" ssh-privatekey )" \
"$( format_attachment "jenkins-ci-provisioner" ssh-publickey )" \
"$( format_attachment "mirror.openshift.com" cert-key.pem ops-mirror.pem )" \
"$( format_field_value telemeter "Telemeter Token" "telemeter-token" )"
target_cloud="openstack"
update_secret generic "cluster-secrets-${target_cloud}" \
--from-literal=pull-secret="$(merge_pull_secrets)" \
"$( format_attachment "openstack" clouds.yaml )" \
"$( format_attachment "jenkins-ci-provisioner" ssh-privatekey )" \
"$( format_attachment "jenkins-ci-provisioner" ssh-publickey )"
target_cloud="vsphere"
update_secret generic "cluster-secrets-${target_cloud}" \
"$( format_attachment "quay.io" pull-secret )" \
"$( format_attachment "jenkins-ci-iam" .awscred )" \
"$( format_attachment "jenkins-ci-iam" ssh-privatekey )" \
"$( format_attachment "jenkins-ci-iam" ssh-publickey )" \
"$( format_attachment "vsphere-credentials" secret.auto.tfvars )"
target_cloud="metal"
update_secret generic "cluster-secrets-${target_cloud}" \
"$( format_attachment "quay.io" pull-secret )" \
"$( format_attachment "jenkins-ci-iam" .awscred )" \
"$( format_attachment "jenkins-ci-iam" ssh-privatekey )" \
"$( format_attachment "jenkins-ci-iam" ssh-publickey )" \
"$( format_attachment "packet.net" .packetcred )" \
"$( format_attachment "packet.net" client.crt matchbox-client.crt )" \
"$( format_attachment "packet.net" client.key matchbox-client.key )"
# DSNs for tools reporting failures to Sentry
update_secret generic "sentry-dsn" "$( format_field_value "sentry" "ci-operator" "ci-operator" )"
# Configuration for the .git-credentials used by the release controller to clone
# private repositories to generate changelogs
oc -n "ci-release" create secret generic "git-credentials" "--from-literal=.git-credentials=https://openshift-bot:$( field_value "openshift-bot" "GitHub OAuth Token" "oauth" )@github.com"
oc -n "ci-release" label secret "git-credentials" "ci.openshift.io/managed=true"
|
#!/bin/bash
set -eo pipefail
SCRIPT_DIR=$(cd "$(dirname "$0")"; pwd)
PROJECT_DIR=$1
shift
"$@" ./src/play/play \
EAKLDYS \
"${SCRIPT_DIR}/tiles.txt" \
"${PROJECT_DIR}/boards/wwf_challenge.txt"
|
#!/bin/bash
#
# --------------------------------------------
# Adds default users to the CRC cluster
# Docs - https://github.com/code-ready/crc/wiki/Add-another-user-to-cluster
# --------------------------------------------
if [[ -z "${CRC_KUBEADMIN_PASSWORD}" ]]; then
echo "CRC 'kubeadmin' password is not set"
exit 1
fi
export HTPASSWD_FILE=/tmp/htpasswd
htpasswd -c -B -b $HTPASSWD_FILE user1 password1
htpasswd -b $HTPASSWD_FILE user2 password2
htpasswd -b $HTPASSWD_FILE user3 password3
htpasswd -b $HTPASSWD_FILE user4 password4
htpasswd -b $HTPASSWD_FILE user5 password5
htpasswd -b $HTPASSWD_FILE user5 password5
# Make sure existing developer and kubeadmin user part of `htpasswd` file because kubeadmin is having cluster admin role.
htpasswd -b $HTPASSWD_FILE developer developer
htpasswd -b $HTPASSWD_FILE kubeadmin $CRC_KUBEADMIN_PASSWORD
oc get secrets htpass-secret -n openshift-config -ojsonpath='{.data.htpasswd}' | base64 -d >> htpasswd
oc create secret generic htpass-secret --from-file=$HTPASSWD_FILE -n openshift-config --dry-run -o yaml > /tmp/htpass-secret.yaml
oc replace -f /tmp/htpass-secret.yaml
oc get pods -n openshift-authentication
|
from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///users.db'
db = SQLAlchemy(app)
bcrypt = Bcrypt(app)
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(50), unique=True, nullable=False)
password = db.Column(db.String(100), nullable=False)
@app.route('/register', methods=['POST'])
def register_user():
data = request.get_json()
username = data.get('username')
password = data.get('password')
hashed_password = bcrypt.generate_password_hash(password).decode('utf-8')
new_user = User(username=username, password=hashed_password)
db.session.add(new_user)
db.session.commit()
return jsonify({'message': 'User registered successfully'})
@app.route('/login', methods=['POST'])
def login_user():
data = request.get_json()
username = data.get('username')
password = data.get('password')
user = User.query.filter_by(username=username).first()
if user and bcrypt.check_password_hash(user.password, password):
return jsonify({'message': 'Login successful'})
else:
return jsonify({'message': 'Invalid username or password'})
if __name__ == '__main__':
db.create_all()
app.run(debug=True) |
DELETE FROM table_name WHERE entry_date < DATE_SUB(CURDATE(), INTERVAL 1 YEAR); |
<filename>src/main/java/br/uff/ic/provviewer/Vertex/ColorScheme/DebugAllTrialsScheme.java<gh_stars>10-100
/*
* The MIT License
*
* Copyright 2017 Kohwalter.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
package br.uff.ic.provviewer.Vertex.ColorScheme;
import br.uff.ic.provviewer.VariableNames;
import br.uff.ic.provviewer.Variables;
import br.uff.ic.utility.TrafficLight;
import br.uff.ic.utility.Utils;
import br.uff.ic.utility.graph.Vertex;
import edu.uci.ics.jung.visualization.picking.PickedState;
import java.awt.Color;
import java.awt.Paint;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
/**
*
* @author Kohwalter
*/
public class DebugAllTrialsScheme extends ColorScheme {
List<String> correctTrials = new ArrayList<>();
List<String> pickedTrials = new ArrayList<>();
public DebugAllTrialsScheme(String attribute) {
super(attribute);
correctTrials.add("workflow_trial_7.xml");
correctTrials.add("workflow_trial_11.xml");
correctTrials.add("workflow_trial_16.xml");
correctTrials.add("workflow_trial_17.xml");
correctTrials.add("workflow_trial_18.xml");
correctTrials.add("workflow_trial_22.xml");
correctTrials.add("workflow_trial_24.xml");
correctTrials.add("workflow_trial_25.xml");
correctTrials.add("workflow_trial_28.xml");
correctTrials.add("workflow_trial_32.xml");
}
public void updatePickedList() {
if(variables.view.getPickedVertexState() != null) {
PickedState<Object> picked_state = variables.view.getPickedVertexState();
if(picked_state.getSelectedObjects().length >= 1) {
Object picked = picked_state.getSelectedObjects()[0];
if(picked instanceof Vertex) {
String[] pickedVertex_graphs = ((Vertex)picked).getAttributeValues(VariableNames.GraphFile);
pickedTrials = new ArrayList<>(Arrays.asList(pickedVertex_graphs));
}
}
}
}
@Override
public Paint Execute(Object v, Variables variables) {
this.variables = variables;
updatePickedList();
PickedState<Object> picked_state = variables.view.getPickedVertexState();
if(picked_state.getSelectedObjects().length >= 1) {
String[] graphs = ((Vertex)v).getAttributeValues(VariableNames.GraphFile);
boolean belongsToPickedTrials = false;
int n = 0;
for(String s : graphs) {
if(pickedTrials.contains(s)) {
belongsToPickedTrials = true;
if(correctTrials.contains(s))
n++;
}
}
if(belongsToPickedTrials) {
return TrafficLight.trafficLight(n, 0, pickedTrials.size(), false);
} else
return new Color(200, 200, 200, 0);
}
else {
String[] graphs = ((Vertex)v).getAttributeValues(VariableNames.GraphFile);
int n = 0;
for(String s : graphs) {
if(correctTrials.contains(s))
n++;
}
return TrafficLight.trafficLight(n, 0, graphs.length, false);
}
}
}
|
<filename>generator/contact.py
# -*- coding: utf-8 -*-
from model.contact_properties import Contact_properties
import random
import string
import os.path
import jsonpickle
import getopt
import sys
try:
opts, args = getopt.getopt(sys.argv[1:], "n:f:", ["number of contacts", "file"])
except getopt.GetoptError as err:
getopt.usage()
sys.exit(2)
n = 5
f = "data/contacts.json"
for o, a in opts:
if o == "-n":
n = int(a)
elif o == "-f":
f = a
def randomString(prefix, maxlen):
symbols = string.ascii_letters + string.digits + " "*12
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
testdata = [
Contact_properties(firstname=randomString("name", 10), middlename=randomString("middlename", 10), lastname=randomString("lastname", 10),
nickname=randomString("nick", 10), title=randomString("title", 10), company=randomString("company", 10),
address=randomString("address", 20), home=randomString("355455", 6), mobile=randomString("455335", 9),
work=randomString("24435", 6), fax=randomString("4334", 6), email=randomString("<EMAIL>", 4),
email2=randomString("<EMAIL>", 4), email3=randomString("<EMAIL>", 4), homepage=randomString("homepage.com", 4),
byear=randomString("1555", 2), ayear=randomString("1666", 2), address2="address", phone2=randomString("234332", 10),
notes=randomString("notes", 30))
for i in range(n)
]
file = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", f)
with open(file, "w") as out:
jsonpickle.set_encoder_options("json", indent=2)
out.write(jsonpickle.encode(testdata))
|
python transformers/examples/language-modeling/run_language_modeling.py --model_name_or_path train-outputs/1024+0+512-shuffled-N-VB/7-model --tokenizer_name model-configs/1536-config --eval_data_file ../data/wikitext-103-raw/wiki.valid.raw --output_dir eval-outputs/1024+0+512-shuffled-N-VB/7-512+512+512-shuffled-N-first-256 --do_eval --per_device_eval_batch_size 1 --dataloader_drop_last --augmented --augmentation_function shuffle_remove_all_but_nouns_first_third_sixth --eval_function penultimate_sixth_eval |
package uk.gov.ons.br.parsers
import akka.stream.scaladsl.Source
import akka.util.ByteString
import org.scalatest.concurrent.ScalaFutures
import org.scalatestplus.play.guice.GuiceOneAppPerTest
import play.api.http.Status.{BAD_REQUEST, UNSUPPORTED_MEDIA_TYPE}
import play.api.libs.json.{JsNumber, JsString}
import play.api.test.{FakeRequest, StubPlayBodyParsersFactory}
import play.mvc.Http.HeaderNames.CONTENT_TYPE
import play.mvc.Http.MimeTypes.JSON
import uk.gov.ons.br.models.patch.{ReplaceOperation, TestOperation}
import uk.gov.ons.br.parsers.JsonPatchBodyParser.JsonPatchMediaType
import uk.gov.ons.br.test.UnitSpec
import scala.concurrent.ExecutionContext
/*
* See https://github.com/playframework/playframework/blob/master/framework/src/play/src/test/scala/play/mvc/RawBodyParserSpec.scala
* for an example of how a BodyParser can be tested.
*/
class JsonPatchBodyParserSpec extends UnitSpec with GuiceOneAppPerTest with ScalaFutures {
private trait Fixture extends StubPlayBodyParsersFactory {
val PatchSpec = """|[{"op": "test", "path": "/a/b/c", "value": "foo"},
| {"op": "replace", "path": "/a/b/c", "value": 42}]""".stripMargin
implicit val materializer = app.materializer
val underTest = new JsonPatchBodyParser(stubPlayBodyParsers.tolerantJson)(ExecutionContext.global)
}
"A body representing a JSON patch specification" - {
"can be parsed when valid" in new Fixture {
val request = FakeRequest().withHeaders(CONTENT_TYPE -> JsonPatchMediaType)
val body = Source.single(ByteString(PatchSpec))
whenReady(underTest(request).run(body)) { result =>
result.right.value shouldBe Seq(
TestOperation("/a/b/c", JsString("foo")),
ReplaceOperation("/a/b/c", JsNumber(42)))
}
}
"is rejected" - {
"when the media type is not that of Json Patch" in new Fixture {
val request = FakeRequest().withHeaders(CONTENT_TYPE -> JSON)
val body = Source.single(ByteString(PatchSpec))
whenReady(underTest(request).run(body)) { result =>
result.left.value.header.status shouldBe UNSUPPORTED_MEDIA_TYPE
}
}
"when the patch document is not valid json" in new Fixture {
val request = FakeRequest().withHeaders(CONTENT_TYPE -> JsonPatchMediaType)
val invalidJson = s"""[{"op": "test", "path": "/a/b/c", "value": "foo"]""" // object is not closed correctly
val body = Source.single(ByteString(invalidJson))
whenReady(underTest(request).run(body)) { result =>
result.left.value.header.status shouldBe BAD_REQUEST
}
}
"when the patch document does not conform to the Json Patch specification (RFC6902)" in new Fixture {
val request = FakeRequest().withHeaders(CONTENT_TYPE -> JsonPatchMediaType)
val invalidPatch = s"""[{"op": "test", "path": "/a/b/c"}]""" // missing 'value' field
val body = Source.single(ByteString(invalidPatch))
whenReady(underTest(request).run(body)) { result =>
result.left.value.header.status shouldBe BAD_REQUEST
}
}
}
}
}
|
def validate_json(json_obj, validation_rules):
validation_results = {}
for field, rules in validation_rules.items():
if field not in json_obj and "required" in rules:
validation_results[field] = False
else:
validation_results[field] = True
if field in json_obj:
value = json_obj[field]
for rule in rules:
if rule.startswith("type:"):
data_type = rule.split(":")[1]
if data_type == "string" and not isinstance(value, str):
validation_results[field] = False
elif data_type == "number" and not isinstance(value, (int, float)):
validation_results[field] = False
elif data_type == "boolean" and not isinstance(value, bool):
validation_results[field] = False
elif rule.startswith("min_length:"):
min_length = int(rule.split(":")[1])
if len(value) < min_length:
validation_results[field] = False
elif rule.startswith("max_length:"):
max_length = int(rule.split(":")[1])
if len(value) > max_length:
validation_results[field] = False
elif rule.startswith("min_value:"):
min_value = float(rule.split(":")[1])
if value < min_value:
validation_results[field] = False
elif rule.startswith("max_value:"):
max_value = float(rule.split(":")[1])
if value > max_value:
validation_results[field] = False
return validation_results |
package com.atguigu.gulimall.member.service;
import com.atguigu.common.utils.PageUtils;
import com.atguigu.gulimall.member.entity.MemberEntity;
import com.atguigu.gulimall.member.vo.MemberLoginVo;
import com.atguigu.gulimall.member.vo.MemberRegisterVo;
import com.atguigu.gulimall.member.vo.SocialUser;
import com.baomidou.mybatisplus.extension.service.IService;
import java.util.Map;
/**
* 会员
*
* @author yanglvjin
* @email <EMAIL>
* @date 2021-04-10 00:44:34
*/
public interface MemberService extends IService<MemberEntity> {
PageUtils queryPage(Map<String, Object> params);
/**
* 用户注册
*
* @param memberRegisterVo
*/
void register(MemberRegisterVo memberRegisterVo);
/**
* 用户普通登录
*
* @param memberLoginVo
* @return
*/
MemberEntity login(MemberLoginVo memberLoginVo);
/**
* 用户社交登录
*
* @param socialUser
* @return
*/
MemberEntity socialLogin(SocialUser socialUser);
}
|
<gh_stars>10-100
/**
* Copyright (C) 2009 - present by OpenGamma Inc. and the OpenGamma group of companies
*
* Please see distribution for license.
*/
package com.opengamma.analytics.math.interpolation;
import org.apache.commons.lang.Validate;
import com.opengamma.analytics.math.interpolation.data.ArrayInterpolator1DDataBundle;
import com.opengamma.analytics.math.interpolation.data.InterpolationBoundedValues;
import com.opengamma.analytics.math.interpolation.data.Interpolator1DDataBundle;
/**
* A one-dimensional linear interpolator. The interpolated value of the function
* <i>y</i> at <i>x</i> between two data points <i>(x<sub>1</sub>,
* y<sub>1</sub>)</i> and <i>(x<sub>2</sub>, y<sub>2</sub>)</i> is given by:<br>
* <i>y = y<sub>1</sub> + (x - x<sub>1</sub>) * (y<sub>2</sub> - y<sub>1</sub>)
* / (x<sub>2</sub> - x<sub>1</sub>)</i>
*/
public class LinearInterpolator1D extends Interpolator1D {
private static final long serialVersionUID = 1L;
@Override
public Double interpolate(final Interpolator1DDataBundle model, final Double value) {
Validate.notNull(value, "Value to be interpolated must not be null");
Validate.notNull(model, "Data bundle must not be null");
final InterpolationBoundedValues boundedValues = model.getBoundedValues(value);
final double x1 = boundedValues.getLowerBoundKey();
final double y1 = boundedValues.getLowerBoundValue();
if (model.getLowerBoundIndex(value) == model.size() - 1) {
return y1;
}
final double x2 = boundedValues.getHigherBoundKey();
final double y2 = boundedValues.getHigherBoundValue();
return y1 + (value - x1) / (x2 - x1) * (y2 - y1);
}
@Override
public double[] getNodeSensitivitiesForValue(final Interpolator1DDataBundle data, final Double value) {
Validate.notNull(data, "data");
final int n = data.size();
final double[] result = new double[n];
final InterpolationBoundedValues boundedValues = data.getBoundedValues(value);
if (boundedValues.getHigherBoundKey() == null) {
result[n - 1] = 1.0;
return result;
}
final int index = data.getLowerBoundIndex(value);
final double x1 = boundedValues.getLowerBoundKey();
final double x2 = boundedValues.getHigherBoundKey();
final double dx = x2 - x1;
final double a = (x2 - value) / dx;
final double b = 1 - a;
result[index] = a;
result[index + 1] = b;
return result;
}
@Override
public Interpolator1DDataBundle getDataBundle(final double[] x, final double[] y) {
return new ArrayInterpolator1DDataBundle(x, y);
}
@Override
public Interpolator1DDataBundle getDataBundleFromSortedArrays(final double[] x, final double[] y) {
return new ArrayInterpolator1DDataBundle(x, y, true);
}
}
|
The solution should depend on the type of data that needs to be visualized. If the dataset consists of categorical data then a suitable visualization should be a bar chart or pie chart. If the dataset consists of numerical data then a suitable visualization should be a scatter plot, line chart or histogram. For example, to visualize the distribution of marks of students in a class, a histogram would be a suitable visualization.
The solution should also consider the number of variables in the dataset. If there are only 2 variables then a simple visualization should be used. For example, if there are 2 numeric variables then a scatter plot should be used. If there are more than 2 variables then more complex visualization techniques such as parallel coordinates or radar charts should be used.
Lastly, the visualization should utilize appropriate colors and labels to make the data more comprehensible. |
def sort_alphabetically(input_string):
chars = list(input_string)
chars.sort()
print("".join(chars))
sort_alphabetically("Hello, World!") |
const request = require('request'),
testData = require('../testData.json');
describe('Action: getImage', () => {
before(() => api.db.models.Variant.truncate()
.then(() => api.db.models.Variant.create(testData.variant)));
it('Properly processes image requests', done => {
let url = [
'http:/',
['localhost', api.config.servers.web.port].join(':'),
api.config.servers.web.urlPathForActions,
'image',
testData.variant.id,
encodeURIComponent(testData.url)
].join('/');
request(url, function(error, response, body) {
expect(error).to.be.null();
expect(response.headers['content-type']).to.be.equal('image/jpeg');
expect(body.length).to.be.above(3000);
done();
});
});
});
|
import random
import string
from django.core.mail import send_mail
def initiate_email_change(user, new_email):
# Generate a unique confirmation code for the email change process
confirmation_code = ''.join(random.choices(string.ascii_letters + string.digits, k=10))
# Compose the email subject and message
email_subject = "Email Change Confirmation"
email_message = f"Dear user, please use the following code to confirm your email change: {confirmation_code}"
# Send an email to the new email address containing the confirmation code
send_mail(email_subject, email_message, 'from@example.com', [new_email])
# Return the confirmation code generated
return confirmation_code |
<reponame>Kepler-Br/Wolfenstein-clone
//
// Created by kepler-br on 6/10/20.
//
#ifndef WOLFENSHETIN_TEXTURE_H
#define WOLFENSHETIN_TEXTURE_H
#include <string>
#include <glm/vec2.hpp>
#include "types.h"
class Texture_loader;
class Texture
{
private:
friend Texture_loader;
bool transparent;
bool wrap = true;
Pixel *pixels = nullptr;
glm::ivec2 resolution;
size_t pixel_count;
void read_file(const std::string &path);
public:
Texture(const std::string &path);
Texture() = default;
void free();
Pixel *get_pixels() const;
void set_pixel(const Pixel &color, const glm::ivec2 &position);
const Pixel &get_pixel(const uint &index) const;
const Pixel &get_pixel(const glm::ivec2 &position) const;
const Pixel &get_normalized_pixel(const glm::vec2 &position) const;
const size_t &get_pixel_count() const;
const glm::ivec2 &get_resolution() const;
bool is_transparent() const;
void set_wrapping(const bool &wrap);
void read(const std::string &path);
};
#endif //WOLFENSHETIN_TEXTURE_H
|
#!/bin/bash
. /kb/deployment/user-env.sh
python ./scripts/prepare_deploy_cfg.py ./deploy.cfg ./work/config.properties
if [ -f ./work/token ] ; then
export KB_AUTH_TOKEN=$(<./work/token)
fi
if [ $# -eq 0 ] ; then
sh ./scripts/start_server.sh
elif [ "${1}" = "test" ] ; then
echo "Run Tests"
make test
elif [ "${1}" = "async" ] ; then
sh ./scripts/run_async.sh
elif [ "${1}" = "init" ] ; then
echo "Initialize module"
cd /data
echo "Getting GTDB-Tk database"
curl -O https://data.gtdb.ecogenomic.org/releases/release202/202.0/auxillary_files/gtdbtk_r202_data.tar.gz
tar xvzf gtdbtk_r202_data.tar.gz --strip 1
rm gtdbtk_r202_data.tar.gz
if [[ -d "taxonomy" && -d "fastani" && -d "markers" ]] ; then
touch __READY__
else
echo "init failed"
fi
elif [ "${1}" = "bash" ] ; then
bash
elif [ "${1}" = "report" ] ; then
export KB_SDK_COMPILE_REPORT_FILE=./work/compile_report.json
make compile
else
echo Unknown
fi
|
#!/usr/bin/env bash
python RIMITM.py &> RIData.txt 2>&1 &
python RIMITMController.py
kill $(pidof python)
|
def max_min(num_list):
max_num = float('-inf')
min_num = float('inf')
for num in num_list:
if num > max_num:
max_num = num
if num < min_num:
min_num = num
return min_num, max_num
number_list = [10, 4, 20, 8, 5]
min_num, max_num = max_min(number_list)
print(f"Minimum: {min_num}, Maximum: {max_num}") |
export default{};
//# sourceMappingURL=ElementState.prod.js.map |
#!/bin/bash
# Simple Bash script for code formatting in 1tbs.
# See http://astyle.sourceforge.net/astyle.html for syntax and defaults.
MINPARAMS=1
ORIG_SUFFIX=orig
# Exit error when not enough arguments.
if [ $# -lt "$MINPARAMS" ]
then
echo "This script needs C source files passed as arguments"
echo "USAGE: format-code.sh src/main.c src/somecode.c ..."
exit 1
fi
# For all files provided as arguments.
for FILE in "$@"
do
RESULT="$(astyle --style=1tbs \
--indent-col1-comments \
--break-blocks \
--pad-oper \
--pad-header \
--delete-empty-lines \
--add-brackets \
--convert-tabs \
--max-code-length=80 \
--break-after-logical \
--mode=c \
--suffix=.$ORIG_SUFFIX \
--lineend=linux \
$FILE)"
# If file unchanged print unchanged result message.
if [[ "$RESULT" = Unchanged* ]]
then
echo "$RESULT"
fi
# If file formatted print result and renamed original file name.
if [[ "$RESULT" = Formatted* ]]
then
echo "$RESULT"
echo "Original code was preserved in file $FILE.$ORIG_SUFFIX"
fi
done
exit 0
|
<reponame>smagill/opensphere-desktop
package io.opensphere.mantle.plugin.selection;
import java.awt.Component;
import java.awt.EventQueue;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors;
import java.util.stream.Collectors;
import javax.swing.JMenuItem;
import org.apache.log4j.Logger;
import io.opensphere.core.Toolbox;
import io.opensphere.core.control.action.ContextActionManager;
import io.opensphere.core.control.action.ContextMenuProvider;
import io.opensphere.core.control.action.context.ContextIdentifiers;
import io.opensphere.core.control.action.context.GeometryContextKey;
import io.opensphere.core.control.action.context.MultiGeometryContextKey;
import io.opensphere.core.control.action.context.ScreenPositionContextKey;
import io.opensphere.core.geometry.AbstractGeometryGroup;
import io.opensphere.core.geometry.Geometry;
import io.opensphere.core.geometry.GeometryGroupGeometry;
import io.opensphere.core.geometry.MultiPolygonGeometry;
import io.opensphere.core.geometry.PointGeometry;
import io.opensphere.core.geometry.PolygonGeometry;
import io.opensphere.core.geometry.PolylineGeometry;
import io.opensphere.core.geometry.renderproperties.DefaultPointRenderProperties;
import io.opensphere.core.math.Vector2i;
import io.opensphere.core.model.Altitude.ReferenceLevel;
import io.opensphere.core.model.GeographicPosition;
import io.opensphere.core.model.Position;
import io.opensphere.core.quantify.Quantify;
import io.opensphere.core.util.Utilities;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.lang.NamedThreadFactory;
import io.opensphere.core.util.ref.WeakReference;
import io.opensphere.core.viewer.impl.ViewerAnimator;
import io.opensphere.mantle.controller.DataGroupController;
import io.opensphere.mantle.controller.DataTypeController;
import io.opensphere.mantle.data.DataTypeInfo;
import io.opensphere.mantle.data.cache.DataElementCache;
import io.opensphere.mantle.data.util.DataElementUpdateUtils;
import io.opensphere.mantle.data.util.purge.PurgeConfirmHelper;
import io.opensphere.mantle.plugin.queryregion.QueryRegionManager;
import io.opensphere.mantle.transformer.MapDataElementTransformer;
/**
* The Class SelectionHandler.
*/
@SuppressWarnings("PMD.GodClass")
public class SelectionHandler
{
/** The logger. */
private static final Logger LOGGER = Logger.getLogger(SelectionHandler.class);
/** The my command to processor map. */
private final Map<SelectionCommand, List<WeakReference<SelectionCommandProcessor>>> myCommandToProcessorMap;
/** The my command to processor map. */
private final Map<SelectionCommand, List<WeakReference<LineSelectionCommandProcessor>>> myLineCommandToProcessorMap;
/** The data group controller. */
private final DataGroupController myDataGroupController;
/**
* The point geometry representing screen position, for creating a buffer
* with no geometry.
*/
private PointGeometry myNoGeometryPoint;
/** The Default context menu provider. */
private final ContextMenuProvider<ScreenPositionContextKey> myDefaultContextMenuProvider = new ContextMenuProvider<>()
{
@Override
public List<Component> getMenuItems(String contextId, ScreenPositionContextKey key)
{
final GeographicPosition pos = myToolbox.getMapManager().convertToPosition(new Vector2i(key.getPosition().asPoint()),
ReferenceLevel.ELLIPSOID);
if (pos != null)
{
myLastGeometry = null;
PointGeometry.Builder<GeographicPosition> pointGeometry = new PointGeometry.Builder<>();
pointGeometry.setPosition(pos);
myNoGeometryPoint = new PointGeometry(pointGeometry, new DefaultPointRenderProperties(0, true, true, true), null);
return SelectionCommandFactory.getNoGeometryMenuItems(myMenuActionListener);
}
return Collections.emptyList();
}
@Override
public int getPriority()
{
return 10000;
}
};
/** The ExecutorService. */
private final ExecutorService myExecutor;
/**
* The menu provider for events related to single geometry selection or
* completion.
*/
private final ContextMenuProvider<GeometryContextKey> myGeometryContextMenuProvider = new ContextMenuProvider<>()
{
@Override
public List<Component> getMenuItems(String contextId, GeometryContextKey key)
{
List<Component> menuItems = New.list();
if (contextId.equals(ContextIdentifiers.GEOMETRY_SELECTION_CONTEXT))
{
Geometry geom = key.getGeometry();
menuItems = getGeometryMenuItems(geom);
}
else if (contextId.equals(ContextIdentifiers.GEOMETRY_COMPLETED_CONTEXT)
&& key.getGeometry() instanceof PolygonGeometry)
{
return SelectionCommandFactory.getSelectionRegionMenuItems(
new PolygonCommandActionListener(Collections.singleton(key.getGeometry())), hasLoadFilters());
}
else if (contextId.equals(ContextIdentifiers.GEOMETRY_COMPLETED_CONTEXT)
&& key.getGeometry() instanceof PolylineGeometry)
{
return SelectionCommandFactory
.getPolylineMenuItems(new PolylineCommandActionListener(Collections.singleton(key.getGeometry())));
}
return menuItems;
}
@Override
public int getPriority()
{
return 10000;
}
};
/** The Last selection geometry. */
private Geometry myLastGeometry;
/** Listener for menu actions. */
private final ActionListener myMenuActionListener = e -> handleCommand(e.getActionCommand());
/**
* The menu provider for events which occur on multiple polygon geometries.
*/
private final ContextMenuProvider<MultiGeometryContextKey> myMultiGeometryContextMenuProvider = new ContextMenuProvider<>()
{
@Override
public List<Component> getMenuItems(String contextId, MultiGeometryContextKey key)
{
List<Component> menuItems = getMultiGeometryMenu(key.getGeometries());
if (key.getGeometries().isEmpty())
{
for (Component item : menuItems)
{
item.setEnabled(false);
if (item instanceof JMenuItem)
{
((JMenuItem)item).setToolTipText("No geometries selected for action.");
}
}
}
return menuItems;
}
@Override
public int getPriority()
{
return 10001;
}
};
/** The Preview geometry. */
private Geometry myPreviewGeometry;
/** The query region manager. */
private final QueryRegionManager myQueryRegionManager;
/** The my toolbox. */
private final Toolbox myToolbox;
/** The controller through which data type lookups are performed. */
private final DataTypeController myDataTypeController;
/** The data element cache. */
private final DataElementCache myDataElementCache;
/** The data element update utilities. */
private final DataElementUpdateUtils myDataElementUpdateUtils;
/** The buffer region creator **/
private final BufferRegionCreator myBufferRegionCreator;
/**
* Instantiates a new selection handler.
*
* @param toolbox The toolbox.
* @param dataGroupController The data group controller.
* @param pTypeController The controller through which data type lookups are
* performed.
* @param queryRegionManager The query region manager.
* @param dataElementCache The data element cache
* @param dataElementUpdateUtils The data element update utilities
*/
public SelectionHandler(Toolbox toolbox, DataGroupController dataGroupController, DataTypeController pTypeController,
QueryRegionManager queryRegionManager, DataElementCache dataElementCache,
DataElementUpdateUtils dataElementUpdateUtils)
{
myDataTypeController = pTypeController;
myExecutor = Executors.newFixedThreadPool(1, new NamedThreadFactory("SelectionHandler:Dispatch", 3, 4));
myCommandToProcessorMap = New.map();
myLineCommandToProcessorMap = New.map();
myToolbox = toolbox;
myDataGroupController = dataGroupController;
myQueryRegionManager = queryRegionManager;
myDataElementCache = dataElementCache;
myDataElementUpdateUtils = dataElementUpdateUtils;
myBufferRegionCreator = new BufferRegionCreator(toolbox);
}
/**
* Gets the set of active data types that have filters applied.
*
* @return the set of active data types that have filters applied
*/
public Set<DataTypeInfo> getActiveFilteredDataTypes()
{
Set<String> typeKeys = new HashSet<>();
myToolbox.getDataFilterRegistry().getLoadFilters().stream().forEach(f -> typeKeys.add(f.getTypeKey()));
typeKeys.addAll(myToolbox.getDataFilterRegistry().getSpatialLoadFilterKeys());
return typeKeys.stream().map(k -> myDataGroupController.findMemberById(k))
.filter(t -> t != null && myDataGroupController.isTypeActive(t) && t.isFilterable()).collect(Collectors.toSet());
}
/**
* Returns whether there are any active filters for active layers.
*
* @return Whether there are any active filters for active layers
*/
public boolean hasLoadFilters()
{
return !getActiveFilteredDataTypes().isEmpty();
}
/**
* Install the selection handler.
*
* @param toolbox the {@link Toolbox}
*/
public void install(Toolbox toolbox)
{
ContextActionManager actionManager = toolbox.getUIRegistry().getContextActionManager();
actionManager.registerContextMenuItemProvider(ContextIdentifiers.SCREEN_POSITION_CONTEXT, ScreenPositionContextKey.class,
myDefaultContextMenuProvider);
actionManager.registerContextMenuItemProvider(ContextIdentifiers.GEOMETRY_COMPLETED_CONTEXT, GeometryContextKey.class,
myGeometryContextMenuProvider);
actionManager.registerContextMenuItemProvider(ContextIdentifiers.GEOMETRY_SELECTION_CONTEXT, GeometryContextKey.class,
myGeometryContextMenuProvider);
actionManager.registerContextMenuItemProvider(ContextIdentifiers.ROI_CONTEXT, MultiGeometryContextKey.class,
myMultiGeometryContextMenuProvider);
}
/**
* Register selection command processor.
*
* @param command the command to be processed
* @param processor the processor to process the command
*/
public void registerSelectionCommandProcessor(SelectionCommand command, SelectionCommandProcessor processor)
{
synchronized (myCommandToProcessorMap)
{
List<WeakReference<SelectionCommandProcessor>> scpList = myCommandToProcessorMap.computeIfAbsent(command,
k -> new LinkedList<>());
// Make sure we don't already have this processor in our set, remove
// any garbage collected listeners from the set.
Iterator<WeakReference<SelectionCommandProcessor>> wrItr = scpList.iterator();
boolean found = false;
WeakReference<SelectionCommandProcessor> wr = null;
SelectionCommandProcessor proc = null;
while (wrItr.hasNext())
{
wr = wrItr.next();
proc = wr.get();
if (proc == null)
{
wrItr.remove();
}
else if (Utilities.sameInstance(processor, proc))
{
found = true;
}
}
// If we didn't find it in the set already add it.
if (!found)
{
scpList.add(new WeakReference<>(processor));
}
}
}
/**
* Register selection command processor.
*
* @param command the command to be processed
* @param processor the processor to process the command
*/
public void registerLineSelectionCommandProcessor(SelectionCommand command, LineSelectionCommandProcessor processor)
{
synchronized (myLineCommandToProcessorMap)
{
List<WeakReference<LineSelectionCommandProcessor>> scpList = myLineCommandToProcessorMap.computeIfAbsent(command,
k -> new LinkedList<>());
// Make sure we don't already have this processor in our set, remove
// any garbage collected listeners from the set.
Iterator<WeakReference<LineSelectionCommandProcessor>> wrItr = scpList.iterator();
boolean found = false;
WeakReference<LineSelectionCommandProcessor> wr = null;
LineSelectionCommandProcessor proc = null;
while (wrItr.hasNext())
{
wr = wrItr.next();
proc = wr.get();
if (proc == null)
{
wrItr.remove();
}
else if (Utilities.sameInstance(processor, proc))
{
found = true;
}
}
// If we didn't find it in the set already add it.
if (!found)
{
scpList.add(new WeakReference<>(processor));
}
}
}
/**
* Handle the creation of a selection region.
*
* @param bounds The bounds of the selection regions.
* @param command The command causing region creation.
*/
public void selectionRegionCreated(List<PolygonGeometry> bounds, String command)
{
doPurgeCheck(SelectionCommandFactory.getSelectionCommand(command), bounds);
}
/**
* Handle the creation of a selection region.
*
* @param bounds The bounds of the selection regions.
* @param command The command causing region creation.
*/
public void selectionLineCreated(List<PolylineGeometry> bounds, String command)
{
SelectionCommand selectionCommand = SelectionCommandFactory.getSelectionCommand(command);
notifyLineSelectionCommandProcessors(bounds, selectionCommand);
}
/**
* Uninstall the selection handler.
*
* @param toolbox the {@link Toolbox}
*/
public void uninstall(Toolbox toolbox)
{
ContextActionManager actionManager = toolbox.getUIRegistry().getContextActionManager();
actionManager.deregisterContextMenuItemProvider(ContextIdentifiers.SCREEN_POSITION_CONTEXT,
ScreenPositionContextKey.class, myDefaultContextMenuProvider);
actionManager.deregisterContextMenuItemProvider(ContextIdentifiers.GEOMETRY_COMPLETED_CONTEXT, GeometryContextKey.class,
myGeometryContextMenuProvider);
actionManager.deregisterContextMenuItemProvider(ContextIdentifiers.GEOMETRY_SELECTION_CONTEXT, GeometryContextKey.class,
myGeometryContextMenuProvider);
actionManager.deregisterContextMenuItemProvider(ContextIdentifiers.ROI_CONTEXT, MultiGeometryContextKey.class,
myMultiGeometryContextMenuProvider);
}
/**
* Unregister selection command processor for the specified command.
*
* @param command the command
* @param processor the processor
*/
public void unregisterSelectionCommandProcessor(SelectionCommand command, SelectionCommandProcessor processor)
{
synchronized (myCommandToProcessorMap)
{
List<WeakReference<SelectionCommandProcessor>> scpList = myCommandToProcessorMap.get(command);
if (scpList != null)
{
// Search for the listener in our set and remove if found,
// also remove any garbage collected listeners from the set.
Iterator<WeakReference<SelectionCommandProcessor>> wrItr = scpList.iterator();
WeakReference<SelectionCommandProcessor> wr = null;
SelectionCommandProcessor lstr = null;
while (wrItr.hasNext())
{
wr = wrItr.next();
lstr = wr.get();
if (lstr == null || Utilities.sameInstance(processor, lstr))
{
wrItr.remove();
}
}
}
}
}
/**
* Unregister selection command processor for all commands for which that
* processors is registered.
*
* @param processor the processor
*/
public void unregisterSelectionCommandProcessor(SelectionCommandProcessor processor)
{
synchronized (myCommandToProcessorMap)
{
for (SelectionCommand command : SelectionCommandFactory.getAllCommands())
{
unregisterSelectionCommandProcessor(command, processor);
}
}
}
/**
* Destroy preview.
*/
private void destroyPreview()
{
if (myPreviewGeometry != null)
{
unregisterGeometry(myPreviewGeometry);
}
myPreviewGeometry = null;
}
/**
* Removes the supplied geometry from display.
*
* @param geometry the geometry to remove from display.
*/
private void unregisterGeometry(Geometry geometry)
{
myToolbox.getGeometryRegistry().removeGeometriesForSource(this, Collections.singletonList(geometry));
}
/**
* Method called when a menu button is selected.
*
* @param act The action command.
*/
private void handleCommand(String act)
{
assert EventQueue.isDispatchThread();
destroyPreview();
SelectionCommand cmd = SelectionCommandFactory.getSelectionCommand(act);
if (cmd == null)
{
return;
}
if (myLastGeometry == null)
{
if (cmd.equals(SelectionCommandFactory.CREATE_BUFFER_REGION))
{
myLastGeometry = myNoGeometryPoint;
}
else
{
doPurgeCheck(cmd, null);
}
}
if (cmd.equals(SelectionCommandFactory.CREATE_BUFFER_REGION))
{
Quantify.collectMetric("mist3d.select.create-buffer-region");
if (myLastGeometry instanceof PolylineGeometry && !(myLastGeometry instanceof PolygonGeometry))
{
createLineBuffer();
}
else
{
// check to see if the geometry is a member of a group of
// multiple geometries. if so, create the buffer for the group
// instead of the individual geometry:
myBufferRegionCreator.createBuffer(getCompleteGeometryGroup(myLastGeometry));
}
}
else if (cmd.equals(SelectionCommandFactory.CREATE_BUFFER_REGION_FOR_SELECTED))
{
Quantify.collectMetric("mist3d.tracks.create-buffer-for-selected-segment");
myBufferRegionCreator.createBuffer(myLastGeometry);
}
else if (myLastGeometry instanceof PolygonGeometry)
{
Set<PolygonGeometry> geom = Collections.singleton((PolygonGeometry)myLastGeometry);
myLastGeometry = null;
doPurgeCheck(cmd, geom);
}
else if (myLastGeometry instanceof AbstractGeometryGroup)
{
if (((AbstractGeometryGroup)myLastGeometry).getGeometries().iterator().next() instanceof PolygonGeometry)
{
Set<PolygonGeometry> childGeometries = ((AbstractGeometryGroup)myLastGeometry).getGeometries().stream()
.map(g -> (PolygonGeometry)g).collect(Collectors.toSet());
myLastGeometry = null;
doPurgeCheck(cmd, childGeometries);
}
}
else if (myLastGeometry instanceof PolylineGeometry || myLastGeometry instanceof PointGeometry)
{
processCommand(cmd, myLastGeometry);
}
}
/**
* Creates a buffer for all segments in a polyline. Uses a mantle lookup to
* find segments related to the selected segment, then creates the buffer.
*/
protected void createLineBuffer()
{
myLastGeometry = getCompleteGeometryGroup(myLastGeometry);
myBufferRegionCreator.createBuffer(myLastGeometry);
}
/**
* For the supplied data model ID (which represents a single geometry
* component, such as a line segment in the case of a displayed track),
* gather the set of geometries that make up the complete group. The
* complete group is generated from the set of segments as a
* {@link GeometryGroupGeometry}.
*
* @param pGeometry the geometry component for which the complete group will
* be constructed.
* @return a {@link GeometryGroupGeometry} composed of the set of components
* related to the currently selected geometry.
*/
protected Geometry getCompleteGeometryGroup(Geometry pGeometry)
{
DataTypeInfo dataType = myDataTypeController.getDataTypeInfoForGeometryId(pGeometry.getDataModelId());
if (dataType != null)
{
if (pGeometry instanceof PolygonGeometry)
{
MapDataElementTransformer transformer = myDataTypeController.getTransformerForType(dataType.getTypeKey());
MultiPolygonGeometry.Builder<GeographicPosition> builder = new MultiPolygonGeometry.Builder<>(
GeographicPosition.class);
Collection<PolygonGeometry> geometries = myToolbox.getGeometryRegistry().getGeometriesForSource(transformer,
PolygonGeometry.class);
builder.setInitialGeometries(geometries);
return new MultiPolygonGeometry(builder, ((PolygonGeometry)pGeometry).getRenderProperties(), null);
}
MapDataElementTransformer transformer = myDataTypeController.getTransformerForType(dataType.getTypeKey());
GeometryGroupGeometry.Builder builder = new GeometryGroupGeometry.Builder(GeographicPosition.class);
builder.setInitialGeometries(myToolbox.getGeometryRegistry()
.getGeometriesForSource(transformer, PolylineGeometry.class).stream().collect(Collectors.toList()));
return new GeometryGroupGeometry(builder, pGeometry.getRenderProperties());
}
return pGeometry;
}
private void processCommand(SelectionCommand cmd, Geometry geometry)
{
if (cmd.equals(SelectionCommandFactory.ZOOM))
{
List<Position> positions = New.list();
if (geometry instanceof PolylineGeometry)
{
positions.addAll(((PolylineGeometry)geometry).getVertices());
}
else if (geometry instanceof PointGeometry)
{
positions.add(((PointGeometry)geometry).getPosition());
}
ViewerAnimator animator = new ViewerAnimator(myToolbox.getMapManager().getStandardViewer(), positions, true);
animator.start();
}
else if (cmd.equals(SelectionCommandFactory.CENTER))
{
List<Position> positions = New.list();
if (geometry instanceof PolylineGeometry)
{
positions.addAll(((PolylineGeometry)geometry).getVertices());
}
else if (geometry instanceof PointGeometry)
{
positions.add(((PointGeometry)geometry).getPosition());
}
ViewerAnimator animator = new ViewerAnimator(myToolbox.getMapManager().getStandardViewer(), positions, false);
animator.start();
}
else if (cmd.equals(SelectionCommandFactory.CREATE_BUFFER_REGION))
{
myBufferRegionCreator.createBuffer(myLastGeometry);
}
}
/**
* Do purge check.
*
* @param cmd the cmd
* @param selectionBounds the selection bounds
*/
private void doPurgeCheck(SelectionCommand cmd, Collection<? extends PolygonGeometry> selectionBounds)
{
if (cmd == null)
{
return;
}
/* Do a special confirmation for purge here before notifying the command
* processors. Probably not the best way to do this, refactor later into
* something more generic. */
if (!cmd.equals(SelectionCommandFactory.REMOVE_ALL))
{
// special case: deselect with no bounds, so deselect all
if (selectionBounds == null && cmd.equals(SelectionCommandFactory.DESELECT))
{
myDataElementUpdateUtils.setDataElementsSelected(false, myDataElementCache.getAllElementIdsAsList(), null, this);
}
else
{
notifySelectionCommandProcessors(selectionBounds, cmd);
}
}
else if (PurgeConfirmHelper.confirmProceedWithPurge(myToolbox, null, this))
{
notifySelectionCommandProcessors(selectionBounds, cmd);
}
}
/**
* Notify selection command processors.
*
* @param bounds the bounds
* @param command the command
*/
private void notifySelectionCommandProcessors(Collection<? extends PolygonGeometry> bounds, SelectionCommand command)
{
myExecutor.execute(() ->
{
synchronized (myCommandToProcessorMap)
{
List<WeakReference<SelectionCommandProcessor>> wrList = myCommandToProcessorMap.get(command);
if (wrList != null && !wrList.isEmpty())
{
Iterator<WeakReference<SelectionCommandProcessor>> wrItr = wrList.iterator();
while (wrItr.hasNext())
{
SelectionCommandProcessor lstr = wrItr.next().get();
if (lstr == null)
{
wrItr.remove();
}
else
{
lstr.selectionOccurred(bounds, command);
}
}
}
}
});
}
/**
* Notify selection command processors.
*
* @param bounds the bounds
* @param command the command
*/
private void notifyLineSelectionCommandProcessors(Collection<? extends PolylineGeometry> bounds, SelectionCommand command)
{
myExecutor.execute(() ->
{
synchronized (myLineCommandToProcessorMap)
{
List<WeakReference<LineSelectionCommandProcessor>> wrList = myLineCommandToProcessorMap.get(command);
if (wrList != null && !wrList.isEmpty())
{
Iterator<WeakReference<LineSelectionCommandProcessor>> wrItr = wrList.iterator();
while (wrItr.hasNext())
{
LineSelectionCommandProcessor lstr = wrItr.next().get();
if (lstr == null)
{
wrItr.remove();
}
else
{
lstr.selectionOccurred(bounds, command);
}
}
}
}
});
}
/** Action listener for actions on the menu items. */
private final class PolygonCommandActionListener implements ActionListener
{
/** The geometry associated with the menu action. */
private final List<PolygonGeometry> myGeometries = new LinkedList<>();
/**
* Constructor.
*
* @param geoms The geometries associated with the menu action.
*/
public PolygonCommandActionListener(Collection<? extends Geometry> geoms)
{
geoms.stream().filter(g -> g instanceof PolygonGeometry).map(g -> (PolygonGeometry)g).forEach(myGeometries::add);
}
@Override
public void actionPerformed(ActionEvent evt)
{
selectionRegionCreated(myGeometries, ((JMenuItem)evt.getSource()).getActionCommand());
}
}
/** Action listener for actions on the menu items. */
private final class PolylineCommandActionListener implements ActionListener
{
/** The geometry associated with the menu action. */
private final List<PolylineGeometry> myGeometries = new LinkedList<>();
/**
* Constructor.
*
* @param geoms The geometries associated with the menu action.
*/
public PolylineCommandActionListener(Collection<? extends Geometry> geoms)
{
geoms.stream().filter(g -> g instanceof PolylineGeometry).map(g -> (PolylineGeometry)g).forEach(myGeometries::add);
}
@Override
public void actionPerformed(ActionEvent evt)
{
selectionLineCreated(myGeometries, ((JMenuItem)evt.getSource()).getActionCommand());
}
}
/**
* Gets menu items for geometry.
*
* @param geom the geometry
* @return menu items
*/
public List<Component> getGeometryMenuItems(Geometry geom)
{
List<Component> menuItems = New.list();
if (myQueryRegionManager.getQueryRegion(geom) != null)
{
myLastGeometry = geom;
menuItems = SelectionCommandFactory.getQueryRegionMenuItems(myMenuActionListener, hasLoadFilters());
}
else if (geom instanceof PolygonGeometry)
{
myLastGeometry = getCompleteGeometryGroup(geom);
menuItems = SelectionCommandFactory.getPolygonMenuItems(myMenuActionListener, hasLoadFilters(), false);
}
else if (geom instanceof PolylineGeometry)
{
myLastGeometry = geom;
menuItems = SelectionCommandFactory.getPolylineMenuItems(myMenuActionListener);
}
else if (geom instanceof PointGeometry)
{
myLastGeometry = geom;
menuItems = SelectionCommandFactory.getPointMenuItems(myMenuActionListener);
}
else if (geom instanceof GeometryGroupGeometry)
{
myLastGeometry = geom;
if (!((GeometryGroupGeometry)geom).getGeometries().isEmpty())
{
// assumption: All geometries contained within the group are of
// the same type
Geometry sampleGeometry = ((GeometryGroupGeometry)geom).getGeometries().iterator().next();
if (sampleGeometry instanceof PolygonGeometry)
{
menuItems = SelectionCommandFactory.getPolygonMenuItems(myMenuActionListener, hasLoadFilters(), false);
}
else if (sampleGeometry instanceof PolylineGeometry)
{
menuItems = SelectionCommandFactory.getPolylineMenuItems(myMenuActionListener);
}
else if (sampleGeometry instanceof PointGeometry)
{
menuItems = SelectionCommandFactory.getPointMenuItems(myMenuActionListener);
}
else
{
LOGGER.warn("Unrecognized geometry group type: '" + geom.getClass().getName()
+ "' cannot be used to create a buffer.");
}
}
}
else
{
LOGGER.warn("Unrecognized geometry type: '" + geom.getClass().getName() + "' cannot be used to create a buffer.");
}
return menuItems;
}
/**
* Gets menu when you have multiple geometries.
*
* @param geometries the geometries
* @return menuItems the menu
*/
public List<Component> getMultiGeometryMenu(Collection<? extends Geometry> geometries)
{
return SelectionCommandFactory.getRoiMenuItems(new PolygonCommandActionListener(geometries), hasLoadFilters());
}
}
|
<reponame>feeedback/hexlet_professions_backend
// sc: https://ru.hexlet.io/courses/js-testing/lessons/matchers/exercise_unit
// tests/gt.test.js
// Напишите тесты для функции _.gt(value, other), которая возвращает true в том случае,
// если value > other, и false в иных случаях.
// gt(3, 1); // true
// gt(3, 3); // false
// gt(1, 3); // false
// Подсказки
// _.gt https://lodash.com/docs/4.17.15#gt
// @ts-check
const getFunction = require('../functions');
const gt = getFunction();
// BEGIN (write your solution here)
test('gt', () => {
expect(gt(3, 1)).toBe(true);
expect(gt(3, 3)).toBe(false);
expect(gt(1, 3)).toBe(false);
});
// END
|
<filename>src/gopar/invalid_construct_pass.go
// Invalid constructs pass
//
//
package main
import (
"fmt"
"go/ast"
"go/token"
)
type InvalidConstructPass struct {
BasePass
}
func NewInvalidConstructPass() *InvalidConstructPass {
return &InvalidConstructPass{
BasePass: NewBasePass(),
}
}
func (pass *InvalidConstructPass) GetPassType() PassType {
return InvalidConstructPassType
}
func (pass *InvalidConstructPass) GetPassMode() PassMode {
return BasicBlockPassMode
}
func (pass *InvalidConstructPass) GetDependencies() []PassType {
return []PassType{BasicBlockPassType}
}
func (pass *InvalidConstructPass) RunBasicBlockPass(block *BasicBlock, p *Package) BasicBlockVisitor {
node := block.node
var external []string
ast.Inspect(node, func(node ast.Node) bool {
if node != nil {
switch t := node.(type) {
case *ast.CallExpr:
switch f := t.Fun.(type) {
case *ast.Ident:
// Check if the function is builtin, or defined in the package
var name string = f.Name
if p.Lookup(name) == nil {
if !builtinTranslated[name] {
fmt.Println("Untranslatable function", name)
external = append(external, name)
}
} else {
fmt.Println("Found supporting function", name)
}
default:
fmt.Println("Unsupported function call", f)
external = append(external, "<anonymous>")
}
case *ast.FuncLit:
fmt.Println("Embedded function")
external = append(external, "<embedded function>")
case *ast.SelectStmt:
fmt.Println("Select stmt")
external = append(external, "select stmt")
case *ast.GoStmt:
fmt.Println("Go stmt")
external = append(external, "go stmt")
case *ast.BranchStmt:
if t.Tok == token.GOTO {
fmt.Println("Goto stmt")
external = append(external, "goto stmt")
}
case *ast.DeferStmt:
fmt.Println("Defer stmt")
external = append(external, "defer stmt")
}
return true
}
return false
})
block.Print("External dependencies", external)
block.Set(InvalidConstructPassType, external)
return DefaultBasicBlockVisitor{}
}
|
#!/bin/bash -x
#
# Generated - do not edit!
#
# Macros
TOP=`pwd`
CND_PLATFORM=GNU-MacOSX
CND_CONF=Release
CND_DISTDIR=dist
CND_BUILDDIR=build
CND_DLIB_EXT=dylib
NBTMPDIR=${CND_BUILDDIR}/${CND_CONF}/${CND_PLATFORM}/tmp-packaging
TMPDIRNAME=tmp-packaging
OUTPUT_PATH=${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/hw08_-_abstract_factory_pattern
OUTPUT_BASENAME=hw08_-_abstract_factory_pattern
PACKAGE_TOP_DIR=hw08-abstractfactorypattern/
# Functions
function checkReturnCode
{
rc=$?
if [ $rc != 0 ]
then
exit $rc
fi
}
function makeDirectory
# $1 directory path
# $2 permission (optional)
{
mkdir -p "$1"
checkReturnCode
if [ "$2" != "" ]
then
chmod $2 "$1"
checkReturnCode
fi
}
function copyFileToTmpDir
# $1 from-file path
# $2 to-file path
# $3 permission
{
cp "$1" "$2"
checkReturnCode
if [ "$3" != "" ]
then
chmod $3 "$2"
checkReturnCode
fi
}
# Setup
cd "${TOP}"
mkdir -p ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package
rm -rf ${NBTMPDIR}
mkdir -p ${NBTMPDIR}
# Copy files and create directories and links
cd "${TOP}"
makeDirectory "${NBTMPDIR}/hw08-abstractfactorypattern/bin"
copyFileToTmpDir "${OUTPUT_PATH}" "${NBTMPDIR}/${PACKAGE_TOP_DIR}bin/${OUTPUT_BASENAME}" 0755
# Generate tar file
cd "${TOP}"
rm -f ${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/hw08-abstractfactorypattern.tar
cd ${NBTMPDIR}
tar -vcf ../../../../${CND_DISTDIR}/${CND_CONF}/${CND_PLATFORM}/package/hw08-abstractfactorypattern.tar *
checkReturnCode
# Cleanup
cd "${TOP}"
rm -rf ${NBTMPDIR}
|
<filename>343 integer-break/javascript/solution1.js
/**
* @param {number} n
* @return {number}
*/
var integerBreak = function(n) {
if(n == 2) return 1;
if(n == 3) return 2;
if(n == 4) return 4;
var product = 1;
while(n > 4) {
product *= 3;
n -= 3;
}
product *= n;
return product;
};
// 为什么分隔是 3,4?
|
const detectBrowser = () => {
// Opera 8.0+
let isOpera = (!!window.opr && !!opr.addons) || !!window.opera || navigator.userAgent.indexOf(' OPR/') >= 0;
// Firefox 1.0+
let isFirefox = typeof InstallTrigger !== 'undefined';
// Safari 3.0+ "[object HTMLElementConstructor]"
let isSafari = /constructor/i.test(window.HTMLElement) || (function (p) { return p.toString() === "[object SafariRemoteNotification]"; })(!window['safari'] || (typeof safari !== 'undefined' && safari.pushNotification));
// Internet Explorer 6-11
let isIE = /*@cc_on!@*/false || !!document.documentMode;
// Edge 20+
let isEdge = !isIE && !!window.StyleMedia;
// Chrome 1+
let isChrome = !!window.chrome && !!window.chrome.webstore;
// Blink engine detection
let isBlink = (isChrome || isOpera || isEdge) && !!window.CSS;
return {
isOpera,
isFirefox,
isSafari,
isIE,
isEdge,
isChrome,
isBlink
};
}; |
<filename>targets/TARGET_Atmel/TARGET_SAM_CortexM0P/utils/cmsis/TARGET_SAML21/include/instance/ins_pac.h<gh_stars>10-100
/**
* \file
*
* \brief Instance description for PAC
*
* Copyright (c) 2014-2015 Atmel Corporation. All rights reserved.
*
* \asf_license_start
*
* \page License
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* 3. The name of Atmel may not be used to endorse or promote products derived
* from this software without specific prior written permission.
*
* 4. This software may only be redistributed and used in connection with an
* Atmel microcontroller product.
*
* THIS SOFTWARE IS PROVIDED BY ATMEL "AS IS" AND ANY EXPRESS OR IMPLIED
* WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT ARE
* EXPRESSLY AND SPECIFICALLY DISCLAIMED. IN NO EVENT SHALL ATMEL BE LIABLE FOR
* ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
* ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
* POSSIBILITY OF SUCH DAMAGE.
*
* \asf_license_stop
*
*/
/*
* Support and FAQ: visit <a href="http://www.atmel.com/design-support/">Atmel Support</a>
*/
#ifndef _SAML21_PAC_INSTANCE_
#define _SAML21_PAC_INSTANCE_
/* ========== Register definition for PAC peripheral ========== */
#if (defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__))
#define REG_PAC_WRCTRL (0x44000000U) /**< \brief (PAC) Write control */
#define REG_PAC_EVCTRL (0x44000004U) /**< \brief (PAC) Event control */
#define REG_PAC_INTENCLR (0x44000008U) /**< \brief (PAC) Interrupt enable clear */
#define REG_PAC_INTENSET (0x44000009U) /**< \brief (PAC) Interrupt enable set */
#define REG_PAC_INTFLAGAHB (0x44000010U) /**< \brief (PAC) Bridge interrupt flag status */
#define REG_PAC_INTFLAGA (0x44000014U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge A */
#define REG_PAC_INTFLAGB (0x44000018U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge B */
#define REG_PAC_INTFLAGC (0x4400001CU) /**< \brief (PAC) Peripheral interrupt flag status - Bridge C */
#define REG_PAC_INTFLAGD (0x44000020U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge D */
#define REG_PAC_INTFLAGE (0x44000024U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge E */
#define REG_PAC_STATUSA (0x44000034U) /**< \brief (PAC) Peripheral write protection status - Bridge A */
#define REG_PAC_STATUSB (0x44000038U) /**< \brief (PAC) Peripheral write protection status - Bridge B */
#define REG_PAC_STATUSC (0x4400003CU) /**< \brief (PAC) Peripheral write protection status - Bridge C */
#define REG_PAC_STATUSD (0x44000040U) /**< \brief (PAC) Peripheral write protection status - Bridge D */
#define REG_PAC_STATUSE (0x44000044U) /**< \brief (PAC) Peripheral write protection status - Bridge E */
#else
#define REG_PAC_WRCTRL (*(RwReg *)0x44000000U) /**< \brief (PAC) Write control */
#define REG_PAC_EVCTRL (*(RwReg8 *)0x44000004U) /**< \brief (PAC) Event control */
#define REG_PAC_INTENCLR (*(RwReg8 *)0x44000008U) /**< \brief (PAC) Interrupt enable clear */
#define REG_PAC_INTENSET (*(RwReg8 *)0x44000009U) /**< \brief (PAC) Interrupt enable set */
#define REG_PAC_INTFLAGAHB (*(RwReg *)0x44000010U) /**< \brief (PAC) Bridge interrupt flag status */
#define REG_PAC_INTFLAGA (*(RwReg *)0x44000014U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge A */
#define REG_PAC_INTFLAGB (*(RwReg *)0x44000018U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge B */
#define REG_PAC_INTFLAGC (*(RwReg *)0x4400001CU) /**< \brief (PAC) Peripheral interrupt flag status - Bridge C */
#define REG_PAC_INTFLAGD (*(RwReg *)0x44000020U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge D */
#define REG_PAC_INTFLAGE (*(RwReg *)0x44000024U) /**< \brief (PAC) Peripheral interrupt flag status - Bridge E */
#define REG_PAC_STATUSA (*(RoReg *)0x44000034U) /**< \brief (PAC) Peripheral write protection status - Bridge A */
#define REG_PAC_STATUSB (*(RoReg *)0x44000038U) /**< \brief (PAC) Peripheral write protection status - Bridge B */
#define REG_PAC_STATUSC (*(RoReg *)0x4400003CU) /**< \brief (PAC) Peripheral write protection status - Bridge C */
#define REG_PAC_STATUSD (*(RoReg *)0x44000040U) /**< \brief (PAC) Peripheral write protection status - Bridge D */
#define REG_PAC_STATUSE (*(RoReg *)0x44000044U) /**< \brief (PAC) Peripheral write protection status - Bridge E */
#endif /* (defined(__ASSEMBLY__) || defined(__IAR_SYSTEMS_ASM__)) */
/* ========== Instance parameters for PAC peripheral ========== */
#define PAC_CLK_AHB_DOMAIN // Clock domain of AHB clock
#define PAC_CLK_AHB_ID 14 // AHB clock index
#define PAC_HPB_NUM 5 // Number of bridges AHB/APB
#define PAC_INTFLAG_NUM 6 // Number of intflag registers
#endif /* _SAML21_PAC_INSTANCE_ */
|
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
#
# Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
from ..functional.elemwise import _elwise
from ..tensor import Tensor
from .module import Module
class Elemwise(Module):
r"""A :class:`~.Module` to do :mod:`~.functional.elemwise` operator. Could be replaced with :class:`~.QATModule`
version :class:`~.qat.Elemwise` using :func:`~.quantize.quantize_qat`.
Args:
method: the elemwise method, support the following string.
It will do the normal elemwise operator for float.
"""
def __init__(self, method, **kwargs):
super().__init__(**kwargs)
self.method = method
def forward(self, *inps):
return _elwise(*inps, mode=self.method)
|
<reponame>HeQuanX/study
package cn.crabapples.common.config.datasource.pkg;
import cn.crabapples.common.config.datasource.aop.DynamicDataSourceContextHolder;
import com.alibaba.druid.pool.DruidDataSource;
import com.alibaba.druid.spring.boot.autoconfigure.DruidDataSourceBuilder;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Primary;
import javax.sql.DataSource;
/**
* TODO 数据源配置
*
* @author Mr.He
* 9/5/20 2:34 PM
* e-mail <EMAIL>
* qq 294046317
* pc-name root
*/
@Slf4j
@Configuration
public class DataSourceConfigure {
// @Value("${spring.datasource.druid.loginUsername}")
// String loginUsername;
// @Value("${spring.datasource.druid.loginPassword}")
// String loginPassword;
// @Value("${spring.datasource.druid.deny}")
// String deny;
// @Value("${spring.datasource.druid.allow}")
// String allow;
// @Value("${spring.datasource.druid.resetEnable}")
// String resetEnable;
// @Value("${spring.datasource.druid.maxActive:}")
// String maxActive;
// @Bean
// public ServletRegistrationBean druidServlet() { // 主要实现WEB监控的配置处理
// ServletRegistrationBean servletRegistrationBean = new ServletRegistrationBean(
// new StatViewServlet(), "/druid/*"); // 现在要进行druid监控的配置处理操作
// servletRegistrationBean.addInitParameter("allow", allow); // 白名单
// servletRegistrationBean.addInitParameter("deny", deny); // 黑名单
// servletRegistrationBean.addInitParameter("loginUsername", loginUsername); // 用户名
// servletRegistrationBean.addInitParameter("loginPassword", loginPassword); // 密码
// servletRegistrationBean.addInitParameter("resetEnable", resetEnable); // 是否可以重置数据源
// servletRegistrationBean.addInitParameter("max-active", maxActive); // 最大链接数
// return servletRegistrationBean;
// }
@Primary
@Bean(value = "firstDataSource")
@ConfigurationProperties("spring.datasource.druid")
public DataSource dataSourceOne() {
log.info("Init firstDataSource");
DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
DynamicDataSourceContextHolder.dataSourceIds.add("firstDataSource");
return dataSource;
}
@Bean(value = "secondDataSource")
@ConfigurationProperties("spring.datasource.druid.second")
public DataSource dataSourceTwo() {
log.info("Init DataSourceTwo");
DruidDataSource dataSource = DruidDataSourceBuilder.create().build();
DynamicDataSourceContextHolder.dataSourceIds.add("secondDataSource");
return dataSource;
}
}
|
#!/usr/bin/env bash
PY_PACKAGE="peek_agent"
PYPI_PUBLISH="1"
VER_FILES_TO_COMMIT=""
VER_FILES=""
|
<gh_stars>0
package com.ing.baker.runtime.serialization.protomappings
import akka.actor.ActorRef
import com.ing.baker.runtime.serialization.ProtoMap.versioned
import com.ing.baker.runtime.akka.actor.protobuf
import com.ing.baker.runtime.akka.actor.protobuf.ActorRefId
import com.ing.baker.runtime.serialization.{ProtoMap, SerializersProvider}
import scala.util.Try
class ActorRefMapping(provider: SerializersProvider) extends ProtoMap[ActorRef, protobuf.ActorRefId] {
val companion = protobuf.ActorRefId
override def toProto(a: ActorRef): protobuf.ActorRefId =
protobuf.ActorRefId(Some(akka.serialization.Serialization.serializedActorPath(a)))
override def fromProto(message: ActorRefId): Try[ActorRef] =
for {
identifier <- versioned(message.identifier, "identifier")
actorRef <- Try(provider.actorRefProvider.resolveActorRef(identifier))
} yield actorRef
}
|
def isArmstrong(num):
sum = 0
temp = num
order = len(str(num))
while temp > 0:
rem = temp % 10
sum += rem ** order
temp //= 10
if num == sum:
return True
else:
return False
print (isArmstrong(num)) |
import time
def measure_time():
start = time.time()
# Your code here...
end = time.time()
return end - start
execution_time = measure_time()
print("Time taken:", execution_time, "seconds") |
import React from 'react'
const Link = ({href, target, onClick, text}) => {
return(
<a href={href} target={target} onClick={onClick}>
{text ? text : href}
</a>
)
}
export default Link |
/*
* Copyright 2014-2016 CyberVision, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.kaaproject.kaa.server.admin.client.mvp.view.endpoint;
import org.kaaproject.avro.ui.gwt.client.widget.SizedTextArea;
import org.kaaproject.avro.ui.gwt.client.widget.SizedTextBox;
import org.kaaproject.avro.ui.gwt.client.widget.grid.AbstractGrid;
import org.kaaproject.kaa.common.dto.TopicDto;
import org.kaaproject.kaa.server.admin.client.mvp.view.EndpointGroupView;
import org.kaaproject.kaa.server.admin.client.mvp.view.base.BaseDetailsViewImpl;
import org.kaaproject.kaa.server.admin.client.mvp.view.config.ConfigurationStructGrid;
import org.kaaproject.kaa.server.admin.client.mvp.view.profile.ProfileFilterStructGrid;
import org.kaaproject.kaa.server.admin.client.mvp.view.topic.TopicGrid;
import org.kaaproject.kaa.server.admin.client.mvp.view.widget.KaaAdminSizedTextBox;
import org.kaaproject.kaa.server.admin.client.util.Utils;
import com.google.gwt.dom.client.Element;
import com.google.gwt.dom.client.Style.Unit;
import com.google.gwt.dom.client.Style.VerticalAlign;
import com.google.gwt.event.dom.client.HasClickHandlers;
import com.google.gwt.user.client.ui.Button;
import com.google.gwt.user.client.ui.CheckBox;
import com.google.gwt.user.client.ui.HasHorizontalAlignment;
import com.google.gwt.user.client.ui.HasValue;
import com.google.gwt.user.client.ui.HasVerticalAlignment;
import com.google.gwt.user.client.ui.IntegerBox;
import com.google.gwt.user.client.ui.Label;
public class EndpointGroupViewImpl extends BaseDetailsViewImpl implements EndpointGroupView {
private static final String REQUIRED = Utils.avroUiStyle.requiredField();
private Label nameLabel;
private SizedTextBox name;
private Label weightLabel;
private IntegerBox weight;
private SizedTextArea description;
private SizedTextBox createdUsername;
private SizedTextBox createdDateTime;
private Label profileFiltersLabel;
private CheckBox includeDeprecatedProfileFilters;
private ProfileFilterStructGrid profileFiltersGrid;
private CheckBox includeDeprecatedConfigurations;
private ConfigurationStructGrid configurationsGrid;
private TopicGrid topicsGrid;
private Button addProfileFilterButton;
private Button addConfigurationButton;
private Button addTopicButton;
public EndpointGroupViewImpl(boolean create) {
super(create);
}
@Override
protected String getCreateTitle() {
return Utils.constants.addNewEndpointGroup();
}
@Override
protected String getViewTitle() {
return Utils.constants.endpointGroup();
}
@Override
protected String getSubTitle() {
return Utils.constants.endpointGroupDetails();
}
@Override
protected void initDetailsTable() {
if (!create) {
detailsTable.getColumnFormatter().setWidth(2, "200px");
}
int row = 0;
Label authorLabel = new Label(Utils.constants.author());
createdUsername = new KaaAdminSizedTextBox(-1, false);
createdUsername.setWidth("100%");
detailsTable.setWidget(row, 0, authorLabel);
detailsTable.setWidget(row, 1, createdUsername);
authorLabel.setVisible(!create);
createdUsername.setVisible(!create);
row++;
Label dateTimeCreatedLabel = new Label(Utils.constants.dateTimeCreated());
createdDateTime = new KaaAdminSizedTextBox(-1, false);
createdDateTime.setWidth("100%");
detailsTable.setWidget(row, 0, dateTimeCreatedLabel);
detailsTable.setWidget(row, 1, createdDateTime);
dateTimeCreatedLabel.setVisible(!create);
createdDateTime.setVisible(!create);
row++;
name = new KaaAdminSizedTextBox(DEFAULT_TEXTBOX_SIZE);
name.setWidth("100%");
nameLabel = new Label(Utils.constants.name());
detailsTable.setWidget(row, 0, nameLabel);
detailsTable.setWidget(row, 1, name);
name.addInputHandler(this);
row++;
weight = new IntegerBox();
weight.setWidth("100%");
weightLabel = new Label(Utils.constants.weight());
detailsTable.setWidget(row, 0, weightLabel);
detailsTable.setWidget(row, 1, weight);
weight.addChangeHandler(this);
row++;
description = new SizedTextArea(1024);
description.setWidth("100%");
description.getTextArea().getElement().getStyle().setPropertyPx("minHeight", 100);
description.getTextArea().getElement().getStyle().setPropertyPx("maxWidth", 487);
Label descriptionLabel = new Label(Utils.constants.description());
detailsTable.setWidget(row, 0, descriptionLabel);
detailsTable.setWidget(row, 1, description);
detailsTable.getFlexCellFormatter().setColSpan(row, 1, 2);
description.addInputHandler(this);
detailsTable.getCellFormatter().setVerticalAlignment(row, 0, HasVerticalAlignment.ALIGN_TOP);
row++;
profileFiltersGrid = new ProfileFilterStructGrid();
profileFiltersGrid.setSize("700px", "200px");
profileFiltersLabel = new Label(Utils.constants.profileFilters());
profileFiltersLabel.addStyleName(Utils.kaaAdminStyle.bAppContentTitleLabel());
includeDeprecatedProfileFilters = new CheckBox(Utils.constants.includeDeprecated());
setCheckBoxStyle(includeDeprecatedProfileFilters);
addProfileFilterButton = new Button(Utils.constants.addProfileFilter());
addProfileFilterButton.addStyleName(Utils.kaaAdminStyle.bAppButtonSmall());
detailsTable.setWidget(row, 0, profileFiltersLabel);
profileFiltersLabel.getElement().getParentElement().getStyle().setPropertyPx("paddingBottom", 10);
detailsTable.setWidget(row, 1, includeDeprecatedProfileFilters);
row++;
detailsTable.setWidget(row, 0, profileFiltersGrid);
detailsTable.getFlexCellFormatter().setColSpan(row, 0, 3);
row++;
detailsTable.setWidget(row, 2, addProfileFilterButton);
addProfileFilterButton.getElement().getParentElement().getStyle().setPropertyPx("paddingTop", 15);
detailsTable.getCellFormatter().setHorizontalAlignment(row, 2, HasHorizontalAlignment.ALIGN_RIGHT);
profileFiltersLabel.setVisible(!create);
includeDeprecatedProfileFilters.setVisible(!create);
profileFiltersGrid.setVisible(!create);
row++;
configurationsGrid = new ConfigurationStructGrid();
configurationsGrid.setSize("700px", "200px");
Label configurationsLabel = new Label(Utils.constants.configurations());
configurationsLabel.addStyleName(Utils.kaaAdminStyle.bAppContentTitleLabel());
includeDeprecatedConfigurations = new CheckBox(Utils.constants.includeDeprecated());
setCheckBoxStyle(includeDeprecatedConfigurations);
addConfigurationButton = new Button(Utils.constants.addConfiguration());
addConfigurationButton.addStyleName(Utils.kaaAdminStyle.bAppButtonSmall());
detailsTable.setWidget(row, 0, configurationsLabel);
configurationsLabel.getElement().getParentElement().getStyle().setPropertyPx("paddingBottom", 10);
detailsTable.setWidget(row, 1, includeDeprecatedConfigurations);
row++;
detailsTable.setWidget(row, 0, configurationsGrid);
detailsTable.getFlexCellFormatter().setColSpan(row, 0, 3);
row++;
detailsTable.setWidget(row, 2, addConfigurationButton);
addConfigurationButton.getElement().getParentElement().getStyle().setPropertyPx("paddingTop", 15);
detailsTable.getCellFormatter().setHorizontalAlignment(row, 2, HasHorizontalAlignment.ALIGN_RIGHT);
configurationsLabel.setVisible(!create);
includeDeprecatedConfigurations.setVisible(!create);
configurationsGrid.setVisible(!create);
row++;
topicsGrid = new TopicGrid(true);
topicsGrid.setSize("700px", "200px");
Label topicsLabel = new Label(Utils.constants.notificationTopics());
topicsLabel.addStyleName(Utils.kaaAdminStyle.bAppContentTitleLabel());
addTopicButton = new Button(Utils.constants.addNotificationTopic());
addTopicButton.addStyleName(Utils.kaaAdminStyle.bAppButtonSmall());
detailsTable.setWidget(row, 0, topicsLabel);
topicsLabel.getElement().getParentElement().getStyle().setPropertyPx("paddingBottom", 10);
row++;
detailsTable.setWidget(row, 0, topicsGrid);
detailsTable.getFlexCellFormatter().setColSpan(row, 0, 3);
row++;
detailsTable.setWidget(row, 2, addTopicButton);
addTopicButton.getElement().getParentElement().getStyle().setPropertyPx("paddingTop", 15);
detailsTable.getCellFormatter().setHorizontalAlignment(row, 2, HasHorizontalAlignment.ALIGN_RIGHT);
topicsLabel.setVisible(!create);
topicsGrid.setVisible(!create);
name.setFocus(true);
}
private void setCheckBoxStyle(CheckBox box) {
Element input = box.getElement().getFirstChildElement();
input.getStyle().setVerticalAlign(VerticalAlign.MIDDLE);
Element label = input.getNextSiblingElement();
label.getStyle().setVerticalAlign(VerticalAlign.MIDDLE);
label.getStyle().setPaddingLeft(5, Unit.PX);
label.getStyle().setFontSize(13, Unit.PX);
}
@Override
protected void resetImpl() {
name.setValue("");
name.setEnabled(true);
nameLabel.addStyleName(REQUIRED);
weight.setValue(null);
weight.setEnabled(true);
weightLabel.addStyleName(REQUIRED);
description.setValue("");
createdUsername.setValue("");
createdDateTime.setValue("");
addProfileFilterButton.setVisible(!create);
addConfigurationButton.setVisible(!create);
addTopicButton.setVisible(!create);
profileFiltersGrid.setEnableActions(true);
configurationsGrid.setEnableActions(true);
includeDeprecatedProfileFilters.setVisible(!create);
includeDeprecatedProfileFilters.setValue(false);
includeDeprecatedConfigurations.setVisible(!create);
includeDeprecatedConfigurations.setValue(false);
}
@Override
protected boolean validate() {
boolean result = name.getValue().length()>0;
result &= weight.getValue() != null;
return result;
}
@Override
public void setReadOnly() {
name.setEnabled(false);
nameLabel.removeStyleName(REQUIRED);
weight.setEnabled(false);
weightLabel.removeStyleName(REQUIRED);
addProfileFilterButton.setVisible(false);
addConfigurationButton.setVisible(false);
includeDeprecatedProfileFilters.setVisible(false);
includeDeprecatedConfigurations.setVisible(false);
profileFiltersGrid.setEnableActions(false);
configurationsGrid.setEnableActions(false);
}
@Override
public HasValue<String> getName() {
return name;
}
@Override
public HasValue<Integer> getWeight() {
return weight;
}
@Override
public HasValue<String> getDescription() {
return description;
}
@Override
public HasValue<String> getCreatedUsername() {
return createdUsername;
}
@Override
public HasValue<String> getCreatedDateTime() {
return createdDateTime;
}
@Override
public ProfileFilterStructGrid getProfileFiltersGrid() {
return profileFiltersGrid;
}
@Override
public ConfigurationStructGrid getConfigurationsGrid() {
return configurationsGrid;
}
@Override
public AbstractGrid<TopicDto, String> getTopicsGrid() {
return topicsGrid;
}
@Override
public HasClickHandlers getAddProfileFilterButton() {
return addProfileFilterButton;
}
@Override
public HasClickHandlers getAddConfigurationButton() {
return addConfigurationButton;
}
@Override
public HasClickHandlers getAddTopicButton() {
return addTopicButton;
}
@Override
public HasValue<Boolean> getIncludeDeprecatedProfileFilters() {
return includeDeprecatedProfileFilters;
}
@Override
public HasValue<Boolean> getIncludeDeprecatedConfigurations() {
return includeDeprecatedConfigurations;
}
@Override
public void setProfileFiltersVisible(boolean visible) {
profileFiltersLabel.setVisible(visible);
includeDeprecatedProfileFilters.setVisible(visible);
profileFiltersGrid.setVisible(visible);
addProfileFilterButton.setVisible(visible);
}
}
|
<gh_stars>0
export const getStatsFromCommits = (raidRepoWithOwner: string, commits: Commit[] | undefined): UserStats[] => {
if (!commits) return []
return Object.values(commits.reduce<{ [key: string]: UserStats }>((stats, commit) => {
if (
!commit.author?.user?.login // Exclude null users
|| commit.parents.totalCount > 1 // Exclude Merge commits
|| commit.associatedPullRequests.nodes.filter(
node => node.baseRef?.repository?.nameWithOwner !== raidRepoWithOwner
).length > 0 // Exclude commits from PRs not to the raid repo
) {
// console.log(JSON.stringify(commit))
return stats
}
if (commit.author.user.login in stats) {
stats[commit.author.user.login].additions += commit.additions
stats[commit.author.user.login].deletions += commit.deletions
stats[commit.author.user.login].commits += 1
} else {
stats[commit.author.user.login] = {
user: commit.author.user.login,
avatarUrl: commit.author.user.avatarUrl,
additions: commit.additions,
deletions: commit.deletions,
commits: 1,
}
}
return stats
}, {}))
} |
#!/bin/bash
APP_NAME=workon # 应用名称
CONFIG_DIR=.config/$APP_NAME
WORKON_CONFIG_DIR=$HOME/$CONFIG_DIR # 配置所在目录
BIN_DIR=$WORKON_CONFIG_DIR # 二进制脚本工作目录
FISH_COMPLETE=$HOME/.config/fish
CUR_PATH=$(pwd) # 当前目录
TMP_DIR=/tmp/$APP_NAME # 临时目录
SHELL_FILES=($HOME/.zshrc $HOME/.bashrc $HOME/.config/fish/config.fish) # shell的配置文件
FILE_SUFFIX=".sh" # 默认的文件后缀
RELATIVE_PATH=.config/workon
__colorPrint() {
printf "\033[1;32m$1\033[0m\n"
}
__colorPrintError() {
printf "\033[1;31m$1\033[0m\n"
}
# 检测是否安装go和git
go version > /dev/null 2>&1
if [[ $? -ne 0 ]];then
__colorPrintError "缺少Go工具,请先安装Go工具"
exit 1
fi
git --version > /dev/null 2>&1
if [[ $? -ne 0 ]];then
echo "缺少Git工具,请先安装Git工具"
exit 1
fi
__colorPrint "正在安装workon,请稍候..."
# 检测是否存在workon配置目录
if [[ ! -d $WORKON_CONFIG_DIR ]];then
mkdir -p $WORKON_CONFIG_DIR > /dev/null 2>&1
fi
rm -rf $TMP_DIR > /dev/null 2>&1
# 执行编译和安装的命令
$(git clone -q https://github.com/zzhaolei/workon.git $TMP_DIR 1> /dev/null \
&& cd $TMP_DIR \
&& go build -o $APP_NAME.tool main.go > /dev/null 2>&1 \
&& cp -f $APP_NAME.* $BIN_DIR/ \
&& cp -f scripts/$APP_NAME.* $BIN_DIR/ \
&& cp -rf scripts/completions $FISH_COMPLETE/ > /dev/null 2>&1)
# 记录执行状态
exit_code=$?
# 清理现场
cd $CUR_PATH
rm -rf $TMP_DIR > /dev/null 2>&1
# 写入文件
for shellFile in $SHELL_FILES;do
if [[ $(echo ".config/fish/config.fish" | grep "config\.fish" | wc -l) -eq 1 ]];then
FILE_SUFFIX=".fish"
fi
# 检查shell rc文件中是否有相应的配置
shellConfig=$(cat $shellFile | grep -v "grep" | grep "$CONFIG_DIR")
if [[ $shellConfig == "" || $(echo $shellConfig | wc -l) -eq 0 ]];then
__colorPrint "$shellFile 中配置不存在,开始写入..."
shellComplete=$(cat $shellFile | grep -v "grep" | grep "^autoload bashcompinit" | wc -l)
if [[ shellComplete -eq 0 ]];then
echo "autoload bashcompinit" >> $shellFile
echo "bashcompinit" >> $shellFile
fi
# 如果之前没有配置的话,则会先执行让其生效
alias setWorkon="source $HOME/.config/workon/workon.$FILE_SUFFIX"
setWorkon
# 写入文件中
echo "alias setWorkon='$HOME/.config/workon/workon.$FILE_SUFFIX'">> $shellFile
echo 'setWorkon' >> $shellFile
__colorPrint "配置写入 $shellFile 成功"
fi
done
# 输出结果
if [[ $exit_code -ne 0 ]];then
__colorPrintError "安装失败"
else
__colorPrint "安装成功"
fi
unset -f __colorPrint > /dev/null 2>&1
unset -f __colorPrintError > /dev/null 2>&1
|
<gh_stars>0
package org.rs2server.rs2.content.api;
import org.rs2server.rs2.model.Item;
import org.rs2server.rs2.model.player.Player;
import javax.annotation.concurrent.Immutable;
/**
* A game event which is created when an item option in the inventory is clicked by the player.
*
* @author tommo
*/
@Immutable
public class GameItemInventoryActionEvent {
public enum ClickType {
OPTION_1, OPTION_2, OPTION_3, OPTION_4, DROP, ITEM_ON_ITEM, WIELD_OPTION,
/**
* Note that if the event is of type DESTROY, the event is fired /after/ the destroy item interface has been shown and confirmed.
*/
DESTROY
}
private final Player player;
private final ClickType clickType;
private final Item item;
private final int slot;
public GameItemInventoryActionEvent(final Player player, final ClickType clickType, final Item item, final int slot) {
this.player = player;
this.clickType = clickType;
this.item = item;
this.slot = slot;
}
public Player getPlayer() {
return player;
}
public ClickType getClickType() {
return clickType;
}
public Item getItem() {
return item;
}
public int getSlot() {
return slot;
}
}
|
<filename>src/worker/RTCIceCandidate.js
import * as is from '../utils/is.js';
import assert from '../utils/assert.js';
export default class RTCIceCandidate {
constructor(config) {
assert(arguments.length, 'Not enough arguments');
assert(
is.undefined(config) || is.object(config),
`'${config}' is not an object`
);
const { candidate } = config || {};
assert(
!is.undefined(candidate),
`'${candidate}' is not a valid value for candidate`
);
this.candidate = String(candidate);
this.spdMid = null;
this.sdpMLineIndex = 0;
}
get [Symbol.toStringTag]() {
return 'RTCIceCandidate';
}
toJSON() {
return {
candidate: this.candidate,
sdpMLineIndex: this.sdpMLineIndex,
sdpMid: this.sdpMid
};
}
}
|
def levenshtein_distance(str1, str2):
m = len(str1)
n = len(str2)
# Create a 2d matrix
distance_matrix = [[0 for x in range(n+1)] for x in range(m+1)]
# Initialize the first row and first column
for i in range(m+1):
distance_matrix[i][0] = i
for j in range(n+1):
distance_matrix[0][j] = j
# Iterate through the matrix
for i in range(1, m+1):
for j in range(1, n+1):
if str1[i-1] == str2[j-1]:
cost = 0
else:
cost = 1
distance_matrix[i][j] = min(distance_matrix[i-1][j] + 1,
distance_matrix[i][j-1] + 1,
distance_matrix[i-1][j-1] + cost)
return distance_matrix[m][n] |
<reponame>Sasha7b9Work/S8-53M2
/////////////////////////////////////////////////////////////////////////////
// Name: samples/console/console.cpp
// Purpose: A sample console (as opposed to GUI) program using wxWidgets
// Author: <NAME>
// Modified by:
// Created: 04.10.99
// Copyright: (c) 1999 <NAME> <<EMAIL>>
// Licence: wxWindows licence
/////////////////////////////////////////////////////////////////////////////
// ============================================================================
// declarations
// ============================================================================
// ----------------------------------------------------------------------------
// headers
// ----------------------------------------------------------------------------
// For compilers that support precompilation, includes "wx/wx.h".
#include "wx/wxprec.h"
// for all others, include the necessary headers (this file is usually all you
// need because it includes almost all "standard" wxWidgets headers)
#ifndef WX_PRECOMP
#include "wx/wx.h"
#endif
#include <wx/app.h>
#include <wx/cmdline.h>
// ============================================================================
// implementation
// ============================================================================
static const wxCmdLineEntryDesc cmdLineDesc[] =
{
{ wxCMD_LINE_SWITCH, "h", "help", "show this help message",
wxCMD_LINE_VAL_NONE, wxCMD_LINE_OPTION_HELP },
{ wxCMD_LINE_SWITCH, "d", "dummy", "a dummy switch",
wxCMD_LINE_VAL_NONE, 0 },
{ wxCMD_LINE_SWITCH, "s", "secret", "a secret switch",
wxCMD_LINE_VAL_NONE, wxCMD_LINE_HIDDEN },
// ... your other command line options here...
wxCMD_LINE_DESC_END
};
int main(int argc, char **argv)
{
wxApp::CheckBuildOptions(WX_BUILD_OPTIONS_SIGNATURE, "program");
wxInitializer initializer;
if ( !initializer )
{
fprintf(stderr, "Failed to initialize the wxWidgets library, aborting.");
return -1;
}
wxCmdLineParser parser(cmdLineDesc, argc, argv);
switch ( parser.Parse() )
{
case -1:
// help was given, terminating
break;
case 0:
// everything is ok; proceed
if (parser.Found("d"))
{
wxPrintf("Dummy switch was given...\n");
while (1)
{
wxChar input[128];
wxPrintf("Try to guess the magic number (type 'quit' to escape): ");
if ( !wxFgets(input, WXSIZEOF(input), stdin) )
break;
// kill the last '\n'
input[wxStrlen(input) - 1] = 0;
if (wxStrcmp(input, "quit") == 0)
break;
long val;
if (!wxString(input).ToLong(&val))
{
wxPrintf("Invalid number...\n");
continue;
}
if (val == 42)
wxPrintf("You guessed!\n");
else
wxPrintf("Bad luck!\n");
}
}
if (parser.Found("s"))
{
wxPrintf("Secret switch was given...\n");
}
break;
default:
break;
}
if ( argc == 1 )
{
// If there were no command-line options supplied, emit a message
// otherwise it's not obvious that the sample ran successfully
wxPrintf("Welcome to the wxWidgets 'console' sample!\n");
wxPrintf("For more information, run it again with the --help option\n");
}
// do something useful here
return 0;
}
|
<filename>doc-examples/jdbc-example-java/src/test/java/example/PersonRepositorySpec.java
package example;
import io.micronaut.data.repository.jpa.criteria.PredicateSpecification;
import io.micronaut.test.extensions.junit5.annotation.MicronautTest;
import jakarta.inject.Inject;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Arrays;
import java.util.List;
import static example.PersonRepository.Specifications.ageIsLessThan;
import static example.PersonRepository.Specifications.nameEquals;
import static example.PersonRepository.Specifications.setNewName;
import static io.micronaut.data.repository.jpa.criteria.PredicateSpecification.not;
import static io.micronaut.data.repository.jpa.criteria.PredicateSpecification.where;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@MicronautTest
class PersonRepositorySpec {
@Inject
PersonRepository personRepository;
@BeforeEach
void beforeEach() {
personRepository.saveAll(Arrays.asList(
new Person(
"Denis",
13
),
new Person(
"Josh",
22
)
));
}
@AfterEach
void afterEach() {
personRepository.deleteAll();
}
@Test
void testFind() {
// tag::find[]
Person denis = personRepository.findOne(nameEquals("Denis")).orElse(null);
long countAgeLess30 = personRepository.count(ageIsLessThan(30));
long countAgeLess20 = personRepository.count(ageIsLessThan(20));
long countAgeLess30NotDenis = personRepository.count(ageIsLessThan(30).and(not(nameEquals("Denis"))));
List<Person> people = personRepository.findAll(where(nameEquals("Denis").or(nameEquals("Josh"))));
// end::find[]
assertNotNull(denis);
assertEquals(2, countAgeLess30);
assertEquals(1, countAgeLess20);
assertEquals(1, countAgeLess30NotDenis);
assertEquals(2, people.size());
}
@Test
void testDelete() {
List<Person> all = personRepository.findAll((PredicateSpecification<Person>) null);
assertEquals(2, all.size());
// tag::delete[]
long recordsDeleted = personRepository.deleteAll(where(nameEquals("Denis")));
// end::delete[]
assertEquals(1, recordsDeleted);
all = personRepository.findAll((PredicateSpecification<Person>) null);
assertEquals(1, all.size());
}
@Test
void testUpdate() {
List<Person> all = personRepository.findAll((PredicateSpecification<Person>) null);
assertEquals(2, all.size());
assertTrue(all.stream().anyMatch(p -> p.getName().equals("Denis")));
assertTrue(all.stream().anyMatch(p -> p.getName().equals("Josh")));
// tag::update[]
long recordsUpdated = personRepository.updateAll(setNewName("Steven").where(nameEquals("Denis")));
// end::update[]
assertEquals(1, recordsUpdated);
all = personRepository.findAll((PredicateSpecification<Person>) null);
assertEquals(2, all.size());
assertTrue(all.stream().anyMatch(p -> p.getName().equals("Steven")));
assertTrue(all.stream().anyMatch(p -> p.getName().equals("Josh")));
}
} |
python main.py \
--architecture 'tcn' \
--comments 'local test reverb cond' \
--loss_functions 'esr,mae,stft' \
--esr_scaling 1 \
--mae_scaling 1 \
--stft_scaling 1 \
--specific_fx_name 'reverb' \
--dilation_depth 6 \
--dilation_factor 7 \
--kernel_size 20 \
--activation 'cond_gated' \
--grouping 'local' \
--sample_duration 20 \
--num_channels 16 \
--cpu \
--data_dir '/home/jovyan/cond-reverb' \
--batch_size 1 \
--learning_rate 0.004 \
--preemphasis_type aw \
--conditioning \
--conditioning_type 'cond_gated' \
--conditioning_structure 'deep'
# --fx_list 'overdrive,reverb' \
# --bias
# --force_local_residual \
# --without_preemphasis \
# --bias
|
module.exports = {
apiUrl: "https://rawpixel-url-shortner.herokuapp.com/api/",
baseUrl: "https://rawpixel-url-shortner.herokuapp.com/"
// /apiUrl: "http://shortener.muhzi.com/v1/api/",
// baseUrl: "http://muhzi.com"
}
|
import assign from 'lodash/assign';
function isNull(value) {
return value === undefined || value === null;
}
const prefix = 'dragontiger-';
export default function createCache(client) {
const { session } = client;
const cache = new Map();
const load = (name) => {
const id = `${prefix}${name}`;
if (cache.has(id) === true) return cache.get(id);
const result = session.getItem(id);
if (isNull(result) === false) {
cache.set(id, JSON.parse(result));
return cache.get(id);
}
return null;
};
return {
get(name) {
const data = load(name);
if (data !== null) return data.value;
return null;
},
set(name, value, expiredAt) {
const id = `${prefix}${name}`;
const timestamp = expiredAt || (Date.now() + 5000);
cache.set(id, { value, timestamp });
if (value instanceof Error) return this;
session.setItem(id, JSON.stringify({ value, timestamp }));
return this;
},
has(name) {
const id = `${prefix}${name}`;
return cache.has(id) === true || isNull(session.getItem(id)) === false;
},
delete(name) {
const id = `${prefix}${name}`;
cache.delete(id);
session.removeItem(id);
return this;
},
merge(name, value) {
const tmp = this.get(name) || {};
this.set(name, { ...tmp, ...value });
return tmp;
},
expired(name) {
const data = load(name);
if (data !== null) return data.timestamp < Date.now();
return true;
},
clear() {
session.clear();
cache.clear();
},
};
}
|
#!/usr/bin/env sh
#
# Ping Identity DevOps - Docker Build Hooks
#
#- Once both the remote (i.e. git) and local server-profiles have been merged
#- then we can push that out to the instance. This will override any files found
#- in the ${SERVER_ROOT_DIR} directory.
#
${VERBOSE} && set -x
# shellcheck source=pingcommon.lib.sh
. "${HOOKS_DIR}/pingcommon.lib.sh"
if test -d "${STAGING_DIR}/instance" ; then
echo "merging ${STAGING_DIR}/instance to ${SERVER_ROOT_DIR}"
cp -af "${STAGING_DIR}"/instance/* "${SERVER_ROOT_DIR}"
fi
|
<reponame>zxqdx/zLyric
var Parser = Parser || {
parsers: {}
};
Parser.parsers.netease = {
name: "netease",
version: "0.0.1",
parse: function (raw) {
var rawJson = JSON.parse(raw);
var json = {
addInfo: []
};
if (rawJson.hasOwnProperty("transUser")) {
json.addInfo.push("歌词:" + rawJson.lyricUser.nickname);
}
if (rawJson.hasOwnProperty("transUser")) {
json.addInfo.push("翻译:" + rawJson.transUser.nickname);
}
json.lyric = !(rawJson.hasOwnProperty("lrc") && rawJson.lrc.lyric) ? false :
rawJson.lrc.lyric.split("\n").map(line => {
var re = /^\[(\d+):(\d+)\.(\d+)\](.*)$/g;
var match = re.exec(line);
if (match !== null) {
var minute = parseInt(match[1]);
var second = parseInt(match[2]);
var millis = parseInt(match[3]);
var decimal = match[3].length;
var content = match[4].trim();
return {
time: minute * 60 * 1000 + second * 1000 +
millis * 1000 / Math.pow(10, decimal),
content: content
};
}
return null;
}).filter(line => {
return line !== null;
}).sort((l1, l2) => {
return l1.time - l2.time;
});
json.tLyric = !(rawJson.hasOwnProperty("tlyric") && rawJson.tlyric.lyric) ? false :
rawJson.tlyric.lyric.split("\n").map(line => {
var re = /^\[(\d+):(\d+)\.(\d+)\](.*)$/g;
var match = re.exec(line);
if (match !== null) {
var minute = parseInt(match[1]);
var second = parseInt(match[2]);
var millis = parseInt(match[3]);
var decimal = match[3].length;
var content = match[4].trim();
return {
time: minute * 60 * 1000 + second * 1000 +
millis * 1000 / Math.pow(10, decimal),
content: content
};
}
return null;
}).filter(line => {
return line !== null;
}).sort((l1, l2) => {
return l1.time - l2.time;
});
return json;
}
}; |
curl -X DELETE "elasticsearch.localhost.com/test"
curl -X PUT "elasticsearch.localhost.com/test" -H 'Content-Type: application/json' -d \
'
{
"settings": {
"number_of_shards": 2,
"number_of_replicas": 1,
"analysis": {
"analyzer": {
"analyzer_ngram": {
"tokenizer": "tokenizer_ngram",
"filter" : ["lowercase"]
}
},
"tokenizer": {
"tokenizer_ngram": {
"type": "ngram",
"min_gram": 1,
"max_gram": 2,
"token_chars": [
"letter",
"digit"
]
}
}
}
},
"mappings": {
"properties": {
"id": {"type":"long"},
"name": {
"type": "text",
"analyzer": "analyzer_ngram",
"search_analyzer": "analyzer_ngram"
},
"desc":{
"type": "text",
"analyzer": "whitespace",
"search_analyzer": "whitespace"
},
"production_date": {
"type": "date"
},
"location": {
"type": "geo_point"
}
}
}
}
' |
#!/bin/bash
# Adapted from https://github.com/facebookresearch/MIXER/blob/master/prepareData.sh
echo 'Cloning Moses github repository (for tokenization scripts)...'
git clone https://github.com/moses-smt/mosesdecoder.git
echo 'Cloning Subword NMT repository (for BPE pre-processing)...'
git clone https://github.com/rsennrich/subword-nmt.git
SCRIPTS=mosesdecoder/scripts
TOKENIZER=$SCRIPTS/tokenizer/tokenizer.perl
CLEAN=$SCRIPTS/training/clean-corpus-n.perl
NORM_PUNC=$SCRIPTS/tokenizer/normalize-punctuation.perl
REM_NON_PRINT_CHAR=$SCRIPTS/tokenizer/remove-non-printing-char.perl
BPEROOT=subword-nmt/subword_nmt
BPE_TOKENS=40000
URLS=(
"http://statmt.org/wmt13/training-parallel-europarl-v7.tgz"
"http://statmt.org/wmt13/training-parallel-commoncrawl.tgz"
"http://data.statmt.org/wmt17/translation-task/training-parallel-nc-v12.tgz"
"http://data.statmt.org/wmt17/translation-task/dev.tgz"
"http://statmt.org/wmt14/test-full.tgz"
)
FILES=(
"training-parallel-europarl-v7.tgz"
"training-parallel-commoncrawl.tgz"
"training-parallel-nc-v12.tgz"
"dev.tgz"
"test-full.tgz"
)
CORPORA=(
"training/europarl-v7.de-en"
"commoncrawl.de-en"
"training/news-commentary-v12.de-en"
)
# This will make the dataset compatible to the one used in "Convolutional Sequence to Sequence Learning"
# https://arxiv.org/abs/1705.03122
if [ "$1" == "--icml17" ]; then ### truely 17? not 14?
URLS[2]="http://statmt.org/wmt14/training-parallel-nc-v9.tgz"
FILES[2]="training-parallel-nc-v9.tgz"
CORPORA[2]="training/news-commentary-v9.de-en"
OUTDIR=wmt14_en_de
else
OUTDIR=wmt17_en_de
fi
if [ ! -d "$SCRIPTS" ]; then
echo "Please set SCRIPTS variable correctly to point to Moses scripts."
exit
fi
src=en
tgt=de
lang=en-de
prep=../$OUTDIR
tmp=$prep/tmp
orig=../orig
dev=dev/newstest2013
mkdir -p $orig $tmp $prep
cd $orig
for ((i=0;i<${#URLS[@]};++i)); do
file=${FILES[i]}
if [ -f $file ]; then
echo "$file already exists, skipping download"
else
url=${URLS[i]}
wget "$url"
if [ -f $file ]; then
echo "$url successfully downloaded."
else
echo "$url not successfully downloaded."
exit -1
fi
if [ ${file: -4} == ".tgz" ]; then
tar zxvf $file
elif [ ${file: -4} == ".tar" ]; then
tar xvf $file
fi
fi
done
cd ../scripts
echo "pre-processing train data..."
for l in $src $tgt; do
rm $tmp/train.tags.$lang.tok.$l
for f in "${CORPORA[@]}"; do
cat $orig/$f.$l | \
perl $NORM_PUNC $l | \
perl $REM_NON_PRINT_CHAR | \
perl $TOKENIZER -threads 32 -a -l $l >> $tmp/train.tags.$lang.tok.$l
done
done
echo "pre-processing test data..."
for l in $src $tgt; do
if [ "$l" == "$src" ]; then
t="src"
else
t="ref"
fi
grep '<seg id' $orig/test-full/newstest2014-deen-$t.$l.sgm | \
sed -e 's/<seg id="[0-9]*">\s*//g' | \
sed -e 's/\s*<\/seg>\s*//g' | \
sed -e "s/\’/\'/g" | \
perl $TOKENIZER -threads 32 -a -l $l > $tmp/test.$l
echo ""
done
echo "splitting train and valid..."
for l in $src $tgt; do
awk '{if (NR%100 == 0) print $0; }' $tmp/train.tags.$lang.tok.$l > $tmp/valid.$l
awk '{if (NR%100 != 0) print $0; }' $tmp/train.tags.$lang.tok.$l > $tmp/train.$l
done
TRAIN=$tmp/train.de-en
BPE_CODE=$prep/code
rm -f $TRAIN
for l in $src $tgt; do
cat $tmp/train.$l >> $TRAIN
done
echo "learn_bpe.py on ${TRAIN}..."
python $BPEROOT/learn_bpe.py -s $BPE_TOKENS < $TRAIN > $BPE_CODE
for L in $src $tgt; do
for f in train.$L valid.$L test.$L; do
echo "apply_bpe.py to ${f}..."
python $BPEROOT/apply_bpe.py -c $BPE_CODE < $tmp/$f > $tmp/bpe.$f
done
done
perl $CLEAN -ratio 1.5 $tmp/bpe.train $src $tgt $prep/train 1 250
perl $CLEAN -ratio 1.5 $tmp/bpe.valid $src $tgt $prep/valid 1 250
for L in $src $tgt; do
cp $tmp/bpe.test.$L $prep/test.$L
done
|
#!/bin/bash
set -euo pipefail
sudo journalctl -f -u homesec-bootstrap
|
import single_robot_behavior
import behavior
import robocup
import main
import constants
class Mark(single_robot_behavior.SingleRobotBehavior):
def __init__(self):
super().__init__(continuous=True)
self._ratio = 0.9
self._mark_line_thresh = 0.9
self._mark_robot = None
self.add_transition(behavior.Behavior.State.start,
behavior.Behavior.State.running, lambda: True,
"immediately")
def execute_running(self):
if self.mark_robot is None or not main.ball(
).valid or not self.mark_robot.visible:
return
ball_pos = main.ball().pos
pos = self.robot.pos
mark_pos = self.mark_robot.pos
mark_line_dir = (ball_pos - mark_pos).normalized()
ball_mark_line = robocup.Segment(
ball_pos - mark_line_dir * constants.Ball.Radius,
mark_pos + mark_line_dir * 2.0 * constants.Robot.Radius)
main.system_state().draw_line(ball_mark_line, (0, 0, 255), "Mark")
mark_line_dist = ball_mark_line.dist_to(pos)
target_point = None
if mark_line_dist > self.mark_line_thresh:
target_point = ball_mark_line.nearest_point(pos)
else:
target_point = ball_pos + (
mark_pos -
ball_pos).normalized() * self.ratio * ball_mark_line.length()
main.system_state().draw_circle(self._mark_robot.pos,
constants.Robot.Radius * 1.2,
(0, 127, 255), "Mark")
self.robot.approach_opponent(self.mark_robot.shell_id(), True)
self.robot.move_to(target_point)
self.robot.face(ball_pos)
@property
def ratio(self):
return self._ratio
@ratio.setter
def ratio(self, value):
self._ratio = min(max(value, 0.0), 1.0)
@property
def mark_line_thresh(self):
return self._mark_line_thresh
@mark_line_thresh.setter
def mark_line_thresh(self, value):
self._mark_line_thresh = value
@property
def mark_robot(self):
return self._mark_robot
@mark_robot.setter
def mark_robot(self, value):
self._mark_robot = value
|
package kr.co.gardener.util;
import java.awt.image.BufferedImage;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.Properties;
import java.util.UUID;
import javax.imageio.ImageIO;
import javax.swing.ImageIcon;
import javax.swing.JFrame;
import javax.swing.JLabel;
import org.springframework.web.multipart.MultipartFile;
public class FileUploader {
public static Properties makeProp() {
// config 파일 찾기
File config = new File("C:\\key\\config.properties");
//File config = new File("gakkum2021/tomcat/webapps/key/config.properties");
// config 파일을 읽기 위한 파일리더
BufferedReader fr = null;
// properties 생성
Properties pro = new Properties();
try {
fr = new BufferedReader(new InputStreamReader(new FileInputStream(config), "UTF8"));
} catch (Exception e) {
System.out.println("config.properties 파일을 찾을 수가 없습니다.");
}
try {
// 읽어온 파일로 properties 로드
pro.load(fr);
} catch (IOException e1) {
System.out.println("config.properties 파일이 잘못되어 있습니다.");
}
return pro;
}
public static String Uploader(MultipartFile uploadFile, String path, String name) {
Properties pro = makeProp();
MultipartFile file = uploadFile;
UUID uuid = UUID.randomUUID();
// 파일 저장을 위한 경로 할당
String javaFilePath = pro.getProperty("java.fileUpload.path") + path.replaceAll("/", "\\\\");
String srcFilePath = pro.getProperty("spring.fileUpload.src") + path.replaceAll("\\\\", "/");
String fileName = name;
System.out.println("javaFilePath : " + javaFilePath);
System.out.println("srcFilePath : " + srcFilePath);
System.out.println(pro.getProperty("java.fileUpload.path"));
//폴더가 없으면 생성
File folder = new File(javaFilePath);
if (!folder.exists()) {
if (folder.mkdirs())
System.out.println(javaFilePath + " : 폴더를 생성 성공");
else
System.out.println("폴더를 생성 실패");
}
//파일복사 해서 넣기
if (file != null && !file.isEmpty()) {
if(fileName == null)
fileName = uuid.toString() +"_"+ file.getOriginalFilename();
//파일 중복 체크
try {
file.transferTo(new File(javaFilePath +"\\"+ fileName));
srcFilePath += "/" + fileName;
} catch (Exception e) {
System.out.println("오류발생");
return null;
}
}
return srcFilePath;
}
public static String WebImageUploader(String fileName,String src) {
Properties pro = makeProp();
// 파일 저장을 위한 경로 할당
String javaFilePath = pro.getProperty("java.fileUpload.path") + "product\\" + fileName+".png";
String srcFilePath = pro.getProperty("spring.fileUpload.src") + "product/" + fileName+".png";
try {
URL url = new URL(src);
BufferedImage image = ImageIO.read(url);
File file = new File(javaFilePath);
ImageIO.write(image, "png", file);
} catch (IOException e) {
e.printStackTrace();
srcFilePath = "error";
}
return srcFilePath;
}
}
|
var file = document.getElementById('file');
var imageMeta = document.getElementById('imageMeta');
var image = document.querySelector('#exif img');
var toDecimal = function(number) {
return number[0].numerator + number[1].numerator /
(60 * number[1].denominator) + number[2].numerator / (3600 * number[2].denominator);
};
file.addEventListener('change', function(e) {
var selectedFile = e.target.files[0]
var fr = new FileReader();
fr.onload = function(e) {
image.src = this.result;
};
fr.readAsDataURL(selectedFile);
EXIF.getData(selectedFile, function() {
//Extract Timestamp
//Extract Location
var lat = EXIF.getTag(this, 'GPSLatitude');
var latRef = EXIF.getTag(this, 'GPSLatitudeRef');
var lng = EXIF.getTag(this, 'GPSLongitude');
var lngRef = EXIF.getTag(this, 'GPSLongitudeRef');
if (lat && lng) {
//Convert coordinates to WGS84 decimal
latRef = latRef || "N";
lngRef = lngRef || "W";
lat = (lat[0] + lat[1] / 60 + lat[2] / 3600) * (latRef == "N" ? 1 : -1);
lng = (lng[0] + lng[1] / 60 + lng[2] / 3600) * (lngRef == "W" ? -1 : 1);
var loc = lat + ',' + lng;
imageMeta.innerHTML = '<a target="_blank" href="http://maps.google.com/maps/place/' + loc + '/@' + loc + ',10z/data=!3m1!1e3"><img src=https://maps.googleapis.com/maps/api/staticmap?zoom=10&size=700x400&maptype=roadmap&markers=color:red%7Clabel:C%7C' + loc + '&key=AIzaSyBM0cQN_J2q4QjjzenttTarUZmvXlj4zl4 ><br><br>';
imageMeta.innerHTML += '<br> latitude: ' + lat;
imageMeta.innerHTML += '<br> longitude: ' + lng;
} else {
imageMeta.innerHTML = ('<br>No Geotags in this Image.');
}
var make = EXIF.getTag(this, "DateTime") || EXIF.getTag(this, "DateTimeOriginal") || EXIF.getTag(this, "Make") || EXIF.getTag(this, "GPSDateStamp"),
model = EXIF.getTag(this, "Model");
if (make) {
imageMeta.innerHTML += "<br>Date: " + make;
}
if (model) {
imageMeta.innerHTML += "<br>Model: " + model;
}
console.log('EXIF Data of this Image \n', EXIF.pretty(this))
});
});
function showImage(src, target) {
var fr = new FileReader();
// when image is loaded, set the src of the image where you want to display it
fr.onload = function(e) { target.src = this.result; };
src.addEventListener("change", function() {
// fill fr with image data
fr.readAsDataURL(src.files[0]);
});
}
|
<gh_stars>100-1000
// https://open.kattis.com/problems/guessthedatastructure
#include <iostream>
#include <queue>
#include <stack>
using namespace std;
int main() {
int n;
while (cin >> n) {
stack<int> s;
queue<int> q;
priority_queue<int> pq;
bool is = true, iq = true, ipq = true;
while (n--) {
int a, b;
cin >> a >> b;
if (a == 1) {
s.push(b);
q.push(b);
pq.push(b);
} else {
if (s.empty()) {
is = false;
iq = false;
ipq = false;
} else {
if (s.top() != b) is = false;
if (q.front() != b) iq = false;
if (pq.top() != b) ipq = false;
s.pop();
q.pop();
pq.pop();
}
}
}
int c = 0;
if (is) c++;
if (iq) c++;
if (ipq) c++;
switch (c) {
case 0: cout << "impossible\n"; break;
case 1:
if (is) cout << "stack\n";
else if (iq) cout << "queue\n";
else cout << "priority queue\n";
break;
default:
cout << "not sure\n";
}
}
}
|
<filename>controllers/PurchasesController.js
const Purchases = require('../models/Purchases');
const Accounts = require('../models/Accounts');
const serialize = require('node-serialize');
const settings = require('electron-settings');
const moment = require('moment');
class PurchasesController {
constructor(elementId) {
showLoader();
const user = settings.get('loggedUser');
switch (elementId) {
case 'purchases-day-book':
case 'purchases-accounts1':
case 'purchases-accounts2':
case 'purchases-accounts3':
case 'purchases-accounts4':
case 'purchases-accounts5':
return this.index(user)
default:
return console.log(123)
}
}
/**
* Get Section
* @returns {Element}
*/
static getS() {
return document.getElementById('purchases-accounts-section')
}
index(user) {
let start = document.getElementById('purchases-start-date')
let end = document.getElementById('purchases-end-date')
if (start.value.trim() === '') {
start.value = moment().format('DD/MM/YYYY');
}
if (end.value.trim() === '') {
end.value = moment().format('DD/MM/YYYY');
}
this.getRecord(start, end, user);
this.filter(start, end, user);
}
filter(start, end, user) {
let form = document.getElementById('purchases-filter-form')
form.onsubmit = (evt) => {
evt.preventDefault();
showLoader();
if (start.value.trim() === '') {
showToast('Please Enter start date', 'danger')
return false;
}
if (end.value.trim() === '') {
showToast('Please Enter end date', 'danger')
return false;
}
this.getRecord(start, end, user);
}
}
getRecord(start, end, user) {
const tableId = '#purchases_list';
Purchases.getPurchases(start.value, end.value).then(function (results) {
let sNo = 1, dataSet = [];
const disabled = PurchasesController.isDisabled(user);
Array.prototype.forEach.call(results, (row) => {
dataSet.push([
HtmlHelper.getSpanCell(sNo) +
HtmlHelper.getInputFieldHtml('id', 'purchases-edit-id-' + row.id, 'hidden', false, row.id),
HtmlHelper.getSelect2InputFieldHtml('product', 'purchases-edit-product-' + row.id, row.product, disabled),
HtmlHelper.getInputFieldHtml('quantity', 'purchases-edit-quantity-' + row.id, 'number', false, row.quantity, disabled),
HtmlHelper.getInputFieldHtml('price', 'purchases-edit-price-' + row.id, 'number', false, row.price, disabled),
HtmlHelper.getInputFieldHtml('total', 'purchases-edit-total-' + row.id, 'number', false, row.total, true),
HtmlHelper.getSelect2InputFieldHtml('supplier', 'purchases-edit-supplier-' + row.id, row.supplier, disabled),
HtmlHelper.getSelect2InputFieldHtml('terminal', 'purchases-edit-terminal-' + row.id, row.terminal, disabled),
HtmlHelper.getInputFieldHtml('reg_no', 'purchases-edit-reg_no-' + row.id, false, false, row.reg_no, disabled),
HtmlHelper.getStatusOfRow(row.id, disabled)
]);
sNo++;
})
// Init Data Table
HtmlHelper.initDataTable(tableId, dataSet, [
'No',
['select', 'Product'],
['text-numeric', 'Qty'],
['text-numeric', 'Rate'],
['text-numeric', 'Amount'],
['select', 'Supplier'],
['select', 'Terminal'],
['text', 'TL Reg No'],
'Status'
])
if (!disabled) {
HtmlHelper.setDataTableFooter(tableId, PurchasesController.defaultRow(sNo))
}
PurchasesController.updateTotalAmount()
PurchasesController.getSelect2Option()
PurchasesController.create();
})
}
static defaultRow(sNo) {
let fields = [
HtmlHelper.getSpanCell(sNo),
HtmlHelper.getSelect2InputFieldHtml('product', 'purchases-add-product'),
HtmlHelper.getInputFieldHtml('quantity', 'purchases-add-quantity', 'number'),
HtmlHelper.getInputFieldHtml('price', 'purchases-add-price', 'number'),
HtmlHelper.getInputFieldHtml('total', 'purchases-add-total', 'number', false, false, true),
HtmlHelper.getSelect2InputFieldHtml('supplier', 'purchases-add-supplier'),
HtmlHelper.getSelect2InputFieldHtml('terminal', 'purchases-add-terminal'),
HtmlHelper.getInputFieldHtml('reg_no', 'purchases-add-reg_no'),
HtmlHelper.getStatusOfRow()
];
return HtmlHelper.setCell2Array(fields);
}
static create() {
let table = document.getElementById('purchases_list')
let created = document.getElementById('purchases-created-date')
let inputs = table.querySelectorAll('input')
Array.prototype.forEach.call(inputs, (input) => {
input.addEventListener('keypress', function (event) {
let key = event.which || event.keyCode;
if (key === 13) {
let data = getFormValues(event.target.parentNode.parentNode);
let statusCol = event.target.parentNode.parentNode.lastChild;
if (validateInputFields(data, 'product')) {
if (isset(data['id'])) {
Purchases.updatePurchases(data).then(function (result) {
showToast('Purchases entry updated successfully')
statusCol.childNodes[1].classList.remove('hide');
statusCol.childNodes[2].classList.add('hide');
})
} else {
if (HtmlHelper.isValidCreatedDate(moment, created)) {
Purchases.createPurchases(data, created).then(function (result) {
const section = document.getElementById('purchases-accounts1')
if (section) section.click()
showToast('Purchases entry added successfully')
})
}
}
}
}
});
});
}
static getSelect2Option() {
let table = document.getElementById('purchases_list')
let productID = PurchasesController.getS().querySelectorAll('select[name=product]')
let supplierID = PurchasesController.getS().querySelectorAll('select[name=supplier]')
let terminalID = PurchasesController.getS().querySelectorAll('select[name=terminal]')
Accounts.getAccounts([2, 5, 7]).then(function (results) {
let products = [], supplier = [], terminal = [];
Array.prototype.forEach.call(results, (row) => {
// Products
if (parseInt(row.type) == 5) {
products.push(HtmlHelper.getObjectOfPill(row));
}
// Supplier
if (parseInt(row.type) == 2) {
supplier.push(HtmlHelper.getObjectOfPill(row));
}
// Terminal
if (parseInt(row.type) == 7) {
terminal.push(HtmlHelper.getObjectOfPill(row));
}
});
// Init Select 2
HtmlHelper.initSelect2Field(productID, products)
HtmlHelper.initSelect2Field(supplierID, supplier)
HtmlHelper.initSelect2Field(terminalID, terminal)
// Update row status
HtmlHelper.updateRowStatus(table)
// Delete row
HtmlHelper.deleteTableRow(Accounts, table, 'purchases')
// Hide loader
hideLoader()
})
}
static updateTotalAmount() {
let quantity = PurchasesController.getS().querySelectorAll('input[name=quantity]')
let rate = PurchasesController.getS().querySelectorAll('input[name=price]')
// Add event on change of quantity
Array.prototype.forEach.call(quantity, (elem) => {
elem.addEventListener('change', function (event) {
let price = event.target.parentNode.nextSibling.childNodes[0];
let total = event.target.parentNode.nextSibling.nextSibling.childNodes[0];
total.value = Math.round(event.target.value * price.value)
});
elem.addEventListener('input', function (event) {
let price = event.target.parentNode.nextSibling.childNodes[0];
let total = event.target.parentNode.nextSibling.nextSibling.childNodes[0];
total.value = Math.round(event.target.value * price.value)
});
});
// Add event on change of price
Array.prototype.forEach.call(rate, (elem) => {
elem.addEventListener('change', function (event) {
let qty = event.target.parentNode.previousSibling.childNodes[0];
let total = event.target.parentNode.nextSibling.childNodes[0];
total.value = Math.round(event.target.value * qty.value)
});
elem.addEventListener('input', function (event) {
let qty = event.target.parentNode.previousSibling.childNodes[0];
let total = event.target.parentNode.nextSibling.childNodes[0];
total.value = event.target.value * qty.value
});
});
}
static isDisabled(user) {
let disabled = true;
if (isAdmin(user.role)) {
disabled = false
} else if (isset(user.permissions.daybook) && user.permissions.daybook.write) {
disabled = false
}
return disabled;
}
}
module.exports = PurchasesController |
#!/bin/bash
# Exit on error
set -e
##########################
### ###
### write the namelist ###
### ###
##########################
########### version
command -v git 2>&1 >/dev/null
if [ $? -eq 0 ]; then
ver=`git describe --tags 2>&1`
if [ $? -ne 0 ]; then
echo "Ignore possible error, git just doesn't find a version tag - using default value"
ver=vx.x.x
fi
else
ver=latest
fi
########### end version
cat > ${dir}/NAMELIST <<EOF
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
!!! DESIGN VERSION ${design}
!!! design variables case ${case}
!!! q_inv == ${q_inv} [g/kg]
!!! tpot_inv == ${tpot_inv} [K]
!!! clw_max == ${clw_max} [g/kg]
!!! tpot_pbl == ${tpot_pbl} [K]
!!! pblh == ${pblh} [m]
!!! num_pbl == ${num_pbl} [#/mg]
!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
&version
ver="${ver}"
/
&model
level = ${level:-3}
nxp = ${nxp:-204} ! Number of points in x direction
nyp = ${nyp:-204} ! Number of points in y direction
nzp = ${nzp:-200} ! Number of vertical levels
deltax = ${deltax:-50.} ! Grid spacing in x
deltay = ${deltay:-50.} ! Grid spacing in y
deltaz = ${deltaz:-20.} ! Grid spacing in the vertical
nxpart = ${nxpart:-.true.}
dzmax = ${dzmax:-3500.} ! Height above which start stretching vertical grid
dzrat = ${dzrat:-1.0} ! Factor for vertical grid stretching
dtlong = ${dtlong:-2.} ! Max. timestep
distim = ${distim:-100.} ! Timescale for the dissipation in sponge layer
timmax = ${timmax:-12600.}
runtype = ${runtype:-'"INITIAL"'} ! INITIAL or HISTORY (restart) run
CCN = ${CCN:-600.e6}
corflg = ${corflg:-.false.} ! Apply coriolis force
prndtl = ${prndtl:--0.3333333}
filprf = ${filprf:-"'emul'"} ! Output filename profile
hfilin = ${hfilin:-"'emul.rst'"}
ssam_intvl = ${ssam_intvl:-300.} ! Interval for statistical output
savg_intvl = ${savg_intvl:-300.} ! Averaging interval for stat output
frqanl = ${frqanl:-5400.} ! Interval for full domain output
frqhis = ${frqhis:-30000.}
lbinanl = ${lbinanl:-.false.} ! Write binned microphysical output (with level >= 4)
salsa_b_bins = ${salsa_b_bins:-.FALSE.} ! ?
mcflg = ${mcflg:-.FALSE.} ! Do mass conservation statistics
sed_aero%switch = ${sed_aero:-.FALSE.} ! Calculate sedimentation of aerosol particles
sed_cloud%switch = ${sed_cloud:-.TRUE.} ! - '' - cloud droplets
sed_cloud%delay = ${Tspinup:-5400.}
sed_precp%switch = ${sed_precp:-.TRUE.} ! precipitation
sed_precp%delay = ${Tspinup:-5400.}
sed_ice%switch = ${sed_ice:-.FALSE.} ! ice particles
sed_snow%switch = ${sed_snow:-.FALSE.} ! snow flakes/precipitating ice
bulk_autoc%switch = .TRUE.
bulk_autoc%delay = ${Tspinup:-5400.} ! Autoconversion switch for level = 1-3
itsflg = ${itsflg:-1} ! Flag for temperature type in input sounding
lnudging = ${lnudging:-.TRUE.} ! Master switch for nudging scheme
lemission = ${lemission:-.FALSE.} ! Master switch for aerosol emissions
iradtyp = ${iradtyp:-3} ! Radiation/large scale forcing
strtim = ${strtim:-180.5} ! Start time
cntlat = ${cntlat:-60.} ! latitude
case_name = ${case_name:-"'default'"} ! Case name for large-scale forcing schemes
div = ${div:-1.5e-6} ! Large-scale divergence
dthcon = ${dthcon:-0.} ! heat flux 18.4613 ! Sensible heat flux
drtcon = ${drtcon:-0.} ! latent 84.8921 ! Latent heat flux
! isfctyp = ${isfctyp:-2}
sst = ${sst:-271.35} ! Surface temperature
zrough = ${zrough:-0.01} ! Roughness length
ubmin = ${ubmin:--0.25}
th00 = ${th00:-289.} ! Reference temperature
umean = ${umean:-10.}
vmean = ${vmean:-0.}
/
! With iradtyp = 3
&radiation
radsounding = ${radsounding:-"'datafiles/kmls.lay'"}
RadPrecipBins = ${RadPrecipBins:-1}
sfc_albedo = ${sfc_albedo:-0.05}
zenithFlag = ${zenithFlag:-.TRUE.}
/
! With lnudging = .TRUE.
&nudge
nudge_time = ${nudge_time:-12600.} ! Overall time for nudging from the start of the simulation
ndg_theta%nudgetype = 1
ndg_theta%tau_type = 2 ! Type of relaxation time (0:constant, 1-3: increasing)
ndg_theta%tau_min = 300. ! Min relaxation time (with tau_type=1-3 and constant tau)
ndg_theta%tau_max = 3600. ! Max relaxation time (with tau_type=1-3)
ndg_theta%tau_max_continue = .FALSE.
/
&salsa
lscoag%switch = ${lscoag:-.TRUE.} ! Master coagulation switch
lscoag%delay = ${Tspinup:-5400.}
lscnd%switch = ${lscnd:-.TRUE.} ! Master condensation switch
lsauto%switch = ${lsauto:-.TRUE.} ! Master autoconversion switch
lsauto%delay = ${Tspinup:-5400.}
lsactiv%switch = ${lsactiv:-.TRUE.} ! Master cloud activation switch
lsicenucl%switch = ${lsicenucl:-.FALSE.} ! Switch for ice nucleation
lsicenucl%delay = ${Tspinup:-5400.}
lsautosnow%switch = ${lsautosnow:-.FALSE.} ! Master snow autoconversion switch
lsicemelt%switch = ${lsicemelt:-.FALSE.} ! Switch for ice'n' snow melting
lscgcc = ${lscgcc:-.TRUE.} ! Self-collection of cloud droplets
lscgpp = ${lscgpp:-.TRUE.} ! Self-collection of rain drops
lscgpc = ${lscgpc:-.TRUE.} ! Rain collection of cloud droplets
lscgaa = ${lscgaa:-.FALSE.} ! Aerosol coagulation
lscgca = ${lscgca:-.TRUE.} ! Cloud collection of aerosols
lscgpa = ${lscgpa:-.TRUE.} ! Rain collection of aerosols
lscgia = ${lscgia:-.TRUE.} ! Ice collection of aerosols
lscgic = ${lscgic:-.TRUE.} ! Ice collection of cloud droplets
lscgii = ${lscgii:-.TRUE.} ! Self-collection of ice
lscgip = ${lscgip:-.TRUE.} ! Ice collection of rain drops
lscgsa = ${lscgsa:-.TRUE.} ! Snow collection of aerosols
lscgsc = ${lscgsc:-.TRUE.} ! Snow collection of cloud droplets
lscgsi = ${lscgsi:-.TRUE.} ! Snow collection of ice particles
lscgsp = ${lscgsp:-.TRUE.} ! Snow collection of rain drops
lscgss = ${lscgss:-.TRUE.} ! Self-collection of snow
lscndgas = ${lscndgas:-.FALSE.} ! --Aerosol precursor gas codensation
lscndh2oae = ${lscndh2oae:-.TRUE.} ! --Condensation of water on aerosols (if FALSE, equilibrium assumed)
lscndh2ocl = ${lscndh2ocl:-.TRUE.} ! --Condensation of water on cloud droplets (and drizzle)
lscndh2oic = ${lscndh2oic:-.TRUE.} ! --Condensation of water on ice particles
lsactbase = ${lsactbase:-.FALSE.} ! --Switch for parameterized cloud base activation
lsactintst = ${lsactintst:-.TRUE.} ! --Switch for interstitial activation based on host model Smax
lscheckarrays = ${lscheckarray:-.FALSE.}
lsfreeRH%switch = .TRUE.
lsfreeRH%delay = ${Tspinup:-5400.}
rhlim = ${rhlim:-1.001} ! RH limit for SALSA during initialization and spinup
isdtyp = ${isdtyp:-0}
nspec = ${nspec:-1}
listspec = ${listspec:-"'SO4','','','','','',''"} !!!! "'SO4','DU','OC','','','',''"
volDistA = ${volDistA:-1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}
volDistB = ${volDistB:-0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0}
nf2a = ${nf2a:-1.0}
sigmag = ${sigmag:-1.3, 2.0, 2.0, 2.0, 2.0, 2.0, 2.0} ! Stdev for initial aerosol size distribution for isdtyp == 0 (uniform)
dpg = ${dpg:-0.1, 1.0, 0.2, 0.2, 0.2, 0.2, 0.2} ! Mode mean diameters in micrometers
n = ${n:-46.2502241367474, 0. , 0., 0., 0., 0., 0.} ! Mode number concentrations in #/mg
/
EOF
exit
|
#!/usr/bin/env sh
# generated from catkin/python/catkin/environment_cache.py
# based on a snapshot of the environment before and after calling the setup script
# it emulates the modifications of the setup script without recurring computations
# new environment variables
# modified environment variables
export CMAKE_PREFIX_PATH="/home/rrqq/TableRearrangement/ur_controllers/devel:$CMAKE_PREFIX_PATH"
export PWD='/home/rrqq/TableRearrangement/ur_controllers/build'
export ROSLISP_PACKAGE_DIRECTORIES="/home/rrqq/TableRearrangement/ur_controllers/devel/share/common-lisp:$ROSLISP_PACKAGE_DIRECTORIES"
export ROS_PACKAGE_PATH="/home/rrqq/TableRearrangement/ur_controllers/src:$ROS_PACKAGE_PATH" |
def encrypt(s):
result = ""
for c in s:
if c.isalpha():
result += chr(ord(c) + 1)
else:
result += c
return result |
def substring_2chars(s):
maxlen = 0
currlen = 0
prev_char = ''
curr_char = ''
for c in s:
if c != prev_char and curr_char == '':
curr_char = c
currlen += 1
elif c != prev_char and c != curr_char:
maxlen = max(maxlen, currlen)
currlen = 1
prev_char = curr_char
curr_char = c
else:
currlen += 1
prev_char = curr_char
maxlen = max(maxlen, currlen)
return maxlen |
<reponame>kugg/microfun
var mongo = require('mongodb');
console.log(mongo);
var Server = mongo.Server,
Db = mongo.Db;
var server = new Server('localhost', 27017, {auto_reconnect: true});
db = new Db('winedb', server);
db.open(function(err, db) {
if(!err) {
console.log("Connected to 'winedb' database");
db.collection('wines', {strict:true}, function(err, collection) {
if (err) {
console.log("The 'wines' collection doesn't exist. Creating it with sample data...");
populateDB();
}
});
} else {
console.log("Error opening database");
}
});
exports.findById = function(req, res) {
var id = req.params.id;
console.log('Retrieving wine: ' + id);
db.collection('wines', function(err, collection) {
q = {'_id': new mongo.ObjectID(id)}
collection.find().limit(1).next(function(err, item) {
res.send(item);
});
});
};
exports.findAll = function(req, res) {
db.collection('wines', function(err, collection) {
collection.find().toArray(function(err, items) {
res.send(items);
});
});
};
exports.addWine = function(req, res) {
var wine = req.body;
console.log('Adding wine: ' + JSON.stringify(wine));
db.collection('wines', function(err, collection) {
collection.insert(wine, {safe:true}, function(err, result) {
if (err) {
res.send({'error':'An error has occurred'});
} else {
console.log('Success: ' + JSON.stringify(result[0]));
res.send(result[0]);
}
});
});
}
exports.updateWine = function(req, res) {
var id = req.params.id;
var wine = req.body;
console.log('Updating wine: ' + id);
console.log(JSON.stringify(wine));
db.collection('wines', function(err, collection) {
collection.update({'_id': new mongo.ObjectID(id)}, wine, {safe:true}, function(err, result) {
if (err) {
console.log('Error updating wine: ' + err);
res.send({'error':'An error has occurred'});
} else {
console.log('' + result + ' document(s) updated');
res.send(wine);
}
});
});
}
exports.deleteWine = function(req, res) {
var id = req.params.id;
console.log('Deleting wine: ' + id);
db.collection('wines', function(err, collection) {
collection.remove({'_id': new mongo.ObjectID(id)}, {safe:true}, function(err, result) {
if (err) {
res.send({'error':'An error has occurred - ' + err});
} else {
console.log('' + result + ' document(s) deleted');
res.send(req.body);
}
});
});
}
/*--------------------------------------------------------------------------------------------------------------------*/
// Populate database with sample data -- Only used once: the first time the application is started.
// You'd typically not find this code in a real-life app, since the database would already exist.
var populateDB = function() {
var wines = [
{
name: "<NAME>",
year: "2009",
grapes: "Grenache / Syrah",
country: "France",
region: "Southern Rhone",
description: "The aromas of fruit and spice...",
picture: "saint_cosme.jpg"
},
{
name: "<NAME>",
year: "2006",
grapes: "Tempranillo",
country: "Spain",
region: "Rioja",
description: "A resurgence of interest in boutique vineyards...",
picture: "lan_rioja.jpg"
}];
db.collection('wines', function(err, collection) {
collection.insert(wines, {safe:true}, function(err, result) {});
});
};
|
import React, { Component } from 'react';
class App extends Component {
constructor(props) {
super(props);
this.state = {
data: [],
query: ''
};
}
componentDidMount() {
fetch('https://my-api/data')
.then(res => res.json())
.then(data => this.setState({ data }));
}
handleInputChange( event ) {
const query = event.target.value;
this.setState( { query } );
}
render() {
return (
<div>
<input
type="text"
value={ this.state.query }
onChange={ this.handleInputChange.bind(this) }
/>
<Display
data={ this.state.data }
query={ this.state.query }
/>
</div>
);
}
}
export default App; |
<gh_stars>0
package com.mrh0.createaddition.item.hammer;
public class DischargedHammer {
}
|
<gh_stars>0
#include <linux/kernel.h>
#include <linux/mutex.h>
#include <linux/init.h>
#include <linux/device.h>
#include <linux/module.h>
#include <linux/interrupt.h>
#include <linux/platform_device.h>
#include <linux/mfd/core.h>
#include <linux/delay.h>
#include <linux/slab.h>
#include <linux/i2c.h>
#include <linux/arisc/arisc.h>
#include "axp-core.h"
int axp_suspend_flag = AXP_NOT_SUSPEND;
struct axp_platform_ops ap_ops[AXP_ONLINE_SUM];
const char *axp_name[AXP_ONLINE_SUM];
static LIST_HEAD(axp_dev_list);
static DEFINE_SPINLOCK(axp_list_lock);
static int axp_dev_register_count;
struct work_struct axp_irq_work;
void axp_platform_ops_set(int pmu_num, struct axp_platform_ops *ops)
{
ap_ops[pmu_num].usb_det = ops->usb_det;
ap_ops[pmu_num].usb_vbus_output = ops->usb_vbus_output;
ap_ops[pmu_num].cfg_pmux_para = ops->cfg_pmux_para;
ap_ops[pmu_num].get_pmu_name = ops->get_pmu_name;
ap_ops[pmu_num].get_pmu_dev = ops->get_pmu_dev;
ap_ops[pmu_num].pmu_regulator_save = ops->pmu_regulator_save;
ap_ops[pmu_num].pmu_regulator_restore = ops->pmu_regulator_restore;
}
s32 axp_usb_det(void)
{
return ap_ops[0].usb_det();
}
EXPORT_SYMBOL_GPL(axp_usb_det);
s32 axp_usb_vbus_output(int high)
{
return ap_ops[0].usb_vbus_output(high);
}
EXPORT_SYMBOL_GPL(axp_usb_vbus_output);
int config_pmux_para(int num, struct aw_pm_info *api, int *pmu_id)
{
if (num >= AXP_ONLINE_SUM)
return -EINVAL;
if (ap_ops[num].cfg_pmux_para)
return ap_ops[num].cfg_pmux_para(num, api, pmu_id);
else
return -EINVAL;
}
EXPORT_SYMBOL_GPL(config_pmux_para);
const char *get_pmu_cur_name(int pmu_num)
{
if (ap_ops[pmu_num].get_pmu_name)
return ap_ops[pmu_num].get_pmu_name();
else
return NULL;
}
EXPORT_SYMBOL_GPL(get_pmu_cur_name);
struct axp_dev *get_pmu_cur_dev(int pmu_num)
{
if (ap_ops[pmu_num].get_pmu_dev)
return ap_ops[pmu_num].get_pmu_dev();
else
return NULL;
}
EXPORT_SYMBOL_GPL(get_pmu_cur_dev);
int axp_mem_save(void)
{
if (ap_ops[0].pmu_regulator_save)
return ap_ops[0].pmu_regulator_save();
return 0;
}
EXPORT_SYMBOL_GPL(axp_mem_save);
void axp_mem_restore(void)
{
if (ap_ops[0].pmu_regulator_restore)
return ap_ops[0].pmu_regulator_restore();
}
EXPORT_SYMBOL_GPL(axp_mem_restore);
int axp_get_pmu_num(const struct of_device_id *ids, int size)
{
struct device_node *np;
int i, j, pmu_num = -EINVAL;
char node_name[8];
const char *prop_name = NULL;
for (i = 0; i < AXP_ONLINE_SUM; i++) {
sprintf(node_name, "pmu%d", i);
np = of_find_node_by_type(NULL, node_name);
if (NULL == np) {
BUG_ON(i == 0);
break;
}
if (of_property_read_string(np, "compatible",
&prop_name)) {
pr_err("%s get failed\n", prop_name);
break;
}
for (j = 0; j < size; j++) {
if (!strcmp(prop_name, ids[j].compatible)) {
pmu_num = i;
break;
}
}
}
return pmu_num;
}
int axp_mfd_cell_name_init(struct axp_platform_ops *ops, int count, int pmu_num,
int size, struct mfd_cell *cells)
{
int i, j, find = 0;
for (j = 0; j < count; j++) {
if ((ops->powerkey_name[j] != NULL)
&& (strstr(ops->powerkey_name[j],
axp_name[pmu_num]) != NULL)) {
find = 1;
break;
}
if ((ops->regulator_name[j] != NULL)
&& (strstr(ops->regulator_name[j],
axp_name[pmu_num]) != NULL)) {
find = 1;
break;
}
if ((ops->charger_name[j] != NULL)
&& (strstr(ops->charger_name[j],
axp_name[pmu_num]) != NULL)) {
find = 1;
break;
}
if ((ops->gpio_name[j] != NULL)
&& (strstr(ops->gpio_name[j],
axp_name[pmu_num]) != NULL)) {
find = 1;
break;
}
}
if (find == 0) {
pr_err("%s no axp mfd cell find\n", __func__);
return -EINVAL;
}
for (i = 0; i < size; i++) {
if (strstr(cells[i].name, "powerkey") != NULL)
cells[i].of_compatible = ops->powerkey_name[j];
else if (strstr(cells[i].name, "regulator") != NULL)
cells[i].of_compatible = ops->regulator_name[j];
else if (strstr(cells[i].name, "charger") != NULL)
cells[i].of_compatible = ops->charger_name[j];
else if (strstr(cells[i].name, "gpio") != NULL)
cells[i].of_compatible = ops->gpio_name[j];
}
return 0;
}
#ifdef CONFIG_AXP_TWI_USED
static s32 __axp_read_i2c(struct i2c_client *client, u32 reg, u8 *val)
{
s32 ret;
ret = i2c_smbus_read_byte_data(client, reg);
if (ret < 0) {
dev_err(&client->dev, "failed reading at 0x%02x\n", reg);
return ret;
}
*val = (u8)ret;
return 0;
}
static s32 __axp_reads_i2c(struct i2c_client *client,
int reg, int len, u8 *val)
{
s32 ret;
ret = i2c_smbus_read_i2c_block_data(client, reg, len, val);
if (ret < 0) {
dev_err(&client->dev, "failed reading from 0x%02x\n", reg);
return ret;
}
return 0;
}
static s32 __axp_write_i2c(struct i2c_client *client, int reg, u8 val)
{
s32 ret;
/* axp_reg_debug(reg, 1, &val); */
ret = i2c_smbus_write_byte_data(client, reg, val);
if (ret < 0) {
dev_err(&client->dev, "failed writing 0x%02x to 0x%02x\n",
val, reg);
return ret;
}
return 0;
}
static s32 __axp_writes_i2c(struct i2c_client *client,
int reg, int len, u8 *val)
{
s32 ret;
/* axp_reg_debug(reg, len, val); */
ret = i2c_smbus_write_i2c_block_data(client, reg, len, val);
if (ret < 0) {
dev_err(&client->dev, "failed writings to 0x%02x\n", reg);
return ret;
}
return 0;
}
static inline s32 __axp_read_arisc_rsb(char devaddr, int reg,
u8 *val, bool syncflag)
{
return 0;
}
static inline s32 __axp_reads_arisc_rsb(char devaddr, int reg,
int len, u8 *val, bool syncflag)
{
return 0;
}
static inline s32 __axp_write_arisc_rsb(char devaddr, int reg,
u8 val, bool syncflag)
{
return 0;
}
static inline s32 __axp_writes_arisc_rsb(char devaddr, int reg,
int len, u8 *val, bool syncflag)
{
return 0;
}
static inline s32 __axp_read_arisc_twi(int reg, u8 *val, bool syncflag)
{
return 0;
}
static inline s32 __axp_reads_arisc_twi(int reg, int len,
u8 *val, bool syncflag)
{
return 0;
}
static inline s32 __axp_write_arisc_twi(int reg, u8 val, bool syncflag)
{
return 0;
}
static inline s32 __axp_writes_arisc_twi(int reg, int len,
u8 *val, bool syncflag)
{
return 0;
}
#else
static inline s32 __axp_read_i2c(struct i2c_client *client,
u32 reg, u8 *val)
{
return 0;
}
static inline s32 __axp_reads_i2c(struct i2c_client *client,
int reg, int len, u8 *val)
{
return 0;
}
static inline s32 __axp_write_i2c(struct i2c_client *client,
int reg, u8 val)
{
return 0;
}
static inline s32 __axp_writes_i2c(struct i2c_client *client,
int reg, int len, u8 *val)
{
return 0;
}
static s32 __axp_read_arisc_rsb(char devaddr, int reg, u8 *val, bool syncflag)
{
s32 ret;
u8 addr = (u8)reg;
u8 data = 0;
arisc_rsb_block_cfg_t rsb_data;
u32 data_temp;
rsb_data.len = 1;
rsb_data.datatype = RSB_DATA_TYPE_BYTE;
if (syncflag)
rsb_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
rsb_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
rsb_data.devaddr = devaddr;
rsb_data.regaddr = &addr;
rsb_data.data = &data_temp;
/* write axp registers */
ret = arisc_rsb_read_block_data(&rsb_data);
if (ret != 0) {
pr_err("failed read to 0x%02x\n", reg);
return ret;
}
data = (u8)data_temp;
*val = data;
return 0;
}
static s32 __axp_reads_arisc_rsb(char devaddr, int reg,
int len, u8 *val, bool syncflag)
{
s32 ret, i, rd_len;
u8 addr[AXP_TRANS_BYTE_MAX];
u8 data[AXP_TRANS_BYTE_MAX];
u8 *cur_data = val;
arisc_rsb_block_cfg_t rsb_data;
u32 data_temp[AXP_TRANS_BYTE_MAX];
/* fetch first register address */
while (len > 0) {
rd_len = min(len, AXP_TRANS_BYTE_MAX);
for (i = 0; i < rd_len; i++)
addr[i] = reg++;
rsb_data.len = rd_len;
rsb_data.datatype = RSB_DATA_TYPE_BYTE;
if (syncflag)
rsb_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
rsb_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
rsb_data.devaddr = devaddr;
rsb_data.regaddr = addr;
rsb_data.data = data_temp;
/* read axp registers */
ret = arisc_rsb_read_block_data(&rsb_data);
if (ret != 0) {
pr_err("failed reads to 0x%02x\n", reg);
return ret;
}
for (i = 0; i < rd_len; i++)
data[i] = (u8)data_temp[i];
/* copy data to user buffer */
memcpy(cur_data, data, rd_len);
cur_data = cur_data + rd_len;
/* process next time read */
len -= rd_len;
}
return 0;
}
static s32 __axp_write_arisc_rsb(char devaddr, int reg, u8 val, bool syncflag)
{
s32 ret;
u8 addr = (u8)reg;
arisc_rsb_block_cfg_t rsb_data;
u32 data;
/* axp_reg_debug(reg, 1, &val); */
data = (unsigned int)val;
rsb_data.len = 1;
rsb_data.datatype = RSB_DATA_TYPE_BYTE;
if (syncflag)
rsb_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
rsb_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
rsb_data.devaddr = devaddr;
rsb_data.regaddr = &addr;
rsb_data.data = &data;
/* write axp registers */
ret = arisc_rsb_write_block_data(&rsb_data);
if (ret != 0) {
pr_err("failed writing to 0x%02x\n", reg);
return ret;
}
return 0;
}
static s32 __axp_writes_arisc_rsb(char devaddr, int reg,
int len, u8 *val, bool syncflag)
{
s32 ret = 0, i, first_flag, wr_len;
u8 addr[AXP_TRANS_BYTE_MAX];
u8 data[AXP_TRANS_BYTE_MAX];
arisc_rsb_block_cfg_t rsb_data;
u32 data_temp[AXP_TRANS_BYTE_MAX];
/* axp_reg_debug(reg, len, val); */
/* fetch first register address */
first_flag = 1;
addr[0] = (u8)reg;
len = len + 1; /* + first reg addr */
len = len >> 1; /* len = len / 2 */
while (len > 0) {
wr_len = min(len, AXP_TRANS_BYTE_MAX);
for (i = 0; i < wr_len; i++) {
if (first_flag) {
/* skip the first reg addr */
data[i] = *val++;
first_flag = 0;
} else {
addr[i] = *val++;
data[i] = *val++;
}
}
for (i = 0; i < wr_len; i++)
data_temp[i] = (unsigned int)data[i];
rsb_data.len = wr_len;
rsb_data.datatype = RSB_DATA_TYPE_BYTE;
if (syncflag)
rsb_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
rsb_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
rsb_data.devaddr = devaddr;
rsb_data.regaddr = addr;
rsb_data.data = data_temp;
/* write axp registers */
ret = arisc_rsb_write_block_data(&rsb_data);
if (ret != 0) {
pr_err("failed writings to 0x%02x\n", reg);
return ret;
}
/* process next time write */
len -= wr_len;
}
return 0;
}
static s32 __axp_read_arisc_twi(int reg, u8 *val, bool syncflag)
{
s32 ret;
u8 addr = (u8)reg;
arisc_twi_block_cfg_t twi_data;
u8 data = 0;
if (syncflag)
twi_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
twi_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
twi_data.len = 1;
twi_data.addr = &addr;
twi_data.data = &data;
/* write axp registers */
ret = arisc_twi_read_block_data(&twi_data);
if (ret != 0) {
pr_err("failed read to 0x%02x\n", reg);
return ret;
}
*val = data;
return 0;
}
static s32 __axp_reads_arisc_twi(int reg, int len, u8 *val, bool syncflag)
{
arisc_twi_block_cfg_t twi_data;
u8 addr[TWI_TRANS_BYTE_MAX] = {0};
u8 data[TWI_TRANS_BYTE_MAX] = {0};
u8 *cur_data = val;
s32 ret, i, rd_len;
/* fetch first register address */
while (len > 0) {
rd_len = min(len, TWI_TRANS_BYTE_MAX);
for (i = 0; i < rd_len; i++)
addr[i] = reg++;
if (syncflag)
twi_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
twi_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
twi_data.len = rd_len;
twi_data.addr = addr;
twi_data.data = data;
/* write axp registers */
ret = arisc_twi_read_block_data(&twi_data);
if (ret != 0) {
pr_err("failed read to 0x%02x\n", reg);
return ret;
}
/* copy data to user buffer */
memcpy(cur_data, data, rd_len);
cur_data = cur_data + rd_len;
/* process next time read */
len -= rd_len;
}
return 0;
}
static s32 __axp_write_arisc_twi(int reg, u8 val, bool syncflag)
{
s32 ret;
u8 addr = (u8)reg;
arisc_twi_block_cfg_t twi_data;
u8 data = val;
if (syncflag)
twi_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
twi_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
twi_data.len = 1;
twi_data.addr = &addr;
twi_data.data = &data;
/* write axp registers */
ret = arisc_twi_write_block_data(&twi_data);
if (ret != 0) {
pr_err("failed writing to 0x%02x\n", reg);
return ret;
}
return 0;
}
static s32 __axp_writes_arisc_twi(int reg, int len, u8 *val, bool syncflag)
{
arisc_twi_block_cfg_t twi_data;
int len_to_write = (len + 1) >> 1;
u8 addr[TWI_TRANS_BYTE_MAX] = {0};
u8 data[TWI_TRANS_BYTE_MAX] = {0};
s32 ret, i, wr_len, first_flag = 1;
addr[0] = (u8)reg;
while (len_to_write > 0) {
wr_len = min(len_to_write, AXP_TRANS_BYTE_MAX);
for (i = 0; i < wr_len; i++) {
if (first_flag) {
/* skip the first reg addr */
data[i] = *val++;
first_flag = 0;
} else {
addr[i] = *val++;
data[i] = *val++;
}
}
if (syncflag)
twi_data.msgattr = ARISC_MESSAGE_ATTR_HARDSYN;
else
twi_data.msgattr = ARISC_MESSAGE_ATTR_SOFTSYN;
twi_data.len = wr_len;
twi_data.addr = addr;
twi_data.data = data;
/* write axp registers */
ret = arisc_twi_write_block_data(&twi_data);
if (ret != 0) {
pr_err("failed writing to 0x%02x\n", reg);
return ret;
}
/* process next time write */
len_to_write -= wr_len;
}
return 0;
}
#endif
static s32 _axp_write(struct axp_regmap *map, s32 reg, u8 val, bool sync)
{
s32 ret = 0;
if (map->type == AXP_REGMAP_I2C)
ret = __axp_write_i2c(map->client, reg, val);
else if (map->type == AXP_REGMAP_ARISC_RSB)
ret = __axp_write_arisc_rsb(map->rsbaddr, reg, val, sync);
else if (map->type == AXP_REGMAP_ARISC_TWI)
ret = __axp_write_arisc_twi(reg, val, sync);
return ret;
}
static s32 _axp_writes(struct axp_regmap *map, s32 reg,
s32 len, u8 *val, bool sync)
{
s32 ret = 0, i;
s32 wr_len, rw_reg;
u8 wr_val[32];
while (len) {
wr_len = min(len, 15);
rw_reg = reg++;
wr_val[0] = *val++;
for (i = 1; i < wr_len; i++) {
wr_val[i*2-1] = reg++;
wr_val[i*2] = *val++;
}
if (map->type == AXP_REGMAP_I2C)
ret = __axp_writes_i2c(map->client,
rw_reg, 2*wr_len-1, wr_val);
else if (map->type == AXP_REGMAP_ARISC_RSB)
ret = __axp_writes_arisc_rsb(map->rsbaddr,
rw_reg, 2*wr_len-1, wr_val, sync);
else if (map->type == AXP_REGMAP_ARISC_TWI)
ret = __axp_writes_arisc_twi(rw_reg,
2*wr_len-1, wr_val, sync);
if (ret)
return ret;
len -= wr_len;
}
return 0;
}
static s32 _axp_read(struct axp_regmap *map, s32 reg, u8 *val, bool sync)
{
s32 ret = 0;
if (map->type == AXP_REGMAP_I2C)
ret = __axp_read_i2c(map->client, reg, val);
else if (map->type == AXP_REGMAP_ARISC_RSB)
ret = __axp_read_arisc_rsb(map->rsbaddr, reg, val, sync);
else if (map->type == AXP_REGMAP_ARISC_TWI)
ret = __axp_read_arisc_twi(reg, val, sync);
return ret;
}
static s32 _axp_reads(struct axp_regmap *map, s32 reg,
s32 len, u8 *val, bool sync)
{
s32 ret = 0;
if (map->type == AXP_REGMAP_I2C)
ret = __axp_reads_i2c(map->client, reg, len, val);
else if (map->type == AXP_REGMAP_ARISC_RSB)
ret = __axp_reads_arisc_rsb(map->rsbaddr, reg, len, val, sync);
else if (map->type == AXP_REGMAP_ARISC_TWI)
ret = __axp_reads_arisc_twi(reg, len, val, sync);
return ret;
}
s32 axp_regmap_write(struct axp_regmap *map, s32 reg, u8 val)
{
s32 ret = 0;
mutex_lock(&map->lock);
ret = _axp_write(map, reg, val, false);
mutex_unlock(&map->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_write);
s32 axp_regmap_writes(struct axp_regmap *map, s32 reg, s32 len, u8 *val)
{
s32 ret = 0;
mutex_lock(&map->lock);
ret = _axp_writes(map, reg, len, val, false);
mutex_unlock(&map->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_writes);
s32 axp_regmap_read(struct axp_regmap *map, s32 reg, u8 *val)
{
return _axp_read(map, reg, val, false);
}
EXPORT_SYMBOL_GPL(axp_regmap_read);
s32 axp_regmap_reads(struct axp_regmap *map, s32 reg, s32 len, u8 *val)
{
return _axp_reads(map, reg, len, val, false);
}
EXPORT_SYMBOL_GPL(axp_regmap_reads);
s32 axp_regmap_set_bits(struct axp_regmap *map, s32 reg, u8 bit_mask)
{
u8 reg_val;
s32 ret = 0;
mutex_lock(&map->lock);
ret = _axp_read(map, reg, ®_val, false);
if (ret)
goto out;
if ((reg_val & bit_mask) != bit_mask) {
reg_val |= bit_mask;
ret = _axp_write(map, reg, reg_val, false);
}
out:
mutex_unlock(&map->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_set_bits);
s32 axp_regmap_clr_bits(struct axp_regmap *map, s32 reg, u8 bit_mask)
{
u8 reg_val;
s32 ret = 0;
mutex_lock(&map->lock);
ret = _axp_read(map, reg, ®_val, false);
if (ret)
goto out;
if (reg_val & bit_mask) {
reg_val &= ~bit_mask;
ret = _axp_write(map, reg, reg_val, false);
}
out:
mutex_unlock(&map->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_clr_bits);
s32 axp_regmap_update(struct axp_regmap *map, s32 reg, u8 val, u8 mask)
{
u8 reg_val;
s32 ret = 0;
mutex_lock(&map->lock);
ret = _axp_read(map, reg, ®_val, false);
if (ret)
goto out;
if ((reg_val & mask) != val) {
reg_val = (reg_val & ~mask) | val;
ret = _axp_write(map, reg, reg_val, false);
}
out:
mutex_unlock(&map->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_update);
s32 axp_regmap_set_bits_sync(struct axp_regmap *map, s32 reg, u8 bit_mask)
{
u8 reg_val;
s32 ret = 0;
#ifndef CONFIG_AXP_TWI_USED
unsigned long irqflags;
spin_lock_irqsave(&map->spinlock, irqflags);
#else
mutex_lock(&map->lock);
#endif
ret = _axp_read(map, reg, ®_val, true);
if (ret)
goto out;
if ((reg_val & bit_mask) != bit_mask) {
reg_val |= bit_mask;
ret = _axp_write(map, reg, reg_val, true);
}
out:
#ifndef CONFIG_AXP_TWI_USED
spin_unlock_irqrestore(&map->spinlock, irqflags);
#else
mutex_unlock(&map->lock);
#endif
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_set_bits_sync);
s32 axp_regmap_clr_bits_sync(struct axp_regmap *map, s32 reg, u8 bit_mask)
{
u8 reg_val;
s32 ret = 0;
#ifndef CONFIG_AXP_TWI_USED
unsigned long irqflags;
spin_lock_irqsave(&map->spinlock, irqflags);
#else
mutex_lock(&map->lock);
#endif
ret = _axp_read(map, reg, ®_val, true);
if (ret)
goto out;
if (reg_val & bit_mask) {
reg_val &= ~bit_mask;
ret = _axp_write(map, reg, reg_val, true);
}
out:
#ifndef CONFIG_AXP_TWI_USED
spin_unlock_irqrestore(&map->spinlock, irqflags);
#else
mutex_unlock(&map->lock);
#endif
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_clr_bits_sync);
s32 axp_regmap_update_sync(struct axp_regmap *map, s32 reg, u8 val, u8 mask)
{
u8 reg_val;
s32 ret = 0;
#ifndef CONFIG_AXP_TWI_USED
unsigned long irqflags;
spin_lock_irqsave(&map->spinlock, irqflags);
#else
mutex_lock(&map->lock);
#endif
ret = _axp_read(map, reg, ®_val, true);
if (ret)
goto out;
if ((reg_val & mask) != val) {
reg_val = (reg_val & ~mask) | val;
ret = _axp_write(map, reg, reg_val, true);
}
out:
#ifndef CONFIG_AXP_TWI_USED
spin_unlock_irqrestore(&map->spinlock, irqflags);
#else
mutex_unlock(&map->lock);
#endif
return ret;
}
EXPORT_SYMBOL_GPL(axp_regmap_update_sync);
struct axp_regmap *axp_regmap_init_i2c(struct device *dev)
{
struct axp_regmap *map = NULL;
map = devm_kzalloc(dev, sizeof(*map), GFP_KERNEL);
if (IS_ERR_OR_NULL(map)) {
pr_err("%s: not enough memory!\n", __func__);
return NULL;
}
map->type = AXP_REGMAP_I2C;
map->client = to_i2c_client(dev);
mutex_init(&map->lock);
return map;
}
EXPORT_SYMBOL_GPL(axp_regmap_init_i2c);
struct axp_regmap *axp_regmap_init_arisc_rsb(struct device *dev, u8 addr)
{
struct axp_regmap *map = NULL;
map = devm_kzalloc(dev, sizeof(*map), GFP_KERNEL);
if (IS_ERR_OR_NULL(map)) {
pr_err("%s: not enough memory!\n", __func__);
return NULL;
}
map->type = AXP_REGMAP_ARISC_RSB;
map->rsbaddr = addr;
#ifndef CONFIG_AXP_TWI_USED
spin_lock_init(&map->spinlock);
#endif
mutex_init(&map->lock);
return map;
}
EXPORT_SYMBOL_GPL(axp_regmap_init_arisc_rsb);
struct axp_regmap *axp_regmap_init_arisc_twi(struct device *dev)
{
struct axp_regmap *map = NULL;
map = devm_kzalloc(dev, sizeof(*map), GFP_KERNEL);
if (IS_ERR_OR_NULL(map)) {
pr_err("%s: not enough memory!\n", __func__);
return NULL;
}
map->type = AXP_REGMAP_ARISC_TWI;
#ifndef CONFIG_AXP_TWI_USED
spin_lock_init(&map->spinlock);
#endif
mutex_init(&map->lock);
return map;
}
EXPORT_SYMBOL_GPL(axp_regmap_init_arisc_twi);
static void __do_irq(int pmu_num, struct axp_irq_chip_data *irq_data)
{
u64 irqs = 0;
u8 reg_val[8];
u32 i, j;
void *idata;
if (irq_data == NULL)
return;
axp_regmap_reads(irq_data->map, irq_data->chip->status_base,
irq_data->chip->num_regs, reg_val);
for (i = 0; i < irq_data->chip->num_regs; i++)
irqs |= (u64)reg_val[i] << (i * AXP_REG_WIDTH);
irqs &= irq_data->irqs_enabled;
if (irqs == 0)
return;
AXP_DEBUG(AXP_INT, pmu_num, "irqs enabled = 0x%llx\n",
irq_data->irqs_enabled);
AXP_DEBUG(AXP_INT, pmu_num, "irqs = 0x%llx\n", irqs);
for_each_set_bit(j, (unsigned long *)&irqs, irq_data->num_irqs) {
if (irq_data->irqs[j].handler) {
idata = irq_data->irqs[j].data;
irq_data->irqs[j].handler(j, idata);
}
}
for (i = 0; i < irq_data->chip->num_regs; i++) {
if (reg_val[i] != 0) {
axp_regmap_write(irq_data->map,
irq_data->chip->status_base + i, reg_val[i]);
udelay(30);
}
}
}
static void axp_irq_work_func(struct work_struct *work)
{
struct axp_dev *adev;
list_for_each_entry(adev, &axp_dev_list, list) {
__do_irq(adev->pmu_num, adev->irq_data);
}
#ifdef CONFIG_AXP_NMI_USED
clear_nmi_status();
enable_nmi();
#endif
}
static irqreturn_t axp_irq(int irq, void *data)
{
struct axp_dev *adev;
#ifdef CONFIG_AXP_NMI_USED
disable_nmi();
#endif
if (axp_suspend_flag == AXP_NOT_SUSPEND) {
schedule_work(&axp_irq_work);
} else if (axp_suspend_flag == AXP_WAS_SUSPEND) {
list_for_each_entry(adev, &axp_dev_list, list) {
if (adev->irq_data->wakeup_event) {
adev->irq_data->wakeup_event();
axp_suspend_flag = AXP_SUSPEND_WITH_IRQ;
}
}
}
return IRQ_HANDLED;
}
struct axp_irq_chip_data *axp_irq_chip_register(struct axp_regmap *map,
int irq_no, int irq_flags,
struct axp_regmap_irq_chip *irq_chip,
void (*wakeup_event)(void))
{
struct axp_irq_chip_data *irq_data = NULL;
struct axp_regmap_irq *irqs = NULL;
int i, err = 0;
irq_data = kzalloc(sizeof(*irq_data), GFP_KERNEL);
if (IS_ERR_OR_NULL(irq_data)) {
pr_err("axp irq data: not enough memory for irq data\n");
return NULL;
}
irq_data->map = map;
irq_data->chip = irq_chip;
irq_data->num_irqs = AXP_REG_WIDTH * irq_chip->num_regs;
irqs = kzalloc(irq_chip->num_regs * AXP_REG_WIDTH * sizeof(*irqs),
GFP_KERNEL);
if (IS_ERR_OR_NULL(irqs)) {
pr_err("axp irq data: not enough memory for irq disc\n");
goto free_irq_data;
}
mutex_init(&irq_data->lock);
irq_data->irqs = irqs;
irq_data->irqs_enabled = 0;
irq_data->wakeup_event = wakeup_event;
/* disable all irq and clear all irq pending */
for (i = 0; i < irq_chip->num_regs; i++) {
axp_regmap_clr_bits(map, irq_chip->enable_base + i, 0xff);
axp_regmap_set_bits(map, irq_chip->status_base + i, 0xff);
}
#ifdef CONFIG_DUAL_AXP_USED
if (axp_dev_register_count == 1) {
err = request_irq(irq_no, axp_irq, irq_flags, "axp", irq_data);
goto irq_out;
} else if (axp_dev_register_count == 2) {
return irq_data;
}
#else
err = request_irq(irq_no, axp_irq, irq_flags, irq_chip->name, irq_data);
#endif
#ifdef CONFIG_DUAL_AXP_USED
irq_out:
#endif
if (err)
goto free_irqs;
INIT_WORK(&axp_irq_work, axp_irq_work_func);
#ifdef CONFIG_AXP_NMI_USED
set_nmi_trigger(IRQF_TRIGGER_LOW);
clear_nmi_status();
enable_nmi();
#endif
return irq_data;
free_irqs:
kfree(irqs);
free_irq_data:
kfree(irq_data);
return NULL;
}
EXPORT_SYMBOL_GPL(axp_irq_chip_register);
void axp_irq_chip_unregister(int irq, struct axp_irq_chip_data *irq_data)
{
int i;
struct axp_regmap *map = irq_data->map;
free_irq(irq, irq_data);
/* disable all irq and clear all irq pending */
for (i = 0; i < irq_data->chip->num_regs; i++) {
axp_regmap_clr_bits(map,
irq_data->chip->enable_base + i, 0xff);
axp_regmap_write(map,
irq_data->chip->status_base + i, 0xff);
}
kfree(irq_data->irqs);
kfree(irq_data);
#ifdef CONFIG_AXP_NMI_USED
disable_nmi();
#endif
}
EXPORT_SYMBOL_GPL(axp_irq_chip_unregister);
int axp_request_irq(struct axp_dev *adev, int irq_no,
irq_handler_t handler, void *data)
{
struct axp_irq_chip_data *irq_data = adev->irq_data;
struct axp_regmap_irq *irqs = irq_data->irqs;
int reg, ret;
u8 mask;
if (!irq_data || irq_no < 0 || irq_no >= irq_data->num_irqs || !handler)
return -1;
mutex_lock(&irq_data->lock);
irqs[irq_no].handler = handler;
irqs[irq_no].data = data;
irq_data->irqs_enabled |= ((u64)0x1 << irq_no);
reg = irq_no / AXP_REG_WIDTH;
reg += irq_data->chip->enable_base;
mask = 1 << (irq_no % AXP_REG_WIDTH);
ret = axp_regmap_set_bits(adev->regmap, reg, mask);
mutex_unlock(&irq_data->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_request_irq);
int axp_enable_irq(struct axp_dev *adev, int irq_no)
{
struct axp_irq_chip_data *irq_data = adev->irq_data;
int reg, ret = 0;
u8 mask;
if (!irq_data || irq_no < 0 || irq_no >= irq_data->num_irqs)
return -1;
if (irq_data->irqs[irq_no].handler) {
mutex_lock(&irq_data->lock);
reg = irq_no / AXP_REG_WIDTH;
reg += irq_data->chip->enable_base;
mask = 1 << (irq_no % AXP_REG_WIDTH);
ret = axp_regmap_set_bits(adev->regmap, reg, mask);
mutex_unlock(&irq_data->lock);
}
return ret;
}
EXPORT_SYMBOL_GPL(axp_enable_irq);
int axp_disable_irq(struct axp_dev *adev, int irq_no)
{
struct axp_irq_chip_data *irq_data = adev->irq_data;
int reg, ret = 0;
u8 mask;
if (!irq_data || irq_no < 0 || irq_no >= irq_data->num_irqs)
return -1;
mutex_lock(&irq_data->lock);
reg = irq_no / AXP_REG_WIDTH;
reg += irq_data->chip->enable_base;
mask = 1 << (irq_no % AXP_REG_WIDTH);
ret = axp_regmap_clr_bits(adev->regmap, reg, mask);
mutex_unlock(&irq_data->lock);
return ret;
}
EXPORT_SYMBOL_GPL(axp_disable_irq);
int axp_free_irq(struct axp_dev *adev, int irq_no)
{
struct axp_irq_chip_data *irq_data = adev->irq_data;
int reg;
u8 mask;
if (!irq_data || irq_no < 0 || irq_no >= irq_data->num_irqs)
return -1;
mutex_lock(&irq_data->lock);
if (irq_data->irqs[irq_no].handler) {
reg = irq_no / AXP_REG_WIDTH;
reg += irq_data->chip->enable_base;
mask = 1 << (irq_no % AXP_REG_WIDTH);
axp_regmap_clr_bits(adev->regmap, reg, mask);
irq_data->irqs[irq_no].data = NULL;
irq_data->irqs[irq_no].handler = NULL;
}
mutex_unlock(&irq_data->lock);
return 0;
}
EXPORT_SYMBOL_GPL(axp_free_irq);
int axp_gpio_irq_register(struct axp_dev *adev, int irq_no,
irq_handler_t handler, void *data)
{
struct axp_irq_chip_data *irq_data = adev->irq_data;
struct axp_regmap_irq *irqs = irq_data->irqs;
if (!irq_data || irq_no < 0 || irq_no >= irq_data->num_irqs || !handler)
return -1;
mutex_lock(&irq_data->lock);
irq_data->irqs_enabled |= ((u64)0x1 << irq_no);
irqs[irq_no].handler = handler;
irqs[irq_no].data = data;
mutex_unlock(&irq_data->lock);
return 0;
}
int axp_mfd_add_devices(struct axp_dev *axp_dev)
{
int ret;
unsigned long irqflags;
ret = mfd_add_devices(axp_dev->dev, -1,
axp_dev->cells, axp_dev->nr_cells, NULL, 0, NULL);
if (ret)
goto fail;
dev_set_drvdata(axp_dev->dev, axp_dev);
spin_lock_irqsave(&axp_list_lock, irqflags);
list_add(&axp_dev->list, &axp_dev_list);
axp_dev_register_count++;
spin_unlock_irqrestore(&axp_list_lock, irqflags);
return 0;
fail:
return ret;
}
EXPORT_SYMBOL_GPL(axp_mfd_add_devices);
int axp_mfd_remove_devices(struct axp_dev *axp_dev)
{
mfd_remove_devices(axp_dev->dev);
return 0;
}
EXPORT_SYMBOL_GPL(axp_mfd_remove_devices);
int axp_dt_parse(struct device_node *node, int pmu_num,
struct axp_config_info *axp_config)
{
if (!of_device_is_available(node)) {
pr_err("%s: failed\n", __func__);
return -1;
}
if (of_property_read_u32(node, "pmu_id", &axp_config->pmu_id)) {
pr_err("%s: get pmu_id failed\n", __func__);
return -1;
}
if (of_property_read_string(node, "compatible", &axp_name[pmu_num])) {
pr_err("%s: get pmu name failed\n", __func__);
return -1;
}
if (of_property_read_u32(node, "pmu_vbusen_func",
&axp_config->pmu_vbusen_func))
axp_config->pmu_vbusen_func = 1;
if (of_property_read_u32(node, "pmu_reset",
&axp_config->pmu_reset))
axp_config->pmu_reset = 0;
if (of_property_read_u32(node, "pmu_irq_wakeup",
&axp_config->pmu_irq_wakeup))
axp_config->pmu_irq_wakeup = 0;
if (of_property_read_u32(node, "pmu_hot_shutdown",
&axp_config->pmu_hot_shutdown))
axp_config->pmu_hot_shutdown = 1;
if (of_property_read_u32(node, "pmu_inshort",
&axp_config->pmu_inshort))
axp_config->pmu_inshort = 0;
if (of_property_read_u32(node, "pmu_reset_shutdown_en",
&axp_config->pmu_reset_shutdown_en))
axp_config->pmu_reset_shutdown_en = 0;
if (of_property_read_u32(node, "pmu_as_slave",
&axp_config->pmu_as_slave))
axp_config->pmu_as_slave = 0;
return 0;
}
EXPORT_SYMBOL_GPL(axp_dt_parse);
int axp_num;
static ssize_t axp_num_show(struct class *class,
struct class_attribute *attr, char *buf)
{
return sprintf(buf, "pmu%d\n", axp_num);
}
static ssize_t axp_num_store(struct class *class,
struct class_attribute *attr,
const char *buf, size_t count)
{
int val, err;
err = kstrtoint(buf, 16, &val);
if (err)
return err;
axp_num = val;
if ((axp_num >= AXP_ONLINE_SUM) || (axp_num < 0))
return -EINVAL;
return count;
}
static ssize_t axp_name_show(struct class *class,
struct class_attribute *attr, char *buf)
{
return sprintf(buf, "%s\n", get_pmu_cur_name(axp_num));
}
static u8 axp_reg_addr;
static ssize_t axp_reg_show(struct class *class,
struct class_attribute *attr, char *buf)
{
u8 val;
struct axp_dev *cur_axp_dev = get_pmu_cur_dev(axp_num);
if (cur_axp_dev == NULL)
return sprintf(buf, "invalid parameters\n");
if (cur_axp_dev->is_dummy)
return sprintf(buf, "unsupported\n");
axp_regmap_read(cur_axp_dev->regmap, axp_reg_addr, &val);
return sprintf(buf, "%s:REG[0x%x]=0x%x\n",
get_pmu_cur_name(axp_num), axp_reg_addr, val);
}
static ssize_t axp_reg_store(struct class *class,
struct class_attribute *attr,
const char *buf, size_t count)
{
s32 tmp;
u8 val;
int err;
struct axp_dev *cur_axp_dev = get_pmu_cur_dev(axp_num);
if (cur_axp_dev == NULL) {
pr_warn("invalid parameters\n");
return -EINVAL;
}
if (cur_axp_dev->is_dummy) {
pr_err("unsupported\n");
return -EINVAL;
}
err = kstrtoint(buf, 16, &tmp);
if (err)
return err;
if (tmp < 256) {
axp_reg_addr = tmp;
} else {
val = tmp & 0x00FF;
axp_reg_addr = (tmp >> 8) & 0x00FF;
axp_regmap_write(cur_axp_dev->regmap, axp_reg_addr, val);
}
return count;
}
static u32 data2 = 2;
static ssize_t axp_regs_show(struct class *class,
struct class_attribute *attr, char *buf)
{
u8 val;
s32 count = 0, i = 0;
struct axp_dev *cur_axp_dev = get_pmu_cur_dev(axp_num);
if (cur_axp_dev == NULL)
return sprintf(buf, "invalid parameters\n");
if (cur_axp_dev->is_dummy)
return sprintf(buf, "unsupported\n");
for (i = 0; i < data2; i++) {
axp_regmap_read(cur_axp_dev->regmap, axp_reg_addr+i, &val);
count += sprintf(buf+count, "%s:REG[0x%x]=0x%x\n",
get_pmu_cur_name(axp_num), axp_reg_addr+i, val);
}
return count;
}
static ssize_t axp_regs_store(struct class *class,
struct class_attribute *attr,
const char *buf, size_t count)
{
u32 data1 = 0;
u8 val[2];
char *endp;
struct axp_dev *cur_axp_dev = get_pmu_cur_dev(axp_num);
if (cur_axp_dev == NULL) {
pr_warn("invalid parameters\n");
return -EINVAL;
}
if (cur_axp_dev->is_dummy) {
pr_err("unsupported\n");
return -EINVAL;
}
data1 = simple_strtoul(buf, &endp, 16);
if (*endp != ' ') {
pr_err("%s: %d\n", __func__, __LINE__);
return -EINVAL;
}
buf = endp + 1;
data2 = simple_strtoul(buf, &endp, 10);
if (data1 < 256) {
axp_reg_addr = data1;
} else {
axp_reg_addr = (data1 >> 16) & 0xFF;
val[0] = (data1 >> 8) & 0xFF;
val[1] = data1 & 0xFF;
axp_regmap_writes(cur_axp_dev->regmap, axp_reg_addr, 2, val);
}
return count;
}
int axp_debug;
static ssize_t debug_mask_store(struct class *class,
struct class_attribute *attr,
const char *buf, size_t count)
{
int val, err;
err = kstrtoint(buf, 16, &val);
if (err)
return err;
axp_debug = val;
return count;
}
static ssize_t debug_mask_show(struct class *class,
struct class_attribute *attr, char *buf)
{
char *s = buf;
char *end = (char *)((ptrdiff_t)buf + (ptrdiff_t)PAGE_SIZE);
s += scnprintf(s, end - s, "%s\n", "1: SPLY 2: REGU 4: INT 8: CHG");
s += scnprintf(s, end - s, "debug_mask=%d\n", axp_debug);
return s - buf;
}
static struct class_attribute axp_class_attrs[] = {
__ATTR(axp_name, S_IRUGO, axp_name_show, NULL),
__ATTR(axp_num, S_IRUGO|S_IWUSR, axp_num_show, axp_num_store),
__ATTR(axp_reg, S_IRUGO|S_IWUSR, axp_reg_show, axp_reg_store),
__ATTR(axp_regs, S_IRUGO|S_IWUSR, axp_regs_show, axp_regs_store),
__ATTR(debug_mask, S_IRUGO|S_IWUSR, debug_mask_show, debug_mask_store),
__ATTR_NULL
};
struct class axp_class = {
.name = "axp",
.class_attrs = axp_class_attrs,
};
static s32 __init axp_core_init(void)
{
class_register(&axp_class);
return 0;
}
arch_initcall(axp_core_init);
MODULE_DESCRIPTION("ALLWINNERTECH axp board");
MODULE_AUTHOR("<NAME>");
MODULE_LICENSE("GPL");
|
def print_multiplication_table(number):
for i in range(1, 11):
print(number,"X",i,"=",number*i) |
<gh_stars>0
/*
* Developed by szczypiorofix on 24.08.18 13:31.
* Copyright (c) 2018. All rights reserved.
*
*/
package com.szczypiorofix.sweetrolls.game.gui;
import com.szczypiorofix.sweetrolls.game.enums.ObjectType;
import com.szczypiorofix.sweetrolls.game.main.fonts.BitMapFont;
import com.szczypiorofix.sweetrolls.game.main.fonts.FontParser;
import com.szczypiorofix.sweetrolls.game.objects.GameObject;
import org.newdawn.slick.Color;
import org.newdawn.slick.Graphics;
import org.newdawn.slick.SlickException;
public class InventoryOptionsButton extends GameObject {
private BitMapFont font;
private String command;
public InventoryOptionsButton(String name, int x, int y, String command) {
super(name, x, y, 100, 25, ObjectType.GUI);
this.command = command;
font = FontParser.getFont();
}
@Override
public void update(int delta, float offsetX, float offsetY) throws SlickException {}
@Override
public void render(Graphics g, float offsetX, float offsetY) throws SlickException {
Color c = g.getColor();
g.setColor(Color.black);
g.fillRect(x - 4, y - 2, width + 4, height + 4);
g.setColor(c);
font.draw(name, (int) x, (int) (y + 5));
}
public String getCommand() {
return command;
}
}
|
<filename>src/serverApi/wsServer.ts
import WebSocket from 'ws';
import path from 'path';
import { ClientMessageTypeMap, ServerMessageDynamic, ServerMessageError, ServerMessageInput, ServerMessageOutput } from '../commonTypes';
let server: WebSocket.Server;
export const getServer = () => server;
export function startServer() {
if (server) return;
server = new WebSocket.Server({ port: 8080 });
server.on('connection', ws => {
ws.on('message', async message => {
if (typeof message !== 'string') {
console.error(`Unknown message from wsClient: ${message}`);
return;
}
let json: ServerMessageInput;
try {
json = JSON.parse(message);
} catch (e) {
console.error(`Unknown message from wsClient: ${message}`);
return;
}
let handler: ServerMessageDynamic;
try {
handler = require(path.join(__dirname, 'messages', json.type)).message;
} catch (e) {
console.error(`Unknown message from wsClient: ${message}`);
if (json.id !== undefined) {
const response: ServerMessageError = {
id: json.id,
type: json.type,
error: e
};
ws.send(JSON.stringify(response));
}
return;
}
let result;
try {
result = await handler.execute(json.data);
} catch (e) {
console.error(`Error executing ${json.type}`);
console.error(e);
if (json.id !== undefined) {
const response: ServerMessageError = {
id: json.id,
type: json.type,
error: e
};
ws.send(JSON.stringify(response));
}
return;
}
if (json.id !== undefined) {
const output: ServerMessageOutput = {
id: json.id,
type: json.type,
data: result
};
ws.send(JSON.stringify(output));
}
});
});
}
export function sendMessage<T extends keyof ClientMessageTypeMap>(type: T, data: ClientMessageTypeMap[T]) {
server.clients.forEach(ws => {
ws.send(JSON.stringify({
type,
data
}));
});
}
export const destroyWhenReady = () => setTimeout(() => server.close(), 0); |
#!/usr/bin/env bash
export LC_ALL=C
TOPDIR=${TOPDIR:-$(git rev-parse --show-toplevel)}
BUILDDIR=${BUILDDIR:-$TOPDIR}
BINDIR=${BINDIR:-$BUILDDIR/src}
MANDIR=${MANDIR:-$TOPDIR/doc/man}
BITCOIND=${BITCOIND:-$BINDIR/securecloud2d}
BITCOINCLI=${BITCOINCLI:-$BINDIR/securecloud2-cli}
BITCOINTX=${BITCOINTX:-$BINDIR/securecloud2-tx}
BITCOINQT=${BITCOINQT:-$BINDIR/qt/securecloud2-qt}
[ ! -x $BITCOIND ] && echo "$BITCOIND not found or not executable." && exit 1
# The autodetected version git tag can screw up manpage output a little bit
BTCVER=($($BITCOINCLI --version | head -n1 | awk -F'[ -]' '{ print $6, $7 }'))
# Create a footer file with copyright content.
# This gets autodetected fine for bitcoind if --version-string is not set,
# but has different outcomes for bitcoin-qt and bitcoin-cli.
echo "[COPYRIGHT]" > footer.h2m
$BITCOIND --version | sed -n '1!p' >> footer.h2m
for cmd in $BITCOIND $BITCOINCLI $BITCOINTX $BITCOINQT; do
cmdname="${cmd##*/}"
help2man -N --version-string=${BTCVER[0]} --include=footer.h2m -o ${MANDIR}/${cmdname}.1 ${cmd}
sed -i "s/\\\-${BTCVER[1]}//g" ${MANDIR}/${cmdname}.1
done
rm -f footer.h2m
|
import {ExpressAfterController, ExpressBeforeController, ResponseHandler} from "@mo/express";
import {co, IController, Injectable, Plugin} from "@mo/core";
import * as e from "express";
import {IUser} from "../define/user-interface";
import {GROUP} from "../decoractor/symbol";
@Injectable()
export class PluginPackage {
@Plugin(ExpressBeforeController)
judge(req: e.Request, res: ResponseHandler, cIns: IController, cFun: Function): Boolean {
let p = this;
return co(function *() {
let user: IUser = req['session'].user;
let group: string[] = Reflect.getMetadata(GROUP, cIns, cFun.name);
if (group) {
for (let g of group) {
switch (g) {
case 'all':
if (user && user.group)
return true;
break;
case '!all':
if (!user)
return true;
break;
case 'self':
if (user && req.body.username && req.body.username === user.username)
return true;
break;
default:
if (user.group == g)
return true;
break;
}
}
} else {
return true;
}
res.status(200).message('无访问权限');
return false;
});
}
}
/**
* Created by yskun on 2017/7/15.
*/
|
package com.example.lostandfoundoncampus.utils;
import android.graphics.Bitmap;
/**
* Created by XiaoAnDev on 2021/4/11
* 图片裁剪正方形类
*/
public class CircleTransform {
/**
* @param bitmap 原图
* @param edgeLength 希望得到的正方形部分的边长
* @return 缩放裁取正中部分后的位图
*/
public static Bitmap centerSquareScaleBitmap(Bitmap bitmap, int edgeLength) {
if (null == bitmap || edgeLength <= 0)
return null;
Bitmap result = bitmap;
int widthOrg = bitmap.getWidth();
int heightOrg = bitmap.getHeight();
if (widthOrg > edgeLength && heightOrg > edgeLength) {
//压缩到一个最小长度是edgeLength的bitmap
int longerEdge = (int)(edgeLength * Math.max(widthOrg, heightOrg) / Math.min(widthOrg, heightOrg));
int scaledWidth = widthOrg > widthOrg ? longerEdge : edgeLength;
int scaledHeight = widthOrg > heightOrg ? edgeLength : longerEdge;
Bitmap scaledBitmap;
try {
scaledBitmap = Bitmap.createScaledBitmap(bitmap, scaledWidth, scaledHeight, true);
} catch (Exception e) {
return null;
}
//从图中截取正中间的正方形部分
int xTopLeft = (scaledWidth - edgeLength) / 2;
int yTopLeft = (scaledHeight - edgeLength) / 2;
try {
result = Bitmap.createBitmap(scaledBitmap, xTopLeft, yTopLeft, edgeLength, edgeLength);
scaledBitmap.recycle();
} catch (Exception e) {
return null;
}
}
return result;
}
}
|
#!/usr/bin/env bash
# sets up LDC for cross-compilation. Source this script, s.t. the new LDC is in PATH
# Make sure this version matches the version of LDC2 used in .travis.yml,
# otherwise the compiler and the lib used might mismatch.
LDC_VERSION="1.22.0"
ARCH=${ARCH:-32}
VERSION=$(git describe --abbrev=0 --tags)
OS=windows
# LDC should already be installed (see .travis.yml)
# However, we need the libraries, so download them
# We can't use the downloaded ldc2 itself, because obviously it's for Windows
if [ "${ARCH}" == 64 ]; then
ARCH_SUFFIX='x86_64'
ZIP_ARCH_SUFFIX='x64'
else
ARCH_SUFFIX='i686'
ZIP_ARCH_SUFFIX='x86'
fi
LDC_DIR_PATH="$(pwd)/ldc2-${LDC_VERSION}-windows-${ZIP_ARCH_SUFFIX}"
LDC_XDFLAGS="-conf=${LDC_DIR_PATH}/etc/ldc2.conf -mtriple=${ARCH_SUFFIX}-pc-windows-msvc"
# Step 1: download the LDC Windows release
# Check if the user already have it (e.g. building locally)
if [ ! -d ${LDC_DIR_PATH} ]; then
if [ ! -d "ldc2-${LDC_VERSION}-windows-${ZIP_ARCH_SUFFIX}.7z" ]; then
wget "https://github.com/ldc-developers/ldc/releases/download/v${LDC_VERSION}/ldc2-${LDC_VERSION}-windows-${ZIP_ARCH_SUFFIX}.7z"
fi
7z x "ldc2-${LDC_VERSION}-windows-${ZIP_ARCH_SUFFIX}.7z" > /dev/null
fi
# Step 2: Generate a config file with the proper path
cat > ${LDC_DIR_PATH}/etc/ldc2.conf <<EOF
default:
{
switches = [
"-defaultlib=phobos2-ldc,druntime-ldc",
"-link-defaultlib-shared=false",
];
post-switches = [
"-I${LDC_DIR_PATH}/import",
];
lib-dirs = [
"${LDC_DIR_PATH}/lib/",
"${LDC_DIR_PATH}/lib/mingw/",
];
};
EOF
|
#!/bin/sh
#
# Homebrew
#
# This installs some of the common dependencies needed (or at least desired)
# using Homebrew.
# Check for Homebrew
if test ! $(which brew)
then
echo ">> Installing Homebrew for you. <<"
# Install the correct homebrew for each OS type
if test "$(uname)" = "Darwin"
then
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
elif test "$(expr substr $(uname -s) 1 5)" = "Linux"
then
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Linuxbrew/install/master/install)"
fi
else
echo ">>> Homebrew already installed <<<"
fi
echo "Copying 'frozen' cask versions with 'cp -R ~/.dotfiles/homebrew/casks/* /usr/local/Homebrew/Library/Taps/homebrew/homebrew-cask/Casks/' "
cp -vR ~/.dotfiles/homebrew/casks/* /usr/local/Homebrew/Library/Taps/homebrew/homebrew-cask/Casks/
exit 0
|
/**
* @license
* Copyright Google Inc. All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import * as ts from 'typescript/lib/tsserverlibrary';
import * as lsp from 'vscode-languageserver';
import {URI} from 'vscode-uri';
export const isDebugMode = process.env['NG_DEBUG'] === 'true';
enum Scheme {
File = 'file',
}
/**
* Extract the file path from the specified `uri`.
* @param uri
*/
export function uriToFilePath(uri: string): string {
// Note: uri.path is different from uri.fsPath
// See
// https://github.com/microsoft/vscode-uri/blob/413805221cc6ed167186ab3103d3248d6f7161f2/src/index.ts#L622-L645
const {scheme, fsPath} = URI.parse(uri);
if (scheme !== Scheme.File) {
return '';
}
return fsPath;
}
/**
* Converts the specified `filePath` to a proper URI.
* @param filePath
*/
export function filePathToUri(filePath: string): lsp.DocumentUri {
return URI.file(filePath).toString();
}
/**
* Convert ts.TextSpan to lsp.TextSpan. TypeScript keeps track of offset using
* 1-based index whereas LSP uses 0-based index.
* @param scriptInfo Used to determine the offsets.
* @param textSpan
*/
export function tsTextSpanToLspRange(scriptInfo: ts.server.ScriptInfo, textSpan: ts.TextSpan) {
const start = scriptInfo.positionToLineOffset(textSpan.start);
const end = scriptInfo.positionToLineOffset(textSpan.start + textSpan.length);
// ScriptInfo (TS) is 1-based, LSP is 0-based.
return lsp.Range.create(start.line - 1, start.offset - 1, end.line - 1, end.offset - 1);
}
/**
* Convert lsp.Position to the absolute offset in the file. LSP keeps track of
* offset using 0-based index whereas TypeScript uses 1-based index.
* @param scriptInfo Used to determine the offsets.
* @param position
*/
export function lspPositionToTsPosition(scriptInfo: ts.server.ScriptInfo, position: lsp.Position) {
const {line, character} = position;
// ScriptInfo (TS) is 1-based, LSP is 0-based.
return scriptInfo.lineOffsetToPosition(line + 1, character + 1);
}
/**
* Convert lsp.Range which is made up of `start` and `end` positions to
* TypeScript's absolute offsets.
* @param scriptInfo Used to determine the offsets.
* @param range
*/
export function lspRangeToTsPositions(
scriptInfo: ts.server.ScriptInfo, range: lsp.Range): [number, number] {
const start = lspPositionToTsPosition(scriptInfo, range.start);
const end = lspPositionToTsPosition(scriptInfo, range.end);
return [start, end];
}
/**
* Convert a ts.DiagnosticRelatedInformation array to a
* lsp.DiagnosticRelatedInformation array
* @param scriptInfo Used to determine the offsets.
* @param relatedInfo
*/
export function tsRelatedInformationToLspRelatedInformation(
scriptInfo: ts.server.ScriptInfo,
relatedInfo?: ts.DiagnosticRelatedInformation[]): lsp.DiagnosticRelatedInformation[]|undefined {
if (relatedInfo === undefined) return;
const lspRelatedInfo: lsp.DiagnosticRelatedInformation[] = [];
for (const info of relatedInfo) {
if (info.file === undefined || info.start === undefined || info.length === undefined) continue;
const textSpan: ts.TextSpan = {
start: info.start,
length: info.length,
};
const location = lsp.Location.create(
filePathToUri(info.file.fileName),
tsTextSpanToLspRange(scriptInfo, textSpan),
);
lspRelatedInfo.push(lsp.DiagnosticRelatedInformation.create(
location,
ts.flattenDiagnosticMessageText(info.messageText, '\n'),
));
}
return lspRelatedInfo;
}
export function isConfiguredProject(project: ts.server.Project):
project is ts.server.ConfiguredProject {
return project.projectKind === ts.server.ProjectKind.Configured;
}
/**
* A class that tracks items in most recently used order.
*/
export class MruTracker {
private readonly set = new Set<string>();
update(item: string) {
if (this.set.has(item)) {
this.set.delete(item);
}
this.set.add(item);
}
delete(item: string) {
this.set.delete(item);
}
/**
* Returns all items sorted by most recently used.
*/
getAll(): string[] {
// Javascript Set maintains insertion order, see
// https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Set
// Since items are sorted from least recently used to most recently used,
// we reverse the result.
return [...this.set].reverse();
}
}
export function tsDisplayPartsToText(parts: ts.SymbolDisplayPart[]): string {
return parts.map(dp => dp.text).join('');
}
interface DocumentPosition {
fileName: string;
pos: number;
}
/**
*
* This function attempts to use *internal* TypeScript APIs to find the original source spans for
* the `ts.DefinitionInfo` using source maps. If it fails, this function returns the same
* `ts.DefinitionInfo` that was passed in.
*
* @see https://github.com/angular/vscode-ng-language-service/issues/1588
*/
export function getMappedDefinitionInfo(
info: ts.DefinitionInfo, project: ts.server.Project): ts.DefinitionInfo {
try {
const mappedDocumentSpan = getMappedDocumentSpan(info, project);
return {...info, ...mappedDocumentSpan};
} catch {
return info;
}
}
function getMappedDocumentSpan(
documentSpan: ts.DocumentSpan, project: ts.server.Project): ts.DocumentSpan|undefined {
const newPosition = getMappedLocation(documentSpanLocation(documentSpan), project);
if (!newPosition) return undefined;
return {
fileName: newPosition.fileName,
textSpan: {start: newPosition.pos, length: documentSpan.textSpan.length},
originalFileName: documentSpan.fileName,
originalTextSpan: documentSpan.textSpan,
contextSpan: getMappedContextSpan(documentSpan, project),
originalContextSpan: documentSpan.contextSpan
};
}
function getMappedLocation(
location: DocumentPosition, project: ts.server.Project): DocumentPosition|undefined {
const mapsTo = (project as any).getSourceMapper().tryGetSourcePosition(location);
return mapsTo &&
(project.projectService as any).fileExists(ts.server.toNormalizedPath(mapsTo.fileName)) ?
mapsTo :
undefined;
}
function documentSpanLocation({fileName, textSpan}: ts.DocumentSpan): DocumentPosition {
return {fileName, pos: textSpan.start};
}
function getMappedContextSpan(
documentSpan: ts.DocumentSpan, project: ts.server.Project): ts.TextSpan|undefined {
const contextSpanStart = documentSpan.contextSpan &&
getMappedLocation({fileName: documentSpan.fileName, pos: documentSpan.contextSpan.start},
project);
const contextSpanEnd = documentSpan.contextSpan &&
getMappedLocation({
fileName: documentSpan.fileName,
pos: documentSpan.contextSpan.start + documentSpan.contextSpan.length
},
project);
return contextSpanStart && contextSpanEnd ?
{start: contextSpanStart.pos, length: contextSpanEnd.pos - contextSpanStart.pos} :
undefined;
} |
extension TypeAttribute {
func containsAttribute(named attribute: String) -> Bool {
switch self {
case .optional(let wrapped), .implicitlyUnwrappedOptional(let wrapped):
return wrapped.containsAttribute(named: attribute)
case .attributed(_, let attributes):
return attributes.contains { $0.trimmingCharacters(in: .whitespacesAndNewlines) == attribute }
case .type:
return false
}
}
} |
<reponame>champ8644/BanG-Dream-Translated-Tool
import { meanLength, meanSmooth } from '../constants/config';
class Meaning {
constructor() {
this.data = [];
this.div = meanSmooth;
this.length = meanLength;
}
avg5(frame) {
let sum = 0;
for (let i = 1; i <= this.div; i++) {
const prevFrame = (frame - i + this.length) % this.length;
sum += this.data[prevFrame] || 0;
}
return sum / this.div;
}
at(frame) {
return this.data[frame % this.length];
}
push(frame, val) {
this.data[frame % this.length] = val;
}
isFadingFromBlack() {}
isFadingToWhite() {}
isFadingFromWhite() {}
}
const meanInstance = new Meaning();
export default meanInstance;
|
<reponame>aasiyahf/programs
package edu.ncsu.csc316.customer_service.data;
/**
* Creates a timestamp object to keep track of individual parts of the time
* the help ticke twas submitted
* @author <NAME>
*
*/
public class Timestamp {
private int year;
private int month;
private int day;
private int hour;
private int minute;
private int second;
/**
* Creates a timestamp from the given string, with separate fields for
* the year, month, day, hour, minute, and second
* @param time the string containing the timestamp
*/
public Timestamp( String time ) {
if (time.contains(" ")) {
String[] fullTime = time.split(" ");
String date = fullTime[0];
String clockTime = fullTime[1];
if (date.contains("/")) {
String[] fullDate = date.split("/");
this.setMonth(Integer.parseInt(fullDate[0]));
this.setDay(Integer.parseInt(fullDate[1]));
this.setYear(Integer.parseInt(fullDate[2]));
} else {
throw new IllegalArgumentException("Invalid date");
}
if (clockTime.contains(":")) {
String[] fullClockTime = clockTime.split(":");
this.setHour(Integer.parseInt(fullClockTime[0]));
this.setMinute(Integer.parseInt(fullClockTime[1]));
this.setSecond(Integer.parseInt(fullClockTime[2]));
} else {
throw new IllegalArgumentException("Invalid time");
}
} else {
throw new IllegalArgumentException("Invalid string");
}
}
/**
* Retrieves the year
* @return the year of the submission
*/
public int getYear() {
return year;
}
/**
* Sets the year
* @param year the year of the submission
*/
public void setYear(int year) {
this.year = year;
}
/**
* Retrieves the month
* @return the month of the submission
*/
public int getMonth() {
return month;
}
/**
* Sets the month
* @param month the month of the submission
*/
public void setMonth(int month) {
this.month = month;
}
/**
* Retrieves the day
* @return the day of the submission
*/
public int getDay() {
return day;
}
/**
* Sets the day
* @param day the day of the submission
*/
public void setDay(int day) {
this.day = day;
}
/**
* Retrieves the hour
* @return the hour of the submission
*/
public int getHour() {
return hour;
}
/**
* Sets the hour
* @param hour the hour of the submission
*/
public void setHour(int hour) {
this.hour = hour;
}
/**
* Retrieves the minute
* @return the minute of the submission
*/
public int getMinute() {
return minute;
}
/**
* Sets the minute
* @param minute the minute of the submission
*/
public void setMinute(int minute) {
this.minute = minute;
}
/**
* Retrieves the second
* @return the second of the submission
*/
public int getSecond() {
return second;
}
/**
* Sets the second
* @param second the second of the submission
*/
public void setSecond(int second) {
this.second = second;
}
}
|
#!/bin/sh
cd `dirname $0`
set -ex
if [ -z "$CACHEDIR" ]
then
CACHEDIR=../../../.cached
fi
: ${TRACT_RUN:=cargo run -p tract $CARGO_OPTS --}
$TRACT_RUN $CACHEDIR/hey_snips_v4_model17.pb -i S,20,f32 --pulse 8 --nnef-tract-pulse dump -q --nnef-graph found
diff -u expected found
|
// Copyright 2018 Sogou Inc. All rights reserved.
// Use of this source code is governed by the Apache 2.0
// license that can be found in the LICENSE file.
package com.sogou.sogocommon.utils;
import android.content.Context;
import android.util.Base64;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.security.KeyManagementException;
import java.security.KeyStore;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.security.cert.CertificateException;
import java.security.cert.CertificateExpiredException;
import java.security.cert.CertificateFactory;
import java.security.cert.CertificateNotYetValidException;
import java.security.cert.X509Certificate;
import javax.net.ssl.HostnameVerifier;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLSession;
import javax.net.ssl.SSLSocketFactory;
import javax.net.ssl.TrustManager;
import javax.net.ssl.TrustManagerFactory;
import javax.net.ssl.X509TrustManager;
/**
* 支持Https请求SSL验证的工具类
* <p>
* created by kabuzai
*/
public class HttpsUtil {
private static final String[] CERTIFICATES = new String[]{};
/**
* HttpUrlConnection支持Https验证(单向,足够满足大多数业务的需求)
* <p>
* 对安全有更高要求的业务如银行、金融等,需要双向验证,可自定义
*
* @param context
*/
public static void initHttpsUrlConnection(Context context) {
InputStream[] certificates = getCertificates(context, CERTIFICATES);
SSLSocketFactory sslSocketFactory = getSSLSocketFactory(certificates, null, null);
HttpsURLConnection.setDefaultSSLSocketFactory(sslSocketFactory);
if (certificates == null) {
HttpsURLConnection.setDefaultHostnameVerifier(getUnSafeHostnameVerifier(context));
}
}
/**
* 获取支持Https的OkHttpClient
* <p>
* 不需要的项目可注释
*
* @param context
* @return
*/
// public static OkHttpClient getHttpsOkHttpClient(Context context) {
// OkHttpClient.Builder builder = new OkHttpClient().newBuilder();
//
// InputStream[] certificates = HttpsUtil.getCertificates(context, CERTIFICATES);
// SSLSocketFactory sslSocketFactory = HttpsUtil.getSSLSocketFactory(certificates, null, null);
// builder.sslSocketFactory(sslSocketFactory);
// if (certificates == null) {
// builder.hostnameVerifier(HttpsUtil.getUnSafeHostnameVerifier());
// }
// return builder.build();
// }
/**
* 获取服务端证书
* <p>
* 默认放在Assets目录下
*
* @param context
* @return
*/
public static InputStream[] getCertificates(Context context, String... fileNames) {
if (context == null || fileNames == null || fileNames.length <= 0) {
return null;
}
try {
InputStream[] certificates = new InputStream[fileNames.length];
for (int i = 0; i < fileNames.length; i++) {
certificates[i] = context.getAssets().open(fileNames[i]);
}
// LogUtil.log("xq","certificates "+certificates[0].toString());
return certificates;
} catch (IOException e) {
e.printStackTrace();
}
return null;
}
/**
* 获取自定义SSLSocketFactory
* <p>
* 单项验证时只需要certificates,其余null即可
* 双向验证时,3个参数均需要
* <p>
* 不验证,即信任所有证书时全部传null,同时配合getUnSafeHostnameVerifier()
* 有安全隐患,慎用!!!
*
* @param certificates 服务端证书(.crt)
* @param bksFile 客户端证书请求文件(.jsk -> .bks)
* @param password 生成jks时的密钥库口令
* @return
*/
public static SSLSocketFactory getSSLSocketFactory(InputStream[] certificates, InputStream bksFile, String password) {
try {
TrustManager[] trustManagers = prepareTrustManager(certificates);
KeyManager[] keyManagers = prepareKeyManager(bksFile, password);
SSLContext sslContext = SSLContext.getInstance("TLS");
if (trustManagers == null || trustManagers.length <= 0) {
trustManagers = new TrustManager[]{new UnSafeTrustManager()};
}
sslContext.init(keyManagers, trustManagers, new SecureRandom());
return sslContext.getSocketFactory();
} catch (NoSuchAlgorithmException | KeyManagementException e) {
throw new AssertionError(e);
}
}
private static TrustManager[] prepareTrustManager(InputStream... certificates) {
if (certificates == null || certificates.length <= 0) return null;
try {
CertificateFactory certificateFactory = CertificateFactory.getInstance("X.509");
KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType());
keyStore.load(null);
int index = 0;
for (InputStream is : certificates) {
String certificateAlias = Integer.toString(index++);
Certificate certificate = certificateFactory.generateCertificate(is);
keyStore.setCertificateEntry(certificateAlias, certificate);
try {
if (is != null)
is.close();
} catch (IOException ignored) {
}
}
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(keyStore);
return trustManagerFactory.getTrustManagers();
// TODO: 2016/11/11 针对有效期异常导致校验失败的情况,目前没有完美的解决方案
// TrustManager[] keyStoreTrustManagers = trustManagerFactory.getTrustManagers();
// return getNotValidateTimeTrustManagers((X509TrustManager[]) keyStoreTrustManagers);
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private static KeyManager[] prepareKeyManager(InputStream bksFile, String password) {
try {
if (bksFile == null || password == null) return null;
KeyStore clientKeyStore = KeyStore.getInstance("BKS");
clientKeyStore.load(bksFile, password.toCharArray());
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(clientKeyStore, password.toCharArray());
return keyManagerFactory.getKeyManagers();
} catch (Exception e) {
e.printStackTrace();
}
return null;
}
private static NotValidateTimeTrustManager[] getNotValidateTimeTrustManagers(X509TrustManager[] trustManagers) {
NotValidateTimeTrustManager[] notValidateTimeTrustManagers = new NotValidateTimeTrustManager[trustManagers.length];
for (int i = 0; i< trustManagers.length; i++) {
notValidateTimeTrustManagers[i] = new NotValidateTimeTrustManager(trustManagers[i]);
}
return notValidateTimeTrustManagers;
}
/**
* 不校验证书有效期的TrustManager
* <p>
* 防止用户乱改手机时间导致校验失败
* 注意:由于校验证书时,对有效期的校验并不是最后一项,所以该TrustManager仍然存在安全隐患,并不推荐使用
*/
private static class NotValidateTimeTrustManager implements X509TrustManager {
private X509TrustManager defaultTrustManager;
public NotValidateTimeTrustManager(X509TrustManager defaultTrustManager) {
this.defaultTrustManager = defaultTrustManager;
}
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
defaultTrustManager.checkClientTrusted(chain, authType);
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
try {
defaultTrustManager.checkServerTrusted(chain, authType);
} catch (CertificateException e) {
e.printStackTrace();
Throwable t = e;
while (t != null) {
if (t instanceof CertificateExpiredException
|| t instanceof CertificateNotYetValidException)
return;
t = t.getCause();
}
throw e;
}
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return defaultTrustManager.getAcceptedIssuers();
}
}
private static class UnSafeTrustManager implements X509TrustManager {
@Override
public void checkClientTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public void checkServerTrusted(X509Certificate[] chain, String authType) throws CertificateException {
}
@Override
public X509Certificate[] getAcceptedIssuers() {
return new X509Certificate[]{};
}
}
private static class UnSafeHostnameVerifier implements HostnameVerifier {
public UnSafeHostnameVerifier(Context context) {
super();
this.context = context;
}
private Context context;
@Override
public boolean verify(String hostname, SSLSession session) {
try {
javax.security.cert.X509Certificate[] chain = session.getPeerCertificateChain();
for (int i = 0; i < chain.length; i++) {
// LogUtil.log("xq","aaa "+i+" "+ Base64.encodeToString(chain[i].getEncoded(),Base64.DEFAULT)+" "+Base64.encodeToString(chain[i].getEncoded(),Base64.DEFAULT).length() );
FileOutputStream outStream = context.openFileOutput("cert.crt", Context.MODE_PRIVATE);
outStream.write("-----BEGIN CERTIFICATE-----\n".getBytes());
outStream.write(Base64.encodeToString(chain[i].getEncoded(),Base64.DEFAULT).getBytes());
outStream.write("-----END CERTIFICATE-----\n".getBytes());
outStream.close();
}
} catch (Exception e) {
e.printStackTrace();
}
return true;
}
}
/**
* 不验证,即信任所有证书时使用
* 有安全隐患,慎用!!!
*
* @return
*/
public static UnSafeHostnameVerifier getUnSafeHostnameVerifier(Context context) {
return new UnSafeHostnameVerifier(context);
}
} |
use rand::Rng;
pub fn shuffle_deck(deck: &mut [Colors]) {
let mut rng = rand::thread_rng();
let mut n = deck.len();
while n > 1 {
let k = rng.gen_range(0..n);
n -= 1;
deck.swap(n, k);
}
} |
#!/bin/bash
# Remove printer script
rm -f "${MUNKIPATH}preflight.d/printer.py"
# Remove printers.txt file
rm -f "${MUNKIPATH}preflight.d/cache/printer.txt"
|
# -----------------------------------------------------------------------------
#
# Package : lcid
# Version : 1.0.0
# Source repo : https://github.com/sindresorhus/lcid
# Tested on : RHEL 8.3
# Script License: Apache License, Version 2 or later
# Maintainer : BulkPackageSearch Automation <sethp@us.ibm.com>
#
# Disclaimer: This script has been tested in root mode on given
# ========== platform using the mentioned version of the package.
# It may not work as expected with newer versions of the
# package and/or distribution. In such case, please
# contact "Maintainer" of this script.
#
# ----------------------------------------------------------------------------
PACKAGE_NAME=lcid
PACKAGE_VERSION=1.0.0
PACKAGE_URL=https://github.com/sindresorhus/lcid
yum -y update && yum install -y yum-utils nodejs nodejs-devel nodejs-packaging npm python38 python38-devel ncurses git gcc gcc-c++ libffi libffi-devel ncurses git jq make cmake
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/appstream/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/8.3Server/ppc64le/baseos/
yum-config-manager --add-repo http://rhn.pbm.ihost.com/rhn/latest/7Server/ppc64le/optional/
yum install -y firefox liberation-fonts xdg-utils && npm install n -g && n latest && npm install -g npm@latest && export PATH="$PATH" && npm install --global yarn grunt-bump xo testem acorn
OS_NAME=`python3 -c "os_file_data=open('/etc/os-release').readlines();os_info = [i.replace('PRETTY_NAME=','').strip() for i in os_file_data if i.startswith('PRETTY_NAME')];print(os_info[0])"`
HOME_DIR=`pwd`
if ! git clone $PACKAGE_URL $PACKAGE_NAME; then
echo "------------------$PACKAGE_NAME:clone_fails---------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME" > /home/tester/output/clone_fails
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Clone_Fails" > /home/tester/output/version_tracker
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
git checkout $PACKAGE_VERSION
PACKAGE_VERSION=$(jq -r ".version" package.json)
# run the test command from test.sh
if ! npm install && npm audit fix && npm audit fix --force; then
echo "------------------$PACKAGE_NAME:install_fails-------------------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_Fails"
exit 0
fi
cd $HOME_DIR/$PACKAGE_NAME
if ! npm test; then
echo "------------------$PACKAGE_NAME:install_success_but_test_fails---------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Fail | Install_success_but_test_Fails"
exit 0
else
echo "------------------$PACKAGE_NAME:install_&_test_both_success-------------------------"
echo "$PACKAGE_URL $PACKAGE_NAME"
echo "$PACKAGE_NAME | $PACKAGE_URL | $PACKAGE_VERSION | $OS_NAME | GitHub | Pass | Both_Install_and_Test_Success"
exit 0
fi |
<reponame>astoctas/firmatacpp
#include "firmservo.h"
#include <iostream>
namespace firmata {
Servo::Servo(FirmIO* firmIO) : Base(firmIO) {
};
Servo::~Servo() {};
void Servo::servoAttach(uint8_t deviceNum) {
sysexCommand({ FIRMATA_SERVO_REQUEST, FIRMATA_SERVO_ATTACH, deviceNum });
};
void Servo::servoWrite(uint8_t deviceNum, uint8_t value) {
uint8_t value_lsb = FIRMATA_LSB(value);
uint8_t value_msb = FIRMATA_MSB(value);
sysexCommand({ FIRMATA_SERVO_REQUEST, FIRMATA_SERVO_WRITE, deviceNum, value_lsb, value_msb });
};
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.