text
stringlengths 1
1.05M
|
|---|
<reponame>matchup-ir/whooshy<filename>src/whoosh/automata/glob.py
# Copyright 2012 <NAME>. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
from whoosh.automata.fsa import ANY, EPSILON, NFA
# Constants for glob
_LIT = 0
_STAR = 1
_PLUS = 2
_QUEST = 3
_RANGE = 4
def parse_glob(pattern, _glob_multi="*", _glob_single="?",
_glob_range1="[", _glob_range2="]"):
pos = 0
last = None
while pos < len(pattern):
char = pattern[pos]
pos += 1
if char == _glob_multi: # *
# (Ignore more than one star in a row)
if last is not _STAR:
yield _STAR, None
last = _STAR
elif char == _glob_single: # ?
# (Ignore ? after a star)
if last is not _STAR:
yield _QUEST, None
last = _QUEST
elif char == _glob_range1: # [
chars = set()
negate = False
# Take the char range specification until the ]
while pos < len(pattern):
char = pattern[pos]
pos += 1
if char == _glob_range2:
break
chars.add(char)
if chars:
yield _RANGE, (chars, negate)
last = _RANGE
else:
yield _LIT, char
last = _LIT
def glob_automaton(pattern):
nfa = NFA(0)
i = -1
for i, (op, arg) in enumerate(parse_glob(pattern)):
if op is _LIT:
nfa.add_transition(i, arg, i + 1)
elif op is _STAR:
nfa.add_transition(i, ANY, i + 1)
nfa.add_transition(i, EPSILON, i + 1)
nfa.add_transition(i + 1, EPSILON, i)
elif op is _QUEST:
nfa.add_transition(i, ANY, i + 1)
elif op is _RANGE:
for char in arg[0]:
nfa.add_transition(i, char, i + 1)
nfa.add_final_state(i + 1)
return nfa
|
import ctypes
import json
import os
import cv2
import numpy as np
import pycuda.driver as cuda
import tensorrt as trt
from backend.utils import timeit, draw_boxed_text
conf_th = 0.3
INPUT_HW = (300, 300)
OUTPUT_LAYOUT = 7
with open(os.path.join('models/ssd_mobilenet/labels.json')) as json_data:
CLASS_NAMES = json.load(json_data)
def _preprocess_trt(img, shape=(300, 300)):
"""Preprocess an image before TRT SSD inferencing."""
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
img = cv2.resize(img, shape)
img = img.transpose((2, 0, 1)).astype(np.float32)
img = (2.0 / 255.0) * img - 1.0
return img
class Detector():
"""Class ssd"""
def _load_plugins(self):
ctypes.CDLL("models/ssd_mobilenet/libflattenconcat.so")
trt.init_libnvinfer_plugins(self.trt_logger, '')
def _load_engine(self):
TRTbin = 'models/ssd_mobilenet/TRT_ssd_mobilenet_v2_coco.bin'
with open(TRTbin, 'rb') as f, trt.Runtime(self.trt_logger) as runtime:
return runtime.deserialize_cuda_engine(f.read())
def _create_context(self):
for binding in self.engine:
size = trt.volume(self.engine.get_binding_shape(binding)) * \
self.engine.max_batch_size
host_mem = cuda.pagelocked_empty(size, np.float32)
cuda_mem = cuda.mem_alloc(host_mem.nbytes)
self.bindings.append(int(cuda_mem))
if self.engine.binding_is_input(binding):
self.host_inputs.append(host_mem)
self.cuda_inputs.append(cuda_mem)
else:
self.host_outputs.append(host_mem)
self.cuda_outputs.append(cuda_mem)
return self.engine.create_execution_context()
@timeit
def __init__(self):
self.colors = np.random.uniform(0, 255, size=(100, 3))
self.input_shape = INPUT_HW
self.trt_logger = trt.Logger(trt.Logger.INFO)
self._load_plugins()
self.engine = self._load_engine()
self.host_inputs = []
self.cuda_inputs = []
self.host_outputs = []
self.cuda_outputs = []
self.bindings = []
self.stream = cuda.Stream()
self.context = self._create_context()
def __del__(self):
"""Free CUDA memories."""
del self.stream
del self.cuda_outputs
del self.cuda_inputs
@timeit
def prediction(self, img):
img_resized = _preprocess_trt(img, self.input_shape)
np.copyto(self.host_inputs[0], img_resized.ravel())
cuda.memcpy_htod_async(
self.cuda_inputs[0], self.host_inputs[0], self.stream)
self.context.execute_async(
batch_size=1,
bindings=self.bindings,
stream_handle=self.stream.handle)
cuda.memcpy_dtoh_async(
self.host_outputs[1], self.cuda_outputs[1], self.stream)
cuda.memcpy_dtoh_async(
self.host_outputs[0], self.cuda_outputs[0], self.stream)
self.stream.synchronize()
output = self.host_outputs[0]
return output
@timeit
def filter_prediction(self, output, img, conf_th=0.3, conf_class=[]):
img_h, img_w, _ = img.shape
boxes, confs, clss = [], [], []
for prefix in range(0, len(output), OUTPUT_LAYOUT):
conf = float(output[prefix + 2])
if conf < conf_th:
continue
x1 = int(output[prefix + 3] * img_w)
y1 = int(output[prefix + 4] * img_h)
x2 = int(output[prefix + 5] * img_w)
y2 = int(output[prefix + 6] * img_h)
cls = int(output[prefix + 1])
if len(conf_class) > 0 and cls not in conf_class:
continue
boxes.append((x1, y1, x2, y2))
confs.append(conf)
clss.append(cls)
return boxes, confs, clss
def draw_boxes(self, image, boxes, confs, clss):
for (box, cf, cls) in zip(boxes, confs, clss):
x_min, y_min, x_max, y_max = box[0], box[1], box[2], box[3]
color = self.colors[cls]
cv2.rectangle(image, (x_min, y_min), (x_max, y_max), color, 2)
txt_loc = (max(x_min + 2, 0), max(y_min + 2, 0))
txt = '{} {:.2f}'.format(CLASS_NAMES[str(cls)], cf)
image = draw_boxed_text(image, txt, txt_loc, color)
return image # [..., ::-1]
if __name__ == "__main__":
image = cv2.imread("./imgs/image.jpeg")
detector = Detector()
output = detector.prediction(image)
boxes, confs, clss = detector.filter_prediction(output, image, conf_th=0.3)
print([(CLASS_NAMES[str(c)], prob) for (c, prob) in zip(clss, confs)])
image = detector.draw_boxes(image, boxes, confs, clss)
cv2.imwrite("./imgs/outputcv.jpg", image)
|
#!/bin/bash -eux
EE=${EE:-false}
VERSION=${VERSION:-$(grep VERSION= Dockerfile | head -n1 | cut -d = -f 2)}
DISTRO=${DISTRO:-$(grep DISTRO= Dockerfile | cut -d = -f 2)}
SNAPSHOT=${SNAPSHOT:-$(grep SNAPSHOT= Dockerfile | cut -d = -f 2)}
IMAGE=camunda/camunda-bpm-platform
function tag_and_push {
local tag=${1}
docker tag ${IMAGE}:${DISTRO} ${IMAGE}:${tag}
docker push ${IMAGE}:${tag}
}
if [ "${EE}" = "true" ]; then
echo "Not pushing EE image to docker hub"
exit 0
fi
# check whether the CE image for distro was already released and exit in that case
if [ $(docker manifest inspect $IMAGE:${DISTRO}-${VERSION} > /dev/null ; echo $?) == '0' ]; then
echo "Not pushing already released CE image"
exit 0
fi
docker login -u "${DOCKER_HUB_USERNAME}" -p "${DOCKER_HUB_PASSWORD}"
docker login -u "${DOCKER_HUB_USERNAME}" -p "${DOCKER_HUB_PASSWORD}"
if [ "${SNAPSHOT}" = "true" ]; then
tag_and_push "${DISTRO}-${VERSION}-SNAPSHOT"
tag_and_push "${DISTRO}-SNAPSHOT"
if [ "${DISTRO}" = "tomcat" ]; then
tag_and_push "${VERSION}-SNAPSHOT"
tag_and_push "SNAPSHOT"
fi
else
tag_and_push "${DISTRO}-${VERSION}"
if [ "${DISTRO}" = "tomcat" ]; then
tag_and_push "${VERSION}"
fi
fi
# Latest Docker image is created and pushed just once when a new version is relased.
# Latest tag refers to the latest minor release of Camunda Platform.
# https://github.com/camunda/docker-camunda-bpm-platform/blob/next/README.md#supported-tagsreleases
# The 1st condition matches only when the version branch is the same as the main branch.
git fetch origin next
if [ $(git rev-parse HEAD) = $(git rev-parse FETCH_HEAD) ] && [ "${SNAPSHOT}" = "false" ]; then
# tagging image as latest
tag_and_push "${DISTRO}-latest"
tag_and_push "${DISTRO}"
if [ "${DISTRO}" = "tomcat" ]; then
tag_and_push "latest"
fi
fi
|
package at.downdrown.somfy.exception;
/**
* Base class for all library specific exceptions.
*
* @author <NAME>
*/
public abstract class SomfyClientException extends Exception {
public SomfyClientException(String message) {
super(message);
}
public SomfyClientException(String message, Throwable cause) {
super(message, cause);
}
}
|
"use strict";
Object.defineProperty(exports, "__esModule", {
value: true
});
exports.androidCancel = void 0;
var androidCancel = {
"viewBox": "0 0 512 512",
"children": [{
"name": "g",
"attribs": {},
"children": [{
"name": "path",
"attribs": {
"d": "M256,48C140.559,48,48,140.559,48,256c0,115.436,92.559,208,208,208c115.435,0,208-92.564,208-208\r\n\t\tC464,140.559,371.436,48,256,48z M360.002,330.881l-29.12,29.117L256,285.117l-74.881,74.881l-29.121-29.117L226.881,256\r\n\t\tl-74.883-74.881l29.121-29.116L256,226.881l74.881-74.878l29.12,29.116L285.119,256L360.002,330.881z"
},
"children": [{
"name": "path",
"attribs": {
"d": "M256,48C140.559,48,48,140.559,48,256c0,115.436,92.559,208,208,208c115.435,0,208-92.564,208-208\r\n\t\tC464,140.559,371.436,48,256,48z M360.002,330.881l-29.12,29.117L256,285.117l-74.881,74.881l-29.121-29.117L226.881,256\r\n\t\tl-74.883-74.881l29.121-29.116L256,226.881l74.881-74.878l29.12,29.116L285.119,256L360.002,330.881z"
},
"children": []
}]
}]
}]
};
exports.androidCancel = androidCancel;
|
import Parser from './stream-parser';
import { InternalConnectionOptions } from '../connection';
import { FeatureExtAckToken } from './token';
const FEATURE_ID = {
SESSIONRECOVERY: 0x01,
FEDAUTH: 0x02,
COLUMNENCRYPTION: 0x04,
GLOBALTRANSACTIONS: 0x05,
AZURESQLSUPPORT: 0x08,
UTF8_SUPPORT: 0x0A,
TERMINATOR: 0xFF
};
function featureExtAckParser(parser: Parser, _options: InternalConnectionOptions, callback: (token: FeatureExtAckToken) => void) {
let fedAuth: Buffer | undefined;
let utf8Support: boolean | undefined;
function next() {
parser.readUInt8((featureId) => {
if (featureId === FEATURE_ID.TERMINATOR) {
return callback(new FeatureExtAckToken(fedAuth, utf8Support));
}
parser.readUInt32LE((featureAckDataLen) => {
parser.readBuffer(featureAckDataLen, (featureData) => {
switch (featureId) {
case FEATURE_ID.FEDAUTH:
fedAuth = featureData;
break;
case FEATURE_ID.UTF8_SUPPORT:
utf8Support = !!featureData[0];
break;
}
next();
});
});
});
}
next();
}
export default featureExtAckParser;
module.exports = featureExtAckParser;
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jena.fuseki.server;
import java.io.File ;
import java.io.IOException ;
import java.io.InputStream ;
import java.io.StringReader ;
import java.nio.file.Files ;
import java.nio.file.Path ;
import java.nio.file.StandardCopyOption ;
import java.util.* ;
import jena.cmd.CmdException ;
import org.apache.jena.atlas.io.IO ;
import org.apache.jena.atlas.lib.DS ;
import org.apache.jena.atlas.lib.FileOps ;
import org.apache.jena.atlas.lib.InternalErrorException ;
import org.apache.jena.fuseki.Fuseki ;
import org.apache.jena.fuseki.FusekiConfigException ;
import org.apache.jena.fuseki.build.Builder ;
import org.apache.jena.fuseki.build.FusekiConfig ;
import org.apache.jena.fuseki.build.Template ;
import org.apache.jena.fuseki.build.TemplateFunctions ;
import org.apache.jena.fuseki.servlets.ServletOps ;
import org.apache.jena.rdf.model.* ;
import org.apache.jena.riot.Lang ;
import org.apache.jena.riot.RDFDataMgr ;
import org.apache.jena.riot.RDFLanguages ;
import org.apache.jena.sparql.core.DatasetGraph ;
import org.apache.jena.tdb.sys.Names ;
public class FusekiServer
{
// Initialization of FUSEKI_HOME and FUSEKI_BASE is done in FusekiEnv.setEnvironment()
// so that the code is independent of any logging. FusekiLogging can use
// initialized values of FUSEKI_BASE while looking forlog4j configuration.
/** Root of the Fuseki installation for fixed files.
* This may be null (e.g. running inside a web application container) */
//public static Path FUSEKI_HOME = null ;
/** Root of the varying files in this deployment. Often $FUSEKI_HOME/run.
* This is not null - it may be /etc/fuseki, which must be writable.
*/
//public static Path FUSEKI_BASE = null ;
// Relative names of directories in the FUSEKI_BASE area.
public static final String runArea = FusekiEnv.ENV_runArea ;
public static final String databasesLocationBase = "databases" ;
// Place to put Lucene text and spatial indexes.
//private static final String databaseIndexesDir = "indexes" ;
public static final String backupDirNameBase = "backups" ;
public static final String configDirNameBase = "configuration" ;
public static final String logsNameBase = "logs" ;
public static final String systemDatabaseNameBase = "system" ;
public static final String systemFileAreaBase = "system_files" ;
public static final String templatesNameBase = "templates" ;
// This name is in web.xml as well.
public static final String DFT_SHIRO_INI = "shiro.ini" ;
// In FUSEKI_BASE
public static final String DFT_CONFIG = "config.ttl" ;
/** Directory for TDB databases - this is known to the assembler templates */
public static Path dirDatabases = null ;
/** Directory for writing backups */
public static Path dirBackups = null ;
/** Directory for assembler files */
public static Path dirConfiguration = null ;
/** Directory for assembler files */
public static Path dirLogs = null ;
/** Directory for system database */
public static Path dirSystemDatabase = null ;
/** Directory for files uploaded (e.g upload assmbler descriptions); not data uploads. */
public static Path dirFileArea = null ;
/** Directory for assembler files */
public static Path dirTemplates = null ;
private static boolean initialized = false ;
public static boolean serverInitialized = false ;
/** For testing - reset the places which initialize once */
public synchronized static void reset() {
initialized = false ;
FusekiServer.initialized = false ;
}
public synchronized static void init() {
if ( initialized )
return ;
initialized = true ;
try {
FusekiEnv.setEnvironment() ;
Path FUSEKI_HOME = FusekiEnv.FUSEKI_HOME ;
Path FUSEKI_BASE = FusekiEnv.FUSEKI_BASE ;
Fuseki.init() ;
Fuseki.configLog.info("FUSEKI_HOME="+ ((FUSEKI_HOME==null) ? "unset" : FUSEKI_HOME.toString())) ;
Fuseki.configLog.info("FUSEKI_BASE="+FUSEKI_BASE.toString());
// ---- Check FUSEKI_HOME and FUSEKI_BASE
// If FUSEKI_HOME exists, it may be FUSEKI_BASE.
if ( FUSEKI_HOME != null ) {
if ( ! Files.isDirectory(FUSEKI_HOME) )
throw new FusekiConfigException("FUSEKI_HOME is not a directory: "+FUSEKI_HOME) ;
if ( ! Files.isReadable(FUSEKI_HOME) )
throw new FusekiConfigException("FUSEKI_HOME is not readable: "+FUSEKI_HOME) ;
}
if ( Files.exists(FUSEKI_BASE) ) {
if ( ! Files.isDirectory(FUSEKI_BASE) )
throw new FusekiConfigException("FUSEKI_BASE is not a directory: "+FUSEKI_BASE) ;
if ( ! Files.isWritable(FUSEKI_BASE) )
throw new FusekiConfigException("FUSEKI_BASE is not writable: "+FUSEKI_BASE) ;
} else {
ensureDir(FUSEKI_BASE);
}
// Ensure FUSEKI_BASE has the assumed directories.
dirTemplates = writeableDirectory(FUSEKI_BASE, templatesNameBase) ;
dirDatabases = writeableDirectory(FUSEKI_BASE, databasesLocationBase) ;
dirBackups = writeableDirectory(FUSEKI_BASE, backupDirNameBase) ;
dirConfiguration = writeableDirectory(FUSEKI_BASE, configDirNameBase) ;
dirLogs = writeableDirectory(FUSEKI_BASE, logsNameBase) ;
dirSystemDatabase = writeableDirectory(FUSEKI_BASE, systemDatabaseNameBase) ;
dirFileArea = writeableDirectory(FUSEKI_BASE, systemFileAreaBase) ;
//Possible intercept point
// ---- Initialize with files.
if ( Files.isRegularFile(FUSEKI_BASE) )
throw new FusekiConfigException("FUSEKI_BASE exists but is a file") ;
// Copy missing files into FUSEKI_BASE
copyFileIfMissing(null, DFT_SHIRO_INI, FUSEKI_BASE) ;
copyFileIfMissing(null, DFT_CONFIG, FUSEKI_BASE) ;
for ( String n : Template.templateNames ) {
copyFileIfMissing(null, n, FUSEKI_BASE) ;
}
serverInitialized = true ;
} catch (RuntimeException ex) {
Fuseki.serverLog.error("Exception in server initialization", ex) ;
throw ex ;
}
}
private static boolean emptyDir(Path dir) {
return dir.toFile().list().length <= 2 ;
}
/** Copy a file from src to dst under name fn.
* If src is null, try as a classpath resource
* @param src Source directory, or null meaning use java resource.
* @param fn File name, a relative path.
* @param dst Destination directory.
*
*/
private static void copyFileIfMissing(Path src, String fn, Path dst) {
Path dstFile = dst.resolve(fn) ;
if ( Files.exists(dstFile) )
return ;
// fn may be a path.
if ( src != null ) {
try {
Files.copy(src.resolve(fn), dstFile, StandardCopyOption.COPY_ATTRIBUTES) ;
} catch (IOException e) {
IO.exception("Failed to copy file "+src, e);
e.printStackTrace();
}
} else {
try {
// Get from the file from area "org/apache/jena/fuseki/server" (our package)
InputStream in = FusekiServer.class.getResource(fn).openStream() ;
Files.copy(in, dstFile) ;
}
catch (IOException e) {
IO.exception("Failed to copy file from resource: "+src, e);
e.printStackTrace();
}
}
}
public static void initializeDataAccessPoints(ServerInitialConfig initialSetup, String configDir) {
List<DataAccessPoint> configFileDBs = initServerConfiguration(initialSetup) ;
List<DataAccessPoint> directoryDBs = FusekiConfig.readConfigurationDirectory(configDir) ;
List<DataAccessPoint> systemDBs = FusekiConfig.readSystemDatabase(SystemState.getDataset()) ;
List<DataAccessPoint> datapoints = new ArrayList<DataAccessPoint>() ;
datapoints.addAll(configFileDBs) ;
datapoints.addAll(directoryDBs) ;
datapoints.addAll(systemDBs) ;
// Having found them, set them all running.
enable(datapoints);
}
private static void enable(List<DataAccessPoint> datapoints) {
for ( DataAccessPoint dap : datapoints ) {
Fuseki.configLog.info("Register: "+dap.getName()) ;
DataAccessPointRegistry.register(dap.getName(), dap);
}
}
private static List<DataAccessPoint> initServerConfiguration(ServerInitialConfig params) {
// Has a side effect of global context setting
// when processing a config file.
// Compatibility.
List<DataAccessPoint> datasets = DS.list() ;
if ( params == null )
return datasets ;
if ( params.fusekiCmdLineConfigFile != null ) {
List<DataAccessPoint> confDatasets = processConfigFile(params.fusekiCmdLineConfigFile) ;
datasets.addAll(confDatasets) ;
}
else if ( params.fusekiServerConfigFile != null ) {
List<DataAccessPoint> confDatasets = processConfigFile(params.fusekiServerConfigFile) ;
datasets.addAll(confDatasets) ;
}
else if ( params.dsg != null ) {
DataAccessPoint dap = datasetDefaultConfiguration(params.datasetPath, params.dsg, params.allowUpdate) ;
datasets.add(dap) ;
} else if ( params.argTemplateFile != null ) {
DataAccessPoint dap = configFromTemplate(params.argTemplateFile, params.datasetPath, params.allowUpdate, params.params) ;
datasets.add(dap) ;
}
// No datasets is valid.
return datasets ;
}
private static List<DataAccessPoint> processConfigFile(String configFilename) {
if ( ! FileOps.exists(configFilename) ) {
Fuseki.configLog.warn("Configuration file '" + configFilename+"' does not exist") ;
return Collections.emptyList();
}
Fuseki.configLog.info("Configuration file: " + configFilename) ;
return FusekiConfig.readConfigFile(configFilename) ;
}
private static DataAccessPoint configFromTemplate(String templateFile, String datasetPath,
boolean allowUpdate, Map<String, String> params) {
// ---- Setup
if ( params == null ) {
params = new HashMap<>() ;
params.put(Template.NAME, datasetPath) ;
} else {
if ( ! params.containsKey(Template.NAME) ) {
Fuseki.configLog.warn("No NAME found in template parameters (added)") ;
params.put(Template.NAME, datasetPath) ;
}
}
//-- Logging
Fuseki.configLog.info("Template file: " + templateFile) ;
String dir = params.get(Template.DIR) ;
if ( dir != null ) {
if ( Objects.equals(dir, Names.memName) ) {
Fuseki.configLog.info("TDB dataset: in-memory") ;
} else {
if ( !FileOps.exists(dir) )
throw new CmdException("Directory not found: " + dir) ;
Fuseki.configLog.info("TDB dataset: directory=" + dir) ;
}
}
//-- Logging
datasetPath = DataAccessPoint.canonical(datasetPath) ;
// DRY -- ActionDatasets (and others?)
addGlobals(params);
String str = TemplateFunctions.templateFile(templateFile, params, Lang.TTL) ;
Lang lang = RDFLanguages.filenameToLang(str, Lang.TTL) ;
StringReader sr = new StringReader(str) ;
Model model = ModelFactory.createDefaultModel() ;
RDFDataMgr.read(model, sr, datasetPath, lang);
// ---- DataAccessPoint
Statement stmt = getOne(model, null, FusekiVocab.pServiceName, null) ;
if ( stmt == null ) {
StmtIterator sIter = model.listStatements(null, FusekiVocab.pServiceName, (RDFNode)null ) ;
if ( ! sIter.hasNext() )
ServletOps.errorBadRequest("No name given in description of Fuseki service") ;
sIter.next() ;
if ( sIter.hasNext() )
ServletOps.errorBadRequest("Multiple names given in description of Fuseki service") ;
throw new InternalErrorException("Inconsistent: getOne didn't fail the second time") ;
}
Resource subject = stmt.getSubject() ;
if ( ! allowUpdate ) {
// Opportunity for more sophisticated "read-only" mode.
// 1 - clean model, remove "fu:serviceUpdate", "fu:serviceUpload", "fu:serviceReadGraphStore", "fu:serviceReadWriteGraphStore"
// 2 - set a flag on DataAccessPoint
}
DataAccessPoint dap = Builder.buildDataAccessPoint(subject) ;
return dap ;
}
public static void addGlobals(Map<String, String> params) {
if ( params == null ) {
Fuseki.configLog.warn("FusekiServer.addGlobals : params is null", new Throwable()) ;
return ;
}
if ( ! params.containsKey("FUSEKI_BASE") )
params.put("FUSEKI_BASE", pathStringOrElse(FusekiEnv.FUSEKI_BASE, "unset")) ;
if ( ! params.containsKey("FUSEKI_HOME") )
params.put("FUSEKI_HOME", pathStringOrElse(FusekiEnv.FUSEKI_HOME, "unset")) ;
}
private static String pathStringOrElse(Path path, String dft) {
if ( path == null )
return dft ;
return path.toString() ;
}
// DRY -- ActionDatasets (and others?)
private static Statement getOne(Model m, Resource s, Property p, RDFNode o) {
StmtIterator iter = m.listStatements(s, p, o) ;
if ( ! iter.hasNext() )
return null ;
Statement stmt = iter.next() ;
if ( iter.hasNext() )
return null ;
return stmt ;
}
private static DataAccessPoint datasetDefaultConfiguration( String name, DatasetGraph dsg, boolean allowUpdate) {
name = DataAccessPoint.canonical(name) ;
DataAccessPoint dap = new DataAccessPoint(name) ;
DataService ds = Builder.buildDataService(dsg, allowUpdate) ;
dap.setDataService(ds) ;
return dap ;
}
// ---- Helpers
/** Ensure a directory exists, creating it if necessary.
*/
private static void ensureDir(Path directory) {
File dir = directory.toFile() ;
if ( ! dir.exists() ) {
boolean b = dir.mkdirs() ;
if ( ! b )
throw new FusekiConfigException("Failed to create directory: "+directory) ;
}
else if ( ! dir.isDirectory())
throw new FusekiConfigException("Not a directory: "+directory) ;
}
private static void mustExist(Path directory) {
File dir = directory.toFile() ;
if ( ! dir.exists() )
throw new FusekiConfigException("Does not exist: "+directory) ;
if ( ! dir.isDirectory())
throw new FusekiConfigException("Not a directory: "+directory) ;
}
private static boolean exists(Path directory) {
File dir = directory.toFile() ;
return dir.exists() ;
}
private static Path writeableDirectory(Path root , String relName ) {
Path p = makePath(root, relName) ;
ensureDir(p);
if ( ! Files.isWritable(p) )
throw new FusekiConfigException("Not writable: "+p) ;
return p ;
}
private static Path makePath(Path root , String relName ) {
Path path = root.resolve(relName) ;
// Must exist
// try { path = path.toRealPath() ; }
// catch (IOException e) { IO.exception(e) ; }
return path ;
}
}
|
#!/usr/bin/env bash
# Copyright (c) 2014 The Bitcoin Core developers
# Copyright (c) 2014-2015 The Dash developers
# Copyright (c) 2015-2017 The PIVX developers
# Copyright (c) 2017-2018 The Franc developers
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Functions used by more than one test
function echoerr {
echo "$@" 1>&2;
}
# Usage: ExtractKey <key> "<json_object_string>"
# Warning: this will only work for the very-well-behaved
# JSON produced by francd, do NOT use it to try to
# parse arbitrary/nested/etc JSON.
function ExtractKey {
echo $2 | tr -d ' "{}\n' | awk -v RS=',' -F: "\$1 ~ /$1/ { print \$2}"
}
function CreateDataDir {
DIR=$1
mkdir -p $DIR
CONF=$DIR/franc.conf
echo "regtest=1" >> $CONF
echo "keypool=2" >> $CONF
echo "rpcuser=rt" >> $CONF
echo "rpcpassword=rt" >> $CONF
echo "rpcwait=1" >> $CONF
echo "walletnotify=${SENDANDWAIT} -STOP" >> $CONF
shift
while (( "$#" )); do
echo $1 >> $CONF
shift
done
}
function AssertEqual {
if (( $( echo "$1 == $2" | bc ) == 0 ))
then
echoerr "AssertEqual: $1 != $2"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# CheckBalance -datadir=... amount account minconf
function CheckBalance {
declare -i EXPECT="$2"
B=$( $CLI $1 getbalance $3 $4 )
if (( $( echo "$B == $EXPECT" | bc ) == 0 ))
then
echoerr "bad balance: $B (expected $2)"
declare -f CleanUp > /dev/null 2>&1
if [[ $? -eq 0 ]] ; then
CleanUp
fi
exit 1
fi
}
# Use: Address <datadir> [account]
function Address {
$CLI $1 getnewaddress $2
}
# Send from to amount
function Send {
from=$1
to=$2
amount=$3
address=$(Address $to)
txid=$( ${SENDANDWAIT} $CLI $from sendtoaddress $address $amount )
}
# Use: Unspent <datadir> <n'th-last-unspent> <var>
function Unspent {
local r=$( $CLI $1 listunspent | awk -F'[ |:,"]+' "\$2 ~ /$3/ { print \$3 }" | tail -n $2 | head -n 1)
echo $r
}
# Use: CreateTxn1 <datadir> <n'th-last-unspent> <destaddress>
# produces hex from signrawtransaction
function CreateTxn1 {
TXID=$(Unspent $1 $2 txid)
AMOUNT=$(Unspent $1 $2 amount)
VOUT=$(Unspent $1 $2 vout)
RAWTXN=$( $CLI $1 createrawtransaction "[{\"txid\":\"$TXID\",\"vout\":$VOUT}]" "{\"$3\":$AMOUNT}")
ExtractKey hex "$( $CLI $1 signrawtransaction $RAWTXN )"
}
# Use: SendRawTxn <datadir> <hex_txn_data>
function SendRawTxn {
${SENDANDWAIT} $CLI $1 sendrawtransaction $2
}
# Use: GetBlocks <datadir>
# returns number of blocks from getinfo
function GetBlocks {
$CLI $1 getblockcount
}
|
<gh_stars>0
import { gql } from 'apollo-server-express';
export const typeDefs = gql`
interface List {
items: [Node!]!
totalItems: Int!
}
enum ListSortmentEnum {
ASC,
DESC
}
input ListSort {
sorter: String!
sortment: ListSortmentEnum!
}
`
export const ListSortmentEnum = Object.freeze({
ASC: 'ASC',
DESC: 'DESC'
});
export const resolvers = {
List: {
__resolveType: () => null
}
}
|
import React from 'react';
export default class ArrowDown extends React.Component {
render() {
const { width, height } = this.props;
return (
<svg width={width} height={height} viewBox="0 0 140 140" >
<defs>
<linearGradient x1="0%" y1="50%" x2="100%" y2="50%" id="linearGradient-1">
<stop stopColor="#4482FC" offset="0%"></stop>
<stop stopColor="#20B3FF" offset="100%"></stop>
</linearGradient>
</defs>
<g id="Icons" stroke="none" strokeWidth="1" fill="none" fillRule="evenodd">
<g transform="translate(-1537.000000, -1383.000000)" fillRule="nonzero" id="icon_Chat">
<g transform="translate(1537.000000, 1383.000000)">
<path d="M11.6666667,0 L128.333333,0 C134.776655,3.94539688e-16 140,5.22334459 140,11.6666667 L140,140 L11.6666667,140 C5.22334459,140 7.89079376e-16,134.776655 0,128.333333 L0,11.6666667 C-7.89079376e-16,5.22334459 5.22334459,1.18361906e-15 11.6666667,0 Z" id="圆角矩形_1" fill="url(#linearGradient-1)"></path>
<g id="组_84" transform="translate(35.000000, 32.083333)">
<path d="M1.23863882e-13,37.9166667 C0.000578956243,22.0739496 10.6429964,8.20720584 25.9465142,4.10915402 C41.250032,0.0111021919 57.3979573,6.70379518 65.315612,20.4261285 C73.2332666,34.1484617 70.9459898,51.4780894 59.7391667,62.67625 L65.4208333,72.9166667 L36.5079167,72.9166667 L36.5079167,72.89625 C36.00625,72.9176389 35.5016667,72.9283333 34.9941667,72.9283333 C25.7105785,72.9267866 16.8079218,69.2370127 10.2450748,62.6708837 C3.6822278,56.1047547 -0.00309465827,47.2002544 1.23863882e-13,37.9166667 Z" id="椭圆_576_拷贝_10" fill="#FFFFFF"></path>
<ellipse id="椭圆_1118_拷贝_7" fill="#1793E5" cx="19.775" cy="37.9166667" rx="4.55" ry="4.57916667"></ellipse>
<circle id="椭圆_1118_拷贝_7-2" fill="#1793E5" cx="35" cy="37.9166667" r="4.57916667"></circle>
<ellipse id="椭圆_1118_拷贝_7-3" fill="#1793E5" cx="50.1958333" cy="37.9166667" rx="4.55" ry="4.57916667"></ellipse>
</g>
</g>
</g>
</g>
</svg>
)
}
}
|
class Operation:
def __init__(self, data_structure):
self._i = data_structure
def _f(self, value):
# Implementation of the operation on the data structure
pass
def perform_operation(self):
return self._f(self._i.value())
|
package graphqlbackend
import (
"context"
"testing"
"github.com/sourcegraph/sourcegraph/internal/database"
"github.com/sourcegraph/sourcegraph/internal/types"
)
func TestCreateUser(t *testing.T) {
resetMocks()
database.Mocks.Users.GetByCurrentAuthUser = func(context.Context) (*types.User, error) {
return &types.User{SiteAdmin: true}, nil
}
database.Mocks.Users.Create = func(context.Context, database.NewUser) (*types.User, error) {
return &types.User{ID: 1, Username: "alice"}, nil
}
calledGrantPendingPermissions := false
database.Mocks.Authz.GrantPendingPermissions = func(context.Context, *database.GrantPendingPermissionsArgs) error {
calledGrantPendingPermissions = true
return nil
}
RunTests(t, []*Test{
{
Schema: mustParseGraphQLSchema(t),
Query: `
mutation {
createUser(username: "alice") {
user {
id
}
}
}
`,
ExpectedResult: `
{
"createUser": {
"user": {
"id": "VXNlcjox"
}
}
}
`,
},
})
if !calledGrantPendingPermissions {
t.Fatal("!calledGrantPendingPermissions")
}
}
|
#!/bin/bash
# This script creates a transient visualization of the power map that was supplied to 3D-ICE.
set -e # exit if a command fails
# Define the plot command
PLT_CMD='python -m HotGauge.visualization.ICE_plt power_map'
# Configure the directories
SIM_DIR=outputs/sim/
PLT_DIR=plots
# Navigate to the simulation directory
cd $SIM_DIR
mkdir -p $PLT_DIR
################################################################################
########################## Make the power map images ###########################
################################################################################
$PLT_CMD die_grid.pows IC.flp -o $PLT_DIR/ptrace_{step:04}.png
################################################################################
################### Convert the images into a video (mp4) ####################
################################################################################
ffmpeg -y -i plots/ptrace_%04d.png -q:v 3 plots/ptrace.mp4
|
<gh_stars>10-100
/*-
* Copyright (c) 2018 <NAME> <<EMAIL>>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
#include <sys/cdefs.h>
#include <sys/errno.h>
#include <sys/param.h>
#include <sys/mbuf.h>
#include <net/ethernet.h>
#include <net/if.h>
#include <net/if_arp.h>
#include <netinet/in.h>
#include <netinet/if_ether.h>
#include <netinet6/ip6_var.h>
static const u_char etherbroadcastaddr[ETHER_ADDR_LEN] =
{ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff };
static void
ether_input(struct ifnet *ifp, struct mbuf *m)
{
struct ether_header *eh;
int type;
eh = mtod(m, struct ether_header *);
type = ntohs(eh->ether_type);
switch(type) {
case ETHERTYPE_ARP:
arp_input(ifp, m);
m_free(m);
break;
case ETHERTYPE_IP:
ip_input(ifp, m);
break;
case ETHERTYPE_IPV6:
ip6_input(ifp, m);
break;
default:
m_free(m);
break;
}
}
static int
ether_resolve_addr(struct ifnet *ifp, struct mbuf *m,
const struct sockaddr *dst, struct route *ro, u_char *phdr)
{
int error;
error = -1;
switch (dst->sa_family) {
case AF_INET:
error = arpresolve(ifp, 0, m, dst, phdr);
default:
break;
};
return (error);
}
static int
ether_output(struct ifnet *ifp, struct mbuf *m,
const struct sockaddr *dst, struct route *ro)
{
struct ether_header *eh;
struct mbuf *m0;
void *phdr;
char linkhdr[ETHER_HDR_LEN];
int error;
int avail;
phdr = NULL;
if (ro != NULL)
phdr = ro->ro_prepend;
if (phdr == NULL) {
phdr = linkhdr;
error = ether_resolve_addr(ifp, m, dst, ro, phdr);
if (error != 0)
return (error == EWOULDBLOCK ? 0 : error);
}
avail = m->m_data - m->m_data0;
if (avail >= ETHER_HDR_LEN) {
m_adj(m, -ETHER_HDR_LEN);
m0 = m;
m0->m_next = NULL;
} else {
m0 = m_alloc(ETHER_HDR_LEN);
m0->m_next = m;
}
eh = (struct ether_header *)m0->m_data;
memcpy(eh, phdr, ETHER_HDR_LEN);
return ((ifp->if_transmit)(ifp, m0));
}
int
ether_ifattach(struct ifnet *ifp, uint8_t *hwaddr)
{
ifp->if_addrlen = ETHER_ADDR_LEN;
ifp->if_hdrlen = ETHER_HDR_LEN;
ifp->if_mtu = ETHERMTU;
ifp->if_output = ether_output;
ifp->if_input = ether_input;
ifp->if_requestencap = ether_requestencap;
ifp->if_hw_addr = malloc(ifp->if_addrlen);
bcopy(hwaddr, ifp->if_hw_addr, ifp->if_addrlen);
ifp->if_broadcastaddr = etherbroadcastaddr;
return (MDX_OK);
}
|
struct Block {
timestamp: u64,
is_main_chain: bool,
}
impl Block {
fn new(timestamp: u64, is_main_chain: bool) -> Block {
Block {
timestamp,
is_main_chain,
}
}
}
fn find_candidate_uncles(blocks: Vec<Block>) -> Vec<Block> {
let mut candidate_uncles = Vec::new();
for block in blocks {
if block.timestamp >= 5 && block.timestamp <= 10 && !block.is_main_chain {
candidate_uncles.push(block);
}
}
candidate_uncles
}
fn main() {
let blocks = vec![
Block::new(3, true),
Block::new(7, false),
Block::new(9, false),
Block::new(12, false),
];
let candidate_uncles = find_candidate_uncles(blocks);
for uncle in candidate_uncles {
println!("Candidate Uncle - Timestamp: {}, Main Chain: {}", uncle.timestamp, uncle.is_main_chain);
}
}
|
def check_permission(permission_set, required_permission):
return permission_set.get(required_permission, False)
|
#!/bin/bash
# define the variables
DIRECTORY=$1
HERE=$(pwd)
# move to the directory
cd $DIRECTORY
# filter out 'hours' data from our data
cat ./*/failed_login_data.txt | awk '{print $3}' > temptxt.txt
# make target html files
sort temptxt.txt | uniq -c | awk '{print "data.addRow([\x27" $2 "\x27, " $1 "]);"}' > temp.html
# add header and footer
cat $HERE/html_components/hours_dist_header.html temp.html $HERE/html_components/hours_dist_footer.html > hours_dist.html
# remove temp files
rm temptxt.txt
rm temp.html
|
#!/bin/bash
{
source env/bin/activate
python3.4 manage.py migrate
} || {
python3.4 manage.py migrate
} || {
python3.4 manage.py syncdb --noinput
python3.4 manage.py migrate
} || {
python3.4 manage.py makemigrations
python3.4 manage.py migrate
} || {
python3.4 manage.py -h
python3.4 manage.py migrate
}
exit 0
|
<gh_stars>1-10
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package org.apache.streampipes.rest.impl.nouser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.streampipes.rest.impl.AbstractRestResource;
import org.apache.streampipes.rest.management.PipelineManagement;
import org.apache.streampipes.rest.shared.annotation.GsonWithIds;
import org.apache.streampipes.rest.shared.annotation.NoAuthenticationRequired;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
@Path("/v2/pipelines")
public class PipelineNoUserResource extends AbstractRestResource {
private static final Logger logger = LoggerFactory.getLogger(PipelineNoUserResource.class);
@Path("/{pipelineId}/stopAdapter")
@GET
@Produces(MediaType.APPLICATION_JSON)
@GsonWithIds
@NoAuthenticationRequired
public Response stop(@PathParam("pipelineId") String pipelineId) {
logger.info("Pipeline: " + pipelineId + " was stopped by the system");
PipelineManagement pm = new PipelineManagement();
return pm.stopPipeline(pipelineId);
}
}
|
<reponame>lifenjoy51/mypet
'use strict';
describe('myApp.myHome module', function() {
beforeEach(module('myApp.myHome'));
describe('myHome controller', function(){
it('should ....', inject(function($controller) {
//spec body
var myHomeCtrl = $controller('myHomeCtrl');
expect(myHomeCtrl).toBeDefined();
}));
});
});
|
var searchData=
[
['cpu_2elnt_2',['cpu.lnt',['../16bit_2cpu_8lnt.html',1,'']]],
['qs_5fport_2eh_3',['qs_port.h',['../16bit_2qs__port_8h.html',1,'']]],
['stdint_2eh_4',['stdint.h',['../16bit_2stdint_8h.html',1,'']]]
];
|
<reponame>developerasun/pawcon<gh_stars>1-10
import * as React from 'react';
export function Upload () {
// FIX : delete file upload for shop
const handleSubmit = (e:React.FormEvent) => {
e.preventDefault()
// FIX : change server route later
const data = document.getElementById('file') as HTMLInputElement
const fileList = data.files as FileList
console.log(fileList[0])
// const packet = data?.files[0] as File
fetch('some-path-to-server', {
method : 'POST',
body : fileList[0]
})
}
return (
<div>
{/* FIX: upload NFT images here */}
<h1>Upload route</h1>
<p>
Upload NFT image. Note that only .png format supported.
</p>
{/* user uploads a NFT image here */}
<form
onSubmit={handleSubmit}
id='uploadNft'
encType='multipart/form-data'>
{/* accept only .png file */}
<input type="file" name="file" id="file" accept='image/png'/>
<button type="submit">Upload</button>
</form>
</div>
);
}
|
<filename>modules/caas/api/src/main/java/io/cattle/platform/api/certificate/LoadBalancerServiceCertificateRemoveFilter.java
package io.cattle.platform.api.certificate;
import io.cattle.platform.core.addon.LbConfig;
import io.cattle.platform.core.constants.ServiceConstants;
import io.cattle.platform.core.dao.ServiceDao;
import io.cattle.platform.core.model.Certificate;
import io.cattle.platform.core.model.Service;
import io.cattle.platform.object.ObjectManager;
import io.cattle.platform.object.util.DataAccessor;
import io.github.ibuildthecloud.gdapi.exception.ClientVisibleException;
import io.github.ibuildthecloud.gdapi.request.ApiRequest;
import io.github.ibuildthecloud.gdapi.request.resource.AbstractValidationFilter;
import io.github.ibuildthecloud.gdapi.request.resource.ActionHandler;
import io.github.ibuildthecloud.gdapi.request.resource.ResourceManager;
import io.github.ibuildthecloud.gdapi.util.ResponseCodes;
import io.github.ibuildthecloud.gdapi.validation.ValidationErrorCodes;
import org.apache.commons.lang3.StringUtils;
import java.util.ArrayList;
import java.util.List;
import static io.cattle.platform.core.model.tables.ServiceTable.*;
public class LoadBalancerServiceCertificateRemoveFilter extends AbstractValidationFilter {
ObjectManager objectManager;
ServiceDao svcDao;
public LoadBalancerServiceCertificateRemoveFilter(ObjectManager objectManager, ServiceDao svcDao) {
super();
this.objectManager = objectManager;
this.svcDao = svcDao;
}
@Override
public Object delete(String type, String id, ApiRequest request, ResourceManager next) {
validateIfCertificateInUse(id);
return super.delete(type, id, request, next);
}
@SuppressWarnings("unchecked")
protected void validateIfCertificateInUse(String certificateId) {
Certificate cert = objectManager.loadResource(Certificate.class, certificateId);
List<String> serviceNames = new ArrayList<>();
List<Service> lbServices = objectManager.find(Service.class, SERVICE.ACCOUNT_ID, cert.getAccountId(),
SERVICE.REMOVED, null, SERVICE.KIND, ServiceConstants.KIND_LOAD_BALANCER_SERVICE);
for (Service lbService : lbServices) {
// get from lb config
LbConfig lbConfig = DataAccessor.field(lbService, ServiceConstants.FIELD_LB_CONFIG, LbConfig.class);
if (lbConfig == null) {
continue;
}
List<Long> certIds = new ArrayList<>();
if (lbConfig.getCertificateIds() != null) {
certIds.addAll(certIds);
}
if (lbConfig.getDefaultCertificateId() != null) {
certIds.add(lbConfig.getDefaultCertificateId());
}
if (certIds.contains(cert.getId())) {
serviceNames.add(lbService.getName());
}
}
if (!serviceNames.isEmpty()) {
String serviceNameStr = StringUtils.join(serviceNames, ",");
throw new ClientVisibleException(ResponseCodes.METHOD_NOT_ALLOWED, ValidationErrorCodes.INVALID_ACTION,
"Certificate is in use by load balancer services: " + serviceNameStr, null);
}
}
@Override
public Object perform(Object obj, ApiRequest request, ActionHandler next) {
if (request.getAction().equalsIgnoreCase("remove")) {
validateIfCertificateInUse(request.getId());
}
return super.perform(obj, request, next);
}
}
|
<reponame>TomTyack/jss<filename>packages/sitecore-jss-angular/src/components/rendering-field.ts
export interface RenderingField {
value?: any;
editable?: string;
}
export interface FileField extends RenderingField {
src?: string;
}
export interface ImageField extends RenderingField {
src?: string;
}
export interface LinkField extends RenderingField {
href?: string;
text?: string;
editableFirstPart?: string;
editableLastPart?: string;
}
// tslint:disable-next-line:no-empty-interface
export interface RichTextField extends RenderingField { }
// tslint:disable-next-line:no-empty-interface
export interface TextField extends RenderingField { }
|
import styled from 'styled-components'
const AreasOfInterestStyle = styled.div`
width: 100%;
// height: 100vh;
font-size: 0.8em;
margin-top: 2em;
padding-bottom: 3em;
margin-bottom: 3em;
border-bottom: 0.1em solid ${props => props.theme.textColor};
display: flex;
flex-direction: column;
justify-content: center;
// margin: 0 auto 0;
align-items: center;
#introContents {
padding: 1em;
}
.heading {
display: flex;
flex-direction: column;
justify-content: center;
margin: 0 auto 0;
align-items: center;
h2 {
margin: 0;
font-size: 1.7em;
}
p {
margin-top: 0.5em;
font-size: 0.7em;
}
padding-bottom: 1em;
}
.interests {
align-content: space-around;
justify-content: space-around;
display: grid;
grid-gap: 2em;
@media (orientation: landscape) {
width: 52em;
grid-template-columns: repeat(3, minmax(12em, 20vw));
font-size: 0.8em;
}
@media (orientation: portrait) {
width: 100%;
grid-template-columns: 1fr;
}
h3 {
margin: 0;
}
}
.area {
display: flex;
flex-direction: column;
justify-content: center;
margin: 0 auto 0;
align-items: center;
border: 0.2em solid ${props => props.theme.bgColor2};
border-radius: 1em;
padding: 1em;
p {
font-size: 0.9em;
text-align: center;
}
img {
filter: invert(${props => (props.theme.name === 'light' ? '0' : '1')});
width: auto;
height: 4em;
}
}
`
export default AreasOfInterestStyle
|
#!/usr/bin/env bash
wget http://www.lfd.uci.edu/~gohlke/pythonlibs/6kbpejrn/pywin32-220.1-cp27-cp27m-win_amd64.whl >> pywin32-220.1-cp27-cp27m-win_amd64.whl
|
<reponame>OSADP/C2C-RI
/*
Jameleon C2C RI plug-in - A plug-in that is used to perform C2C Conformance
Testing
*/
package org.fhwa.c2cri.plugin.c2cri;
import java.util.ArrayList;
import javax.swing.JOptionPane;
import net.sf.jameleon.SessionTag;
import net.sf.jameleon.TestCaseTag;
import net.sf.jameleon.exception.JameleonScriptException;
import org.fhwa.c2cri.applayer.ApplicationLayerStandard;
import org.fhwa.c2cri.applayer.ApplicationLayerStandardFactory;
import org.fhwa.c2cri.infolayer.InformationLayerStandard;
import org.fhwa.c2cri.infolayer.InformationLayerStandardFactory;
import org.fhwa.c2cri.messagemanager.MessageManager;
import test.transports.ListenerManager;
/**
* A Session tag for the C2CRI plug-in.
*
* @author TransCore ITS, LLC
* Last Updated: 11/10/2012
*
* An example of its use might:
*
* <pre><source>
* <testcase xmlns="jelly:jameleon">
* <C2CRI-session infoStd="tmdd-v3.0" beginSession="true">
* <C2CRI-assert-element
* functionId="Check that the data element is the input value."
* element="Center Name" value = "${TMC_Name}"/>
* </C2CRI-session>
* </testcase>
* </source></pre>
*
* @jameleon.function name="RI-session"
*/
public class RISessionTag extends SessionTag {
/**
* @jameleon.attribute contextName="infoStd"
*/
protected String infoStd;
/**
* @jameleon.attribute contextName="appStd"
*/
protected String appStd;
/**
* If provided, the name of the request dialog that will implemented by this session.
* @jameleon.attribute contextName="requestDialog"
*/
protected String requestDialog;
/**
* If provided, the name of the subscription dialog that will implemented by this session.
* @jameleon.attribute contextName="subscriptionDialog"
*/
protected String subscriptionDialog;
/**
* If provided, the name of the publication dialog that will implemented by this session.
* @jameleon.attribute contextName="publicationDialog"
*/
protected String publicationDialog;
/**
* @jameleon.attribute contextName="lclPort"
*/
protected String lclPort;
/**
* @jameleon.attribute contextName="lclAddress"
*/
protected String lclAddress;
/**
* @jameleon.attribute contextName="SUTPort"
*/
protected String sutPort;
/**
* @jameleon.attribute contextName="SUTAddress"
*/
protected String sutAddress;
/**
* @jameleon.attribute contextName="RIMode"
*/
protected String riMode;
/**
* @jameleon.attribute contextName="RI_WEBSERVICEURL"
*/
protected String wsdlURL;
/**
* @jameleon.attribute contextName="INFOSTD_SUITEURL"
*/
protected String infoStdSuiteURL;
/**
* @jameleon.attribute contextName="APPSTD_SUITEURL"
*/
protected String appStdSuiteURL;
/**
* Flag indicating whether the service will be defined from a WSDL file.
*
* @jameleon.attribute
*/
protected boolean useWSDL = true;
/**
* The service name to be tested. Must match the WSDL definition
* @jameleon.attribute
*/
protected String serviceName;
/**
* The port name of the service to be tested. Must match the WSDL definition
* @jameleon.attribute
*/
protected String portName;
/**
* The operation name of the portType name of the service to be tested. Must match the WSDL definition
* @jameleon.attribute
*/
protected String operationName;
/**
* Flag indicating whether the operation specified is a publication.
*
* @jameleon.attribute
*/
protected boolean isPublicationOperation = false;
/**
* The transport type that will be used for this service. Only applied when WSDL not used.
* @jameleon.attribute
*/
protected String transportType;
/**
* The encoding type that will be used for this service. Only applied when WSDL not used.
* @jameleon.attribute
*/
protected String encodingType;
/**
* A unique identifier for this test case.
*
* @jameleon.attribute required="true"
*/
protected String testCaseIdentifier;
/**
* A user name to be applied.
*
* @jameleon.attribute
*/
protected String userName = "";
/**
* A password to be applied.
*
* @jameleon.attribute
*/
protected String password = "";
/**
* A handle for the C2C Center Client
*/
// protected C2CClient externalCenter;
/**
* A handle for the C2C messages
*/
protected ArrayList messageList;
/**
* the Service object managed by this Session
*/
private ApplicationLayerStandard theApplicationLayerStandard;
private InformationLayerStandard theInformationLayerStandard;
/**
* Gets the information Layer Standard.
*
* @return the information layer standard selected.
*/
public String getInfoStd() {
return infoStd;
}
/**
* Gets the port to be used for the RI
*
* @return the port to be used for the RI.
*/
public int getlclPort() {
return Integer.parseInt(lclPort);
}
/**
* Gets the address to be used for the RI
*
* @return the address to be used for the RI.
*/
public String getlclAddress() {
return lclAddress;
}
/**
* Gets the port to be used for the SUT
*
* @return the port to be used for the SUT.
*/
public int getSUTPort() {
return Integer.parseInt(sutPort);
}
/**
* Gets the address to be used for the SUT
*
* @return the address to be used for the SUT.
*/
public String getSUTAddress() {
return sutAddress;
}
/**
* Gets the application Layer Standard
*
* @return the application layer standard selected.
*/
public String getAppStd() {
return appStd;
}
/**
* Gets the information layer standard to request
*
* @return The info standard to use in startApplication.
*/
protected String getRequestInfoStd() {
String info = getInfoStd();
return info;
}
/**
* Gets the application layer standard to request
*
* @return The application layer standard to use in startApplication.
*/
protected String getRequestAppStd() {
String app = getAppStd();
return app;
}
@Override
public void setUpSession() {
if ((getAppStd() != null)&&(getInfoStd()!=null)) {
if (this.getParent() instanceof TestCaseTag){
TestCaseTag testCaseTag = (TestCaseTag)this.getParent();
String testCaseID = testCaseTag.getTestCase().getTestCaseId();
MessageManager.getInstance().setParentTestCase(testCaseID);
ListenerManager.getInstance().setTestCaseID(testCaseID);
}
try {
ApplicationLayerStandardFactory theStandardFactory = ApplicationLayerStandardFactory.getInstance();
theStandardFactory.setApplicationLayerStandard(appStd);
theStandardFactory.setCenterMode(riMode);
theStandardFactory.setInformationLayerStandard(infoStd);
theStandardFactory.setTestCase(testCaseIdentifier);
theStandardFactory.setRequestDialog(requestDialog);
theStandardFactory.setSubscriptionDialog(subscriptionDialog);
theStandardFactory.setPublicationDialog(publicationDialog);
log.debug("The wsdlURL file is "+wsdlURL);
theStandardFactory.setTestConfigSpecificationURL(wsdlURL);
theStandardFactory.setTestSuiteSpecificationURL(wsdlURL);
try {
theApplicationLayerStandard = theStandardFactory.getApplicationStandard();
InformationLayerStandardFactory theInfoStandardFactory = InformationLayerStandardFactory.getInstance();
theInfoStandardFactory.setApplicationLayerStandard(theApplicationLayerStandard);
theInfoStandardFactory.setCenterMode(riMode);
theInfoStandardFactory.setInformationLayerStandardName(infoStd);
theInfoStandardFactory.setTestCase(testCaseIdentifier);
theInformationLayerStandard = theInfoStandardFactory.getInformationStandard();
} catch (Exception ex) {
ex.printStackTrace();
log.debug("*C2CRISessionTag: Error Creating Application Layer Standard ->" + ex.getMessage());
throw new JameleonScriptException("*C2CRISessionTag: Error Creating Application Layer Standard ->" + ex.getMessage(), this);
}
MessageManager theManager = MessageManager.getInstance();
theManager.setParentTestCase(testCaseIdentifier);
} catch (Exception ex) {
log.debug("*C2CRISessionTag: Error processing WSDL ->" + ex.getMessage());
JOptionPane.showMessageDialog(null, "Script Error: \n" + ex.getMessage(), "Error", JOptionPane.ERROR_MESSAGE);
throw new JameleonScriptException("*C2CRISessionTag: Error processing WSDL ->" + ex.getMessage(), this);
}
} else if ((getAppStd() != null) && (!getAppStd().toUpperCase().contains("NTCIP2306"))) {
JOptionPane.showMessageDialog(null, "Script Error: \n" + "'appStd' " + getAppStd() + " is not yet supported by the C2C RI", "Error", JOptionPane.ERROR_MESSAGE);
throw new JameleonScriptException("'appStd' " + getAppStd() + " is not yet supported by the C2C RI", this);
}
}
@Override
public void tearDownSession() {
try {
log.debug("C2CRISessionTag.tearDownSession: About to try and clear out the service!");
if (theInformationLayerStandard != null){
theInformationLayerStandard.stopServices();
theInformationLayerStandard = null;
}
if (theApplicationLayerStandard != null){
theApplicationLayerStandard.stopServices();
theApplicationLayerStandard = null;
}
} catch (Exception ex) {
throw new JameleonScriptException("Error tearing down C2CRISessionTag. Could not shutdown the Transport. /n" + ex.getMessage(), this);
}
}
/**
* Ensures that things are started off correctly.
*/
@Override
public void startApplication() {
}
public ApplicationLayerStandard getTheApplicationLayerStandard() {
return theApplicationLayerStandard;
}
public InformationLayerStandard getTheInformationLayerStandard() {
return theInformationLayerStandard;
}
public String getOperationName() {
if (operationName != null) {
return operationName;
} else {
return "None Specified";
}
}
}
|
x = 6, y = 4 is a feasible solution with an objective value of 14.
|
package com.movella.service;
import com.google.gson.JsonArray;
import com.google.gson.JsonElement;
import com.google.gson.JsonObject;
import com.google.gson.JsonParser;
import com.movella.dao.ContatoDAO;
import com.movella.exceptions.InvalidDataException;
import com.movella.model.Usuario;
import com.movella.responses.BadRequest;
import com.movella.responses.Forbidden;
import com.movella.responses.Success;
import com.movella.utils.Localization;
import spark.*;
public class ContatoService {
public static Route adminRead = (Request req, Response res) -> {
final Session session = req.session();
final Usuario sessionUsuario = (Usuario) session.attribute("user");
if (!sessionUsuario.getAcesso().equals("admin"))
return new Forbidden(res);
final String _id = req.params("id");
if (_id == null)
return new BadRequest(res, Localization.invalidId);
int id;
try {
id = Integer.parseInt(_id);
} catch (Exception e) {
return new BadRequest(res, Localization.invalidId);
}
try {
return new Success(res, ContatoDAO.read(id).toJson());
} catch (InvalidDataException e) {
return new BadRequest(res, e.message);
}
};
public static Route adminAll = (Request req, Response res) -> {
final Session session = req.session();
final Usuario sessionUsuario = (Usuario) session.attribute("user");
if (!sessionUsuario.getAcesso().equals("admin"))
return new Forbidden(res);
try {
final JsonArray out = new JsonArray();
ContatoDAO.all().forEach((v) -> {
out.add(v.toJson());
});
return new Success(res, out);
} catch (InvalidDataException e) {
return new BadRequest(res, e.message);
} catch (RuntimeException e) {
return new BadRequest(res);
}
};
public static Route create = (Request req, Response res) -> {
final JsonObject body = JsonParser.parseString(req.body()).getAsJsonObject();
final JsonElement _nome = body.get("nome");
final JsonElement _email = body.get("email");
final JsonElement _assunto = body.get("assunto");
final JsonElement _mensagem = body.get("mensagem");
if (_nome == null)
return new BadRequest(res, Localization.invalidName);
if (_email == null)
return new BadRequest(res, Localization.invalidEmail);
if (_assunto == null)
return new BadRequest(res, Localization.invalidSubject);
if (_mensagem == null)
return new BadRequest(res, Localization.invalidMessage);
final String nome = _nome.getAsString();
final String email = _email.getAsString();
final String assunto = _assunto.getAsString();
final String mensagem = _mensagem.getAsString();
try {
ContatoDAO.insert(nome, email, assunto, mensagem);
return new Success(res, Localization.contactCreateSuccess);
} catch (InvalidDataException e) {
return new BadRequest(res, e.message);
} catch (RuntimeException e) {
return new BadRequest(res);
}
};
}
|
def classify_letter(letter):
vowels = ["a", "e", "i", "o", "u"]
if letter in vowels:
return "Vowel"
else:
return "Consonant"
print(classify_letter(letter))
Output:
Consonant
|
package com.johnnolcox.mintodo;
import android.content.res.Resources;
import com.johnnolcox.mintodo.R;
public class PreferenceKeys {
final String night_mode_pref_key;
public PreferenceKeys(Resources resources){
night_mode_pref_key = resources.getString(R.string.night_mode_pref_key);
}
}
|
def count_unique(arr):
# create a set from the array
s = set(arr)
# return the count of the set
return len(s)
|
python uncertainty_gp.py -f cubic
python uncertainty_ours.py -f cubic
|
export default ({ breakpoints, spacing }) => ({
container: {
margin: 'auto',
position: 'relative',
},
content: {
display: 'flex',
float: 'left',
paddingBottom: 8,
minHeight: 110,
[breakpoints.up('md')]: {
minHeight: 200,
paddingLeft: spacing.doubleBaseMargin,
paddingRight: spacing.doubleBaseMargin,
},
},
loadingContainer: {
display: 'flex',
flexDirection: 'row',
},
loadingAvatar: {
background: 'rgba(0,0,0,0.075)',
},
loadingInfo: {
background: 'rgba(0,0,0,0.075)',
height: spacing.doubleBaseMargin,
marginTop: spacing.baseMargin,
},
emptyContainer: {
justifyContent: 'center',
alignItems: 'center',
},
disabledBackdrop: {
position: 'absolute',
top: 0,
left: 0,
width: '100%',
height: '100%',
cursor: 'not-allowed',
},
});
|
import React from "react"
import { Container, Col, Row} from 'react-bootstrap';
import "../style/PageTitle.css";
const PageTitle = ({title, menu1, menu2, menu3}) => {
return (
<>
<Container>
<Row >
<Col className="mb-0"><h1>{title}</h1></Col>
</Row>
</Container>
</>
);
}
export default PageTitle;
|
<filename>src/commands/utility/UserInfoCommand.js
const Command = require('../Command');
const util = require('../../util');
const Member = require('../../Member');
const {MessageEmbed} = require('discord.js');
const icons = require('../../icons');
class UserInfoCommand extends Command {
static description = 'Show info about a user';
static usage = '<@user|id>';
static names = ['userinfo','user','check', 'u'];
static userPerms = ['BAN_MEMBERS'];
static modCommand = true;
static supportsSlashCommands = true;
async execute() {
let user;
if (this.source.isInteraction) {
user = this.options.getUser('user');
}
else {
const userID = this.options.getString('user');
if (!userID || !await util.isUser(userID)) {
return this.sendUsage();
}
user = await this.bot.users.fetch(userID);
}
const member = new Member(user, this.source.getGuild()),
guildMember = await member.fetchMember(),
guildID = this.source.getGuild().id;
let [moderations, strikes, mute, ban] = await Promise.all([
this.database.query('SELECT COUNT(*) AS count FROM moderations WHERE userid = ? AND guildid = ?',[user.id, guildID]),
member.getStrikeSum(this.database),
this.database.query('SELECT * FROM moderations WHERE active = TRUE AND userid = ? AND guildid = ? AND action = \'mute\'',[user.id, guildID]),
this.database.query('SELECT * FROM moderations WHERE active = TRUE AND userid = ? AND guildid = ? AND action = \'ban\'', [user.id, guildID]),
]);
if (!mute && guildMember) {
if (guildMember.roles.cache.has(this.guildConfig.mutedRole)) {
mute = {reason: 'Has muted role (Unknown reason and timer)'};
}
if (guildMember.isCommunicationDisabled()) {
mute = {reason: `Timed out until <t:${Math.floor(guildMember.communicationDisabledUntilTimestamp / 1000)}:R>`};
}
}
let muteTime = getRemainingDuration(mute);
let banTime = getRemainingDuration(ban);
if (!ban && await member.fetchBanInfo()) ban = member.banInfo;
const embed = new MessageEmbed()
.setAuthor({name: user.tag, iconURL: user.avatarURL()})
.setDescription(
`**ID:** ${user.id}\n` +
`**Account Created:** <t:${Math.floor(user.createdTimestamp/1000)}:D>\n` +
(guildMember?.joinedAt ? `**Joined Guild:** <t:${Math.floor(guildMember.joinedTimestamp/1000)}:D>\n` : '') +
`**Moderations:** ${moderations.count}\n` +
`**Strikes:** ${strikes}\n` +
`**Muted:** ${mute ? `${icons.yes} - ${mute.reason}`: icons.no}\n` +
(muteTime ? `**Until:** ${muteTime}\n` : '') +
`**Banned:** ${ban ? `${icons.yes} - ${ban.reason || 'Unknown Reason'}` : icons.no}\n` +
(banTime ? `**Until:** ${banTime}\n` : '')
)
.setColor(getColor(ban, mute));
await this.reply(embed);
}
static getOptions() {
return [{
name: 'user',
type: 'USER',
description: 'The user in question',
required: true,
}];
}
parseOptions(args) {
return [
{
name: 'user',
type: 'STRING',
value: util.userMentionToId(args.shift()),
}
];
}
}
function getRemainingDuration(info) {
if (!info?.expireTime) return null;
return `<t:${info.expireTime}:R>`;
}
function getColor(ban, mute) {
if (ban) return util.color.red;
if (mute) return util.color.orange;
return util.color.green;
}
module.exports = UserInfoCommand;
|
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Return on any failure
set -e
# Run pylint
python -m pylint joblibspark
|
# Complete your game here
|
<gh_stars>1-10
import m from 'mithril';
// @ts-ignore
import md from '../markdown/instructions.md';
// Enable modal links.
let md_edited = md.replace(/<a href="#modal-gallery"/g, '<a href="#modal-gallery" uk-toggle');
// Edit image paths.
md_edited = md_edited.replace(/https:\/\/raw.githubusercontent.com\/amandaghassaei\/botanigram\/main\//g, '');
const videoHTML = `
<div class="intro-video-wrapper">
<video
id="intro-video-2"
class="video-js"
controls
aspectRatio="1:1"
fluid
preload="auto"
width="600"
height="600"
poster="docs/intro_video_cover.jpg"
data-setup="{}"
>
<source src="docs/intro-small.mp4" type="video/mp4" />
<p class="vjs-no-js">
To view this video please enable JavaScript, and consider upgrading to a
web browser that
<a href="https://videojs.com/html5-video-support/" target="_blank"
>supports HTML5 video</a
>
</p>
</video>
</div>`;
md_edited = videoHTML + md_edited;
const instructionsHTML = m.trust(md_edited);
export const InstructionsModal = {
view() {
return m('div', { id: 'modal-instructions', 'uk-modal': true },
m('div', { class: 'uk-modal-dialog uk-modal-dialog-large uk-modal-body' }, [
m('button', { class: 'uk-modal-close-default', type: 'button', 'uk-close': true, }),
m('h2', { class: 'uk-modal-title', }, 'Instructions'),
instructionsHTML,
m('br'),
]),
);
},
}
|
module.exports=[
['link', {rel: 'icon', href: '/logo.png'}],
['link', {rel: 'manifest', href: '/manifest.json'}],
//['meta', {name: 'theme-color', content: '#6441a5'}],
['meta', {name: 'apple-mobile-web-app-capable', content: 'yes'}],
['meta', {name: 'apple-mobile-web-app-status-bar-style', content: 'black'}],
['link', {rel: 'apple-touch-icon', href: '/icons/apple-touch-icon-152x152.png'}],
// ['link', {rel: 'mask-icon', href: '/icons/logo.svg', color: '#6441a5'}],
['meta', {name: 'msapplication-TileImage', content: '/icons/msapplication-icon-144x144.png'}],
// ['meta', {name: 'msapplication-TileColor', content: '#000000'}],
// ['link', {rel: 'stylesheet', href: 'https://fonts.googleapis.com/css?family=Roboto:100,300,400,500,700,900'}],
['link', {
rel: 'stylesheet',
href: 'https://cdn.staticfile.org/MaterialDesign-Webfont/5.0.45/css/materialdesignicons.min.css'
}],
['link', {rel: 'stylesheet', href: 'https://cdn.staticfile.org/twitter-bootstrap/4.4.1/css/bootstrap.min.css'}]
];
|
'use strict';
angular.module('copayApp.controllers').controller('addwalletController',
function ($rootScope, $scope, $timeout, storageService, notification, profileService, bwcService, $log, gettext, go, gettextCatalog, isCordova) {
var self = this;
var successMsg = gettext('Backup words deleted');
var indexScope = $scope.index;
self.aiwname = '';
self.aiwpass = '';
self.aiwrpass = '';
self.chosenWords = [];
self.showcodes = [];
self.showrandamcodes = [];
self.mnemonic = '';
self.showcodeerr = false;
self.addwalleterr = false;
self.showconfirm = false;
self.showtab = 'tabcold';
//表单验证
self.formvalidatename = false;
self.formvalidatepass = false;
self.formvalidaterpass = false;
var fc = profileService.focusedClient;
var walletClient = bwcService.getClient();
self.ducodes = walletClient.createRandomMnemonic().split(' ');
//乱序
self.shuffle = function (v) {
for (var j, x, i = v.length; i; j = parseInt(Math.random() * i), x = v[--i], v[i] = v[j], v[j] = x);
return v;
};
// 定义提示框内容
self.funReg = function () {
var newlist = [];
if (self.showrandamcodes.length > 3) {
// 显示乱序提示框
self.showrandamcodes = self.shuffle(JSON.parse(JSON.stringify(self.showrandamcodes)));
// 显示乱序提示框 结束
return false;
} else {
for (var i = 0; i <= 11; i++) {
var newStr = {
id: i,
str: self.ducodes[i],
chosen: false
};
newlist.push(newStr);
}
self.showcodes = JSON.parse(JSON.stringify(newlist));
self.showrandamcodes = self.shuffle(JSON.parse(JSON.stringify(newlist)));
}
$timeout(function () {
$scope.$digest();
});
};
// 定义提示框内容 结束
self.addwordf = function ($event) {
self.showcodeerr = false;
if ($event.srcElement.tagName == 'BUTTON') {
self.showrandamcodes.forEach(function (item, index) {
if (item.id == $event.srcElement.id) {
self.showrandamcodes[index].chosen = true;
self.chosenWords.push({
id: item.id,
str: item.str
})
}
});
} else {
return false;
}
self.watchchose();
}
self.minuswordf = function ($event) {
self.showcodeerr = false;
if ($event.srcElement.tagName == 'SPAN') {
self.showrandamcodes.forEach(function (item, index) {
if (item.id == $event.srcElement.id) {
self.showrandamcodes[index].chosen = false;
}
});
self.chosenWords.forEach(function (item, index) {
if (item.id == $event.srcElement.id) {
self.chosenWords.splice(index, 1);
};
})
} else {
return false;
}
self.watchchose();
};
self.watchchose = function(){
if (self.chosenWords.length > 11) {
var chostr = '';
for (var i = 0; i < self.chosenWords.length; i++) {
chostr += self.chosenWords[i].str;
}
var showstr = '';
for (var i = 0; i < self.showcodes.length; i++) {
showstr += self.showcodes[i].str;
}
if (chostr == showstr) {
for (var i = 0; i < self.showcodes.length; i++) {
self.mnemonic += ' ' + self.showcodes[i].str;
}
self.step = 'deletecode';
} else {
self.showcodeerr = true;
}
}else{
return;
}
}
/**
* 创建钱包
* @param walletName
* @param password
* @param passphrase
* @param mnemonic
* @param del
*/
self.addWallet = function (walletName, password, passphrase, mnemonic,del) {
if(password !== passphrase){
$rootScope.$emit('Local/ShowErrorAlert', gettextCatalog.getString('*Inconsistent password'));
return;
}
mnemonic = mnemonic.trim();
if (self.creatingProfile)
return console.log('already creating profile');
self.creatingProfile = true;
if (isCordova)
window.plugins.spinnerDialog.show(null, gettextCatalog.getString('Loading...'), true);
else{
$scope.index.progressing = true;
$scope.index.progressingmsg = 'Loading...';
}
let networkName = indexScope.type =='BTC' ? 'livenet' : 'livenet';
//{ walletName: walletName, password: <PASSWORD>, mnemonic: mnemonic, type:'INVE' }
setTimeout(function () {
profileService.importWallets({
name: walletName,
password: <PASSWORD>,
passphrase: '',
mnemonic: mnemonic,
m: 1,
n: 1,
networkName: networkName,
cosigners: [],
isSinglecreateress: true,
type: indexScope.type,
segwit: true,
info: ''
}, function (err, walletId) {
$timeout(function () {
if (indexScope.type != 'BTC') {
if (isCordova)
window.plugins.spinnerDialog.hide();
else
$scope.index.progressing = false;
}
if (err) {
self.creatingProfile = false;
$log.warn(err);
self.error = err;
$timeout(function () {
$scope.$apply();
});
}
else if (del) {
//$rootScope.$emit('Local/WalletImported', walletId);
var fc = profileService.focusedClient;
fc.clearMnemonic();
profileService.clearMnemonic(function () {
self.deleted = true;
notification.success(successMsg);
});
}
notification.success(gettextCatalog.getString('Success'), gettextCatalog.getString('successfully create wallet "{{walletName}}"', {
walletName: walletName
}));
importAfterDo[indexScope.type] ? importAfterDo[indexScope.type].do() : importAfterDo['INVE'].do();
});
});
});
};
var rpcHelper = require('inWalletcore/HDWallet/btc_rpcHelper');
var importAfterDo = {
'INVE': {
do: function(){
$rootScope.$emit('Local/addWallets');
go.walletHome();
}
},
'BTC': {
do: function(){
if (self.page == 'addwallet'){
var address = profileService.focusedClient.credentials.otherObject.addressList[0];
rpcHelper.importAddress(address, address, function(err, res) {
if (isCordova)
window.plugins.spinnerDialog.hide();
else
$scope.index.progressing = false;
$rootScope.$emit('Local/addWallets');
go.walletHome();
}, false, 'now');
} else {
$rootScope.$emit('Local/addWallets');
go.walletHome();
}
}
},
'ETH': {
do: function(){
$rootScope.$emit('Local/addWallets');
go.walletHome();
}
}
}
/**
* 输入框的验证,名称验证
*/
self.validateName = function( $event ){
// var val = $event.srcElement.value;
var val = self.aiwname;
var vdparent = $event.srcElement.parentElement.parentElement;
var vderrp = $event.srcElement.parentElement.nextElementSibling;
if(typeof(val) == 'undefined'){
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidatename = false;
}else if(val == ''){
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidatename = false;
}else if(val.length < 1 || val.length > 20){
angular.element(vdparent).addClass('setErrorexp');
angular.element(vderrp).html(gettextCatalog.getString('*Characters exceed the 1-20 limit!'));
self.formvalidatename = false;
}else{
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidatename = true;
}
$timeout(function(){
$scope.$apply();
})
}
/**
* 输入框的验证,密码验证
*/
self.validatePass = function( $event ){
var val = self.aiwpass;
var vdparent = $event.srcElement.parentElement.parentElement;
var vderrp = $event.srcElement.parentElement.nextElementSibling;
var trimeasyExp=/^(([a-z]){8,18}|([A-Z]){8,18}|([0-9]){8,18})$/;
if(typeof(val) == 'undefined'){
angular.element(vdparent).removeClass('setErrorexp');
angular.element(vdparent).removeClass('setWarmErrorexp');
self.formvalidatepass = false;
}else if(val == ''){
angular.element(vdparent).removeClass('setErrorexp');
angular.element(vdparent).removeClass('setWarmErrorexp');
self.formvalidatepass = false;
}else if(val.length > 18 || val.length < 8){
angular.element(vdparent).removeClass('setWarmErrorexp');
angular.element(vdparent).addClass('setErrorexp');
angular.element(vderrp).html(gettextCatalog.getString('*Password cannot less than 8 digits or exceed 18 digits!'));
self.formvalidatepass = false;
}else if(trimeasyExp.test(val)){
angular.element(vdparent).addClass('setWarmErrorexp');
angular.element(vdparent).removeClass('setErrorexp');
angular.element(vderrp).html(gettextCatalog.getString('*The password is too simple, it is recommended to mix uppercase and lowercase letters, numbers, special characters!'));
self.formvalidatepass = true;
}else{
angular.element(vdparent).removeClass('setErrorexp');
angular.element(vdparent).removeClass('setWarmErrorexp');
self.formvalidatepass = true;
}
$timeout(function(){
$scope.$apply();
})
}
/**
* 输入框的验证,重复密码验证
*/
self.validateRpass = function( $event ){
var val = self.aiwrpass;
var vdparent = $event.srcElement.parentElement.parentElement;
var vderrp = $event.srcElement.parentElement.nextElementSibling;
if(typeof(val) == 'undefined'){
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidaterpass = false;
}else if(val == ''){
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidaterpass = false;
}else if(self.aiwrpass !== self.aiwpass){
angular.element(vdparent).addClass('setErrorexp');
angular.element(vderrp).html(gettextCatalog.getString('*Inconsistent password'));
self.formvalidaterpass = false;
}else{
angular.element(vdparent).removeClass('setErrorexp');
self.formvalidaterpass = true;
}
$timeout(function(){
$scope.$apply();
})
}
});
|
<filename>src/main/java/org/kwstudios/play/kwbungeelobby/toolbox/ConstantHolder.java
package org.kwstudios.play.kwbungeelobby.toolbox;
import org.bukkit.ChatColor;
public class ConstantHolder {
public static final String MOTD_PREFIX = ChatColor.DARK_PURPLE.toString() + ChatColor.BOLD.toString() + "KWStudios"
+ ChatColor.RESET + ":" + " " + ChatColor.GOLD + ChatColor.ITALIC;
public static final String API_URL = "https://api.kwstudios.org/minecraft/server/:server/players/:player/storedata";
public static final String JEDIS_SERVER = "172.16.17.32";
public static final String NAVIGATOR_NAME = ChatColor.GREEN + "Navigator";
public static final String KW_CHANNEL_NAME = "KWBungee";
}
|
package io.syndesis.qe.pages.customizations.extensions;
import static com.codeborne.selenide.Condition.visible;
import static com.codeborne.selenide.Selenide.$;
import org.openqa.selenium.By;
import com.codeborne.selenide.SelenideElement;
import io.syndesis.qe.pages.SyndesisPageObject;
public class TechExtensionDetailPage extends SyndesisPageObject {
private static final class Element {
public static final By ROOT = By.cssSelector("syndesis-tech-extension-detail");
}
@Override
public SelenideElement getRootElement() {
return $(Element.ROOT).shouldBe(visible);
}
@Override
public boolean validate() {
//TODO Deeper validation
boolean isUpdateButtonPresent = this.getButton("Update").is(visible);
boolean isDeleteButtonPresent = this.getButton("Delete").is(visible);
return isUpdateButtonPresent && isDeleteButtonPresent;
}
}
|
#!/usr/bin/env bash
#
# Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -eu
echo "Reading CI secret configuration parameters."
source "${KOKORO_GFILE_DIR}/test-configuration.sh"
echo "Running build and tests"
cd "$(dirname "$0")/../../.."
readonly PROJECT_ROOT="${PWD}"
echo
echo "================================================================"
echo "================================================================"
echo "Update or Install Bazel."
echo
# macOS does not have sha256sum by default, but `shasum -a 256` does the same
# thing:
function sha256sum() { shasum -a 256 "$@" ; } && export -f sha256sum
"${PROJECT_ROOT}/ci/install-bazel.sh"
readonly BAZEL_BIN="$HOME/bin/bazel"
echo "Using Bazel in ${BAZEL_BIN}"
# We need this environment variable because on macOS gRPC crashes if it cannot
# find the credentials, even if you do not use them. Some of the unit tests do
# exactly that.
echo
echo "================================================================"
echo "================================================================"
echo "Define GOOGLE_APPLICATION_CREDENTIALS."
export GOOGLE_APPLICATION_CREDENTIALS="${KOKORO_GFILE_DIR}/service-account.json"
# The -DGRPC_BAZEL_BUILD is needed because gRPC does not compile on macOS unless
# it is set.
"${BAZEL_BIN}" test \
--copt=-DGRPC_BAZEL_BUILD \
--action_env=GOOGLE_APPLICATION_CREDENTIALS="${GOOGLE_APPLICATION_CREDENTIALS}" \
--test_output=errors \
--verbose_failures=true \
--keep_going \
-- //google/cloud/...:all
echo
echo "================================================================"
echo "================================================================"
"${BAZEL_BIN}" build \
--copt=-DGRPC_BAZEL_BUILD \
--action_env=GOOGLE_APPLICATION_CREDENTIALS="${GOOGLE_APPLICATION_CREDENTIALS}" \
--test_output=errors \
--verbose_failures=true \
--keep_going \
-- //google/cloud/...:all
|
<reponame>jsoref/hashicorp-faas-nomad
package command
import (
"fmt"
"strings"
"github.com/hashicorp/nomad/api/contexts"
"github.com/posener/complete"
)
type StopCommand struct {
Meta
}
func (c *StopCommand) Help() string {
helpText := `
Usage: nomad stop [options] <job>
Stop an existing job. This command is used to signal allocations
to shut down for the given job ID. Upon successful deregistration,
an interactive monitor session will start to display log lines as
the job unwinds its allocations and completes shutting down. It
is safe to exit the monitor early using ctrl+c.
General Options:
` + generalOptionsUsage() + `
Stop Options:
-detach
Return immediately instead of entering monitor mode. After the
deregister command is submitted, a new evaluation ID is printed to the
screen, which can be used to examine the evaluation using the eval-status
command.
-purge
Purge is used to stop the job and purge it from the system. If not set, the
job will still be queryable and will be purged by the garbage collector.
-yes
Automatic yes to prompts.
-verbose
Display full information.
`
return strings.TrimSpace(helpText)
}
func (c *StopCommand) Synopsis() string {
return "Stop a running job"
}
func (c *StopCommand) AutocompleteFlags() complete.Flags {
return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient),
complete.Flags{
"-detach": complete.PredictNothing,
"-purge": complete.PredictNothing,
"-yes": complete.PredictNothing,
"-verbose": complete.PredictNothing,
})
}
func (c *StopCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictFunc(func(a complete.Args) []string {
client, err := c.Meta.Client()
if err != nil {
return nil
}
resp, _, err := client.Search().PrefixSearch(a.Last, contexts.Jobs, nil)
if err != nil {
return []string{}
}
return resp.Matches[contexts.Jobs]
})
}
func (c *StopCommand) Run(args []string) int {
var detach, purge, verbose, autoYes bool
flags := c.Meta.FlagSet("stop", FlagSetClient)
flags.Usage = func() { c.Ui.Output(c.Help()) }
flags.BoolVar(&detach, "detach", false, "")
flags.BoolVar(&verbose, "verbose", false, "")
flags.BoolVar(&autoYes, "yes", false, "")
flags.BoolVar(&purge, "purge", false, "")
if err := flags.Parse(args); err != nil {
return 1
}
// Truncate the id unless full length is requested
length := shortId
if verbose {
length = fullId
}
// Check that we got exactly one job
args = flags.Args()
if len(args) != 1 {
c.Ui.Error(c.Help())
return 1
}
jobID := args[0]
// Get the HTTP client
client, err := c.Meta.Client()
if err != nil {
c.Ui.Error(fmt.Sprintf("Error initializing client: %s", err))
return 1
}
// Check if the job exists
jobs, _, err := client.Jobs().PrefixList(jobID)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error deregistering job: %s", err))
return 1
}
if len(jobs) == 0 {
c.Ui.Error(fmt.Sprintf("No job(s) with prefix or id %q found", jobID))
return 1
}
if len(jobs) > 1 && strings.TrimSpace(jobID) != jobs[0].ID {
c.Ui.Error(fmt.Sprintf("Prefix matched multiple jobs\n\n%s", createStatusListOutput(jobs)))
return 1
}
// Prefix lookup matched a single job
job, _, err := client.Jobs().Info(jobs[0].ID, nil)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error deregistering job: %s", err))
return 1
}
// Confirm the stop if the job was a prefix match.
if jobID != *job.ID && !autoYes {
question := fmt.Sprintf("Are you sure you want to stop job %q? [y/N]", *job.ID)
answer, err := c.Ui.Ask(question)
if err != nil {
c.Ui.Error(fmt.Sprintf("Failed to parse answer: %v", err))
return 1
}
if answer == "" || strings.ToLower(answer)[0] == 'n' {
// No case
c.Ui.Output("Cancelling job stop")
return 0
} else if strings.ToLower(answer)[0] == 'y' && len(answer) > 1 {
// Non exact match yes
c.Ui.Output("For confirmation, an exact ‘y’ is required.")
return 0
} else if answer != "y" {
c.Ui.Output("No confirmation detected. For confirmation, an exact 'y' is required.")
return 1
}
}
// Invoke the stop
evalID, _, err := client.Jobs().Deregister(*job.ID, purge, nil)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error deregistering job: %s", err))
return 1
}
// If we are stopping a periodic job there won't be an evalID.
if evalID == "" {
return 0
}
if detach {
c.Ui.Output(evalID)
return 0
}
// Start monitoring the stop eval
mon := newMonitor(c.Ui, client, length)
return mon.monitor(evalID, false)
}
|
#!/bin/sh -e
. ~/admin-openrc
# adjust tiny image
nova flavor-delete m1.tiny
nova flavor-create m1.tiny 1 512 8 1
# configure security groups
neutron security-group-rule-create --direction ingress --ethertype IPv4 --protocol icmp --remote-ip-prefix 0.0.0.0/0 default
neutron security-group-rule-create --direction ingress --ethertype IPv4 --protocol tcp --port-range-min 22 --port-range-max 22 --remote-ip-prefix 0.0.0.0/0 default
# import key pair
keystone tenant-create --name demo --description "Demo Tenant"
keystone user-create --name demo --tenant demo --pass demo --email demo@demo.demo
nova keypair-add --pub-key id_rsa.pub ubuntu-keypair
# configure external network
neutron net-create ext-net --router:external --provider:physical_network external --provider:network_type flat
neutron subnet-create ext-net --name ext-subnet --allocation-pool start=10.5.8.5,end=10.5.8.254 --disable-dhcp --gateway 10.5.8.1 10.5.8.0/24
# create vm network
neutron net-create demo-net
neutron subnet-create --name demo-subnet --gateway 10.20.5.1 demo-net 10.20.5.0/24
neutron router-create demo-router
neutron router-interface-add demo-router demo-subnet
neutron router-gateway-set demo-router ext-net
# create pool of floating ips
i=0
while [ $i -ne 10 ]; do
neutron floatingip-create ext-net
i=$((i + 1))
done
|
echo "install ..."
sudo apt update
sudo apt install apt-transport-https ca-certificates curl gnupg-agent software-properties-common
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo apt-key add -
sudo add-apt-repository "deb [arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable"
sudo apt update
yes | sudo apt install docker-ce
apt list -a docker-ce
yes | sudo apt install docker-ce=5:18.09.6~3-0~ubuntu-bionic
# hold version
sudo apt-mark hold docker-ce
sudo usermod -aG docker $USER
|
package com.sreemenon.opensesame;
import android.content.DialogInterface;
import android.os.AsyncTask;
import android.os.Bundle;
import android.support.v4.app.DialogFragment;
import android.support.v4.app.Fragment;
import android.support.v7.app.AlertDialog;
import android.text.Editable;
import android.text.TextWatcher;
import android.view.LayoutInflater;
import android.view.View;
import android.view.ViewGroup;
import android.widget.ArrayAdapter;
import android.widget.AutoCompleteTextView;
import android.widget.EditText;
import android.widget.ImageButton;
import android.widget.NumberPicker;
import android.widget.Switch;
import com.sreemenon.crypt.Crypt;
import com.sreemenon.sqlite.DBTransactions;
import net.sqlcipher.Cursor;
import java.io.IOException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.UnrecoverableEntryException;
import java.security.cert.CertificateException;
import java.security.interfaces.RSAPrivateKey;
import java.util.ArrayList;
import java.util.List;
import java.util.Random;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
/**
* Created by Sree on 14/12/2015.
*
* Backend Logic for Adding and Editing an Entry
*/
public class AddFragment extends Fragment {
private AutoCompleteTextView actvWebsite;
private EditText etUname;
private Switch hasNum;
private Switch hasSpl;
private Switch hasUpper;
private NumberPicker npCharCount;
private boolean isEdit;
private DataItem item;
private EditText etPass;
private ImageButton btnGenPas;
public static AddFragment newInstance() {
AddFragment fragment = new AddFragment();
Bundle args = new Bundle();
fragment.setArguments(args);
return fragment;
}
@Override
public View onCreateView(LayoutInflater inflater, ViewGroup container,
Bundle savedInstanceState) {
isEdit = false;
// Inflate the layout for this fragment
View view = inflater.inflate(R.layout.fragment_add, container, false);
actvWebsite = (AutoCompleteTextView)view.findViewById(R.id.actvWebsite);
etUname = (EditText)view.findViewById(R.id.etUname);
hasNum = (Switch)view.findViewById(R.id.switchNumeric);
hasSpl = (Switch)view.findViewById(R.id.switchSpl);
hasUpper = (Switch)view.findViewById(R.id.switchUpper);
npCharCount = (NumberPicker)view.findViewById(R.id.npCharCount);
npCharCount.setMaxValue(15);
npCharCount.setMinValue(7);
npCharCount.setValue(15);
etPass = (EditText)view.findViewById(R.id.etPass);
/**
* etPass: Text change Logic
*/
etPass.addTextChangedListener(new TextWatcher() {
@Override
public void beforeTextChanged(CharSequence s, int start, int count, int after) {}
@Override
public void onTextChanged(CharSequence s, int start, int before, int count) {}
/**
* Auto update the switches according to the text in password EditText
*/
@Override
public void afterTextChanged(Editable s) {
if(s.length() != 0) {
Pattern numPattern = Pattern.compile("(.*)(\\d+)(.*)");
Matcher numMatcher = numPattern.matcher(s);
hasNum.setChecked(numMatcher.matches());
Pattern upperPattern = Pattern.compile("(.*)([A-Z])(.*)");
Matcher upperMatcher = upperPattern.matcher(s);
hasUpper.setChecked(upperMatcher.matches());
Pattern splPattern = Pattern.compile("(.*)([^A-Za-z0-9])(.*)");
Matcher splMathcer = splPattern.matcher(s);
hasSpl.setChecked(splMathcer.matches());
}else {
hasNum.setChecked(true);
hasSpl.setChecked(true);
hasUpper.setChecked(true);
}
}
});
btnGenPas = (ImageButton)view.findViewById(R.id.btnGenPass);
/**
* btnGen OnCLickListener: AutoGenerate a secure password!
*/
btnGenPas.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
boolean boolSpl = hasSpl.isChecked();
boolean boolUpper = hasUpper.isChecked();
boolean boolNum = hasNum.isChecked();
String lower = "abcdefghijklmnopqrstuvwxyz";
String upper = lower.toUpperCase();
String numbers = "1234567890";
String splChars = "@%+\\/'!#$^?:,(){}[]~-_";
StringBuilder baseChars = new StringBuilder(lower);
if (boolNum)
baseChars.append(numbers);
if (boolSpl)
baseChars.append(splChars);
if (boolUpper)
baseChars.append(upper);
if (boolNum)
baseChars.append(numbers);
StringBuilder preTicket;
Random random = new Random();
do {
preTicket = new StringBuilder();
for (int i = 0; i < npCharCount.getValue(); i++) {
preTicket.append(baseChars.charAt(random.nextInt(baseChars.length())));
}
etPass.setText(preTicket.toString());
}
while (!(boolSpl == hasSpl.isChecked() && boolUpper == hasUpper.isChecked() && boolNum == hasNum.isChecked()));
}
});
List<DataItem> dataItemList = ((MainActivity)getActivity()).getDataItemList();
List<String> websiteList = new ArrayList<>();
for (DataItem dataItem: dataItemList) {
String website = dataItem.getWebsite();
if(!websiteList.contains(website)){
websiteList.add(website);
}
}
ArrayAdapter<String> websiteAdapter = new ArrayAdapter<String>(AddFragment.this.getActivity(), R.layout.spinner_layout, R.id.tvSpinnerItem, websiteList);
actvWebsite.setAdapter(websiteAdapter);
int editPosition = ((MainActivity)getActivity()).getEditPosition();
if(editPosition != -1){
isEdit = true;
item = dataItemList.get(editPosition);
actvWebsite.setText(item.getWebsite());
etUname.setText(item.getUname());
DecryptPassword decryptPassword = new DecryptPassword();
decryptPassword.execute(item.getWebsite(), item.getUname(), item.getSalt());
}
return view;
}
/**
* Save or update changes into database
*/
public void saveNewPassword() {
if(isEdit){
DialogInterface.OnClickListener deleteConfirmListener = new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
switch(which){
case DialogInterface.BUTTON_POSITIVE:
DataItem currRow = new DataItem(actvWebsite.getText().toString(), etUname.getText().toString(), hasSpl.isChecked(), hasNum.isChecked(), hasUpper.isChecked(), AddFragment.this);
currRow.setId(item.getId());
currRow.updateRow(etPass.getText().toString());
break;
case DialogInterface.BUTTON_NEGATIVE:
((MainActivity)getActivity()).switchFragment(R.layout.fragment_home);
}
}
};
AlertDialog.Builder builder = new AlertDialog.Builder(getContext());
builder.setPositiveButton("Confirm", deleteConfirmListener);
builder.setNegativeButton("Cancel", deleteConfirmListener);
builder.setTitle("Confirm!");
builder.setMessage("Are you sure you want to edit this Entry?");
builder.show();
}else {
DataItem currRow = new DataItem(actvWebsite.getText().toString(), etUname.getText().toString(), hasSpl.isChecked(), hasNum.isChecked(), hasUpper.isChecked(), AddFragment.this);
currRow.insertRow(etPass.getText().toString());
}
}
/**
* AsyncTask for decrypting the password to display when editing the entry!
*/
private class DecryptPassword extends AsyncTask<String, Void, String>{
DialogFragment dialogFragment = new CustomProgressDialog();
@Override
protected String doInBackground(String... params) {
String salt, website, uname;
String ticket = "";
salt = params[2];
website = params[0];
uname = params[1];
DBTransactions transactions = new DBTransactions(getContext());
Cursor cursor = transactions.getCursor(false, "gems", new String[]{"ticket", "salt"}, "website=? AND uname=?", new String[]{website, uname}, null, null, null, null);
if(cursor.moveToFirst()){
ticket = cursor.getString(cursor.getColumnIndex("ticket"));
}
cursor.close();
transactions.closeDB();
KeyStore keyStore;
String result = null;
try {
keyStore = KeyStore.getInstance("AndroidKeyStore");
keyStore.load(null, null);
KeyStore.PrivateKeyEntry keyEntry = (KeyStore.PrivateKeyEntry)keyStore.getEntry(website+uname, null);
RSAPrivateKey privKey = (RSAPrivateKey) keyEntry.getPrivateKey();
String key = String.valueOf(privKey.getModulus());
Crypt sreeCrypt = Crypt.getDefault(key, salt, new byte[16]);
result = sreeCrypt.decryptOrNull(ticket);
}catch(KeyStoreException | NoSuchAlgorithmException | UnrecoverableEntryException | IOException e) {
e.printStackTrace();
this.cancel(false);
} catch (CertificateException e) {
e.printStackTrace();
}
return result;
}
@Override
protected void onPreExecute() {
super.onPreExecute();
Bundle args = new Bundle();
args.putString("message","Opening Vault!!!");
dialogFragment.setArguments(args);
dialogFragment.show(getFragmentManager(), "Progress Dialog");
}
@Override
protected void onPostExecute(String s) {
super.onPostExecute(s);
dialogFragment.dismiss();
etPass.setText(s);
}
}
}
|
<reponame>mdsd-team-1/photos-metamodeling
/**
*/
package PhotosMetaModel.impl;
import PhotosMetaModel.Data;
import PhotosMetaModel.DataSegment;
import PhotosMetaModel.PhotosMetaModelPackage;
import java.util.Collection;
import org.eclipse.emf.common.notify.NotificationChain;
import org.eclipse.emf.common.util.EList;
import org.eclipse.emf.ecore.EClass;
import org.eclipse.emf.ecore.InternalEObject;
import org.eclipse.emf.ecore.util.EObjectContainmentEList;
import org.eclipse.emf.ecore.util.InternalEList;
/**
* <!-- begin-user-doc -->
* An implementation of the model object '<em><b>Data</b></em>'.
* <!-- end-user-doc -->
* <p>
* The following features are implemented:
* </p>
* <ul>
* <li>{@link PhotosMetaModel.impl.DataImpl#getDataSegment <em>Data Segment</em>}</li>
* </ul>
*
* @generated
*/
public class DataImpl extends LayerImpl implements Data {
/**
* The cached value of the '{@link #getDataSegment() <em>Data Segment</em>}' containment reference list.
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @see #getDataSegment()
* @generated
* @ordered
*/
protected EList<DataSegment> dataSegment;
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
protected DataImpl() {
super();
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
protected EClass eStaticClass() {
return PhotosMetaModelPackage.Literals.DATA;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public EList<DataSegment> getDataSegment() {
if (dataSegment == null) {
dataSegment = new EObjectContainmentEList<DataSegment>(DataSegment.class, this, PhotosMetaModelPackage.DATA__DATA_SEGMENT);
}
return dataSegment;
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public NotificationChain eInverseRemove(InternalEObject otherEnd, int featureID, NotificationChain msgs) {
switch (featureID) {
case PhotosMetaModelPackage.DATA__DATA_SEGMENT:
return ((InternalEList<?>)getDataSegment()).basicRemove(otherEnd, msgs);
}
return super.eInverseRemove(otherEnd, featureID, msgs);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public Object eGet(int featureID, boolean resolve, boolean coreType) {
switch (featureID) {
case PhotosMetaModelPackage.DATA__DATA_SEGMENT:
return getDataSegment();
}
return super.eGet(featureID, resolve, coreType);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@SuppressWarnings("unchecked")
@Override
public void eSet(int featureID, Object newValue) {
switch (featureID) {
case PhotosMetaModelPackage.DATA__DATA_SEGMENT:
getDataSegment().clear();
getDataSegment().addAll((Collection<? extends DataSegment>)newValue);
return;
}
super.eSet(featureID, newValue);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public void eUnset(int featureID) {
switch (featureID) {
case PhotosMetaModelPackage.DATA__DATA_SEGMENT:
getDataSegment().clear();
return;
}
super.eUnset(featureID);
}
/**
* <!-- begin-user-doc -->
* <!-- end-user-doc -->
* @generated
*/
@Override
public boolean eIsSet(int featureID) {
switch (featureID) {
case PhotosMetaModelPackage.DATA__DATA_SEGMENT:
return dataSegment != null && !dataSegment.isEmpty();
}
return super.eIsSet(featureID);
}
} //DataImpl
|
package planaridade;
import model.Grafo;
public interface VerificadorPlanaridade {
public boolean isPlanar(Grafo grafo);
}
|
package org.apereo.cas.authentication;
import org.apereo.cas.util.junit.EnabledIfPortOpen;
import org.junit.jupiter.api.Tag;
import org.springframework.test.context.TestPropertySource;
/**
* Unit test for {@link LdapAuthenticationHandler}.
*
* @author <NAME>
* @author <NAME>
* @since 4.0.0
*/
@TestPropertySource(properties = {
"cas.authn.ldap[0].password-policy.enabled=true",
"cas.authn.ldap[0].password-policy.type=EDirectory",
"cas.authn.ldap[0].password-policy.strategy=GROOVY",
"cas.authn.ldap[0].password-policy.groovy.location=classpath:GroovyPwdPolicy.groovy",
"cas.authn.ldap[0].password-policy.accountStateHandlingEnabled=true",
"cas.authn.ldap[0].password-policy.warning-attribute-name=attr-name",
"cas.authn.ldap[0].password-policy.warning-attribute-value=attr-value"
})
@EnabledIfPortOpen(port = 10389)
@Tag("Ldap")
public class EDirectoryPasswordPolicyLdapAuthenticationHandlerTests extends DirectLdapAuthenticationHandlerTests {
}
|
<filename>src/main/java/org/olat/ims/lti13/manager/LTI13ExternalToolSigningKeyResolver.java<gh_stars>100-1000
/**
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at the
* <a href="http://www.apache.org/licenses/LICENSE-2.0">Apache homepage</a>
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Initial code contributed and copyrighted by<br>
* frentix GmbH, http://www.frentix.com
* <p>
*/
package org.olat.ims.lti13.manager;
import java.security.Key;
import java.util.ArrayList;
import java.util.List;
import org.apache.logging.log4j.Logger;
import org.olat.core.CoreSpringFactory;
import org.olat.core.logging.Tracing;
import org.olat.core.util.StringHelper;
import org.olat.core.util.crypto.CryptoUtil;
import org.olat.ims.lti13.LTI13Key;
import org.olat.ims.lti13.LTI13Service;
import org.olat.ims.lti13.LTI13Tool;
import org.olat.ims.lti13.LTI13Tool.PublicKeyType;
import org.springframework.beans.factory.annotation.Autowired;
import io.jsonwebtoken.Claims;
import io.jsonwebtoken.JwsHeader;
import io.jsonwebtoken.SigningKeyResolver;
/**
*
* Initial date: 9 mars 2021<br>
* @author srosse, <EMAIL>, http://www.frentix.com
*
*/
public class LTI13ExternalToolSigningKeyResolver implements SigningKeyResolver {
private static final Logger log = Tracing.createLoggerFor(LTI13ExternalToolSigningKeyResolver.class);
private LTI13Tool tool;
private boolean withKid;
private List<LTI13Key> foundKeys;
@Autowired
private LTI13Service lti13Service;
public LTI13ExternalToolSigningKeyResolver() {
CoreSpringFactory.autowireObject(this);
}
public LTI13Tool getTool() {
return tool;
}
public List<LTI13Key> getFoundKeys() {
return foundKeys;
}
public boolean hasFoundMultipleKeys() {
return foundKeys != null && foundKeys.size() > 1;
}
public boolean isWithKid() {
return withKid;
}
@Override
public Key resolveSigningKey(JwsHeader header, Claims claims) {
try {
String iss = claims.getIssuer();
String sub = claims.getSubject();// client id
List<LTI13Tool> tools = lti13Service.getToolsByClientId(sub);
if(tools.isEmpty()) {
log.error("Client ID not found: {}", sub);
} else if(tools.size() == 1) {
tool = tools.get(0);
} else if(StringHelper.containsNonWhitespace(iss)) {
List<LTI13Tool> byIssuers = new ArrayList<>();
for(LTI13Tool t:tools) {
if(iss.equals(t.getToolUrl())) {
byIssuers.add(tool);
}
}
if(tools.isEmpty()) {
log.error("Client ID/Issuer not found: {}/{}", sub, iss);
} else if(byIssuers.size() == 1) {
tool = byIssuers.get(0);
} else {
log.error("Several tools with same Client ID/Issuer found: {}/{}", sub, iss);
}
}
if(tool != null) {
if(tool.getPublicKeyTypeEnum() == PublicKeyType.KEY) {
String publicKeyContent = tool.getPublicKey();
return CryptoUtil.string2PublicKey(publicKeyContent);
} else if(tool.getPublicKeyTypeEnum() == PublicKeyType.URL) {
String kid = header.getKeyId();
withKid = StringHelper.containsNonWhitespace(kid);
String alg = header.getAlgorithm();
String publicKeyUrl = tool.getPublicKeyUrl();
List<LTI13Key> keys = lti13Service.getKeys(publicKeyUrl, alg, kid);
if(keys.size() == 1) {
return keys.get(0).getPublicKey();
}
if(keys.size() > 1) {
foundKeys = keys;
return keys.get(0).getPublicKey();
}
return null;
}
} else {
log.error("Client ID not found: {}", sub);
}
return null;
} catch (Exception e) {
log.error("", e);
return null;
}
}
@Override
public Key resolveSigningKey(JwsHeader header, String plaintext) {
log.debug("resolveSigningKey plain: {} claims: {}", header, plaintext);
return null;
}
}
|
#!/bin/bash
# Copyright 2018 The Bazel Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -euo pipefail
toolchain_name=""
while getopts "t:h" opt; do
case "$opt" in
"t") toolchain_name="$OPTARG";;
"h") echo "Usage:"
echo "-t - Toolchain name to use for testing; default is llvm_toolchain"
exit 2
;;
"?") echo "invalid option: -$OPTARG"; exit 1;;
esac
done
os="$(uname -s | tr "[:upper:]" "[:lower:]")"
readonly os
# Use bazelisk to catch migration problems.
# Value of BAZELISK_GITHUB_TOKEN is set as a secret on Travis.
readonly url="https://github.com/bazelbuild/bazelisk/releases/download/v1.0/bazelisk-${os}-amd64"
bazel="${TMPDIR:-/tmp}/bazelisk"
readonly bazel
curl -L -sSf -o "${bazel}" "${url}"
chmod a+x "${bazel}"
set -x
"${bazel}" version
"${bazel}" --migrate --bazelrc=/dev/null test \
--extra_toolchains="${toolchain_name}" \
--incompatible_enable_cc_toolchain_resolution \
--copt=-v \
--linkopt=-Wl,-t \
--symlink_prefix=/ \
--color=yes \
--show_progress_rate_limit=30 \
--keep_going \
--test_output=errors \
//...
|
'use strict';
var fs = require('fs');
var path = require('path');
var async = require('async');
var command = require('commander');
var config = require('config');
var moment = require('moment');
var helper = require('./helper');
command
.description('Prune tarballs from the archives based on aging rules')
.option('-a, --archives [directory]', 'directory containing archives')
.parse(process.argv);
var log = helper.logger('github-prune-archives', config.get('log.dir'),
config.get('log.level'), config.get('log.retention'));
/**
* Is the date less than n days from today
*/
var isDaily = function(date, n, today) {
var t = moment(today);
return t.subtract(n, 'days').isBefore(date);
};
/**
* Is the date a Sunday and less than n weeks from today
*/
var isSunday = function(date, n, today) {
for (var i=0; i<n; i++) {
var sunday = moment(today);
sunday.startOf('week').subtract(i, 'weeks');
if (sunday.isSame(date)) {
return true;
}
}
return false;
};
/**
* Is the date a first of the month and less than n months from today
*/
var isFirstOfMonth = function(date, n, today) {
for (var i=0; i<n; i++) {
var firstOfMonth = moment(today);
firstOfMonth.startOf('month').subtract(i, 'months');
if (firstOfMonth.isSame(date)) {
return true;
}
}
return false;
};
/**
* Is the date a first of the year and less than n years from today
*/
var isFirstOfYear = function(date, n, today) {
for (var i=0; i<n; i++) {
var firstOfYear = moment(today);
firstOfYear.startOf('year').subtract(i, 'years');
if (firstOfYear.isSame(date)) {
return true;
}
}
return false;
};
/**
* Prune files that don't match the retention rules
*/
var prune = function(today, days, weeks, months, years, dir, filename, done) {
var absFilename = path.format({
dir: dir,
base: filename
});
try {
var archiveDate = helper.extractDate(absFilename);
if (isDaily(archiveDate, days, today) ||
isSunday(archiveDate, weeks, today) ||
isFirstOfMonth(archiveDate, months, today) ||
isFirstOfYear(archiveDate, years, today)) {
log.info('Keeping archive', absFilename);
done(null, 'keep');
} else {
fs.unlink(absFilename, function(err) {
if (err) {
// we will retries on the next run
log.error('Unable to remove archive', absFilename);
done(null, 'failed');
} else {
log.info('Removed archive', absFilename);
done(null, 'removed');
}
});
}
} catch (e) {
log.warn('Skipping unrecognized file', path);
done(null, 'skipped');
}
};
/**
* Scan the archive dir to look for files to prune
*/
var scanArchives = function(archives, today, retention) {
// scan all files in the archive directory
fs.readdir(archives, function(err, files) {
if (err) {
log.error('Cannot read directory', archives);
} else {
async.each(files,
prune.bind(prune, today, retention.days, retention.weeks,
retention.months, retention.years, archives),
/* istanbul ignore next */
function(err) {
if (err) {
log.error(err);
} else {
log.info('processed', files.length, 'archives');
}
}
);
}
});
};
/* istanbul ignore if */
if (process.env.NODE_ENV !== 'test') {
// fix the date in case this script runs around midnight
var today = moment();
var archives = command.archives || config.get('dir.archives');
scanArchives(archives, today, config.get('retention'));
}
|
#!/bin/sh
cd docs && make html
|
# Update RubyGems system and install Bundler
before_install:
- gem update --system
- gem install bundler
# Install necessary dependencies for PhantomJS
addons:
apt:
packages:
- libcurl4-openssl-dev
- libfontconfig
# Install PhantomJS
install:
- rm -rf $PWD/travis_phantomjs; mkdir -p $PWD/travis_phantomjs
- wget https://bitbucket.org/ariya/phantomjs/downloads/phantomjs-2.1.1-linux-x86_64.tar.bz2 -O $PWD/travis_phantomjs/phantomjs-2.1.1-linux-x86_64.tar.bz2
- tar -xvf $PWD/travis_phantomjs/phantomjs-2.1.1-linux-x86_64.tar.bz2 -C $PWD/travis_phantomjs
- export PATH=$PWD/travis_phantomjs/phantomjs-2.1.1-linux-x86_64/bin:$PATH
# Verify PhantomJS installation
script:
- phantomjs --version
|
module Trith; module Core
##
# Stack operators.
module Stack
##
# Stack shufflers.
module Shufflers
##
# @return [Machine]
def clear
@stack.clear
self
end
##
# @return [Machine]
def depth
push(@stack.size)
end
##
# @return [Machine]
def stack_
@stack = [@stack]
self
end
##
# @param [#to_a, #each]
# @return [Machine]
def unstack(seq)
@stack = case seq
when String then seq.each_char.to_a
when Array then seq
else case
when seq.respond_to?(:to_a) then seq.to_a
when seq.respond_to?(:each) then seq.each.to_a
else raise Machine::InvalidOperandError.new(seq, :unstack)
end
end
self
end
##
# @return [Machine]
def drop
pop
self
end
##
# @return [Machine]
def drop2
pop(2)
self
end
alias_method :'2drop', :drop2
##
# @return [Machine]
def drop3
pop(3)
self
end
alias_method :'3drop', :drop3
##
# @return [Machine]
def dup
push(*(pop(1) * 2))
end
##
# @return [Machine]
def dup2
push(*(pop(2) * 2))
end
alias_method :'2dup', :dup2
##
# @return [Machine]
def dup3
push(*(pop(3) * 2))
end
alias_method :'3dup', :dup3
##
# @return [Machine]
def swap
push(*pop(2).reverse)
end
##
# @return [Machine]
def nip
push(pop(2).last)
end
##
# @return [Machine]
def nip2
push(pop(3).last)
end
alias_method :'2nip', :nip2
##
# @return [Machine]
def over
push(*((ops = pop(2)) + [ops.first]))
end
##
# @return [Machine]
def pick
push(*((ops = pop(3)) + [ops.first]))
end
##
# @return [Machine]
def rot(a, b, c)
push(b, c, a)
end
end # module Shufflers
##
# Stack combinators.
module Combinators
##
# @return [Machine]
def dip
# TODO
end
end # module Combinators
# Include all submodule methods directly into Trith::Core::Stack:
constants.each { |mod| include(const_get(mod)) }
end # module Stack
end; end # module Trith::Core
|
#!/bin/bash
# This is
# header
# This is not
echo "TEST"
|
#!/bin/bash
#
# This script generates separate egs directory for each input
# language in multilingual setup, which contains both egs.*.ark and egs.*.scp.
#
# This script will generally be called from nnet3 multilingual training script.
echo "$0 $@" # Print the command line for logging
. ./cmd.sh
set -e
# Begin configuration section
cmd=
stage=0
left_context=13
right_context=9
online_multi_ivector_dirs= # list of iVector dir for all languages
# can be used if we are including speaker information as iVectors.
# e.g. "exp/lang1/train-ivector exp/lang2/train-ivector"
samples_per_iter=400000 # this is the target number of egs in each archive of egs
# (prior to merging egs). We probably should have called
# it egs_per_iter. This is just a guideline; it will pick
# a number that divides the number of samples in the
# entire data.
# Configuration to allocate egs
minibatch_size=512
num_archives=100
num_jobs=10
cmvn_opts=
echo "$0 $@" # Print the command line for logging
if [ -f path.sh ]; then . ./path.sh; fi
. parse_options.sh || exit 1;
if [ $# -lt 4 ]; then
echo "Usage: $0 [opts] N <data-dir1> .. <data-dirN> <ali-dir1> .. <ali-dirN>"
echo " <egs-out1> .. <egs-outN>"
echo " e.g.: $0 2 data/lang1/train data/lang2/train exp/lang1/tri5_ali"
echo " exp/lang2/tri5_ali exp/lang1/nnet3/egs exp/lang2/nnet3/egs"
echo ""
echo "Main options (for others, see top of script file)"
echo " --config <config-file> # config file containing options"
echo " --num-jobs <nj> # The maximum number of jobs you want to run in"
echo " # parallel (increase this only if you have good disk and"
echo " # network speed). default=6"
echo " --cmd (utils/run.pl;utils/queue.pl <queue opts>) # how to run jobs."
echo " --samples-per-iter <#samples;400000> # Target number of egs per archive (option is badly named)"
echo " --frames-per-eg <frames;8> # number of frames per eg on disk"
echo " --left-context <width;4> # Number of frames on left side to append for feature input"
echo " --right-context <width;4> # Number of frames on right side to append for feature input"
echo " --num-frames-diagnostic <#frames;4000> # Number of frames used in computing (train,valid) diagnostics"
echo " --num-valid-frames-combine <#frames;10000> # Number of frames used in getting combination weights at the"
echo " # very end."
echo " --stage <stage|0> # Used to run a partially-completed training process from somewhere in"
echo " # the middle."
exit 1;
fi
num_lang=$1
shift
args=("$@")
if [ ${#args[@]} != $[$num_lang*3] ]; then
echo "$0: num of input dirs provided for all langs is not compatible with num-langs in input." && exit 1;
fi
# read input data, ali and egs dir per lang
for l in `seq 0 $[$num_lang-1]`; do
multi_data_dirs[$l]=${args[$l]}
multi_ali_dirs[$l]=${args[$l+$num_lang]}
multi_egs_dirs[$l]=${args[$l+2*$num_lang]}
done
echo "$0: Generate separate egs directory per language for multilingual training."
online_multi_ivector_dirs=(${online_multi_ivector_dirs[@]})
for lang_index in `seq 0 $[$num_lang-1]`; do
data=${multi_data_dirs[$lang_index]}
ali_dir=${multi_ali_dirs[$lang_index]}
egs_dir=${multi_egs_dirs[$lang_index]}
online_ivector_dir=
if [ ! -z "${online_multi_ivector_dirs[$lang_index]}" ]; then
online_ivector_dir=${online_multi_ivector_dirs[$lang_index]}
fi
echo online_ivector_dir = $online_ivector_dir
if [ ! -d "$egs_dir" ]; then
echo "$0: Generate egs for ${lang_list[$lang_index]}"
if [[ $(hostname -f) == *.clsp.jhu.edu ]] && [ ! -d $egs_dir/storage ]; then
utils/create_split_dir.pl \
/export/b0{3,4,5,6}/$USER/kaldi-data/egs/${lang_list[$lang_index]}-$(date +'%m_%d_%H_%M')/s5/$egs_dir/storage $egs_dir/storage
fi
extra_opts=()
[ ! -z "$cmvn_opts" ] && extra_opts+=(--cmvn-opts "$cmvn_opts")
[ ! -z "$online_ivector_dir" ] && extra_opts+=(--online-ivector-dir $online_ivector_dir)
extra_opts+=(--left-context $left_context)
extra_opts+=(--right-context $right_context)
echo "$0: calling get_egs.sh"
steps/nnet3/get_egs.sh $egs_opts "${extra_opts[@]}" \
--samples-per-iter $samples_per_iter --stage $stage \
--cmd "$cmd" $egs_opts \
--generate-egs-scp true \
$data $ali_dir $egs_dir || exit 1;
fi
done
|
/*
* Copyright 2015 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.atomix.catalyst.transport.netty;
import io.atomix.catalyst.util.Assert;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.*;
import java.io.File;
import java.io.FileInputStream;
import java.security.KeyStore;
/**
* Netty TLS.
*
* @author <a href="http://github.com/electrical"><NAME></a>
*/
final class NettyTls {
private static final Logger LOGGER = LoggerFactory.getLogger(NettyTls.class);
private NettyOptions properties;
public NettyTls(NettyOptions properties) {
this.properties = properties;
}
/**
* Initializes an SSL engine.
*
* @param client Indicates whether the engine is being initialized for a client.
* @return The initialized SSL engine.
*/
public SSLEngine initSslEngine(boolean client) throws Exception {
// Load the keystore
KeyStore keyStore = loadKeystore(properties.sslKeyStorePath(), properties.sslKeyStorePassword());
// Setup the keyManager to use our keystore
KeyManagerFactory keyManagerFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
keyManagerFactory.init(keyStore, keyStoreKeyPass(properties));
// Setup the Trust keystore
KeyStore trustStore;
if (properties.sslTrustStorePath() != null) {
// Use the separate Trust keystore
LOGGER.debug("Using separate trust store");
trustStore = loadKeystore(properties.sslTrustStorePath(), properties.sslTrustStorePassword());
} else {
// Reuse the existing keystore
trustStore = keyStore;
LOGGER.debug("Using key store as trust store");
}
TrustManagerFactory trustManagerFactory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm());
trustManagerFactory.init(trustStore);
KeyManager[] keyManagers = keyManagerFactory.getKeyManagers();
TrustManager[] trustManagers = trustManagerFactory.getTrustManagers();
SSLContext sslContext = SSLContext.getInstance("TLS");
sslContext.init(keyManagers, trustManagers, null);
SSLEngine sslEngine = sslContext.createSSLEngine();
sslEngine.setUseClientMode(client);
sslEngine.setWantClientAuth(true);
sslEngine.setEnabledProtocols(sslEngine.getSupportedProtocols());
sslEngine.setEnabledCipherSuites(sslEngine.getSupportedCipherSuites());
sslEngine.setEnableSessionCreation(true);
return sslEngine;
}
private KeyStore loadKeystore(String path, String password) throws Exception {
Assert.notNull(path, "Path");
File file = new File(path);
LOGGER.debug("Using JKS at {}", file.getCanonicalPath());
KeyStore ks = KeyStore.getInstance("JKS");
ks.load(new FileInputStream(file.getCanonicalPath()), password.toCharArray());
return ks;
}
private char[] keyStoreKeyPass(NettyOptions properties) throws Exception {
if (properties.sslKeyStoreKeyPassword() != null) {
return properties.sslKeyStoreKeyPassword().toCharArray();
} else {
return properties.sslKeyStorePassword().toCharArray();
}
}
}
|
#!/usr/bin/env bash
echo "===CPU:"
cat /proc/cpuinfo | egrep "model name|cores"
echo " "
echo "===RAM: "
free -h
printf "\n"
echo "===Java version: "
java -version
echo " "
echo "===OS: "
uname -a
printf "\n"
echo "===Node: "
node --version
printf "\n"
echo "=== Go: "
go version
|
#!/bin/bash
SSH_ENV="$HOME/.ssh/environment"
function start_agent {
echo "Initialising new SSH agent..."
(umask 066; /usr/bin/ssh-agent > "${SSH_ENV}")
. "${SSH_ENV}" > /dev/null
/usr/bin/ssh-add;
}
# Source SSH settings, if applicable
if [ -f "${SSH_ENV}" ]; then
. "${SSH_ENV}" > /dev/null
ps -ef | grep ${SSH_AGENT_PID} | grep ssh-agent$ > /dev/null || {
start_agent;
}
else
start_agent;
fi
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
# This protects against multiple targets copying the same framework dependency at the same time. The solution
# was originally proposed here: https://lists.samba.org/archive/rsync/2008-February/020158.html
RSYNC_PROTECT_TMP_FILES=(--filter "P .*.??????")
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# Use filter instead of exclude so missing patterns don't throw errors.
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Copies the dSYM of a vendored framework
install_dsym() {
local source="$1"
if [ -r "$source" ]; then
echo "rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${DWARF_DSYM_FOLDER_PATH}\""
rsync --delete -av "${RSYNC_PROTECT_TMP_FILES[@]}" --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${DWARF_DSYM_FOLDER_PATH}"
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
local code_sign_cmd="/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements '$1'"
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
code_sign_cmd="$code_sign_cmd &"
fi
echo "$code_sign_cmd"
eval "$code_sign_cmd"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Braintree/Braintree.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftSpinner/SwiftSpinner.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "${BUILT_PRODUCTS_DIR}/Braintree/Braintree.framework"
install_framework "${BUILT_PRODUCTS_DIR}/SwiftSpinner/SwiftSpinner.framework"
fi
if [ "${COCOAPODS_PARALLEL_CODE_SIGN}" == "true" ]; then
wait
fi
|
public static double parseExpression(String expression){
Stack<Character> operators = new Stack<>();
Stack<Double> operands = new Stack<>();
int n = expression.length();
double result;
for(int i=0; i<n; i++){
char c = expression.charAt(i);
if(Character.isDigit(c)){
double x = Double.parseDouble(c+"");
operands.push(x);
}
else if(c == '+' || c == '-'){
while(!operators.isEmpty() && (operators.peek() == '+' || operators.peek() == '-')){
char op = operators.pop();
double x = operands.pop();
double y = operands.pop();
if(op == '+')
result = x + y;
else
result = x - y;
operands.push(result);
}
operators.push(c);
}
else if(c == '*' || c == '/'){
while(!operators.isEmpty() && (operators.peek() == '*' || operators.peek() == '/')){
char op = operators.pop();
double x = operands.pop();
double y = operands.pop();
if(op == '*')
result = x * y;
else
result = x / y;
operands.push(result);
}
operators.push(c);
}
else if(c == '(')
operators.push(c);
else if(c == ')'){
while(!operators.isEmpty() && operators.peek() != '('){
char op = operators.pop();
double x = operands.pop();
double y = operands.pop();
if(op == '+')
result = x + y;
else if(op == '-')
result = x - y;
else if(op == '*')
result = x * y;
else if(op == '/')
result = x / y;
operands.push(result);
}
operators.pop();
}
}
while(!operators.isEmpty()){
char op = operators.pop();
double x = operands.pop();
double y = operands.pop();
if(op == '+')
result = x + y;
else if(op == '-')
result = x - y;
else if(op == '*')
result = x * y;
else if(op == '/')
result = x / y;
operands.push(result);
}
return operands.pop();
}
|
package grpc
import (
"strconv"
"testing"
"time"
"github.com/sirupsen/logrus"
"github.com/sirupsen/logrus/hooks/test"
"github.com/spiral/php-grpc/tests"
"github.com/spiral/roadrunner/service"
"github.com/spiral/roadrunner/service/rpc"
"github.com/stretchr/testify/assert"
"golang.org/x/net/context"
)
func Test_RPC(t *testing.T) {
logger, _ := test.NewNullLogger()
logger.SetLevel(logrus.DebugLevel)
c := service.NewContainer(logger)
c.Register(rpc.ID, &rpc.Service{})
c.Register(ID, &Service{})
assert.NoError(t, c.Init(&testCfg{
rpcCfg: `{"enable":true, "listen":"tcp://:5004"}`,
grpcCfg: `{
"listen": "tcp://:9080",
"tls": {
"key": "tests/server.key",
"cert": "tests/server.crt"
},
"proto": "tests/test.proto",
"workers":{
"command": "php tests/worker.php",
"relay": "pipes",
"pool": {
"numWorkers": 1,
"allocateTimeout": 10,
"destroyTimeout": 10
}
}
}`,
}))
s, _ := c.Get(ID)
ss := s.(*Service)
s2, _ := c.Get(rpc.ID)
rs := s2.(*rpc.Service)
go func() { assert.NoError(t, c.Serve()) }()
time.Sleep(time.Millisecond * 100)
defer c.Stop()
cl, cn := getClient(addr)
defer cn.Close()
rcl, err := rs.Client()
assert.NoError(t, err)
out, err := cl.Info(context.Background(), &tests.Message{Msg: "PID"})
assert.NoError(t, err)
assert.Equal(t, strconv.Itoa(*ss.rr.Workers()[0].Pid), out.Msg)
r := ""
assert.NoError(t, rcl.Call("grpc.Reset", true, &r))
assert.Equal(t, "OK", r)
out2, err := cl.Info(context.Background(), &tests.Message{Msg: "PID"})
assert.NoError(t, err)
assert.Equal(t, strconv.Itoa(*ss.rr.Workers()[0].Pid), out2.Msg)
assert.NotEqual(t, out.Msg, out2.Msg)
}
func Test_Workers(t *testing.T) {
logger, _ := test.NewNullLogger()
logger.SetLevel(logrus.DebugLevel)
c := service.NewContainer(logger)
c.Register(rpc.ID, &rpc.Service{})
c.Register(ID, &Service{})
assert.NoError(t, c.Init(&testCfg{
rpcCfg: `{"enable":true, "listen":"tcp://:5004"}`,
grpcCfg: `{
"listen": "tcp://:9080",
"tls": {
"key": "tests/server.key",
"cert": "tests/server.crt"
},
"proto": "tests/test.proto",
"workers":{
"command": "php tests/worker.php",
"relay": "pipes",
"pool": {
"numWorkers": 1,
"allocateTimeout": 10,
"destroyTimeout": 10
}
}
}`,
}))
s, _ := c.Get(ID)
ss := s.(*Service)
s2, _ := c.Get(rpc.ID)
rs := s2.(*rpc.Service)
go func() { assert.NoError(t, c.Serve()) }()
time.Sleep(time.Millisecond * 100)
defer c.Stop()
cl, cn := getClient(addr)
defer cn.Close()
rcl, err := rs.Client()
assert.NoError(t, err)
out, err := cl.Info(context.Background(), &tests.Message{Msg: "PID"})
assert.NoError(t, err)
assert.Equal(t, strconv.Itoa(*ss.rr.Workers()[0].Pid), out.Msg)
r := &WorkerList{}
assert.NoError(t, rcl.Call("grpc.Workers", true, &r))
assert.Len(t, r.Workers, 1)
}
func Test_Errors(t *testing.T) {
r := &rpcServer{nil}
assert.Error(t, r.Reset(true, nil))
assert.Error(t, r.Workers(true, nil))
}
|
<filename>lib/transform-fields.ts<gh_stars>0
import { Fields } from "../src/utils/game-constants"
import { fetchTeamsFromTBA } from "./apiFetches"
// run using ts-node, use: ` ts-node -O '{"module": "commonjs"}' ./transform-fields.ts `
const fields: Fields = {
General: [
"Scouter Name",
"Team Number",
{
"field-name": "How much experience do your drivers have?",
type: "Radio",
options: ["1 year", "2 years", "3 years+"],
scoring: false,
},
{
"field-name":
"Where is your robot capable of placing hatches?(Check all that apply)",
type: "Checkbox",
options: ["Not Capable", "level 1", "level 2", "level 3"],
scoring: false,
},
{
"field-name":
"Where is your robot capable of placing cargo?(Check all that apply)",
type: "Checkbox",
options: ["Not Capable", "level 1", "level 2", "level 3"],
scoring: false,
},
],
Sandstorm: [
{
"field-name": "Where does your robot start during Sandstorm?",
type: "Radio",
options: ["level 1", "level 2"],
scoring: false,
},
{
"field-name": "Do you preload hatch or cargo?",
type: "Radio",
options: ["Hatch", "Cargo"],
scoring: false,
},
],
Endgame: [
{
"field-name": "Is your robot capable of climbing?",
type: "Radio",
options: ["Not capable", "level 2", "level 3"],
scoring: false,
},
{
"field-name":
"If you can climb, how many robots are you able to bring with you?",
type: "Slider",
min: 0,
max: 2,
marks: {
0: "None",
1: "One",
2: "Two",
},
step: 1,
scoring: false,
},
],
Problems: [
"Recent problems with robot",
{
"field-name": "If so, what is your progress in solving those issues?",
type: "Radio",
options: [
"No problems",
"In progress but kind of stuck",
"In progress, & we know what we're doing",
"Solved",
],
scoring: false,
},
],
}
const transfromFields = Object.keys(fields)
.map(key => fields[key])
.reduce((acc, val) => acc.concat(val), [])
.map(fieldKey =>
typeof fieldKey === "string"
? fieldKey
: fieldKey.type === "Number"
? fieldKey.dropped
? `${fieldKey["field-name"]}\n ${fieldKey["field-name"]} dropped`
: fieldKey["field-name"]
: fieldKey["field-name"]
)
.reduce(
(p, c) => `${p}
${c}`,
""
)
console.log(transfromFields)
|
package com.shop.service.user;
import com.shop.been.AjaxResult;
import com.shop.model.user.User;
import javax.servlet.http.HttpSession;
/**
* <p>Title: UserService</p>
* <p>Description:</p>
*
* @Author 姚洪斌
* @Date 2017/7/18 20:16
*/
public interface UserService {
/**
* 注册时查询用户是否已注册
* @param user
* @return
*/
String selectUser(User user); //查询用户名、邮箱、手机号码是否已被注册
/**
* 找回密码时查询用户是否已注册,返回的字符串不同,所以与selectUser(User user)区别开
* @param user
* @param identify
* @return
*/
String selectUser(User user, Integer identify); //找回密码时查询手机号/邮箱是否已注册
/**
* 保存注册的用户
* @param user
*/
void saveUser(User user);
User login(User user);
/**
* 修改密码
* @param user
*/
void resetPassword(User user);
/**
* 更新用户信息
* @param user
*/
String updateUser(User user);
/**
* 在更新完登录用户的数据后把session中的loginUser的数据也更新
* @param userId
*/
void updateSession(Integer userId, HttpSession session);
/**
* 保存用户上传的头像
* @param img base64编码的图片数据
* @param session 获取登录的用户
* @return 上传的结果
*/
AjaxResult avatarUpload(String img, HttpSession session);
}
|
<filename>src/test/java/com/github/piedpiper/node/stepfunctions/StepFunctionsExecuteHandlerTest.java
package com.github.piedpiper.node.stepfunctions;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.IOException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.powermock.modules.junit4.PowerMockRunner;
import com.amazonaws.services.stepfunctions.AWSStepFunctions;
import com.amazonaws.services.stepfunctions.model.StartExecutionRequest;
import com.amazonaws.services.stepfunctions.model.StartExecutionResult;
import com.github.commons.utils.JsonUtils;
import com.github.piedpiper.node.NodeInput;
import com.github.piedpiper.node.NodeOutput;
import com.google.inject.AbstractModule;
import com.google.inject.Guice;
import com.google.inject.Injector;
import com.google.inject.Provides;
import com.google.inject.Singleton;
@RunWith(PowerMockRunner.class)
public class StepFunctionsExecuteHandlerTest {
@Mock
private AWSStepFunctions sfClient;
@Mock
private StartExecutionRequest sfExecuteRequest;
private Injector injector;
@Test
@Before
public void builderSetup() throws Exception {
StartExecutionResult sfExecuteResult = new StartExecutionResult();
sfExecuteResult.setExecutionArn("sample_arn");
Mockito.when(sfClient.startExecution(Mockito.any())).thenReturn(sfExecuteResult);
this.injector = Guice.createInjector(new AbstractModule() {
@Override
protected void configure() {
}
@Provides
@Singleton
public AWSStepFunctions getStepFunctionsClient() {
return sfClient;
}
});
Mockito.doNothing().when(sfExecuteRequest).setName(Mockito.anyString());
Mockito.doNothing().when(sfExecuteRequest).setStateMachineArn(Mockito.anyString());
Mockito.doNothing().when(sfExecuteRequest).setInput(Mockito.anyString());
}
@Test
public void testSuccess() throws Exception {
StepFunctionsExecuteHandler sfExecute = getStepFunctionsExecuteNode();
Mockito.doReturn(sfExecuteRequest).when(sfExecute).getStartExecutionRequest();
NodeInput input = new NodeInput();
input.setInput(JsonUtils.mapper.readTree(new FileInputStream(getFileName("SFExecuteSuccessGraph.json"))));
NodeOutput output = sfExecute.apply(input);
Assert.assertNotNull(output.getOutput());
Mockito.verify(sfExecuteRequest, Mockito.times(1)).setName("test1");
Mockito.verify(sfExecuteRequest).setStateMachineArn("sample_arn");
Mockito.verify(sfExecuteRequest).setInput("{}");
Assert.assertEquals(output.getOutput().get("executionArn").asText(), "sample_arn");
}
@Test(expected = RuntimeException.class)
public void testRuntimeException() throws Exception {
StepFunctionsExecuteHandler sfExecute = getStepFunctionsExecuteNode();
Mockito.doReturn(new RuntimeException()).when(sfExecute).getStartExecutionRequest();
NodeInput input = new NodeInput();
input.setInput(JsonUtils.mapper.readTree(new FileInputStream(getFileName("SFExecuteSuccessGraph.json"))));
sfExecute.apply(input);
}
@Test
public void testParameterValidation() throws FileNotFoundException, IOException {
StepFunctionsExecuteHandler sfExecute = getStepFunctionsExecuteNode();
Mockito.doReturn(sfExecuteRequest).when(sfExecute).getStartExecutionRequest();
NodeInput input = new NodeInput();
input.setInput(JsonUtils.mapper.readTree(new FileInputStream(getFileName("emptyInput.json"))));
try {
sfExecute.apply(input);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals(e.getMessage(),
"java.lang.IllegalArgumentException: parameter: name required but not present");
}
input.setInput(JsonUtils.mapper.readTree(new FileInputStream(getFileName("missingInput.json"))));
try {
sfExecute.apply(input);
} catch (Exception e) {
Assert.fail();
}
input.setInput(JsonUtils.mapper.readTree(new FileInputStream(getFileName("missingARN.json"))));
try {
sfExecute.apply(input);
Assert.fail();
} catch (Exception e) {
Assert.assertEquals(e.getMessage(),
"java.lang.IllegalArgumentException: parameter: arn required but not present");
}
}
private StepFunctionsExecuteHandler getStepFunctionsExecuteNode() {
StepFunctionsExecuteHandler sfExecute = this.injector.getInstance(StepFunctionsExecuteHandler.class);
StepFunctionsExecuteHandler spySfExecute = Mockito.spy(sfExecute);
return spySfExecute;
}
private String getFileName(String fileName) {
return "src/test/java/com/github/piedpiper/node/stepfunctions/resources/" + fileName;
}
}
|
#set( $symbol_pound = '#' )
#set( $symbol_dollar = '$' )
#set( $symbol_escape = '\' )
package ${package};
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
/**
* Unit test to exercise the functionality of the {@link ${pluginName}Envoy} class.
*/
public class ${pluginName}EnvoyTest
{
/**
* Sets up resources in preparation for tests. Executed before each test.
*/
@Before
public void setup()
{
}
}
|
import pandas as pd
def detect_outliers(data):
# Calculate the mean
mean = data.mean()
# Calculate the standard deviation
std_dev = data.std()
# Set outliers limit
outliers_limit = mean + 3*std_dev
# For every data point compute the z-score and check if it's an outlier
for data_point in data:
z_score = (data_point - mean)/std_dev
if z_score > outliers_limit:
# if it is an outlier replace it with the mean
data_point = mean
return data
data = [1.2, 3.4, 4.5, 3.1, 2.2, 4.9, 4.2, 2.1, 1.1, 5.1, 5.2]
data = pd.Series(data)
print(detect_outliers(data))
|
<gh_stars>0
const cheerio = require("cheerio");
const { toTimeSec } = require("./time-format");
/**
*
* @param {String} value
*/
const getInt = (value) => {
if(value===undefined){
return value;
}
return parseInt(value.replace(/,/g, ""));
};
const nicoSearchHtmlParse = (html, search_target) => {
const $ = cheerio.load(html);
const result = {
total_num:0,
list:[]
};
if(search_target == "tag"){
const total_elm = $('.dataValue').filter(function() {
const r = $(this).text().trim().match(/タグを含む動画/);
return r !== null;
});
const total = total_elm.find(".num").text();
result.total_num = getInt(total);
}else if(search_target == "search"){
const total_elm = $(".searchTotal");
const total = total_elm.text();
result.total_num = getInt(total);
}else{
throw new Error(`search_target=${search_target}はtagでもsearchでもない`);
}
const elms = $("ul[data-video-list] > li[data-video-item]");
elms.each((i, el) => {
const video_item_elm = $(el);
const video_id = video_item_elm.data().videoId;
const start_time = video_item_elm.find(".video_uploaded > .time").text();
const thumb_url = video_item_elm.find("img[data-thumbnail]").data().original;
const video_len = video_item_elm.find("span.videoLength").text();
const item_content_elm = video_item_elm.find(".itemContent");
const title = item_content_elm.find(".itemTitle > a").text();
const comments = item_content_elm.find(".itemComment").text().trim();
const item_data_elm = video_item_elm.find(".itemData > .list");
const view_count = item_data_elm.find(".view > .value").text();
const comment_count = item_data_elm.find(".comment > .value").text();
const items = {
thumbnailUrl:thumb_url,
contentId:video_id,
title:unescape(title),
viewCounter:getInt(view_count),
commentCounter:getInt(comment_count),
lengthSeconds:toTimeSec(video_len),
startTime:start_time,
tags:comments,
};
let error_msg = "";
Object.keys(items).forEach(key => {
if(items[key]===undefined){
error_msg += `${key}の値が不正, `;
}
});
if(error_msg.length>0){
throw new Error(`nicoSearchHtmlParse: ${error_msg}`);
}
result.list.push(items);
});
return result;
};
module.exports = {
nicoSearchHtmlParse,
};
|
package composite
import "fmt"
func Example_one() {
treasureChest := NewChest(TreasureChestType, nil)
goldenChest := NewChest(ChestOfChestType, treasureChest)
silverChest := NewChest(ChestOfChestType, goldenChest)
woodChest := NewChest(ChestOfChestType, silverChest)
fmt.Println(woodChest.Open())
// Output:
// [[[$]]]
}
|
package kazura
import chisel3._
import chisel3.util.experimental.BoringUtils
import kazura.models.InstInfo
import kazura.modules.{BranchPredictor, RFWrite, ROB}
import kazura.util.Params._
import kazura.stages._
object Main {
val prog = Seq(
"b1001_001_000000000".U, // 0: $1 = 0
"b1001_010_000000000".U, // 1: $2 = 0
"b1001_011_000001001".U, // 2: $3 = 9
"b1001_100_000000000".U, // 3: $4 = 0
"b0101_001_000000001".U, // 4: $1 += 1
"b0001_010_001_000000".U, // 5: $2 += $1
"b1101_001_011_000011".U, // 6: if($1 > $3) pc+= 3
"h0000".U, // 7: NOP
"b1110_000_111111100".U, // 7: pc = pc - 3 - 1
"b0001_100_010_000000".U // 8: $4 += $2
)
// 大学指定の問題を解くのに必要なデータ
val mem_init = Seq(
"x7530".U,
"x1".U,
"x64".U,
"x8".U,
"x7d9".U,
"x64".U,
"x40".U,
"x3b".U,
"x57".U,
"x2" .U,
"x62".U,
"x44".U,
"x30".U,
"x54".U,
"x16".U,
"x2a".U,
"x45".U,
"x2d".U,
"x49".U,
"x29".U,
"x1f".U,
"x52".U,
"x2c".U,
"x5d".U,
"x48".U,
"x04".U,
"x5e".U,
"x1d".U,
"x38".U,
"x2b".U,
"x1c".U,
"x21".U,
"x3d".U,
"x4c".U,
"x03".U,
"x40".U,
"x34".U,
"x3a".U,
"xe".U,
)
def main(args: Array[String]): Unit = {
chisel3.Driver.execute(args, () => new Hart(
prog ++ Seq.fill(32 - prog.length)("h0000".U),
mem_init ++ Seq.fill(256 - mem_init.length)("h0000".U),
))
}
}
class HartIO extends Bundle {
val pc: UInt = Output(UInt(LEN.W))
val total_cnt: UInt = Output(UInt(LEN.W))
val rf: Vec[UInt] = Output(Vec(RF.NUM, UInt(LEN.W)))
val is_halt: Bool = Output(Bool())
}
class Hart(val im: Seq[UInt], val dummy_data: Seq[UInt]) extends Module {
val io: HartIO = IO(new HartIO)
val m_bp: BranchPredictor = Module(new BranchPredictor())
val s_if: IF = Module(new IF(im))
val s_id: ID = Module(new ID)
val s_ex: EX = Module(new EX)
val s_im: DataMemory = Module(new DataMemory(dummy_data))
val m_rob: ROB = Module(new ROB)
val is_halt: Bool = RegInit(false.B)
is_halt := is_halt | m_rob.io.commit_inst_info(0).ctrl.is_halt
io.is_halt := is_halt
// val predict: Bool = m_bp.io.predict // 分岐予測器からの出力
val predict: Bool = false.B // 分岐予測器からの出力
// --------------------
// IF
s_if.io.in.predict := predict
s_if.io.in.predict_enable := s_id.io.inst_info.ctrl.is_branch
s_if.io.in.predict_pc := s_id.io.jump_pc
s_if.io.in.branch_mispredicted := s_ex.io.mispredicted
s_if.io.in.branch_graduated := s_ex.io.inst_info_out.ctrl.is_branch // TODO: mispredictedを分岐時にのみ出すようにしたので消す
s_if.io.in.restoration_pc := s_ex.io.restoration_pc_out
s_if.io.in.is_jump := s_id.io.inst_info.ctrl.is_jump
s_if.io.in.jump_pc := s_id.io.jump_pc
s_if.io.in.stall := s_id.io.stall
s_if.io.in.is_halt := is_halt
// --------------------
// BP
m_bp.io.pc := s_if.io.out.pc
m_bp.io.stall := s_id.io.stall
m_bp.io.learning.valid := s_ex.io.inst_info_out.ctrl.is_branch
m_bp.io.learning.bits.result := s_ex.io.alu_out
m_bp.io.learning.bits.pc := s_ex.io.pc_out
// --------------------
// ID
s_id.io.predict := predict // 分岐予測器未実装のため
s_id.io.branch_mispredicted := s_ex.io.mispredicted
s_id.io.branch_graduated := s_ex.io.inst_info_out.ctrl.is_branch
s_id.io.if_out := s_if.io.out
s_id.io.commit(0) := m_rob.io.commit(0)
s_id.io.commit(1) := s_im.io.mem_out
s_id.io.unreserved_head := m_rob.io.unreserved_head
// --------------------
// EX
s_ex.io.predict := predict
s_ex.io.inst_info := s_id.io.inst_info
s_ex.io.source := s_id.io.source
s_ex.io.rd := s_id.io.rd
s_ex.io.next_pc := s_id.io.next_pc
s_ex.io.branch_pc := s_id.io.jump_pc
s_ex.io.pc := s_id.io.pc
// --------------------
// IM
s_im.io.inst_info := m_rob.io.commit_inst_info(0)
s_im.io.rob_out := m_rob.io.commit(0)
s_im.io.rd_out := m_rob.io.commit_rd(0)
// --------------------
// ROB
m_rob.io.used_num := s_id.io.used_num
m_rob.io.graduate(0).valid := s_ex.io.inst_info_out.valid
m_rob.io.graduate(0).bits.addr := s_ex.io.inst_info_out.rob_addr
m_rob.io.graduate(0).bits.rd := s_ex.io.rd_out
m_rob.io.graduate(0).bits.mispredicted := s_ex.io.mispredicted
m_rob.io.graduate(0).bits.inst_info := s_ex.io.inst_info_out
m_rob.io.graduate(0).bits.data := s_ex.io.alu_out
// m_rob.io.graduate(1).valid := s_im.io.inst_info.valid
// m_rob.io.graduate(1).bits.addr := s_im.io.inst_info.rob_addr
// m_rob.io.graduate(1).bits.mispredicted := false.B
// m_rob.io.graduate(1).bits.inst_info := s_im.io.inst_info
// m_rob.io.graduate(1).bits.data := s_im.io.mem_out
m_rob.io.graduate(1).valid := false.B
m_rob.io.graduate(1).bits.addr := 0.U
m_rob.io.graduate(1).bits.rd := 0.U
m_rob.io.graduate(1).bits.mispredicted := false.B
m_rob.io.graduate(1).bits.inst_info := InstInfo.nop
m_rob.io.graduate(1).bits.data := 0.U
// --------------------
// IO
io.pc := s_if.io.out.pc
io.total_cnt := s_if.io.out.total_cnt
io.rf := s_id.io.rf4debug
}
|
/* *****************************************************************************
* Caleydo - Visualization for Molecular Biology - http://caleydo.org
* Copyright (c) The Caleydo Team. All rights reserved.
* Licensed under the new BSD license, available at http://caleydo.org/license
**************************************************************************** */
/**
* Created by <NAME> on 04.08.2014.
*/
import {AppContext} from '../app/AppContext';
import {ParseRangeUtils} from '../range';
import {Range} from '../range/Range';
import {IValueType, ValueTypeUtils} from '../data';
import {IVectorDataDescription} from './IVector';
import {IDTypeManager} from '../idtype/IDTypeManager';
/**
* @internal
*/
export interface IVectorLoaderResult<T> {
readonly rowIds: Range;
readonly rows: string[];
readonly data: T[];
}
/**
* @internal
*/
export interface IVectorLoader<T> {
(desc: IVectorDataDescription<any>): Promise<IVectorLoaderResult<T>>;
}
export class VectorLoaderUtils {
/**
* @internal
*/
static viaAPILoader<T>() {
let _loader: Promise<IVectorLoaderResult<T>> = undefined;
return (desc: IVectorDataDescription<any>) => {
if (_loader) { //in the cache
return _loader;
}
return _loader = AppContext.getInstance().getAPIJSON('/dataset/' + desc.id).then((data) => {
const range = ParseRangeUtils.parseRangeLike(data.rowIds);
data.rowIds = range;
data.data = ValueTypeUtils.mask(data.data, desc.value);
const idType = IDTypeManager.getInstance().resolveIdType(desc.idtype);
idType.fillMapCache(range.dim(0).asList(data.rows.length), data.rows);
return data;
});
};
}
/**
* @internal
*/
static viaDataLoader<T>(rows: string[], rowIds: number[], data: IValueType[]) {
let _data: IVectorLoaderResult<T> = undefined;
return () => {
if (_data) { //in the cache
return Promise.resolve(_data);
}
_data = {
rowIds: ParseRangeUtils.parseRangeLike(rowIds),
rows,
data
};
return Promise.resolve(_data);
};
}
}
|
import React, { useState } from "react"
import Portfolio1 from "../../images/portfolio/port1.jpg"
import Portfolio2 from "../../images/portfolio/port2.jpg"
import Portfolio3 from "../../images/portfolio/port3.jpg"
import Portfolio4 from "../../images/portfolio/port4.jpg"
import Portfolio5 from "../../images/portfolio/port5.jpg"
import Portfolio6 from "../../images/portfolio/port6.jpg"
const list = [
{
category: "cat1",
id: "1",
imgSource: Portfolio1,
title: "Portfolio Title",
tag: "Business || Finance",
},
{
category: "cat1",
id: "2",
imgSource: Portfolio2,
title: "Portfolio Title",
tag: "Business || Finance",
},
{
category: "cat2",
id: "3",
imgSource: Portfolio3,
title: "Portfolio Title",
tag: "Business || Finance",
},
{
category: "cat1",
id: "4",
imgSource: Portfolio4,
title: "Portfolio Title",
tag: "Business || Finance",
},
{
category: "cat2",
id: "5",
imgSource: Portfolio5,
title: "Portfolio Title",
tag: "Business || Finance",
},
{
category: "cat3",
id: "6",
imgSource: Portfolio6,
title: "Portfolio Title",
tag: "Business || Finance",
},
]
const Element = ({ imgSource, title, tag }) => (
<div className="col-md-4 col-sm-6">
<div className="portfolio-post-2 mb30">
<img src={imgSource} alt="Portfolio Image" />
<div className="portfolio-details">
<h4>{title}</h4>
<p>{tag}</p>
<a href="#">
<i className="icon-attachment"></i>
</a>
</div>
</div>
</div>
)
const PortfolioCol3Style1 = () => {
const [display, setDisplay] = useState(list)
const handleDisplay = category => {
const categoryToDisplay = list.filter(item => item.category === category)
setDisplay(categoryToDisplay)
}
return (
<>
<ul className="filter">
<li>
<button onClick={() => setDisplay(list)}>Show All</button>
</li>
<li>
<button onClick={() => handleDisplay("cat1")}>HTML</button>
</li>
<li>
<button onClick={() => handleDisplay("cat2")}>WordPress</button>
</li>
<li>
<button onClick={() => handleDisplay("cat3")}>Joomla</button>
</li>
</ul>
<div className="row portfolio-box">
{display.map(element => (
<Element key={element.id} {...element} />
))}
</div>
</>
)
}
export default PortfolioCol3Style1
|
#!/bin/bash
# Copyright (C) 2009-2013 Team XBMC
# http://xbmc.org
#
# This Program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This Program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with XBMC; see the file COPYING. If not, write to
# the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA.
# http://www.gnu.org/copyleft/gpl.html
APP=Kodi
prefix="$SNAP"
exec_prefix="$SNAP"
bindir="${exec_prefix}/bin"
bin_name=kodi
APP="${bindir}/${bin_name} --standalone $@"
# Path for Kodi data.
export KODI_HOME="$datarootdir/kodi"
if [ "$SNAP_ARCH" == "amd64" ]; then
ARCH="x86_64-linux-gnu"
elif [ "$SNAP_ARCH" == "armhf" ]; then
ARCH="arm-linux-gnueabihf"
else
ARCH="$SNAP_ARCH-linux-gnu"
fi
export LIBGL_DRIVERS_PATH=$SNAP/usr/lib/$ARCH/dri
PULSE_START="$(which start-pulseaudio-x11)"
if [ -n "$PULSE_START" ]; then
$PULSE_START
else
PULSE_SESSION="$(which pulse-session)"
if [ -n "$PULSE_SESSION" ]; then
XBMC="$PULSE_SESSION $XBMC"
fi
fi
LOOP=1
CRASHCOUNT=0
LASTSUCCESSFULSTART=$(date +%s)
while [ $(( $LOOP )) = "1" ]
do
$APP
RET=$?
NOW=$(date +%s)
if [ $(( ($RET >= 64 && $RET <=66) || $RET == 0 )) = "1" ]; then # clean exit
LOOP=0
else # crash
DIFF=$((NOW-LASTSUCCESSFULSTART))
if [ $(($DIFF > 60 )) = "1" ]; then # Not on startup, ignore
LASTSUCESSFULSTART=$NOW
CRASHCOUNT=0
else # at startup, look sharp
CRASHCOUNT=$((CRASHCOUNT+1))
if [ $(($CRASHCOUNT >= 3)) = "1" ]; then # Too many, bail out
LOOP=0
echo "${APP} has exited in an unclean state 3 times in the last ${DIFF} seconds."
echo "Something is probably wrong"
fi
fi
fi
done
|
# export BATCH_SIZE=64
# export ADDITIONAL='--critic_type=kplusone_fm --kplusone_mhinge_cond_discriminator_weight=1.0 --aux_mhinge_cond_generator_weight=0.05'
# export ADDITIONAL='--critic_type=kplusone_fm --generator_loss_fn=kplusone_ssl_featurematching_generator_loss --kplusone_mhinge_ssl_cond_discriminator_weight=1.0 --aux_mhinge_cond_generator_weight=0.05'
export EXPERIMENT_NAME=late_cifar_base
export BATCH_SIZE=64
export TRAIN_STEPS_PER_EVAL=10000
export DATASET_ARGS='--image_size=32 --dataset_name=cifar10 --num_classes=10 --dataset_val_split_name=test'
# for k+1 mhinge with fm
# export ADDITIONAL='--critic_type=kplusone_fm \
# --generator_loss_fn=kplusone_featurematching_generator_loss \
# --kplusone_mhinge_cond_discriminator_weight=1.0 \
# --aux_mhinge_cond_generator_weight=0.05 \
# --tpu_gan_estimator_d_step=4'
export ADDITIONAL='--critic_type=acgan_multiproj \
--extra_eval_metrics \
--eval_batch_size=1024 \
--num_eval_steps=9'
bash tpu/_base.sh
|
const RankingProgress = require('../../models/RankingProgress')
const brawlhallaApi = require('../../brawlhalla_api')
const config = require('../../../config')
const utils = require('../../utils')
const players = require('./players')
async function cronFn() {
const resetRankingPageSeconds = 60 * 60 * 12 // 12h
const initialTs = Date.now()
let apiCallsMade = 0
try {
// Get next page to crawl
const now = Math.floor(Date.now() / 1000)
let page = 1
let rankingPos = await RankingProgress.findByPk('1v1/all')
if (rankingPos === null) {
rankingPos = RankingProgress.create({
type: '1v1/all',
first_page_crawl_ts: now,
page: 0,
})
} else {
page = rankingPos.page + 1
}
// Process page
if (config.debug) console.debug(`Processing page ${page}`)
apiCallsMade++
var ranking = await brawlhallaApi.get(`rankings/1v1/all/${page}`)
if (ranking.error) {
console.error(`Brawlhalla API down?: ${ranking.error}`)
} else {
const playerUpdates = await utils.forEachAndWaitForAsyncs(ranking, players.updatePlayer)
apiCallsMade += playerUpdates.reduce((tot, curr) => tot + curr.apiCallsMade, 0)
if (config.debug) console.debug(`Finished processing page ${page}. ${apiCallsMade} API calls made`)
}
// Update page position in db
if (now - rankingPos.first_page_crawl_ts > resetRankingPageSeconds) {
RankingProgress.update({ page: 0, first_page_crawl_ts: now }, { where: { type: '1v1/all' } })
} else {
RankingProgress.update({ page }, { where: { type: '1v1/all' } })
}
} catch (e) {
console.error(e)
}
const msPerApiCall = (1000 * 60 * 15) / config.queries_per_15_min
const margin = 0.9 // Reduce our theoretical max limit a bit, to avoid hitting rate limits
const msToWait = (msPerApiCall / margin) * apiCallsMade
const msElapsed = Date.now() - initialTs
if (config.debug) console.log(`To Wait: ${msToWait / 1000}s / Elapsed: ${msElapsed / 1000}s`)
setTimeout(cronFn, msToWait - msElapsed)
}
module.exports = cronFn
|
<reponame>tomegorny/temp<filename>messaging-with-jms-using-payara/src/main/java/de/rieckpil/blog/StockPublisher.java
package de.rieckpil.blog;
import java.time.Instant;
import java.util.concurrent.ThreadLocalRandom;
import javax.annotation.Resource;
import javax.ejb.Schedule;
import javax.ejb.Singleton;
import javax.jms.Connection;
import javax.jms.ConnectionFactory;
import javax.jms.JMSException;
import javax.jms.MessageProducer;
import javax.jms.Queue;
import javax.jms.Session;
import javax.jms.TextMessage;
import javax.json.Json;
import javax.json.JsonObject;
@Singleton
public class StockPublisher {
@Resource(lookup = "jms/__defaultConnectionFactory")
private ConnectionFactory jmsFactory;
@Resource(lookup = "jms/stocks")
private Queue jmsQueue;
private String[] stockCodes = { "MSFT", "GOOGL", "AAPL", "AMZN" };
@Schedule(second = "*/2", minute = "*", hour = "*", persistent = false)
public void sendStockInformation() {
TextMessage message;
try (Connection connection = jmsFactory.createConnection();
Session session = connection.createSession(false, Session.AUTO_ACKNOWLEDGE);
MessageProducer producer = session.createProducer(jmsQueue)) {
JsonObject stockInformation = Json.createObjectBuilder()
.add("stockCode", stockCodes[ThreadLocalRandom.current().nextInt(stockCodes.length)])
.add("price", ThreadLocalRandom.current().nextDouble(1.0, 150.0))
.add("timestamp", Instant.now().toEpochMilli()).build();
message = session.createTextMessage();
message.setText(stockInformation.toString());
producer.send(message);
} catch (JMSException e) {
e.printStackTrace();
}
}
}
|
#!/bin/bash
# Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if [ -z ${BRANCH} ]; then
BRANCH="develop"
fi
PADDLE_ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}")/../" && pwd )"
approval_line=`curl -H "Authorization: token ${GITHUB_API_TOKEN}" https://api.github.com/repos/PaddlePaddle/Paddle/pulls/${GIT_PR_ID}/reviews?per_page=10000`
failed_num=0
echo_list=()
function check_approval(){
person_num=`echo $@|awk '{for (i=2;i<=NF;i++)print $i}'`
APPROVALS=`echo ${approval_line}|python ${PADDLE_ROOT}/tools/check_pr_approval.py $1 $person_num`
if [[ "${APPROVALS}" == "FALSE" && "${echo_line}" != "" ]]; then
add_failed "${failed_num}. ${echo_line}"
fi
}
function add_failed(){
failed_num=`expr $failed_num + 1`
echo_list="${echo_list[@]}$1"
}
api_params_diff=`python ${PADDLE_ROOT}/tools/check_api_compatible.py ${PADDLE_ROOT}/paddle/fluid/API_DEV.spec ${PADDLE_ROOT}/paddle/fluid/API_PR.spec`
api_spec_diff=`python ${PADDLE_ROOT}/tools/diff_api.py ${PADDLE_ROOT}/paddle/fluid/API_DEV.spec.api ${PADDLE_ROOT}/paddle/fluid/API_PR.spec.api`
if [ "$api_spec_diff" != "" -o "${api_params_diff}" != "" ]; then
echo_line="You must have one RD (XiaoguangHu01, lanxianghit or Superjomn) approval for API change.\n"
echo_line="${echo_line} and one TPM approval for API change: \n"
echo_line="${echo_line} jzhang533/ZhangJun, dingjiaweiww/DingJiaWei, TCChenlong/ChenLong, Ligoml/LiMengLiu for general APIs.\n"
echo_line="${echo_line} liuTINA0907/LiuShuangQiao for distributed related APIs.\n"
echo_line="${echo_line} leiqing1/LeiQing for inference related APIs.\n"
check_approval 1 46782768 47554610 328693
check_approval 1 29231 23093488 11935832 39876205 65896652 54695910
fi
api_doc_spec_diff=`python ${PADDLE_ROOT}/tools/diff_api.py ${PADDLE_ROOT}/paddle/fluid/API_DEV.spec.doc ${PADDLE_ROOT}/paddle/fluid/API_PR.spec.doc`
if [ "$api_doc_spec_diff" != "" ]; then
echo_line="You must have one TPM approval for API documents change: \n"
echo_line="${echo_line} jzhang533/ZhangJun, dingjiaweiww/DingJiaWei, TCChenlong/ChenLong, Ligoml/LiMengLiu for general API docs.\n"
echo_line="${echo_line} liuTINA0907/LiuShuangQiao for distributed related API docs.\n"
echo_line="${echo_line} leiqing1/LeiQing for inference related API docs.\n"
check_approval 1 29231 23093488 11935832 39876205 65896652 54695910
fi
api_src_spec_diff=`python ${PADDLE_ROOT}/tools/check_api_source_without_core_ops.py ${PADDLE_ROOT}/paddle/fluid/API_DEV.source.md5 ${PADDLE_ROOT}/paddle/fluid/API_PR.source.md5`
if [ "$api_src_spec_diff" != "" ]; then
echo_line="APIs without core.ops: \n${api_src_spec_diff}\n"
echo_line="${echo_line}You must have one RD (zhiqiu (Recommend) or phlrain) approval for the api change for the opreator-related api without '_C_ops'.\n"
echo_line="${echo_line}For more details, please click [https://github.com/PaddlePaddle/Paddle/wiki/paddle_api_development_manual.md]\n"
check_approval 1 6888866 43953930
fi
op_type_spec_diff=`python ${PADDLE_ROOT}/tools/check_op_register_type.py ${PADDLE_ROOT}/paddle/fluid/OP_TYPE_DEV.spec ${PADDLE_ROOT}/paddle/fluid/OP_TYPE_PR.spec`
if [ "$op_type_spec_diff" != "" ]; then
echo_line="You must have one RD (Aurelius84 (Recommend) or zhhsplendid)approval for the data_type registration of new operator. More data_type of new operator should be registered in your PR. Please make sure that both float/double (or int/int64_t) have been registered.\n For more details, please click [https://github.com/PaddlePaddle/Paddle/wiki/Data-types-of-generic-Op-must-be-fully-registered].\n"
check_approval 1 9301846 7913861
fi
op_desc_diff=`python ${PADDLE_ROOT}/tools/check_op_desc.py ${PADDLE_ROOT}/paddle/fluid/OP_DESC_DEV.spec ${PADDLE_ROOT}/paddle/fluid/OP_DESC_PR.spec`
inference_approve=`echo "$op_desc_diff" | grep "need inference to review" -`
slim_approve=`echo "$op_desc_diff" | grep "need slim to review" -`
if [ "$op_desc_diff" != "" ]; then
echo_line="You must have one RD (inference[ Superjomn(Recommend), Shixiaowei02, cyj1986 ] or slim[ wanghaoshuang(Recommend), qingqing01 ]) approval for the changes of Inputs/Output/Attrs of OPs. The changes of OPs will cause that the new version inference fails to load model trained by the old version. Please modify your code. \n For more details, please click [https://github.com/PaddlePaddle/Paddle/wiki/OP-Input-Output-Attribute-Compatibility-Modification].\n${op_desc_diff}\n"
check_approval 1 39645414 328693 39303645 7534971 7845005
fi
if [ "$slim_approve" != "" ]; then
echo_line="You must have one RD (wanghaoshuang(Recommend), qingqing01) approval for the changes of `quant` Inputs/Output/Attrs of OPs. \n For more details, please click [https://github.com/PaddlePaddle/Paddle/wiki/OP-Input-Output-Attribute-Compatibility-Modification].\n${slim_approve}\n"
check_approval 1 7534971 7845005
fi
if [ "$inference_approve" != "" ]; then
echo_line="You must have one RD (Superjomn(Recommend), Shixiaowei02, cyj1986) approval for the changes of `def` Inputs/Output/Attrs of OPs. \n For more details, please click [https://github.com/PaddlePaddle/Paddle/wiki/OP-Input-Output-Attribute-Compatibility-Modification].\n${inference_approve}\n"
check_approval 1 39645414 328693 39303645
fi
DEV_OP_USE_DEFAULT_GRAD_MAKER_SPEC=${PADDLE_ROOT}/paddle/fluid/op_use_default_grad_maker_DEV.spec
PR_OP_USE_DEFAULT_GRAD_MAKER_SPEC=${PADDLE_ROOT}/paddle/fluid/op_use_default_grad_maker_PR.spec
ADDED_OP_USE_DEFAULT_GRAD_MAKER=`python ${PADDLE_ROOT}/tools/diff_use_default_grad_op_maker.py ${DEV_OP_USE_DEFAULT_GRAD_MAKER_SPEC} ${PR_OP_USE_DEFAULT_GRAD_MAKER_SPEC}`
if [ "${ADDED_OP_USE_DEFAULT_GRAD_MAKER}" != "" ]; then
echo_line="You must have one RD (zhiqiu (Recommend) or zhhsplendid) approval because you use DefaultGradOpMaker for ${ADDED_OP_USE_DEFAULT_GRAD_MAKER}, which manages the grad_op memory optimization.\n"
check_approval 1 6888866 7913861
fi
if [ -n "${echo_list}" ];then
echo "**************************************************************"
echo "Please find RD for approval first, and then find TPM for approval."
echo -e "${echo_list[@]}"
echo "There are ${failed_num} approved errors."
echo "**************************************************************"
# L40 L48 L62 has fetch the result out, but there are splitted.
if [ "${api_spec_diff}" != "" -o "${api_doc_spec_diff}" != "" ] ; then
python ${PADDLE_ROOT}/tools/diff_api.py ${PADDLE_ROOT}/paddle/fluid/API_DEV.spec ${PADDLE_ROOT}/paddle/fluid/API_PR.spec
fi
if [ "${api_params_diff}" != "" ] ; then
echo "api_params_diff: ${api_params_diff}"
fi
if [ "${op_type_spec_diff}" != "" ] ; then
echo "op_type_spec_diff: ${op_type_spec_diff}"
fi
exit 6
fi
|
/**
*
*/
package coca.ca;
import java.io.Closeable;
/**
*
* @author dzh
* @date Nov 14, 2016 12:46:26 PM
* @since 0.0.1
*/
public interface Ca<K, V> extends Closeable {
String name();
CaValue<K, V> read(K key);
/**
*
* @param val
* @return true if written successfully, otherwise to return false
*/
boolean write(CaValue<K, V> val);
boolean isClosed();
CaType type();
public static enum CaType {
Local, Remote
}
}
|
func calculateAverage(_ scores: [Int]) -> Int {
let sum = scores.reduce(0, +)
let average = Double(sum) / Double(scores.count)
return Int(round(average))
}
|
package com.vxml.tag;
import java.io.File;
import java.io.StringWriter;
import java.util.HashMap;
import java.util.Map;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerException;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import org.w3c.dom.Node;
import org.w3c.dom.NodeList;
public abstract class AbstractTag implements Tag {
private static Map propertyMap = new HashMap();
private static Map<String, Tag> tagRef = new HashMap<String, Tag>();
private Node node;
public AbstractTag(Node node) {
this.node = node;
}
public void performTag() {
execute();
// }
}
public Object executeScript(String script) {
return VxmlPlayer.context.executeScript(script);
}
public Object executeScript(File file) {
return VxmlPlayer.context.executeScript(file);
}
@Override
public Node getNode() {
return node;
}
public void setProperty(String property, String value) {
propertyMap.put(property, value);
}
public String getAttribute(String key) {
Node namedItem = node.getAttributes().getNamedItem(key);
return namedItem != null ? namedItem.getNodeValue() : null;
}
public void storeTag(String id, Tag tag) {
tagRef.put(id, tag);
}
public Tag retrieveTag(String id) {
return tagRef.get(id);
}
public void executeChildNodes() {
NodeList list = node.getChildNodes();
for(int i = 0; i < list.getLength(); i++) {
Node n = list.item(i);
Tag tag = TagHandlerFactory.getTag(n);
((AbstractTag) tag).performTag();
}
}
public static String nodeToString(Node node) {
StringWriter sw = new StringWriter();
try {
Transformer t = TransformerFactory.newInstance().newTransformer();
t.setOutputProperty(OutputKeys.OMIT_XML_DECLARATION, "yes");
t.setOutputProperty(OutputKeys.INDENT, "yes");
t.transform(new DOMSource(node), new StreamResult(sw));
} catch (TransformerException te) {
System.out.println("nodeToString Transformer Exception");
}
return sw.toString();
}
}
|
<filename>services/issuer/src/issuer.ts<gh_stars>1-10
import createLogger from './lib/logger'
import setupDb from './setup/db'
import setupAgent from './setup/agent'
import setupIdentity from './setup/identity'
import credentialRequestService from './services/credentialRequests'
import backOffice from './services/backOffice'
const logger = createLogger('rif-id:main')
logger.info('Setting up')
export async function runIssuer ({
secretBoxKey,
rpcUrl,
networkName,
adminUser,
adminPass,
apps,
credentialRequestServicePrefix,
backOfficePrefix,
launchCredentialRequestService,
launchBackOffice,
database,
challengeExpirationInSeconds,
authExpirationInHours,
maxRequestsPerToken
}) {
const dbConnection = setupDb(database)
const agent = setupAgent(dbConnection, secretBoxKey, rpcUrl, networkName)
await setupIdentity(agent);
const identities = await agent.identityManager.getIdentities()
const identity = identities[0]
const env = {
challengeExpirationInSeconds,
authExpirationInHours,
maxRequestsPerToken,
signer: (await identity.keyByType('Secp256k1')).signer(),
did: identity.did,
rpcUrl,
networkName
}
logger.info('Setting up services')
if (launchCredentialRequestService) credentialRequestService(apps[0], agent, env, credentialRequestServicePrefix)
if (launchBackOffice) backOffice(apps.length > 1 ? apps[1] : apps[0], agent, adminUser, adminPass, backOfficePrefix)
logger.info('Services set up')
}
|
#!/usr/bin/env bash
PIDS=$(pidof node)
if [ -z "$PIDS" ]; then
echo "no node pids"
exit 1
fi
case $1 in
kill)
set -x
kill $PIDS
;;
*)
set -x
ps h -p $PIDS
;;
esac
# pidof node
# ps h -j -p`pidof node`
# kill `pidof node`
|
<filename>src/aima/util/Util.java
/*
* Created on Aug 24, 2003 by <NAME>
*
*/
package aima.util;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Set;
public class Util {
public static Set listToSet(List l) {
Set s = new HashSet();
for (int i = 0; i < l.size(); i++) {
s.add(l.get(i));
}
return s;
}
public static List SetToList(Set s) {
List l = new ArrayList();
Iterator i = s.iterator();
while (i.hasNext()) {
l.add(i.next());
}
return l;
}
public static List rest(ArrayList l) {
List ls = (List) l.clone();
ls.remove(0);
return ls;
}
}
|
// https://uva.onlinejudge.org/external/107/10783.pdf
#include<bits/stdc++.h>
using namespace std;
int main(){
int t,a,b;
cin>>t;
for(int i=1;i<=t;i++){
cin>>a>>b;
int s=0;
for(int j=a;j<=b;j++)
if(j%2)s+=j;
cout<<"Case "<<i<<": "<<s<<"\n";
}
}
|
#!/bin/sh
# MIT License
#
# (C) Copyright [2020] Hewlett Packard Enterprise Development LP
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
set -ex
python3 -m swagger_server &
sleep 5
curl --fail http://localhost:8088/v1/mgr-info
|
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Some interesting stuff
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
import styled from 'styled-components'
import { layout, shadow } from 'styled-system'
import PropTypes from 'prop-types'
import { isMap } from 'styled-funcs'
import theme from './theme'
const SHAPES = {
square: 'square',
round: 'round',
rounded: 'rounded',
}
const Image = styled.img.attrs(props => ({
alt: '',
theme: props.theme || theme,
shape: props.shape || SHAPES.square,
}))`
display: block;
max-width: 100%;
height: auto;
border-radius: ${props =>
isMap('shapde', {
[SHAPES.round]: props.theme.radii.full,
[SHAPES.rounded]: props.theme.radii.large,
default: props.theme.radii.none,
})(props)};
${layout}
${shadow}
`
Image.displayName = 'Image'
Image.propTypes = {
...layout.propTypes,
...shadow.propTypes,
shape: PropTypes.oneOf(Object.keys(SHAPES)),
}
Image.SHAPES = SHAPES
export default Image
|
<html>
<head>
<title>Form Page</title>
<script>
function validateForm() {
var x = document.forms[0]["user_input"].value;
if (x == "") {
alert("Input must be filled out");
return false;
}
}
</script>
</head>
<body>
<form action="myScript.js" onsubmit="return validateForm()" method="post">
User Input: <input type="text" name="user_input">
<input type="submit" value="Submit">
</form>
</body>
</html>
|
<reponame>lauracappelli/pixeltrack-standalone-test-oneapi<gh_stars>0
#ifndef RecoLocalTracker_SiPixelClusterizer_plugins_gpuClusterChargeCut_h
#define RecoLocalTracker_SiPixelClusterizer_plugins_gpuClusterChargeCut_h
#include <CL/sycl.hpp>
#include <dpct/dpct.hpp>
#include <cstdint>
#include <cstdio>
#include "../CUDACore/cuda_assert.h"
#include "../CUDACore/prefixScan.h"
#include "gpuClusteringConstants.h"
namespace gpuClustering {
void clusterChargeCut(
uint16_t* __restrict__ id, // module id of each pixel (modified if bad cluster)
uint16_t const* __restrict__ adc, // charge of each pixel
uint32_t const* __restrict__ moduleStart, // index of the first pixel of each module
uint32_t* __restrict__ nClustersInModule, // modified: number of clusters found in each module
uint32_t const* __restrict__ moduleId, // module id of each module
int32_t* __restrict__ clusterId, // modified: cluster id of each pixel
uint32_t numElements,
sycl::nd_item<3> item_ct1,
sycl::stream stream_ct1,
int32_t *charge,
uint8_t *ok,
uint16_t *newclusId,
uint16_t *ws) {
if (item_ct1.get_group(2) >= moduleStart[0])
return;
auto firstPixel = moduleStart[1 + item_ct1.get_group(2)];
auto thisModuleId = id[firstPixel];
assert(thisModuleId < MaxNumModules);
assert(thisModuleId == moduleId[blockIdx.x]);
auto nclus = nClustersInModule[thisModuleId];
if (nclus == 0)
return;
if (item_ct1.get_local_id(2) == 0 && nclus > MaxNumClustersPerModules)
/*
DPCT1015:11: Output needs adjustment.
*/
stream_ct1 << "Warning too many clusters in module %d in block %d: %d > %d\n";
auto first = firstPixel + item_ct1.get_local_id(2);
if (nclus > MaxNumClustersPerModules) {
// remove excess FIXME find a way to cut charge first....
for (auto i = first; i < numElements; i += item_ct1.get_local_range().get(2)) {
if (id[i] == InvId)
continue; // not valid
if (id[i] != thisModuleId)
break; // end of module
if (clusterId[i] >= MaxNumClustersPerModules) {
id[i] = InvId;
clusterId[i] = InvId;
}
}
nclus = MaxNumClustersPerModules;
}
#ifdef GPU_DEBUG
if (thisModuleId % 100 == 1)
if (threadIdx.x == 0)
printf("start clusterizer for module %d in block %d\n", thisModuleId, blockIdx.x);
#endif
assert(nclus <= MaxNumClustersPerModules);
for (auto i = item_ct1.get_local_id(2); i < nclus; i += item_ct1.get_local_range().get(2)) {
charge[i] = 0;
}
item_ct1.barrier();
for (auto i = first; i < numElements; i += item_ct1.get_local_range().get(2)) {
if (id[i] == InvId)
continue; // not valid
if (id[i] != thisModuleId)
break; // end of module
sycl::atomic<int32_t, sycl::access::address_space::local_space>(sycl::local_ptr<int32_t>(&charge[clusterId[i]]))
.fetch_add(adc[i]);
}
item_ct1.barrier();
auto chargeCut = thisModuleId < 96 ? 2000 : 4000; // move in constants (calib?)
for (auto i = item_ct1.get_local_id(2); i < nclus; i += item_ct1.get_local_range().get(2)) {
newclusId[i] = ok[i] = charge[i] > chargeCut ? 1 : 0;
}
item_ct1.barrier();
// renumber
blockPrefixScan(newclusId, nclus, ws, item_ct1);
assert(nclus >= newclusId[nclus - 1]);
if (nclus == newclusId[nclus - 1])
return;
nClustersInModule[thisModuleId] = newclusId[nclus - 1];
item_ct1.barrier();
// mark bad cluster again
for (auto i = item_ct1.get_local_id(2); i < nclus; i += item_ct1.get_local_range().get(2)) {
if (0 == ok[i])
newclusId[i] = InvId + 1;
}
item_ct1.barrier();
// reassign id
for (auto i = first; i < numElements; i += item_ct1.get_local_range().get(2)) {
if (id[i] == InvId)
continue; // not valid
if (id[i] != thisModuleId)
break; // end of module
clusterId[i] = newclusId[clusterId[i]] - 1;
if (clusterId[i] == InvId)
id[i] = InvId;
}
//done
}
} // namespace gpuClustering
#endif // RecoLocalTracker_SiPixelClusterizer_plugins_gpuClusterChargeCut_h
|
// bank.rs
pub mod bank {
pub struct Account {
account_number: u32,
balance: f64,
}
impl Account {
pub fn new_account(account_number: u32) -> Account {
Account {
account_number,
balance: 0.0,
}
}
pub fn deposit(&mut self, amount: f64) {
self.balance += amount;
}
pub fn withdraw(&mut self, amount: f64) -> Result<(), &'static str> {
if self.balance >= amount {
self.balance -= amount;
Ok(())
} else {
Err("Insufficient funds")
}
}
pub fn balance(&self) -> f64 {
self.balance
}
}
}
|
import compas_rrc as rrc
def move_to_joints(joint_positions):
"""
Move the robotic arm to the specified joint positions.
Parameters:
joint_positions (list): A list of joint positions to move the robotic arm to.
Returns:
bool: True if the movement was successful, False otherwise.
"""
# Connect to the robotic arm using the RRC protocol
with rrc.RobotClient() as client:
# Check if the connection to the robotic arm was successful
if client.is_connected:
# Send the command to move to the specified joint positions
success = client.send(rrc.MoveToJoints(joint_positions))
return success
else:
print("Failed to connect to the robotic arm.")
return False
|
<gh_stars>1-10
package com.touch.air.mall.cart.config;
import com.touch.air.mall.cart.interceptor.CartInterceptor;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.servlet.config.annotation.InterceptorRegistry;
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
/**
* @author: bin.wang
* @date: 2021/2/2 14:31
*/
@Configuration
public class MallWebConfig implements WebMvcConfigurer {
@Override
public void addInterceptors(InterceptorRegistry registry) {
registry.addInterceptor(new CartInterceptor()).addPathPatterns("/**");
}
}
|
<reponame>Pietervanhalem/Pieters-Personal-Repository
import numpy as np
from IPython.display import clear_output
class ConvectionDiffussion:
def __init__(
self,
flow,
initial_conditions,
K = 0.282,
dt = 0.5,
dx = 1,
dy = 1,
L = 100,
B = 100,
T = 300
):
# Diffusion cooficent source: https://en.wikipedia.org/wiki/Mass_diffusivity
self.K = K
self.dt = dt
self.dx = dx
self.dy = dy
self.L = L
self.B = B
self.T = T
x = np.arange(0, self.L, self.dx)
y = np.arange(0, self.B, self.dy)
t = np.arange(0, self.T, self.dt)
self.x,self.t,self.y = np.meshgrid(x,t, y)
self.u, self.v = flow(self.x, self.y, self.t)
self.c = initial_conditions(self.x, self.y, self.t)
u_bar = (self.u **2 + self.v**2) ** 0.5
print(f"q = {self.K *self.dt/(self.dx**2)} < 1/2")
print(f"P_Delta = {u_bar.max() * self.dx / self.K } < 2")
def Kdcdx2(self, t, x, y):
return self.K * (self.c[t-1,x-1,y] - 2 * self.c[t-1,x,y] + self.c[t-1,x+1,y])/(self.dx ** 2)
def Kdcdy2(self, t, x, y):
return self.K * (self.c[t-1,x,y-1] - 2 * self.c[t-1,x,y] + self.c[t-1,x,y+1])/(self.dy ** 2)
def vdcdx(self, t, x, y):
return self.u[t-1,x,y] * (self.c[t-1,x+1,y] - self.c[t-1,x-1,y])/ (2 * self.dx)
def udcdy(self, t, x, y):
return self.v[t-1,x,y] * (self.c[t-1,x,y+1] - self.c[t-1,x,y-1])/ (2 * self.dy)
def run(self):
for k in range(1, self.c.shape[0]):
clear_output(True)
print(f"{np.round(k/self.c.shape[0] * 100, 2)} %")
for i in range(1, self.c.shape[1]-1):
for j in range(1, self.c.shape[2]-1):
new_c = self.c[k-1,i,j] + self.dt * (
self.Kdcdx2(k,i,j)+
self.Kdcdy2(k,i,j)-
self.udcdy(k,i,j)-
self.vdcdx(k,i,j)
)
self.c[k,i,j] = new_c
|
using System;
namespace ConsoleApp
{
class Program
{
static void Main(string[] args)
{
string userInput;
Console.WriteLine("Please enter some text: ");
// Read user input
userInput = Console.ReadLine();
Console.WriteLine($"You entered: {userInput}");
}
}
}
|
A relational database can be used to design a database schema for a blog website. The database will include tables for blog posts, comments, users, tags and categories. Each post will have a many-to-one relationship with users, tags and categories. The comments table will also have a many-to-one relationship with the posts.
|
import {
ChangeDetectionStrategy,
Component,
EventEmitter,
Input,
Output,
TemplateRef,
ViewEncapsulation,
HostBinding,
} from '@angular/core';
import { IPsTableIntlTexts } from '@prosoft/components/core';
import { IPsTableSortDefinition } from '../models';
@Component({
selector: 'ps-table-header',
template: `
<h2 *ngIf="caption" class="ps-table-header__caption">{{ caption }}</h2>
<div *ngIf="customHeader" class="ps-table-header__custom-content">
<ng-container [ngTemplateOutlet]="customHeader"></ng-container>
</div>
<ps-table-sort
*ngIf="showSorting"
class="ps-table-header__sort"
[sortColumn]="sortColumn"
[sortDirection]="sortDirection"
[sortDefinitions]="sortDefinitions"
[intl]="intl"
(sortChanged)="sortChanged.emit($event)"
></ps-table-sort>
<ps-table-search
*ngIf="filterable"
class="ps-table-header__search"
[searchText]="searchText"
[debounceTime]="300"
[intl]="intl"
(searchChanged)="searchChanged.emit($event)"
></ps-table-search>
<div *ngIf="topButtonSection" class="ps-table-header__actions">
<ng-container [ngTemplateOutlet]="topButtonSection" [ngTemplateOutletContext]="{ $implicit: selectedRows }"></ng-container>
</div>
`,
styles: [
`
ps-table-header {
padding: 0 16px;
display: flex;
flex-wrap: wrap;
align-items: flex-end;
justify-content: space-between;
}
.ps-table-header__caption {
flex-basis: 100%;
}
.ps-table-header__sort {
flex: 0 1 350px;
margin-right: auto; /* This counters the margin of the actions to push the search back to the middle */
}
.ps-table-header__search {
flex: 0 1 800px;
}
.ps-table-header__actions {
flex-basis: auto;
margin: 0.3em 8px 1em;
text-align: end;
align-self: flex-end;
margin-left: auto; /* This ensures that the actions are always right, even if there is no other flex item */
}
`,
],
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
})
export class PsTableHeaderComponent {
@Input() public intl: IPsTableIntlTexts;
@Input() public caption: string;
@Input() public topButtonSection: TemplateRef<any> | null;
@Input() public customHeader: TemplateRef<any> | null;
@Input() public selectedRows: any[];
@Input() public showSorting: boolean;
@Input() public sortColumn: string;
@Input() public sortDirection: 'asc' | 'desc';
@Input() public sortDefinitions: IPsTableSortDefinition[] = [];
@Input() public filterable: boolean;
@Input() public searchText: string;
@Output() public readonly sortChanged = new EventEmitter<{ sortColumn: string; sortDirection: 'asc' | 'desc' }>();
@Output() public readonly searchChanged = new EventEmitter<string>();
@HostBinding('style.padding-top') public get paddingTop() {
return !this.caption && (this.showSorting || this.filterable || this.topButtonSection) ? '1em' : '0';
}
}
|
<gh_stars>0
package de.crackscout123.Utils;
import java.util.logging.Level;
import de.crackscout123.Main.CrackysBot;
public class sys {
// ### CrackyBot.java ###
// Client and server settings
public static Level DebugLvl = Level.ALL;
public static String hostname = CrackysBot.args_host;
public static String query_user = CrackysBot.args_user;
public static String query_pass = CrackysBot.args_pass;
public static Integer VirtualServerId = 1;
public static String nickname = CrackysBot.args_nickname;
public static String msg_botrunning = "Hi %sender%. All my system are online and running.";
// ### other useful stuff ###
public static String getChannelNameById(Integer TargetChannelId) { return CrackysBot.api.getChannelInfo(TargetChannelId).getName(); }
public static String getNicknameById(Integer ClientId) { return CrackysBot.api.getClientInfo(ClientId).getNickname().toString(); }
public static Integer getActivQuarrys() { return CrackysBot.api.getServerInfo().getQueryClientsOnline(); }
}
/**
* @author <NAME> - crackscout123.de
*
* @date 22.01.2021 - 12:56:45
*
*/
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.