text stringlengths 1 1.05M |
|---|
import java.lang.Math;
public class Triangle {
public static void main(String[] args) {
float a = 4.0;
float b = 7.0;
float c = 8.0;
float s = (a + b + c) * 0.5 ;
float area = Math.sqrt(s * (s - a) * (s - b) * (s - c));
System.out.println("Area of Triangle: "+ area);
}
} |
package com.airbnb.lottie.model;
import static androidx.annotation.RestrictTo.Scope.LIBRARY;
import android.annotation.SuppressLint;
import android.graphics.PointF;
import androidx.annotation.NonNull;
import androidx.annotation.RestrictTo;
/**
* One cubic path operation. CubicCurveData is structured such that it is easy to iterate through
* it and build a path. However, it is modeled differently than most path operations.
*
* CubicCurveData
* | - vertex
* | /
* | cp1 cp2
* | /
* | |
* | /
* --------------------------
*
* When incrementally building a path, it will already have a "current point" so that is
* not captured in this data structure.
* The control points here represent {@link android.graphics.Path#cubicTo(float, float, float, float, float, float)}.
*
* Most path operations are centered around a vertex and its in control point and out control point like this:
* | outCp
* | /
* | |
* | v
* | /
* | inCp
* --------------------------
*/
@RestrictTo(LIBRARY)
public class CubicCurveData {
private final PointF controlPoint1;
private final PointF controlPoint2;
private final PointF vertex;
public CubicCurveData() {
controlPoint1 = new PointF();
controlPoint2 = new PointF();
vertex = new PointF();
}
public CubicCurveData(PointF controlPoint1, PointF controlPoint2, PointF vertex) {
this.controlPoint1 = controlPoint1;
this.controlPoint2 = controlPoint2;
this.vertex = vertex;
}
public void setControlPoint1(float x, float y) {
controlPoint1.set(x, y);
}
public PointF getControlPoint1() {
return controlPoint1;
}
public void setControlPoint2(float x, float y) {
controlPoint2.set(x, y);
}
public PointF getControlPoint2() {
return controlPoint2;
}
public void setVertex(float x, float y) {
vertex.set(x, y);
}
public void setFrom(CubicCurveData curveData) {
setVertex(curveData.vertex.x, curveData.vertex.y);
setControlPoint1(curveData.controlPoint1.x, curveData.controlPoint1.y);
setControlPoint2(curveData.controlPoint2.x, curveData.controlPoint2.y);
}
public PointF getVertex() {
return vertex;
}
@SuppressLint("DefaultLocale")
@NonNull
@Override public String toString() {
return String.format("v=%.2f,%.2f cp1=%.2f,%.2f cp2=%.2f,%.2f",
vertex.x, vertex.y, controlPoint1.x, controlPoint1.y, controlPoint2.x, controlPoint2.y);
}
}
|
#!/bin/bash
# -------------------------------------------------------------------------- #
# Copyright 2002-2022, OpenNebula Project, OpenNebula Systems #
# #
# Licensed under the Apache License, Version 2.0 (the "License"); you may #
# not use this file except in compliance with the License. You may obtain #
# a copy of the License at #
# #
# http://www.apache.org/licenses/LICENSE-2.0 #
# #
# Unless required by applicable law or agreed to in writing, software #
# distributed under the License is distributed on an "AS IS" BASIS, #
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #
# See the License for the specific language governing permissions and #
# limitations under the License. #
#--------------------------------------------------------------------------- #
if [ -f /mnt/context/context.sh ]
then
. /mnt/context/context.sh
fi
echo $HOSTNAME > /etc/HOSTNAME
hostname $HOSTNAME
if [ -n "$IP_PUBLIC" ]; then
ifconfig eth0 $IP_PUBLIC
fi
if [ -n "$NETMASK" ]; then
ifconfig eth0 netmask $NETMASK
fi
if [ -f /mnt/context/$ROOT_PUBKEY ]; then
cat /mnt/context/$ROOT_PUBKEY >> /root/.ssh/authorized_keys
fi
if [ -n "$USERNAME" ]; then
adduser -s /bin/bash -D $USERNAME
if [ -f /mnt/context/$USER_PUBKEY ]; then
mkdir -p /home/$USERNAME/.ssh/
cat /mnt/context/$USER_PUBKEY >> /home/$USERNAME/.ssh/authorized_keys
chown -R $USERNAME /home/$USERNAME/.ssh
chmod -R 600 /home/$USERNAME/.ssh
fi
fi
|
<filename>40.redux/src/store/actions/todos.js
import * as types from '../action-types';
//actionCreator 创建action的函数
export default {
addTodo(text){
return {type:types.ADD_TODO,text}
},
delTodo(index){
return {type:types.DEL_TODO,index}
},
toggleTodo(index){
return {type:types.TOGGLE_TODO,index}
},
switchType(newType){
return {type:types.SWITCH_TYPE,newType}
}
} |
package org.sklsft.generator.skeletons.core.commands.model.resources;
import java.io.File;
import org.sklsft.generator.model.domain.Project;
import org.sklsft.generator.model.metadata.files.FileType;
import org.sklsft.generator.skeletons.commands.impl.templatized.ProjectTemplatizedFileWriteCommand;
public class AuditEntityFileWriteCommand extends ProjectTemplatizedFileWriteCommand {
public AuditEntityFileWriteCommand(Project project) {
super(project.workspaceFolder + File.separator + project.model.modelArtefactName + File.separator + project.model.javaSourcesFolder + File.separator + project.model.enversPackageName.replace(".", File.separator), "AuditEntity", FileType.JAVA, project);
}
}
|
<reponame>phetsims/dot
// Copyright 2013-2020, University of Colorado Boulder
/**
* A 2D rectangle-shaped bounded area, with a convenience name and constructor. Totally functionally
* equivalent to Bounds2, but with a different constructor.
*
* @author <NAME> <<EMAIL>>
*/
import Bounds2 from './Bounds2.js';
import dot from './dot.js';
class Rectangle extends Bounds2 {
/**
* @param {number} x
* @param {number} y
* @param {number} width
* @param {number} height
*/
constructor( x, y, width, height ) {
assert && assert( height !== undefined, 'Rectangle requires 4 parameters' );
super( x, y, x + width, y + height );
}
}
dot.register( 'Rectangle', Rectangle );
export default Rectangle; |
#!/bin/bash
set -ev
if [[ -z $TRAVIS_TAG ]]; then
echo TRAVIS_TAG unset, exiting
exit 1
fi
BUILD_REPO_URL=https://github.com/AXErunners/electrum-axe.git
cd build
git clone --branch $TRAVIS_TAG $BUILD_REPO_URL electrum-axe
cd electrum-axe
export PY36BINDIR=/Library/Frameworks/Python.framework/Versions/3.6/bin/
export PATH=$PATH:$PY36BINDIR
source ./contrib/axe/travis/electrum_axe_version_env.sh;
echo wine build version is $ELECTRUM_AXE_VERSION
sudo pip3 install --upgrade pip
sudo pip3 install -r contrib/deterministic-build/requirements.txt
sudo pip3 install \
x11_hash>=1.4 \
pycryptodomex==3.6.1 \
btchip-python==0.1.27 \
keepkey==4.0.2 \
safet==0.1.3 \
trezor==0.10.2
pyrcc5 icons.qrc -o electrum_axe/gui/qt/icons_rc.py
export PATH="/usr/local/opt/gettext/bin:$PATH"
./contrib/make_locale
find . -name '*.po' -delete
find . -name '*.pot' -delete
cp contrib/axe/osx.spec .
cp contrib/axe/pyi_runtimehook.py .
cp contrib/axe/pyi_tctl_runtimehook.py .
pyinstaller \
-y \
--name electrum-axe-$ELECTRUM_AXE_VERSION.bin \
osx.spec
sudo hdiutil create -fs HFS+ -volname "Electrum-AXE" \
-srcfolder dist/Electrum-AXE.app \
dist/electrum-axe-$ELECTRUM_AXE_VERSION-macosx.dmg
|
def removeThreeFiveSeven(list):
for value in list:
if value == 3 or value == 5 or value == 7:
list.remove(value)
return list
list = [3, 4, 5, 7, 10]
result = removeThreeFiveSeven(list)
print(result)
# Output:
# [4, 10] |
from setuptools import setup
setup(
name='sarfetcher',
version='0.1',
py_modules=['sarfetcher'],
install_requires=[
'Click',
'requests',
'sqlalchemy',
'geoalchemy2',
'numpy',
'dateutils',
'shapely'
],
entry_points='''
[console_scripts]
sarfetcher=sarfetcher.main:cli
''',
)
|
/**
* background side hotkey manager
*
* @author <EMAIL>
*/
/**
* Copyright 2012-2017 akahuku, <EMAIL>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
(function (global) {
'use strict';
var DEFAULT_HOTKEYS_DESC = '<insert>,<c-enter>';
/*
* keyCode of punctual keys
* ========================
*
* xubuntu 14.04
* -----------------------
*
* Chromium Opera Firefox
* ctrl + ~ 192 192 192
* ctrl + - 189 109 173 *
* ctrl + = 187 107 61 *
* ctrl + [ 219 219 219
* ctrl + ] 221 221 221
* ctrl + \ 220 220 220
* ctrl + , 188 188 188
* ctrl + . 190 190 190
* ctrl + / 191 191 191
*
* shift + ~ 192 18 192 *
* shift + - 189 109 173 *
* shift + = 187 107 61 *
* shift + [ 219 219 219
* shift + ] 221 221 221
* shift + \ 220 220 220
* shift + , 188 188 188
* shift + . 190 190 190
* shift + / 191 191 191
*
* Windows 7
* -------------------
*
* Chromium Opera Firefox
* ctrl + ~ 229 192 0 *
* ctrl + - 189 189 173 *
* ctrl + = 187 187 61 *
* ctrl + [ 219 219 219
* ctrl + ] 221 221 221
* ctrl + \ 220 220 220
* ctrl + , 188 188 188
* ctrl + . 190 190 190
* ctrl + / 191 191 191
*
* shift + ~ 192 192 192
* shift + - 189 189 173 *
* shift + = 187 187 61 *
* shift + [ 219 219 219
* shift + ] 221 221 221
* shift + \ 220 220 220
* shift + , 188 188 188
* shift + . 190 190 190
* shift + / 191 191 191
*
* Mac OS X
* ------------------
*
* (I don't have Macintosh)
*/
var keyTable = {
// basic keys [0-9a-z]: generated by code
// special keys
backspace:8, bs:8,
tab:9,
enter:13, return:13, ret:13,
space:32, spc:32,
pageup:33, pgup:33,
pagedown:34, pgdn: 34,
end:35,
home:36,
left:37,
up:38,
right:39,
down:40,
insert:45, ins:45,
delete:46, del:46,
// function keys: generated by code
// f1:112 - f12:123
// punctual keys
',': 188, comma: 188,
'.': 190, dot: 190, period: 190,
'/': 191, slash: 190,
'[': 219,
'\\':220, backslash: 220,
']': 221
};
// hotkey base class
function Hotkey (emit) {
this.onPress = null;
this.canProcess = false;
this.defaultHotkeysDesc_ = DEFAULT_HOTKEYS_DESC;
}
Hotkey.prototype = {
register:function (hotkeys) {},
getObjectsForDOM:function (hotkeys) {
return this.parseHotkeys(hotkeys);
},
get defaultHotkeysDesc () {
return this.defaultHotkeysDesc_;
},
set defaultHotkeysDesc (v) {
this.defaultHotkeysDesc_ = v;
},
get keyTable () {
return keyTable;
},
parseHotkeys:function (hotkeys) {
var result = [];
hotkeys = (hotkeys || '').replace(/^\s+|\s+$/g, '') || this.defaultHotkeysDesc_;
hotkeys = hotkeys.replace(/,>/g, 'comma>');
hotkeys.toLowerCase().split(/\s*,\s*/).forEach(function (sc) {
var re = /^<([^>]+)>$/.exec(sc);
if (!re) return;
var modifiers = re[1].split('-');
var key = modifiers.pop();
if (!(key in keyTable)) return;
var codes = {keyCode:keyTable[key], shiftKey:false, ctrlKey:false};
modifiers.forEach(function (m) {
switch (m.toLowerCase()) {
case 's':
codes['shiftKey'] = true;
break;
case 'c':
codes['ctrlKey'] = true;
break;
}
});
result.push(codes);
});
if (result.length == 0) {
result = this.parseHotkeys('');
}
var hash = {};
result.forEach(function (sc) {
hash[JSON.stringify(sc)] = sc;
});
result = Object.keys(hash).map(function (key) {
return hash[key];
});
return result;
},
handlePress:function () {
},
validateKeyCode:function (arg) {
if (typeof arg == 'number') {
arg = {keyCode: arg};
}
for (var i in keyTable) {
if (keyTable[i] == arg.keyCode) {
var codes = [];
arg.shiftKey && codes.push('s');
arg.ctrlKey && codes.push('c');
codes.push(i);
return '<' + codes.join('-') + '>';
}
}
return null;
}
};
// chrome
function HotkeyChrome () {
Hotkey.apply(this, arguments);
}
HotkeyChrome.prototype = Object.create(Hotkey.prototype);
HotkeyChrome.prototype.constructor = Hotkey;
// opera
function HotkeyOpera () {
Hotkey.apply(this, arguments);
}
HotkeyOpera.prototype = Object.create(Hotkey.prototype);
HotkeyOpera.prototype.constructor = Hotkey;
// firefox
function HotkeyFirefox (emit) {
Hotkey.apply(this, arguments);
this.canProcess = true;
this.emit = emit;
this.tabs = require('sdk/tabs');
this.hotkeyFactory = require('sdk/hotkeys').Hotkey;
this.hotkeyObjects = null;
this.handlePressBinded = this.handlePress.bind(this);
}
HotkeyFirefox.prototype = Object.create(Hotkey.prototype, {
constructor: Hotkey,
translateTable: {value: {
enter:'return', ret:'return',
ins:'insert',
del:'delete',
comma:',',
dot:'.', period:'.',
slash:'/',
backslash:'\\'
}},
register: {value: function (hotkeys) {
if (this.hotkeyObjects) {
this.hotkeysObject.forEach(function (hotkey) {
hotkey.destroy();
}, this);
}
this.hotkeyObjects = [];
this.parseHotkeys(hotkeys).forEach(function (hotkey) {
this.hotkeyObjects.push(this.hotkeyFactory({
combo:hotkey,
onPress:this.handlePressBinded
}));
}, this);
}},
parseHotkeys: {value: function (hotkeys) {
var result = [];
hotkeys = (hotkeys || '').replace(/^\s+|\s+$/g, '') || this.defaultHotkeysDesc_;
hotkeys.toLowerCase().split(/\s*,\s*/).forEach(function (sc) {
var re = /^<([^>]+)>$/.exec(sc);
if (!re) return;
var modifiers = re[1].split('-');
var key = modifiers.pop();
if (key in this.translateTable) {
key = this.translateTable[key];
}
if (!(key in keyTable)) return;
var codes = {shift:false, control:false, alt:false, meta:false, accel:false};
modifiers.forEach(function (m) {
switch (m.toLowerCase()) {
case 's':
codes.shift = true;
break;
case 'c':
codes.control = true;
break;
case 'a':
codes.alt = true;
break;
case 'm':
codes.meta = true;
break;
case 'x':
codes.accel = true;
break;
}
});
codes = Object.keys(codes).filter(function (m) {return codes[m]});
if (codes.length) {
codes.push(key);
result.push(codes.join('-'));
}
}, this);
if (result.length == 0) {
result = this.parseHotkeys('');
}
return result;
}},
handlePress: {value: function () {
if (this.emit) {
this.emit(this.onPress, this);
}
else if (this.onPress) {
this.onPress(this);
}
}}
});
function create (useDefault) {
var ext = require('./Kosian').Kosian();
if (!useDefault) {
if (global.chrome) {
return new HotkeyChrome(ext.emit);
}
else if (global.opera) {
return new HotkeyOpera(ext.emit);
}
else if (require('sdk/self')) {
return new HotkeyFirefox(ext.emit);
}
}
return new Hotkey(ext.emit);
}
(function init () {
var i, j, goal;
for (i = '0'.charCodeAt(0), goal = '9'.charCodeAt(0); i <= goal; i++) {
keyTable[String.fromCharCode(i)] = i;
}
for (i = 'A'.charCodeAt(0), goal = 'Z'.charCodeAt(0); i <= goal; i++) {
keyTable[String.fromCharCode(i)] = i;
keyTable[String.fromCharCode(i + 32)] = i;
}
for (i = 112, j = 1, goal = i + 12; i < goal; i++, j++) {
keyTable['f' + j] = i;
}
})();
exports.Hotkey = create;
})(this);
// vim:set ts=4 sw=4 fenc=UTF-8 ff=unix ft=javascript fdm=marker :
|
<reponame>matthew-gerstman/code-surfer
// @ts-check
import React from "react";
import { storiesOf } from "@storybook/react";
import { CodeSurfer } from "@code-surfer/standalone";
import { StoryWithSlider } from "./utils";
import parsedSteps from "./parsed-steps";
storiesOf("Perf", module).add("50 Steps Parsed", () => <Story />);
function Story() {
const [shouldLoad, setLoad] = React.useState(false);
if (!shouldLoad) {
return <button onClick={() => setLoad(true)}>Load</button>;
}
return (
<StoryWithSlider max={parsedSteps.steps.length - 1}>
{progress => <CodeSurfer progress={progress} parsedSteps={parsedSteps} />}
</StoryWithSlider>
);
}
|
#!/bin/sh -xe
#
# Copyright SecureKey Technologies Inc. All Rights Reserved.
#
# SPDX-License-Identifier: Apache-2.0
#
echo "Adding curl and jq"
apk --no-cache add curl jq
echo
echo "fetching kubectl"
curl -qL https://storage.googleapis.com/kubernetes-release/release/v1.20.0/bin/linux/amd64/kubectl -o /usr/local/bin/kubectl
chmod +x /usr/local/bin/kubectl
echo "Processing access rules template from Strapi token"
# Get token for admin user
n=0
until [ $n -ge 10 ]
do
token=$(curl --header "Content-Type: application/json" \
--request POST \
--data '{"identifier":"strapi","password":"strapi"}' \
http://strapi/admin/auth/local | jq -r '.jwt')
echo "token: $token"
if [ -n "$token" ] && [ "$token" != "null" ]
then
break
fi
echo "strapi token is empty, retry ${n}/10"
n=$((n+1))
sleep 5
done
if [ -z "$token" ] || [ "$token" = "null" ]
then
echo "strapi token is empty . . . exiting"
exit 1
fi
mkdir -p /oathkeeper/rules
sed -e "s/{TOKEN}/$token/g" /oathkeeper/template/access-rules.tmpl > /oathkeeper/rules/access-rules.json
echo
config_map=$(kubectl get cm -l component=cms -o jsonpath='{range .items[*]}{.metadata.name}{"\n"}{end}' | grep oathkeeper-rules)
echo "mutating oathkeeper configMap ${config_map}"
kubectl create cm ${config_map} --dry-run=client --from-file=/oathkeeper/rules/access-rules.json -o yaml | kubectl apply -f -
echo "labeling"
kubectl label cm ${config_map} component=cms group=demo project=trustbloc instance=||DEPLOYMENT_ENV||
echo "Finished processing template"
|
from collections import Counter
def most_frequent(nums):
count = Counter(nums)
max_count = max(count.values())
for value, mc in count.items():
if mc == max_count:
return value
result = most_frequent([1, 2, 3, 2, 3, 4, 3])
print(result) |
<filename>lib/sparrow/stackdriver_formatter.rb
# frozen_string_literal: true
module Sparrow
# @private
class StackdriverFormatter < Ougai::Formatters::Bunyan
def _call(severity, time, progname, data)
if data.is_a?(Hash)
data[:message] = data.delete(:msg)
data[:severity] = severity
data[:eventTime] = time
super(severity, time, progname, data)
else
super(severity, time, progname, { message: data.to_s })
end
end
end
end
|
def highest_avg_salary_increase(data):
department_salary_increase = {}
department_count = {}
for _, department, current_salary, previous_salary in data:
increase = current_salary - previous_salary
if department in department_salary_increase:
department_salary_increase[department] += increase
department_count[department] += 1
else:
department_salary_increase[department] = increase
department_count[department] = 1
max_avg_increase = 0
max_avg_department = ""
for department in department_salary_increase:
avg_increase = department_salary_increase[department] / department_count[department]
if avg_increase > max_avg_increase:
max_avg_increase = avg_increase
max_avg_department = department
return max_avg_department |
#!/bin/bash
header="#$ -cwd
\n#$ -V
\n#$ -l mem=64G
\n#$ -l h_cpu=372800
\n#$ -pe parallel-onenode 1
\n#$ -S /bin/bash
\n#$ -M jkodner@seas.upenn.edu
\n#$ -m eas
\n#$ -j y -o /home1/j/jkodner/"
source $1
#set in config file
#MESSAGE: Printed at top of output
#INTERMED_DIR: Where intermediate outputs should go
#DATA_DIR: Where to read in from
#CREATE: Whether or not to create initial similarity matrices
#CORPUS: Corpus type
#NUM_SEEDS: Number of seeds
#THRESHOLD: Confidence threshold
#K_SEQUENCE: Sequence of k to classify
#SUBMIT: Whether to run on NLPGrid or not
#SIM_FILE: Location of similarity file within INTERMED_DIR
#SCRIPT_FILE: Location of output script within INTERMED_DIR
#OTHER_ARGS: Any other specific arguments
echo $MESSAGE
echo CREATING? $CREATE
echo SIM FILE $INTERMED_DIR/$SIM_FILE
echo SCRIPT FILE $INTERMED_DIR/$SCRIPT_FILE
echo -e $header > $INTERMED_DIR/$SCRIPT_FILE
echo >> $INTERMED_DIR/$SCRIPT_FILE
echo "echo" $MESSAGE >> $INTERMED_DIR/$SCRIPT_FILE
if $CREATE; then
echo python parkesclustering.py --corpus $CORPUS $DATA_DIR $INTERMED_DIR/$SIM_FILE -k $K_SEQUENCE -s $NUM_SEEDS --both $OTHER_ARGS >> $INTERMED_DIR/$SCRIPT_FILE
else
echo python parkesclustering.py --loadmats --corpus $CORPUS $DATA_DIR $INTERMED_DIR/$SIM_FILE -k $K_SEQUENCE -s $NUM_SEEDS --both $OTHER_ARGS -c $THRESHOLD >> $INTERMED_DIR/$SCRIPT_FILE
fi
if $SUBMIT; then
qsub $INTERMED_DIR/$SCRIPT_FILE
else
bash $INTERMED_DIR/$SCRIPT_FILE
fi
|
const React = require('react');
const { renderToString } = require('react-dom/server');
const App = require('./App');
const serverRender = () => {
const renderedApp = renderToString(<App />);
return { renderedApp };
};
module.exports = serverRender; |
def spiralPrint(m, n, a) :
k = 0; l = 0
''' k - starting row index
m - ending row index
l - starting column index
n - ending column index
i - iterator '''
while (k < m and l < n) :
# Print the first row from the remaining rows
for i in range(l, n) :
print(a[k][i], end = " ")
k += 1
# Print the last column from the remaining columns
for i in range(k, m) :
print(a[i][n - 1], end = " ")
n -= 1
# Print the last row from the remaining rows
if ( k < m) :
for i in range(n - 1, (l - 1), -1) :
print(a[m - 1][i], end = " ")
m -= 1
# Print the first column from the remaining columns
if (l < n) :
for i in range(m - 1, k - 1, -1) :
print(a[i][l], end = " ")
l += 1 |
import { st} from "springtype/core";
import {IEvent, ILifecycle} from "springtype/web/component/interface";
import {tsx} from "springtype/web/vdom";
import {attr, component} from "springtype/web/component";
import {ref} from "springtype/core/ref";
import {getUniqueHTMLId} from "../../function";
import {mergeArrays, TYPE_UNDEFINED} from "springtype/core/lang";
import {maxLength, minLength, pattern, required} from "st-validate";
import {min} from "st-validate/validate/min";
import {max} from "st-validate/validate/max";
import {matGetConfig} from "../../config";
import {FORM_IGNORE_PROPERTY_NAME, IAttrValidation, Validation, ValidationEventDetail} from "st-form";
export interface IAttrMatTextInput extends IAttrValidation {
label?: string;
helperText?: string;
characterCounter?: boolean;
validators?: Array<(value: string | number | Date) => Promise<boolean>>;
validationErrorMessages?: { [error: string]: string };
validationSuccessMessage?: string;
formIgnore?: boolean;
setValidClass?: boolean;
name: string;
value?: string;
type?: 'text' | 'email' | 'number' | 'password' | 'date';
placeholder?: string;
readonly?: boolean;
maxLength?: number;
minLength?: number;
pattern?: RegExp;
required?: boolean;
max?: number | Date;
min?: number | Date;
step?: number;
hidden?: boolean;
}
@component
export class MatInput extends st.component<IAttrMatTextInput> implements ILifecycle {
@attr
label: string = '';
@attr
helperText: string = '';
@attr
characterCounter: boolean = false;
@attr
validationErrorMessages: { [error: string]: string } = {};
@attr
validationSuccessMessage: string = '';
/**
* this field will be ignored by from state
*/
@attr
formIgnore: boolean = false;
@attr
setValidClass: boolean = matGetConfig().setValidClass;
/**
* Input specific stuff
*/
@attr
name!: string;
@attr
value!: string;
@attr
type: 'text' | 'email' | 'number' | 'password' | 'date' = 'text';
@attr
placeholder!: string;
@attr
readonly!: boolean;
@attr
maxLength!: number;
@attr
minLength!: number;
@attr
pattern!: RegExp;
@attr
required!: boolean;
//for range
@attr
max!: number | Date;
@attr
min!: number | Date;
@attr
step!: number;
//hide the complete object
@attr
hidden!: boolean;
//validation properties
@attr
eventListeners!: Array<string>;
@attr
debounceTimeInMs!: number;
@attr
validators: Array<(value: any) => Promise<boolean>> = [];
@ref
inputRef!: HTMLInputElement;
@ref
labelRef!: HTMLLabelElement;
@ref
helperTextRef!: HTMLSpanElement;
@ref
counterRef!: HTMLSpanElement;
@ref
validationRef!: Validation;
inputId: string;
constructor() {
super();
this.inputId = getUniqueHTMLId();
}
render() {
const internalValidators = [];
if (typeof this.required !== TYPE_UNDEFINED) {
internalValidators.push(required)
}
if (typeof this.maxLength !== TYPE_UNDEFINED) {
internalValidators.push(maxLength(this.maxLength))
}
if (typeof this.minLength !== TYPE_UNDEFINED) {
internalValidators.push(minLength(this.minLength))
}
if (typeof this.max !== TYPE_UNDEFINED) {
internalValidators.push(max(this.max))
}
if (typeof this.min !== TYPE_UNDEFINED) {
internalValidators.push(min(this.min))
}
if (typeof this.pattern !== TYPE_UNDEFINED) {
internalValidators.push(pattern(this.pattern))
}
let label;
if (this.label) {
label = <label ref={{labelRef: this}}
class={[this.value || this.placeholder || this.type === 'date' ? 'active' : '']}
for={this.inputId}>{this.label}</label>
}
return <Validation ref={{validationRef: this}} validators={mergeArrays(internalValidators, this.validators)}
eventListeners={this.eventListeners} debounceTimeInMs={this.debounceTimeInMs}
onValidation={(evt) => this.onAfterValidate(evt)}>
<div class={['input-field']} style={{display: this.hidden ? 'none' : ''}}>
{this.renderChildren()}
<input tabIndex={0} ref={{inputRef: this}} {...{
id: this.inputId,
name: this.name,
type: this.type,
value: this.value,
step: this.step,
placeholder: this.placeholder,
disabled: this.disabled,
readOnly: this.readonly,
//validation
required: this.required,
minLength: this.minLength,
maxLength: this.maxLength,
min: this.prepareRange(this.min),
max: this.prepareRange(this.max),
pattern: this.pattern ? this.pattern.toString() : undefined,
}}
onInput={() => this.onCharacterCounterUpdate()}
onFocus={() => this.onInputFocus()}
onBlur={() => this.onInputBlur()}
/>
{label}
<div ref={{helperTextRef: this}} class="helper-text mat-input-helper-counter"
data-success={this.validationSuccessMessage}>
<span style={{flex: 1}}>{this.helperText}</span>
<span ref={{counterRef: this}}
class={["character-counter", this.value && this.characterCounter ? '' : 'hide']}>
{this.getCharacterCountText(this.value)}
</span>
</div>
</div>
</Validation>
}
onAfterRender(): void {
super.onAfterRender();
if (this.formIgnore) {
(this.inputRef as any)[FORM_IGNORE_PROPERTY_NAME] = true;
}
}
onInputFocus = () => {
if (!this.disabled && !this.readonly) {
this.updateAfterDataAdd()
}
};
updateAfterDataAdd = () => {
if (this.labelRef) {
this.labelRef.classList.add('active');
}
const matIcon = this.el.querySelector('.mat-icon');
if (matIcon) {
matIcon.classList.add('active')
}
const materialIcon = this.el.querySelector('.material-icons');
if (materialIcon) {
materialIcon.classList.add('active')
}
if (this.counterRef) {
this.counterRef.classList.remove('hide');
}
}
onCharacterCounterUpdate = () => {
if (this.counterRef) {
this.counterRef.innerText = this.getCharacterCountText(this.inputRef.value);
}
};
getCharacterCountText(value: string) {
if (this.characterCounter) {
let counterText = (value || '').length.toString();
if (this.maxLength) {
counterText = counterText + '/' + this.maxLength;
}
return counterText;
}
return '';
}
onInputBlur = () => {
if (this.labelRef && !this.inputRef.value && this.type !== 'date' && !this.placeholder) {
this.labelRef.classList.remove('active');
}
const matIcon = this.el.querySelector('.mat-icon');
if (matIcon) {
matIcon.classList.remove('active')
}
const materialIcon = this.el.querySelector('.material-icons');
if (materialIcon) {
materialIcon.classList.remove('active')
}
};
formatDate(date: Date) {
let month = '' + (date.getMonth() + 1);
let day = '' + date.getDate();
let year = date.getFullYear();
if (month.length < 2) {
month = '0' + month;
}
if (day.length < 2) {
day = '0' + day;
}
return [year, month, day].join('-');
}
prepareRange(range: number | Date): string | undefined {
if (typeof range !== TYPE_UNDEFINED) {
if (range instanceof Date) {
return this.formatDate(range);
} else {
return range.toString();
}
}
}
onAfterValidate = (evt: IEvent<ValidationEventDetail>) => {
if (!this.disabled) {
const details = evt.detail as ValidationEventDetail;
this.helperTextRef.removeAttribute("data-error");
this.inputRef.classList.remove('valid', 'invalid');
if (!details.valid) {
this.inputRef.classList.add('invalid');
const error = this.getError(details.errors);
if (error) {
this.helperTextRef.setAttribute("data-error", error);
}
} else if (this.setValidClass) {
this.inputRef.classList.add('valid');
}
}
};
getError(errors: Array<string>) {
for (const error of errors) {
const message = this.validationErrorMessages[error];
if (message) {
return message;
}
}
}
getValue() {
return this.inputRef.value;
}
getChecked() {
return this.inputRef.checked;
}
getValueAsNumber() {
return this.inputRef.valueAsNumber;
}
getValueAsDate() {
return this.inputRef.valueAsDate;
}
setValue(value: string) {
this.inputRef.value = value;
this.onAfterManualChange();
}
setChecked(checked: boolean) {
this.inputRef.checked = checked;
this.onAfterManualChange();
}
setValueAsNumber(number: number) {
this.inputRef.valueAsNumber = number;
this.onAfterManualChange();
}
setValueAsDate(date: Date) {
this.inputRef.valueAsDate = date;
this.onAfterManualChange();
}
onAfterManualChange() {
if (!!this.inputRef.value) {
this.updateAfterDataAdd();
} else {
this.onInputBlur();
}
}
async validate(force: boolean = false) {
return await this.validationRef.validate(force);
}
} |
#!/bin/bash
DATA_TRAIN_SRC="http://www.openslr.org/resources/12/train-clean-100.tar.gz"
DATA_TEST_SRC="http://www.openslr.org/resources/12/test-clean.tar.gz"
DATA_WAV=./wav
echo "--- Starting LibriSpeech data download (may take some time) ..."
wget DATA_TRAIN_SRC || exit 1
wget DATA_TEST_SRC || exit 1
mkdir -p ${DATA_WAV}
echo "--- Starting LibriSpeech archives extraction ..."
for a in ./*.tar.gz; do
tar -C ${DATA_WAV} -xzf $a
done
|
<reponame>Skitionek/alpha-vantage<filename>src/lib/data.js
/**
* Util function to get the timeseries data.
*
* @TODO: Add input validation.
*
* @param {String} fn
* The enum fn available for timeseries data.
*
* @returns {Function}
* A timeseries function to accept user data that returns a promise.
*/
const series = (fn, polisher = 'time_series') => function ({ symbol, outputsize, interval }) {
return this.util.fn(
fn,
polisher
).call(this, {
symbol,
interval,
outputsize
});
};
const polish_bestmatches = data => {
if (data.bestmatches) {
return data.bestmatches;
}
return data;
};
/**
* Util function to get the symbol search data.
*
* @TODO: Add input validation.
*
* @param {String} fn
* The enum fn available for search data.
*
* @returns {Function}
* A search function to accept user data that returns a promise.
*/
const search = (fn) => function ({ keywords }) {
return this.util.fn(fn, polish_bestmatches).call(this, {
keywords
});
};
const polish_global_quote = data => {
data = data.global_quote;
delete data.global_quote;
data.date = data.latest_trading_day;
delete data.latest_trading_day;
data.close = data.prev_close;
delete data.prev_close;
return data;
};
module.exports = {
intraday: series('TIME_SERIES_INTRADAY'),
daily: series('TIME_SERIES_DAILY'),
daily_adjusted: series('TIME_SERIES_DAILY_ADJUSTED'),
weekly: series('TIME_SERIES_WEEKLY'),
weekly_adjusted: series('TIME_SERIES_WEEKLY_ADJUSTED'),
monthly: series('TIME_SERIES_MONTHLY'),
monthly_adjusted: series('TIME_SERIES_MONTHLY_ADJUSTED'),
quote: series('GLOBAL_QUOTE', polish_global_quote),
search: search('SYMBOL_SEARCH'),
exchangeTimeSeries ({ symbol, interval, outputsize }) {
const intraday = interval.match(/\d+min/);
if (intraday) return this.data.intraday({ symbol, interval, outputsize });
return this.data[interval.toLowerCase()]({ symbol, outputsize });
},
exchangeTimeSeries_adjusted ({ symbol, interval, outputsize }) {
return this.data[`${interval.toLowerCase()}_adjusted`]({ symbol, outputsize });
}
};
|
import sys
import os
from glob import glob
def process_files(directory_path, output_file_path):
files = glob(os.path.join(directory_path, '*.tmp'))
if len(files) == 0:
print("No files with '.tmp' extension found in the specified directory.")
elif len(files) > 1:
print("Multiple files with '.tmp' extension found in the specified directory. Expected only one.")
else:
with open(files[0]) as ifile, open(output_file_path, 'w') as ofile:
ofile.write(ifile.read())
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: python program_name.py directory_path output_file_path")
else:
process_files(sys.argv[1], sys.argv[2]) |
# frozen_string_literal: true
RSpec.shared_context "anycable:rpc:command" do
let(:url) { "ws://example.anycable.com/cable" }
let(:headers) { {} }
let(:env) { AnyCable::Env.new(url: url, headers: headers) }
let(:command) { "" }
let(:channel_id) { "" }
let(:identifiers) { {} }
let(:data) { {} }
let(:request) do
AnyCable::CommandMessage.new(
command: command,
identifier: channel_id,
connection_identifiers: identifiers.to_json,
data: data.to_json,
env: env
)
end
end
|
import { IsDecimal, IsInt, IsNotEmpty, IsNumber, IsOptional, IsString, Matches, MATCHES } from "class-validator";
export class SignUpDto {
id_tipo_login: number;
@IsNotEmpty({message: 'Campo login obrigatório'})
login: string;
@IsNotEmpty({message: 'Campo senha obrigatório'})
//Mínimo de oito caracteres, pelo menos uma letra maiúscula, uma letra minúscula, um número e um caractere especial:
@Matches(/^(?=.*[a-z])(?=.*[A-Z])(?=.*\d)(?=.*[@$!%*?&])[A-Za-z\d@$!%*?&]{8,}$/,{
message: 'Senha inválida, verifique as solicitações para criar a senha!'
})
password: string;
@IsNotEmpty({message: 'Campo nome obrigatório'})
name: string;
@IsNotEmpty({message: 'Campo email obrigatório'})
email: string;
@IsString()
@IsOptional()
observacao: string;
@IsOptional()
id_login_insert: number;
dt_insert: string;
} |
if (Meteor.isServer) {
var child_process = Npm.require("child_process");
var fs = Npm.require("fs");
var path = Npm.require("path");
var Canvas = Npm.require("canvas");
// Await a program to close, returning its exit code.
var awaitCloseAsync = function(x, callback) {
x.on("close", function(code) { callback(null, code); });
};
// Make it synchronous.
var awaitClose = Meteor.wrapAsync(awaitCloseAsync);
var zbar = path.join(process.env.PWD, "zbar");
var zbarimg = path.join(zbar, "zbarimg");
if (!fs.existsSync(zbarimg)) {
throw new Error("Tests require zbarimg at '" + zbarimg + "'");
}
// Run zbar, looking for a barcode.
//
// @param file {String} File name. Must be an absolute path.
// @return { stdout, stderr, code }
var runZbar = function(file) {
// Run zbar on the given file
var child = child_process.execFile(zbarimg, [file], {
env: {
"LD_LIBRARY_PATH": zbar
}
});
// Read stdout and stderr
var stdout = streamToBuffer(child.stdout);
var stderr = streamToBuffer(child.stderr);
// Get the exit code
var code = awaitClose(child);
return {
stdout: stdout,
stderr: stderr,
code: code
};
};
// Temporarily write an image to file, run zbar on it, and then delete it.
//
// @param buffer {Buffer} PNG data buffer.
// @return { stdout, stderr, code }
var getBarcodes = function(buffer, options) {
if (!options) {
options = { };
}
var name = options.name || "test-" + Random.id(8) + ".png";
var save = options.save || false;
var file = path.join(process.env.PWD, name);
fs.writeFileSync(file, buffer);
var info = runZbar(file);
if (!save) {
fs.unlinkSync(file);
}
return info;
};
// For now, save test images to rootdir.
var rootDir = process.env.PWD;
Tinytest.add("Code 128 - Checksum(\"BarCode 1\") == 33", function(test) {
var encoded = bardcode.encodeCode128("BarCode 1");
test.equal(encoded.checksum, 33);
});
Tinytest.add("Code 128 - Test 1", function(test) {
var canvas = new Canvas(600, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 600, 100);
drawBarcode(g, "Code 128 test 1", {
width: 600,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.isTrue(/^CODE-128:(.*)$/.test(lines[0]));
test.equal(RegExp.$1, "Code 128 test 1");
});
Tinytest.add("Code 128 - at angles", function(test) {
var canvas = new Canvas(1000, 1000);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 1000, 1000);
g.lineWidth = 1;
var i = 100;
drawBarcode(g, "a", {
angle: 10*i,
x: 100,
y: 100,
horizontalAlign: "left",
verticalAlign: "top"
});
drawBarcode(g, "b", {
angle: 10*i,
text: " ",
x: 900,
y: 100,
horizontalAlign: "right",
verticalAlign: "top"
});
drawBarcode(g, "c", {
angle: 10*i,
x: 500,
y: 500,
horizontalAlign: "center",
verticalAlign: "middle"
});
drawBarcode(g, "d", {
angle: 10*i,
x: 100,
y: 900,
horizontalAlign: "left",
verticalAlign: "bottom"
});
drawBarcode(g, "e", {
angle: 10*i,
x: 900,
y: 900,
horizontalAlign: "right",
verticalAlign: "bottom"
});
drawBarcode(g, "test", {
x: 300,
y: 300,
width: 140,
height: 100,
quietZoneSize: 0
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 6);
test.isTrue(lines.indexOf("CODE-128:a") >= 0);
test.isTrue(lines.indexOf("CODE-128:b") >= 0);
test.isTrue(lines.indexOf("CODE-128:c") >= 0);
test.isTrue(lines.indexOf("CODE-128:d") >= 0);
test.isTrue(lines.indexOf("CODE-128:e") >= 0);
test.isTrue(lines.indexOf("CODE-128:test") >= 0);
});
Tinytest.add("Code 128 - test SHIFT A", function(test) {
var canvas = new Canvas(300, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 300, 100);
drawBarcode(g, "Test\nThree", {
maxWidth: 300,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 2);
test.equal(lines[0], "CODE-128:Test");
test.equal(lines[1], "Three");
});
Tinytest.add("ITF - Odd Char Count", function(test) {
var canvas = new Canvas(300, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 300, 100);
drawBarcode(g, "04004", {
type: "ITF",
maxWidth: 300,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.equal(lines[0], "I2/5:004004");
});
Tinytest.add("ITF - Even Char Count", function(test) {
var canvas = new Canvas(300, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 300, 100);
drawBarcode(g, "11223344", {
type: "ITF",
maxWidth: 300,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.equal(lines[0], "I2/5:11223344");
});
Tinytest.add("Code 39 - Characters", function(test) {
var canvas = new Canvas(1000, 1000);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 550);
drawBarcode(g, "0123456789", {
type: "Code 39",
x: 0,
y: 0,
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
drawBarcode(g, "ABCDEFGHIJ", {
type: "Code 39",
x: 0,
y: 150,
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
drawBarcode(g, "KLMNOPQRST", {
type: "Code 39",
x: 0,
y: 300,
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
drawBarcode(g, "UVW -.XYZ", {
type: "Code 39",
x: 0,
y: 450,
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 4);
test.isTrue(lines.indexOf("CODE-39:0123456789") >= 0);
test.isTrue(lines.indexOf("CODE-39:ABCDEFGHIJ") >= 0);
test.isTrue(lines.indexOf("CODE-39:KLMNOPQRST") >= 0);
test.isTrue(lines.indexOf("CODE-39:UVW -.XYZ") >= 0);
});
Tinytest.add("Code 39 - WIKIPEDIA", function(test) {
var canvas = new Canvas(1000, 1000);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 800, 300);
drawBarcode(g, "WIKIPEDIA", {
type: "Code 39",
x: 0,
y: 0,
width: 800,
height: 300,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.isTrue(lines.indexOf("CODE-39:WIKIPEDIA") >= 0);
});
Tinytest.add("EAN - checksum(\"400638133393\") == 1", function(test) {
var encodeData = bardcode.encodeEAN("400638133393");
test.equal(encodeData.checksum, 1);
});
Tinytest.add("EAN - checksum(\"846823000342\") == 0", function(test) {
var encodeData = bardcode.encodeEAN("846823000342");
test.equal(encodeData.checksum, 0);
});
Tinytest.add("EAN - valid checksum(\"846823000342\")", function(test) {
var encodeData = bardcode.encodeEAN("846823000342", true);
test.equal(encodeData.checksum, 2);
});
Tinytest.add("EAN - invalid checksum(\"846823000344\")", function(test) {
test.throws(
function(){ bardcode.encodeEAN("846823000344", true); },
"Invalid checksum."
);
});
Tinytest.add("EAN - checksum(\"9638507\") == 4", function(test) {
var encodeData = bardcode.encodeEAN("9638507");
test.equal(encodeData.checksum, 4);
});
Tinytest.add("EAN - UPC-A, precalculated checksum", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "846823000342", {
type: "UPC-A",
hasChecksum: true,
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
// UPC-A is EAN-13 with a leading zero
test.equal(lines[0], "EAN-13:0846823000342");
});
Tinytest.add("EAN - UPC-A", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "84682300034", {
type: "UPC-A",
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
// UPC-A is EAN-13 with a leading zero
test.equal(lines[0], "EAN-13:0846823000342");
});
Tinytest.add("EAN - EAN-13", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "590123412345", {
type: "EAN-13",
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.equal(lines[0], "EAN-13:5901234123457");
});
Tinytest.add("EAN - EAN-8", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "9638507", {
type: "EAN-8",
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
var zbarOutput = getBarcodes(canvas.toBuffer());
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.equal(lines[0], "EAN-8:96385074");
});
Tinytest.add("FIM - test no crash", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "C", {
type: "FIM",
maxWidth: 400,
height: 100,
moduleWidth: 2,
quietZoneSize: 10
});
});
Tinytest.add("Codabar - test no crash", function(test) {
var canvas = new Canvas(400, 100);
var g = canvas.getContext("2d");
g.fillStyle = "white";
g.fillRect(0, 0, 400, 100);
drawBarcode(g, "31117013206375", {
type: "Codabar",
maxWidth: 400,
height: 100,
quietZoneSize: 10
});
});
Tinytest.add("SVG - test", function(test) {
var svg = drawBarcode("svg", "3117820", {
maxWidth: 400,
height: 100,
quietZoneSize: 8
});
test.equal(svg.indexOf("<svg"), 0);
var zbarOutput = getBarcodes(svg, {
name: "test-" + Random.id(8) + ".svg"
});
test.equal(zbarOutput.code, 0);
var output = zbarOutput.stdout.trim();
var lines = output.split("\n");
test.equal(lines.length, 1);
test.equal(lines[0], "CODE-128:3117820");
});
}
|
<reponame>Mbein03/marvel-champions-conquest<filename>server/migrations/20220217192820_create_cards_table.js<gh_stars>0
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.up = function (knex) {
return knex.schema.createTable('cards', (table) => {
table.increments('card_id'),
table.string('name').index().notNullable(),
table.string('faction').notNullable(),
table.string('tier'),
table.string('marvel_cdb_id'),
table.string('image_path'),
table.integer('qty').unsigned().notNullable(),
});
};
/**
* @param { import("knex").Knex } knex
* @returns { Promise<void> }
*/
exports.down = function (knex) {
return knex.schema.dropTableIfExists('cards');
};
|
//go:build go1.9
// +build go1.9
package session
import (
"crypto/x509"
"io"
"net/http"
"os"
"strings"
"testing"
"time"
"github.com/aws/aws-sdk-go/awstesting"
)
func TestNewSession_WithClientTLSCert(t *testing.T) {
type testCase struct {
// Params
setup func(certFilename, keyFilename string) (Options, func(), error)
ExpectErr string
}
cases := map[string]testCase{
"env": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
os.Setenv(useClientTLSCert[0], certFilename)
os.Setenv(useClientTLSKey[0], keyFilename)
return Options{}, func() {}, nil
},
},
"env file not found": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
os.Setenv(useClientTLSCert[0], "some-cert-file-not-exists")
os.Setenv(useClientTLSKey[0], "some-key-file-not-exists")
return Options{}, func() {}, nil
},
ExpectErr: "LoadClientTLSCertError",
},
"env cert file only": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
os.Setenv(useClientTLSCert[0], certFilename)
return Options{}, func() {}, nil
},
ExpectErr: "must both be provided",
},
"env key file only": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
os.Setenv(useClientTLSKey[0], keyFilename)
return Options{}, func() {}, nil
},
ExpectErr: "must both be provided",
},
"session options": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
certFile, err := os.Open(certFilename)
if err != nil {
return Options{}, nil, err
}
keyFile, err := os.Open(keyFilename)
if err != nil {
return Options{}, nil, err
}
return Options{
ClientTLSCert: certFile,
ClientTLSKey: keyFile,
}, func() {
certFile.Close()
keyFile.Close()
}, nil
},
},
"session cert load error": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
certFile, err := os.Open(certFilename)
if err != nil {
return Options{}, nil, err
}
keyFile, err := os.Open(keyFilename)
if err != nil {
return Options{}, nil, err
}
stat, _ := certFile.Stat()
return Options{
ClientTLSCert: io.LimitReader(certFile, stat.Size()/2),
ClientTLSKey: keyFile,
}, func() {
certFile.Close()
keyFile.Close()
}, nil
},
ExpectErr: "unable to load x509 key pair",
},
"session key load error": {
setup: func(certFilename, keyFilename string) (Options, func(), error) {
certFile, err := os.Open(certFilename)
if err != nil {
return Options{}, nil, err
}
keyFile, err := os.Open(keyFilename)
if err != nil {
return Options{}, nil, err
}
stat, _ := keyFile.Stat()
return Options{
ClientTLSCert: certFile,
ClientTLSKey: io.LimitReader(keyFile, stat.Size()/2),
}, func() {
certFile.Close()
keyFile.Close()
}, nil
},
ExpectErr: "unable to load x509 key pair",
},
}
for name, c := range cases {
t.Run(name, func(t *testing.T) {
// Asserts
restoreEnvFn := initSessionTestEnv()
defer restoreEnvFn()
certFilename, keyFilename, err := awstesting.CreateClientTLSCertFiles()
if err != nil {
t.Fatalf("failed to create client certificate files, %v", err)
}
defer func() {
if err := awstesting.CleanupTLSBundleFiles(certFilename, keyFilename); err != nil {
t.Errorf("failed to cleanup client TLS cert files, %v", err)
}
}()
opts, cleanup, err := c.setup(certFilename, keyFilename)
if err != nil {
t.Fatalf("test case failed setup, %v", err)
}
if cleanup != nil {
defer cleanup()
}
server, err := awstesting.NewTLSClientCertServer(http.HandlerFunc(
func(w http.ResponseWriter, r *http.Request) {
w.WriteHeader(200)
}))
if err != nil {
t.Fatalf("failed to load session, %v", err)
}
server.StartTLS()
defer server.Close()
// Give server change to start
time.Sleep(time.Second)
// Load SDK session with options configured.
sess, err := NewSessionWithOptions(opts)
if len(c.ExpectErr) != 0 {
if err == nil {
t.Fatalf("expect error, got none")
}
if e, a := c.ExpectErr, err.Error(); !strings.Contains(a, e) {
t.Fatalf("expect error to contain %v, got %v", e, a)
}
return
}
if err != nil {
t.Fatalf("expect no error, got %v", err)
}
// Clients need to add ca bundle for test service.
p := x509.NewCertPool()
p.AddCert(server.Certificate())
client := sess.Config.HTTPClient
client.Transport.(*http.Transport).TLSClientConfig.RootCAs = p
// Send request
req, _ := http.NewRequest("GET", server.URL, nil)
resp, err := client.Do(req)
if err != nil {
t.Fatalf("failed to send request, %v", err)
}
if e, a := 200, resp.StatusCode; e != a {
t.Errorf("expect %v status code, got %v", e, a)
}
})
}
}
|
<filename>src/main/scala/http4s/extend/package.scala
package http4s
import http4s.extend.types._
package object extend {
type |[A, B] = Either[A, B]
type ~~>[F[_], G[_]] = ByNameNt.~~>[F, G]
val ExceptionDisplay = MkExceptionDisplay
type ExceptionDisplay = ExceptionDisplay.T
type Void = types.Void
val AndBoolean = MkAndBoolean
type AndBoolean = AndBoolean.T
val OrBoolean = MkOrBoolean
type OrBoolean = OrBoolean.T
val NonFailingIO = MkNonFailingIO
type NonFailingIO[A] = NonFailingIO.T[A]
} |
/**
* Abstractions of geospatial geometries.
*/
package io.opensphere.core.common.geospatial.model.interfaces;
|
<reponame>Ayvytr/MvpCommons
/*
******************************* Copyright (c)*********************************\
**
** (c) Copyright 2015, 蒋朋, china, qd. sd
** All Rights Reserved
**
** By()
**
**
**-----------------------------------版本信息------------------------------------
** 版 本: V0.1
**
**------------------------------------------------------------------------------
********************************End of Head************************************\
*/
package com.base.utils;
import android.annotation.TargetApi;
import android.content.ContentUris;
import android.content.Context;
import android.database.Cursor;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.net.Uri;
import android.os.Environment;
import android.provider.DocumentsContract;
import android.provider.MediaStore;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
/**
*
* 图片处理工具类
*/
public class ImageUtil {
public static boolean saveBitmap(Bitmap bm, String picName) {
try {
File f = new File(picName);
if (f.exists()) {
f.delete();
}
FileOutputStream out = new FileOutputStream(f);
bm.compress(Bitmap.CompressFormat.PNG, 100, out);
out.flush();
out.close();
return true;
} catch (FileNotFoundException e) {
e.printStackTrace();
return false;
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
/**
* 根据Uri获取图片绝对路径,解决Android4.4以上版本Uri转换
*
* @param context
* @param imageUri
*/
@TargetApi(19)
public static String getImageAbsolutePath(Context context, Uri imageUri) {
if (context == null || imageUri == null)
return null;
if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.KITKAT && DocumentsContract.isDocumentUri(context, imageUri)) {
if (isExternalStorageDocument(imageUri)) {
String docId = DocumentsContract.getDocumentId(imageUri);
String[] split = docId.split(":");
String type = split[0];
if ("primary".equalsIgnoreCase(type)) {
return Environment.getExternalStorageDirectory() + "/" + split[1];
}
} else if (isDownloadsDocument(imageUri)) {
String id = DocumentsContract.getDocumentId(imageUri);
Uri contentUri = ContentUris.withAppendedId(Uri.parse("content://downloads/public_downloads"), Long.valueOf(id));
return getDataColumn(context, contentUri, null, null);
} else if (isMediaDocument(imageUri)) {
String docId = DocumentsContract.getDocumentId(imageUri);
String[] split = docId.split(":");
String type = split[0];
Uri contentUri = null;
if ("image".equals(type)) {
contentUri = MediaStore.Images.Media.EXTERNAL_CONTENT_URI;
} else if ("video".equals(type)) {
contentUri = MediaStore.Video.Media.EXTERNAL_CONTENT_URI;
} else if ("audio".equals(type)) {
contentUri = MediaStore.Audio.Media.EXTERNAL_CONTENT_URI;
}
String selection = MediaStore.Images.Media._ID + "=?";
String[] selectionArgs = new String[]{split[1]};
return getDataColumn(context, contentUri, selection, selectionArgs);
}
} // MediaStore (and general)
else if ("content".equalsIgnoreCase(imageUri.getScheme())) {
// Return the remote address
if (isGooglePhotosUri(imageUri))
return imageUri.getLastPathSegment();
return getDataColumn(context, imageUri, null, null);
}
// File
else if ("file".equalsIgnoreCase(imageUri.getScheme())) {
return imageUri.getPath();
}
return null;
}
public static String getDataColumn(Context context, Uri uri, String selection, String[] selectionArgs) {
Cursor cursor = null;
String column = MediaStore.Images.Media.DATA;
String[] projection = {column};
try {
cursor = context.getContentResolver().query(uri, projection, selection, selectionArgs, null);
if (cursor != null && cursor.moveToFirst()) {
int index = cursor.getColumnIndexOrThrow(column);
return cursor.getString(index);
}
} finally {
if (cursor != null)
cursor.close();
}
return null;
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is ExternalStorageProvider.
*/
public static boolean isExternalStorageDocument(Uri uri) {
return "com.android.externalstorage.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is DownloadsProvider.
*/
public static boolean isDownloadsDocument(Uri uri) {
return "com.android.providers.downloads.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is MediaProvider.
*/
public static boolean isMediaDocument(Uri uri) {
return "com.android.providers.media.documents".equals(uri.getAuthority());
}
/**
* @param uri The Uri to check.
* @return Whether the Uri authority is Google Photos.
*/
public static boolean isGooglePhotosUri(Uri uri) {
return "com.google.android.apps.photos.content".equals(uri.getAuthority());
}
/**
* Compress image by fileSize, this will modify image width/height.
* Used to get thumbnail
*
* @param image
* @param pixelW target pixel of width
* @param pixelH target pixel of height
* @return
*/
public static Bitmap ratio(Bitmap image, float pixelW, float pixelH) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
image.compress(Bitmap.CompressFormat.JPEG, 100, os);
if( os.toByteArray().length / 1024>1024) {//判断如果图片大于1M,进行压缩避免在生成图片(BitmapFactory.decodeStream)时溢出
os.reset();//重置baos即清空baos
image.compress(Bitmap.CompressFormat.JPEG, 50, os);//这里压缩50%,把压缩后的数据存放到baos中
}
ByteArrayInputStream is = new ByteArrayInputStream(os.toByteArray());
BitmapFactory.Options newOpts = new BitmapFactory.Options();
//开始读入图片,此时把options.inJustDecodeBounds 设回true了
newOpts.inJustDecodeBounds = true;
newOpts.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap bitmap = BitmapFactory.decodeStream(is, null, newOpts);
newOpts.inJustDecodeBounds = false;
int w = newOpts.outWidth;
int h = newOpts.outHeight;
float hh = pixelH;// 设置高度为240f时,可以明显看到图片缩小了
float ww = pixelW;// 设置宽度为120f,可以明显看到图片缩小了
//缩放比。由于是固定比例缩放,只用高或者宽其中一个数据进行计算即可
int be = 1;//be=1表示不缩放
if (w >= h && w >= ww) {//如果宽度大的话根据宽度固定大小缩放
be = (int) (newOpts.outWidth / ww);
} else if (w < h && h > hh) {//如果高度高的话根据宽度固定大小缩放
be = (int) (newOpts.outHeight / hh);
}
if (be <= 0) be = 1;
newOpts.inSampleSize = be;//设置缩放比例
//重新读入图片,注意此时已经把options.inJustDecodeBounds 设回false了
is = new ByteArrayInputStream(os.toByteArray());
bitmap = BitmapFactory.decodeStream(is, null, newOpts);
//压缩好比例大小后再进行质量压缩
// return compress(bitmap, maxSize); // 这里再进行质量压缩的意义不大,反而耗资源,删除
return bitmap;
}
}
|
#!/usr/bin/env bash
# Copyright (c) Facebook, Inc. and its affiliates.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##############################################################################
# Extract frames from videos.
IN_DATA_DIR="../../data/ava/videos_15min"
OUT_DATA_DIR="../../data/ava/frames"
if [[ ! -d "${OUT_DATA_DIR}" ]]; then
echo "${OUT_DATA_DIR} doesn't exist. Creating it.";
mkdir -p ${OUT_DATA_DIR}
fi
for video in $(ls -A1 -U ${IN_DATA_DIR}/*)
do
video_name=${video##*/}
if [[ $video_name = *".webm" ]]; then
video_name=${video_name::-5}
else
video_name=${video_name::-4}
fi
out_video_dir=${OUT_DATA_DIR}/${video_name}/
mkdir -p "${out_video_dir}"
out_name="${out_video_dir}/${video_name}_%06d.jpg"
ffmpeg -i "${video}" -r 30 -q:v 1 "${out_name}"
done
|
<filename>backend/__tests__/factories.js
import faker from 'faker';
import bcrypt from 'bcrypt';
import { factory } from 'factory-girl';
import User from '../src/app/models/User';
import Student from '../src/app/models/Student';
factory.define('User', User, {
name: 'Administrador',
email: '<EMAIL>',
password_hash: bcrypt.hashSync('<PASSWORD>', 8)
});
factory.define('Student', Student, {
name: faker.name.findName(),
email: faker.internet.email(),
age: Math.floor(Math.random() * 100),
weight: Math.floor(Math.random() * 450),
height: Math.floor(Math.random() * 250)
});
export default factory;
|
import { NuxtCommand } from '@nuxt/cli'
import consola from 'consola'
import runCommand from 'src/cli'
import * as utils from 'src/utils'
import { resetUtilMocks as _resetUtilMocks } from 'test-utils'
jest.mock('src/utils')
const resetUtilMocks = utilNames => _resetUtilMocks(utils, utilNames)
jest.mock('@nuxt/cli')
jest.mock('src/cli/commands')
describe('cli', () => {
beforeAll(() => resetUtilMocks())
afterEach(() => jest.resetAllMocks())
test('runCommand applies options to NuxtCommand', () => {
let command
NuxtCommand.run.mockImplementation(cmd => (command = cmd))
runCommand({
name: 'test-name',
description: 'test-description'
})
expect(command.name).toEqual('test-name')
expect(command.description).toEqual('test-description')
expect(command.usage).toEqual(expect.stringContaining('test-name'))
expect(command.run).toBeInstanceOf(Function)
NuxtCommand.run.mockReset()
})
test('runCommand.run logs fatal error without command', async () => {
let command
NuxtCommand.run.mockImplementation(cmd => (command = cmd))
runCommand()
await command.run({
argv: {
_: []
}
})
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Unrecognized command ''`)
NuxtCommand.run.mockReset()
})
test('runCommand.run logs fatal error on unknown command', async () => {
let command
NuxtCommand.run.mockImplementation(cmd => (command = cmd))
runCommand()
await command.run({
argv: {
_: ['does-not-exists']
}
})
expect(consola.fatal).toHaveBeenCalledTimes(1)
expect(consola.fatal).toHaveBeenCalledWith(`Unrecognized command 'does-not-exists'`)
NuxtCommand.run.mockReset()
})
test('runCommand.run does not log error on known command', async () => {
let command
NuxtCommand.run.mockImplementation(cmd => (command = cmd))
runCommand()
await command.run({
getNuxtConfig: jest.fn(),
getNuxt: jest.fn(),
argv: {
_: ['eject']
}
})
expect(consola.fatal).not.toHaveBeenCalled()
NuxtCommand.run.mockReset()
})
})
|
<filename>src/store/mutations.js
import { ADD_COUNTER, ADD_TO_CART } from "./mutation-types";
// Vuex 中的 mutaions 模块
export default {
// 注1:只能通过 mutations 修改 state 中的值
// 注2:mutations中的每个方法尽可能完成的事件比较单一一点
[ADD_COUNTER](state, payload) {
payload.count += 1;
},
[ADD_TO_CART](state, payload) {
// console.log("添加成功");
// 设置在添加进购物车时,添加一个checked属性用于表示选中,(默认选中)
payload.checked = true;
state.cartList.push(payload);
}
};
|
var keyMirror = require('keymirror');
module.exports = {
BlockFetchLimit: 20,
BlockListLimit: 20,
ActionTypes: keyMirror({
RECEIVE_LATEST_STATES: null,
POLLER_RECEIVED_SUCCESS: null,
POLLER_RECEIVED_FAILURE: null,
POLLER_STOPPED: null
})
};
|
CUDA_VISIBLE_DEVICES='0' python3 -u train.py --network y1 --loss softmax --dataset vgg
|
#!/bin/bash
TITLE=DISPLAY_OUTPUT_CLONE
NOTIFY_TIME=5000
EXIT_THRESHOLD=5
ARG_COUNT=1
REQUIRED_USER=$USER
source ${0%/*}/chaos-shell.sh
LABEL=SET_LVDS1_PRIMARY
xrandr --output LVDS1 --auto --primary --preferred
checkError 10
LABEL=SET_EXTERNAL_CLONE
xrandr --output $1 --same-as LVDS1 --auto --noprimary
checkError 10
sleep 2
command ${0%/*}/program_restartAwesome.sh
sleep 3
notify "Laptop set as primary screen, $1 set as clone"
|
. /minicoin/util/parse-opts.sh $HOME "$@"
sudo apt-get install -y ccache
if [[ ! $(which ccache) ]]
then
>&2 echo "Failed to install ccache"
exit 1
fi
for p in ${PARAMS[@]}
do
config="PARAM_$p"
value="${!config}"
[ ! -z $PARAM_cache_dir ] && sudo -u vagrant ccache --set-config=$p=$value
done
sudo -u vagrant ccache --show-config || true
|
<reponame>InfraBlockchain/infra-did-resolver
import { VerificationMethod } from 'did-resolver'
export const DEFAULT_REGISTRY_CONTRACT = 'infradidregi'
export const DEFAULT_JSON_RPC = 'http://localhost:8888'
export enum verificationMethodTypes {
EcdsaSecp256k1VerificationKey2019 = 'EcdsaSecp256k1VerificationKey2019',
EcdsaSecp256k1RecoveryMethod2020 = 'EcdsaSecp256k1RecoveryMethod2020',
Ed25519VerificationKey2018 = 'Ed25519VerificationKey2018',
RSAVerificationKey2018 = 'RSAVerificationKey2018',
X25519KeyAgreementKey2019 = 'X25519KeyAgreementKey2019',
}
export interface LegacyVerificationMethod extends VerificationMethod {
/**@deprecated */
publicKeyHex?: string
/**@deprecated */
publicKeyBase64?: string
/**@deprecated */
publicKeyPem?: string
// eslint-disable-next-line @typescript-eslint/no-explicit-any
[x: string]: any
}
export const legacyAttrTypes: Record<string, string> = {
sigAuth: 'SignatureAuthentication2018',
veriKey: 'VerificationKey2018',
enc: 'KeyAgreementKey2019',
}
export const legacyAlgoMap: Record<string, string> = {
/**@deprecated */
Secp256k1VerificationKey2018: verificationMethodTypes.EcdsaSecp256k1VerificationKey2019,
/**@deprecated */
Ed25519SignatureAuthentication2018: verificationMethodTypes.Ed25519VerificationKey2018,
/**@deprecated */
Secp256k1SignatureAuthentication2018: verificationMethodTypes.EcdsaSecp256k1VerificationKey2019,
//keep legacy mapping
RSAVerificationKey2018: verificationMethodTypes.RSAVerificationKey2018,
Ed25519VerificationKey2018: verificationMethodTypes.Ed25519VerificationKey2018,
X25519KeyAgreementKey2019: verificationMethodTypes.X25519KeyAgreementKey2019,
}
export const INFRA_DID_NONCE_VALUE_FOR_REVOKED_PUB_KEY_DID: number = 65535
export const knownInfraBlockchainNetworks: Record<string, string> = {
mainnet: '01',
yosemite: 'yos',
sentinel: 'sentinel',
}
export enum Errors {
/**
* The resolver has failed to construct the DID document.
* Please inspect the `DIDResolutionMetadata.message` to debug further.
*/
notFound = 'notFound',
/**
* The resolver does not know how to resolve the given DID. Most likely it is not a `did:infra`.
*/
invalidDid = 'invalidDid',
/**
* The resolver is misconfigured or is being asked to resolve a DID anchored on an unknown network
*/
unknownNetwork = 'unknownNetwork',
}
|
#!/bin/bash
cd basic
./cleanup.sh
cd ..
cd library
./cleanup.sh
cd ..
cd generators/cmake
./cleanup.sh
cd ../..
cd generators/python
./cleanup.sh
cd ../..
|
#!/bin/sh
# Check env vars that we know should be set to verify that everything is working
function verify {
if [ "$2" == "" ]
then
echo -e "Error: $1 should be set but is not."
exit 2
fi
}
# If the container is running in the Horizon environment, then the Horizon platform env vars should all be there.
# Otherwise, assume it is running outside Horizon and running in a non-Horizon environment.
BASEURL=""
if [ "$HZN_AGREEMENTID" != "" ]
then
verify "HZN_RAM" $HZN_RAM
verify "HZN_CPUS" $HZN_CPUS
verify "HZN_ARCH" $HZN_ARCH
verify "HZN_NODE_ID" $HZN_NODE_ID
verify "HZN_ORGANIZATION" $HZN_ORGANIZATION
# verify "HZN_HASH" $HZN_HASH - Delete
verify "HZN_EXCHANGE_URL" $HZN_EXCHANGE_URL
verify "HZN_ESS_API_PROTOCOL" $HZN_ESS_API_PROTOCOL
verify "HZN_ESS_API_ADDRESS" $HZN_ESS_API_ADDRESS
verify "HZN_ESS_API_PORT" $HZN_ESS_API_PORT
verify "HZN_ESS_AUTH" $HZN_ESS_AUTH
verify "HZN_ESS_CERT" $HZN_ESS_CERT
echo -e "All Horizon platform env vars verified."
echo -e "Service is running on node $HZN_NODE_ID in org $HZN_ORGANIZATION"
if [ "${HZN_PATTERN}" == "" ]
then
echo "Service is running in policy mode"
else
echo "Service is running in pattern mode: ${HZN_PATTERN}"
fi
# Assuming the API address is a unix socket file. HZN_ESS_API_PROTOCOL should be "unix".
BASEURL='--unix-socket '${HZN_ESS_API_ADDRESS}' https://localhost/api/v1/objects/'
else
echo -e "Running outside Horizon, skip Horizon platform env var checks."
fi
verify "MY_VAR1" $MY_VAR1
echo -e "All Agreement Service variables verified."
OBJECT_TYPE="model"
echo -e "Looking for file objects of type ${OBJECT_TYPE}"
# ${HZN_ESS_AUTH} is mounted to this container and contains a json file with the credentials for authenticating to the ESS.
USER=$(cat ${HZN_ESS_AUTH} | jq -r ".id")
PW=$(cat ${HZN_ESS_AUTH} | jq -r ".token")
# Passing basic auth creds in base64 encoded form (-u).
AUTH="-u ${USER}:${PW} "
# ${HZN_ESS_CERT} is mounted to this container and contains the client side SSL cert to talk to the ESS API.
CERT="--cacert ${HZN_ESS_CERT} "
FAILCOUNT=0
# There should be 2 files loaded into the CSS for this node and the node should be able to get them quickly. If not,
# there is a problem and the service will terminate, causing it to get into a restart loop in docker. This should be detected
# by the test automation and terminate the test in error.
while :
do
if [ "$BASEURL" != "" ]
then
# First sync service call should pick up any objects received the last time we were started.
echo -e "Retrieving sync service objects that have already been received."
FILE_LOC="/e2edevuser/objects"
mkdir -p ${FILE_LOC}
# For each object, write the data into the local file system using the object ID as the file name. Then mark the object
# as received so that a subsequent poll doesn't see the object again.
OBJS=$(curl -sL ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}?received=true)
BADRES=$(echo ${OBJS} | jq -r '.[].objectID')
if [ "${BADRES}" == "" ]
then
echo "Error Return from object poll: ${OBJS}"
exit 1
fi
echo ${OBJS} | jq -c '.[]' | while read i; do
del=$(echo $i | jq -r '.deleted')
id=$(echo $i | jq -r '.objectID')
if [ "$del" == "true" ]
then
echo "Acknowledging that Object $id is deleted"
ACKDEL=$(curl -sLX PUT ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/deleted)
rm -f ${FILE_LOC}/${id}
else
DATA=$(curl -sL -o ${FILE_LOC}/${id} ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/data)
RCVD=$(curl -sLX PUT ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/received)
echo -e "Received object: ${id}"
fi
done
# There should be 2 files in the file sync service for this node. If not, there is a problem, exit the workload to fail the test.
COUNT=$(ls ${FILE_LOC} | wc -l)
COUNT_TARGET="2"
if [ "${COUNT}" != "${COUNT_TARGET}" ]
then
echo -e "Found ${COUNT} files from the sync service in ${FILE_LOC}, there should be ${COUNT_TARGET}."
if [ "$FAILCOUNT" -gt "1" ]
then
exit 1
fi
sleep 2
FAILCOUNT=$((FAILCOUNT+1))
else
break
fi
else
break
fi
done
# Keep everything alive
while :
do
echo -e "Service usehello running."
if [ "$MY_VAR1" != "outside" ]
then
co=$(curl -sS http://${HZN_ARCH}_helloservice:8000)
echo -e "Hello service: $co"
cpuo=$(curl -sS http://${HZN_ARCH}_cpu:8347)
echo -e "CPU Usage: $cpuo"
fi
if [ "$BASEURL" != "" ]
then
echo -e "Calling ESS to poll for new objects"
# Pick up any newly added objects or notifications of changed or deleted objects since our initial poll.
OBJS=$(curl -sL ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE})
echo "Full poll response: ${OBJS}"
# Iterate over each returned object, it will be set into $i
echo ${OBJS} | jq -c '.[]' | while read i; do
# work with each returned object in $i
del=$(echo $i | jq -r '.deleted')
id=$(echo $i | jq -r '.objectID')
if [ "$del" == "true" ]
then
echo "Acknowledging that Object $id is deleted"
ACKDEL=$(curl -sLX PUT ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/deleted)
rm -f ${FILE_LOC}/${id}
else
# Assume we got a new object
DATA=$(curl -sL -o ${FILE_LOC}/${id} ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/data)
RCVD=$(curl -sLX PUT ${AUTH}${CERT}${BASEURL}${OBJECT_TYPE}/${id}/received)
echo -e "Got a new object: ${id}"
fi
done
fi
sleep 10
done
|
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
/**
* Created by alex on 6/7/17.
*/
require("./app/assets/css/app-styles.css");
//# sourceMappingURL=styles.js.map |
"""Markdown widgets"""
from django import forms
from django.utils.safestring import mark_safe
from django.utils.deconstruct import deconstructible
from json import dumps
@deconstructible
class MarkdownTextarea(forms.Textarea):
"""Basic textarea widget for rendering Markdown objects"""
pass
@deconstructible
class CodeMirror(MarkdownTextarea):
def __init__(self, *args, **kwargs):
"""Widget that uses the `CodeMirror` editor
:param mode: Syntax mode name.
:param addons: List of addons (each element is a relative path
to the addon, without `.js` extension. Example: `mode/overlay`)
:param theme: Theme name.
:param theme_path: Path to the theme file.
Default is `s_markdown/codemirror/theme/<theme>.css`
:param keymap: A keymap name.
:param options: A dict of options that will be passed
to the codemirror editor.
:param additional_modes: Load additional modes for `overlay` extension.
:param js_var_format: A name of the js variable in which
the codemirror instance is saved.
"""
self.mode = kwargs.pop('mode', 'markdown')
self.addons = kwargs.pop('addons', [])
self.theme = kwargs.pop('theme', 'default')
self.theme_path = kwargs.pop('theme_path', 's_markdown/codemirror/theme/%s.css' % self.theme)
self.keymap = kwargs.pop('keymap', None)
self.options = kwargs.pop('options', {})
self.additional_modes = kwargs.pop('additional_modes', [])
self.js_var_format = kwargs.pop('js_var_format', None)
self.options.update(dict(mode=self.mode, theme=self.theme))
self.option_json = dumps(self.options)
super(CodeMirror, self).__init__(*args, **kwargs)
@property
def media(self):
"""Construct a list of mediafiles required for this widget
:return: `forms.Media` instance.
"""
css = ['s_markdown/codemirror/lib/codemirror.css']
if self.theme:
css.append(self.theme_path)
js = ['s_markdown/codemirror/lib/codemirror.js']
js.extend('s_markdown/codemirror/addon/%s.js' % a for a in self.addons)
if self.keymap:
js.append('s_markdown/codemirror/keymap/%s.js' % self.keymap)
if self.mode:
js.append('s_markdown/codemirror/mode/%s/%s.js' % (self.mode, self.mode))
for mode in self.additional_modes:
js.append('s_markdown/codemirror/mode/%s/%s.js' % (mode, mode))
return forms.Media(
css=dict(all=css),
js=js,
)
def render(self, name, value, attrs=None):
"""Render this widget
:param value: Current field vlue.
:param attrs: Attributes of the widget.
:param name: Name of the widget.
:return: Rendered html.
"""
if self.js_var_format is not None:
js_var_bit = 'var %s = ' % (self.js_var_format % name)
else:
js_var_bit = ''
output = [super(CodeMirror, self).render(name, value, attrs),
'<script type="text/javascript">'
'%sCodeMirror.fromTextArea('
'document.getElementById(%s), %s);'
'</script>' %
(js_var_bit, '"id_%s"' % name, self.option_json)]
return mark_safe('\n'.join(output)) |
<reponame>ftheberge/Hypergraph_Clustering
from collections import Counter
import numpy as np
from functools import reduce
import igraph as ig
import itertools
from scipy.special import comb
################################################################################
## we use 2 representations for partitions (0-based part ids):
## (1) dictionary or (2) list of sets
def dict2part(D):
P = []
k = list(D.keys())
v = list(D.values())
for x in range(max(D.values())+1):
P.append(set([k[i] for i in range(len(k)) if v[i]==x]))
return P
def part2dict(A):
x = []
for i in range(len(A)):
x.extend([(a,i) for a in A[i]])
return {k:v for k,v in x}
################################################################################
## Precompute soe values on HNX hypergraph for computing qH faster
def HNX_precompute(HG):
"""
Precompute some values on hypergraph for faster computing of hypergraph modularity. The following attributes will be set:
v.strength: the weighted degree for each v in HG.nodes
HG.d_weights
node strength (i.e. weighted degree), d-weights (total edge weigths for each edge cardinality d) and binomial coefficients.
Parameters
----------
HG : Hypergraph
"""
H = HG.remove_singletons()
## 1. compute node strenghts (weighted degrees)
for v in H.nodes:
H.nodes[v].strength = 0
for e in H.edges:
try:
w = H.edges[e].weight
except:
w = 1
## add unit weight if none to simplify other functions
H.edges[e].weight = 1
for v in list(H.edges[e]):
H.nodes[v].strength += w
## 2. compute d-weights
ctr = Counter([len(H.edges[e]) for e in H.edges])
for k in ctr.keys():
ctr[k]=0
for e in H.edges:
ctr[len(H.edges[e])] += H.edges[e].weight
H.d_weights = ctr
H.total_weight = sum(ctr.values())
## 3. compute binomial coeffcients (modularity speed-up)
bin_coef = {}
for n in H.d_weights.keys():
for k in np.arange(n//2+1,n+1):
bin_coef[(n,k)] = comb(n, k, exact=True)
H.bin_coef = bin_coef
return H
################################################################################
## some weight function 'wdc' for d-edges with c-majority
## default: linear w.r.t. c
def linear(d,c):
return c/d if c>d/2 else 0
## majority
def majority(d,c):
return 1 if c>d/2 else 0
## strict
def strict(d,c):
return 1 if c==d else 0
#########################################
## compute vol(A_i)/vol(V) for each part A_i in A (list of sets)
def compute_partition_probas(HG, A):
p = []
for part in A:
vol = 0
for v in part:
vol += HG.nodes[v].strength
p.append(vol)
s = sum(p)
return [i/s for i in p]
## degree tax
def DegreeTax(HG, Pr, wdc):
DT = 0
for d in HG.d_weights.keys():
tax = 0
for c in np.arange(d//2+1,d+1):
for p in Pr:
tax += p**c * (1-p)**(d-c) * HG.bin_coef[(d,c)] * wdc(d,c)
tax *= HG.d_weights[d]
DT += tax
DT /= HG.total_weight
return DT
## edge contribution, A is list of sets
def EdgeContribution(HG, A, wdc):
EC = 0
for e in HG.edges:
d = HG.size(e)
for part in A:
if HG.size(e,part) > d/2:
EC += wdc(d,HG.size(e,part)) * HG.edges[e].weight
EC /= HG.total_weight
return EC
## HG: HNX hypergraph
## A: partition (list of sets)
## wcd: weight function (ex: strict, majority, linear)
def HNX_modularity(HG, A, wdc=linear):
Pr = compute_partition_probas(HG, A)
return EdgeContribution(HG, A, wdc) - DegreeTax(HG, Pr, wdc)
################################################################################
## 2-section igraph from HG
def HNX_2section(HG):
s = []
for e in HG.edges:
E = HG.edges[e]
if len(E)>1:
## random-walk 2-section (preserve nodes' weighted degrees)
try:
w = HG.edges[e].weight/(len(E)-1)
except:
w = 1/(len(E)-1)
s.extend([(k[0],k[1],w) for k in itertools.combinations(E,2)])
G = ig.Graph.TupleList(s,weights=True).simplify(combine_edges='sum')
return G
################################################################################
def HNX_Kumar(HG, delta=.01, verbose=False):
## weights will be modified -- store initial weights
W = [e.weight for e in HG.edges()]
## build graph
G = HNX_2section(HG)
# apply clustering
CG = G.community_multilevel(weights='weight')
CH = []
for comm in CG.as_cover():
CH.append(set([G.vs[x]['name'] for x in comm]))
## LOOP
diff = 1
ctr = 0
while diff > delta:
# re-weight
diff = 0
for i in HG.edges:
e = HG.edges[i]
reweight = sum([1/(1+HG.size(e,c)) for c in CH])*(HG.size(e)+len(CH))/HG.number_of_edges()
diff = max(diff,0.5*abs(e.weight-reweight))
e.weight = 0.5*e.weight + 0.5*reweight
## re-run louvain
## build graph
G = HNX_2section(HG)
# apply clustering
CG = G.community_multilevel(weights='weight')
CH = []
for comm in CG.as_cover():
CH.append(set([G.vs[x]['name'] for x in comm]))
ctr += 1
if verbose:
print(ctr,diff)
if ctr>50: ## this process sometimes gets stuck -- set limit
break
G.vs['part'] = CG.membership
for e in HG.edges:
HG.edges[e].weight = W[e]
return dict2part({v['name']:v['part'] for v in G.vs})
################################################################################
## compute change in edge contribution --
## partition P, node v going from P[a] to P[b]
def DeltaEC(HG, P, v, a, b, wdc):
Pm = P[a]-{v}
Pn = P[b].union({v})
ec = 0
for e in list(HG.nodes[v].memberships):
d = HG.size(e)
w = HG.edges[e].weight
ec += w*(wdc(d,HG.size(e,Pm))+wdc(d,HG.size(e,Pn))
-wdc(d,HG.size(e,P[a]))-wdc(d,HG.size(e,P[b])))
return ec/HG.total_weight
## exp. part of binomial pmf
def bin_ppmf(d,c,p):
return p**c * (1-p)**(d-c)
## compute change in degree tax --
## partition P (list), node v going from P[a] to P[b]
def DeltaDT(HG, P, v, a, b, wdc):
s = HG.nodes[v].strength
vol = sum([HG.nodes[v].strength for v in HG.nodes])
vola = sum([HG.nodes[v].strength for v in P[a]])
volb = sum([HG.nodes[v].strength for v in P[b]])
volm = (vola-s)/vol
voln = (volb+s)/vol
vola /= vol
volb /= vol
DT=0
for d in HG.d_weights.keys():
x=0
for c in np.arange(int(np.floor(d/2))+1,d+1):
x+=HG.bin_coef[(d,c)]*wdc(d,c)*(bin_ppmf(d,c,voln)+bin_ppmf(d,c,volm)
-bin_ppmf(d,c,vola)-bin_ppmf(d,c,volb))
DT += x*HG.d_weights[d]
return DT/HG.total_weight
## simple H-based algorithm --
## try moving nodes between communities to optimize qH
## requires L: initial non-trivial partition
def HNX_LastStep(HG, L, wdc=linear, delta = .01):
A = L[:] ## we will modify this, copy
D = part2dict(A)
qH = 0
while True:
for v in list(np.random.permutation(list(HG.nodes))):
c = D[v]
s = list(set([c] + [D[i] for i in HG.neighbors(v)]))
M = []
if len(s)>0:
for i in s:
if c==i:
M.append(0)
else:
M.append(DeltaEC(HG,A,v,c,i,wdc)-DeltaDT(HG,A,v,c,i,wdc))
i = s[np.argmax(M)]
if c!=i:
A[c] = A[c] - {v}
A[i] = A[i].union({v})
D[v] = i
Pr = compute_partition_probas(HG,A)
q2 = EdgeContribution(HG,A,wdc)-DegreeTax(HG,Pr,wdc)
if (q2-qH) < delta:
break
qH = q2
return [a for a in A if len(a)>0]
################################################################################
|
import pandas as pd
def preprocess_data(df_ones_training, df_zeros_training, df_ones_test, df_zeros_test):
# Concatenate positive and negative samples for training
df_training = pd.concat([df_ones_training, df_zeros_training])
# Shuffle the concatenated training DataFrame
df_training = df_training.sample(frac=1).reset_index(drop=True)
# Concatenate positive and negative samples for testing
df_test = pd.concat([df_ones_test, df_zeros_test])
# Shuffle the concatenated testing DataFrame
df_test = df_test.sample(frac=1).reset_index(drop=True)
# Extract 'comment' column for training and testing sentences
sentences_train = df_training['comment'].tolist()
sentences_test = df_test['comment'].tolist()
# Extract 'label' column for training and testing labels
labels_train = df_training['label'].tolist()
labels_test = df_test['label'].tolist()
return sentences_train, sentences_test, labels_train, labels_test |
import tensorflow as tf
def get_not_none_from_list(grads):
return [grad for grad in grads if grad is not None]
def validate_gradients(grads, locally_aggregated_grads):
filtered_grads = get_not_none_from_list(grads)
assert len(filtered_grads) == len(locally_aggregated_grads)
# Example usage
grads = [tf.constant(1.0), None, tf.constant(2.0), tf.constant(3.0)]
locally_aggregated_grads = [tf.constant(0.5), tf.constant(1.5), tf.constant(2.5)]
validate_gradients(grads, locally_aggregated_grads) |
<gh_stars>0
import { Linq } from "../utils/linq";
export class CreepHelpers {
public static getCreepsByRole(role: string): Creep[] {
return _.filter(Game.creeps, (c) => c.memory.role == role);
}
public static getCreeps(): Creep[] {
return _.filter(Game.creeps);
}
public static getCreepWithLeastTtl(creeps: Creep[]): Creep | null {
let lowestTll: Creep | null = null;
lowestTll = Linq.min<Creep, number | undefined>(creeps, c => c.ticksToLive);
// creeps.forEach(c => {
// if (c.ticksToLive === undefined) {
// // the creep is spawning
// return;
// }
// if (lowestTll === null) {
// lowestTll = c;
// return;
// }
//
// if (lowestTll?.ticksToLive !== undefined) {
// if (c.ticksToLive < lowestTll?.ticksToLive ?? 10000) {
// lowestTll = c;
// return;
// }
// }
// });
return lowestTll;
}
public static isCreepStorageFull(creep: Creep, storageType?: undefined): boolean {
return creep.store.getFreeCapacity(storageType) <= 0;
}
public static doesCreepHaveStorageSpace(creep: Creep, storageType?: undefined): boolean {
return creep.store.getFreeCapacity(storageType) > 0;
}
public static isCreepStorageEmpty(creep: Creep, storageType?: undefined): boolean {
return creep.store.getFreeCapacity(storageType) === creep.store.getCapacity(storageType);
}
public static getHarvesters(): Creep[] {
return this.getCreepsByRole('harvester');
}
public static getBuilders(): Creep[] {
return this.getCreepsByRole('builder');
}
public static getUpgraders(): Creep[] {
return this.getCreepsByRole('upgrader');
}
}
|
/* $Id$ */
/***************************************************************************
* (C) Copyright 2003-2010 - Stendhal *
***************************************************************************
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
package games.stendhal.client.sound.system.processors;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.atomic.AtomicLong;
import games.stendhal.client.sound.facade.Time;
import games.stendhal.client.sound.system.SignalProcessor;
/**
* Signal processor stage to pause sound signals.
* @author silvio
*/
public class Interruptor extends SignalProcessor
{
private static final int PLAY = 0;
private static final int STOP = 1;
private static final int PAUSE = 2;
private AtomicInteger mState = new AtomicInteger(PLAY);
private AtomicLong mDelay = new AtomicLong(0);
public void play()
{
mState.set(PLAY);
mDelay.set(0);
}
public void pause(Time delay)
{
mState.set(PAUSE);
mDelay.set(delay.getInNanoSeconds());
}
public void stop(Time delay)
{
mState.set(STOP);
mDelay.set(delay.getInNanoSeconds());
}
@Override
public synchronized boolean request()
{
if(mDelay.get() <= 0)
{
switch(mState.get())
{
case PAUSE:
return true;
case STOP:
quit();
return false;
}
}
return super.request();
}
@Override
protected void modify(float[] data, int frames, int channels, int rate)
{
if(mState.get() != PLAY)
{
//Time delay = new Time(mDelay.get(), Time.Unit.NANO);
Time delaySegment = new Time(frames, rate);
mDelay.addAndGet(-delaySegment.getInNanoSeconds());
}
super.propagate(data, frames, channels, rate);
}
}
|
/* Copyright 2007-2015 QReal Research Group
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. */
#include "messageGenerator.h"
#include <qrutils/nameNormalizer.h>
#include <QtCore/QFile>
#include <QtCore/QDebug>
using namespace ubiq::generator;
using namespace qReal;
using namespace utils;
/// Generation target file
QString const fileName = "Message.cs";
MessageGenerator::MessageGenerator(QString const &templateDirPath
, QString const &outputDirPath
, qReal::LogicalModelAssistInterface const &logicalModel
, qReal::ErrorReporterInterface &errorReporter
)
: AbstractGenerator(templateDirPath, outputDirPath, logicalModel, errorReporter)
{
}
MessageGenerator::~MessageGenerator()
{
}
void MessageGenerator::generate()
{
QString result;
loadTemplateFromFile(fileName, result);
loadUtilsTemplates();
for (Id const &diagram : mApi.elementsByType("DataStructuresDiagram")) {
if (!mApi.isLogicalElement(diagram)) {
continue;
}
for (Id const &element : mApi.children(diagram)) {
if (!mApi.isLogicalElement(element)) {
continue;
}
if (element.element() == "MessageClass") {
result.replace("@@Properties@@", generatePropertiesCode(element))
.replace("@@InitFieldsWithDefaults@@", generateDefaultFieldsInitialization(element))
.replace("@@ConstructorArgs@@", generateConstructorArguments(element))
.replace("@@InitFieldsWithArgs@@", generateFieldsInitialization(element))
.replace("@@ConstructorActualArgs@@", generateConstructorActualArguments(element))
.replace("@@PackFields@@", generatePackFields(element))
.replace("@@UnpackFields@@", generateUnpackFields(element))
;
} else if (element.element() == "MessageCodes") {
result.replace("@@MessageCodes@@", generateEnumElements(element));
} else if (element.element() == "ErrorCodes") {
result.replace("@@ErrorCodes@@", generateEnumElements(element));
}
}
}
saveOutputFile(fileName, result);
}
QString MessageGenerator::generateEnumElements(qReal::Id const &element) const
{
QString result;
for (Id const &id : mApi.children(element)) {
if (!mApi.isLogicalElement(id) || id.element() != "EnumElement") {
continue;
}
QString propertyTemplate = mTemplateUtils["@@EnumElement@@"];
QString const name = mApi.name(id);
QString const value = mApi.stringProperty(id, "value");
propertyTemplate.replace("@@Name@@", NameNormalizer::normalize(name))
.replace("@@Value@@", value);
result += propertyTemplate;
}
return result;
}
QString MessageGenerator::generateDefaultFieldsInitialization(qReal::Id const &element) const
{
QString fieldsInitialization;
for (Id const &property : mApi.children(element)) {
if (!mApi.isLogicalElement(property) || property.element() != "Field") {
continue;
}
QString initTemplate = mTemplateUtils["@@FieldInit@@"];
QString const name = mApi.name(property);
QString const defaultValue = mApi.stringProperty(property, "defaultValue").isEmpty()
? getDefaultValue(mApi.stringProperty(property, "type"))
: mApi.stringProperty(property, "defaultValue");
initTemplate.replace("@@Name@@", name)
.replace("@@Value@@", defaultValue)
;
fieldsInitialization += (" " + initTemplate);
}
return fieldsInitialization;
}
QString MessageGenerator::generateFieldsInitialization(qReal::Id const &element) const
{
QString fieldsInitialization;
for (Id const &property : mApi.children(element)) {
if (!mApi.isLogicalElement(property) || property.element() != "Field") {
continue;
}
QString initTemplate = mTemplateUtils["@@FieldInit@@"];
QString argName = mApi.name(property);
initTemplate.replace("@@Name@@", argName)
.replace("@@Value@@", NameNormalizer::normalize(argName, false))
;
fieldsInitialization += (" " + initTemplate);
}
return fieldsInitialization;
}
QString MessageGenerator::generateConstructorArguments(qReal::Id const &element) const
{
QString parametersList;
for (Id const &property : mApi.children(element)) {
if (!mApi.isLogicalElement(property) || property.element() != "Field") {
continue;
}
QString argumentTemplate = mTemplateUtils["@@Argument@@"];
QString argName = NameNormalizer::normalize(mApi.name(property), false);
QString type = mApi.stringProperty(property, "type");
argumentTemplate.replace("@@ArgType@@", type).replace("@@ArgName@@", argName);
parametersList += (argumentTemplate.trimmed() + ", ");
}
return parametersList;
}
QString MessageGenerator::generateConstructorActualArguments(qReal::Id const &element) const
{
QString parametersList;
for (Id const &property : mApi.children(element)) {
if (!mApi.isLogicalElement(property) || property.element() != "Field") {
continue;
}
QString argName = NameNormalizer::normalize(mApi.name(property), false);
parametersList += (argName + ", ");
}
return parametersList;
}
QString MessageGenerator::generatePackFields(qReal::Id const &element) const
{
return generateSerializationRelatedCode(element, "Serialization");
}
QString MessageGenerator::generateUnpackFields(qReal::Id const &element) const
{
return generateSerializationRelatedCode(element, "Deserialization");
}
QString MessageGenerator::generateSerializationRelatedCode(qReal::Id const &element, QString const &method) const
{
QString serializersList;
for (Id const &property : mApi.children(element)) {
if (!mApi.isLogicalElement(property) || property.element() != "Field") {
continue;
}
if (!mApi.property(property, "serialize").toBool()) {
continue;
}
QString const name = mApi.name(property);
QString const type = mApi.stringProperty(property, "type");
QString serializationTemplate;
if (type == "int") {
bool const serializeAsShort = mApi.property(property, "serializeAsShort").toBool();
if (serializeAsShort) {
serializationTemplate = mTemplateUtils["@@" + method + "IntAsShort@@"];
}
}
if (serializationTemplate.isEmpty()) {
serializationTemplate = mTemplateUtils["@@" + method + "_" + type + "@@"];
}
serializationTemplate.replace("@@Name@@", name);
serializersList += serializationTemplate;
}
return serializersList;
}
|
<gh_stars>0
package com.freelancer.hashan.soap.ws.client.generated;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlType;
/**
* <p>Java class for tCountryCodeAndNameGroupedByContinent complex type.
*
* <p>The following schema fragment specifies the expected content contained within this class.
*
* <pre>
* <complexType name="tCountryCodeAndNameGroupedByContinent">
* <complexContent>
* <restriction base="{http://www.w3.org/2001/XMLSchema}anyType">
* <sequence>
* <element name="Continent" type="{http://www.oorsprong.org/websamples.countryinfo}tContinent"/>
* <element name="CountryCodeAndNames" type="{http://www.oorsprong.org/websamples.countryinfo}ArrayOftCountryCodeAndName"/>
* </sequence>
* </restriction>
* </complexContent>
* </complexType>
* </pre>
*
*
*/
@XmlAccessorType(XmlAccessType.FIELD)
@XmlType(name = "tCountryCodeAndNameGroupedByContinent", propOrder = {
"continent",
"countryCodeAndNames"
})
public class TCountryCodeAndNameGroupedByContinent {
@XmlElement(name = "Continent", required = true)
protected TContinent continent;
@XmlElement(name = "CountryCodeAndNames", required = true)
protected ArrayOftCountryCodeAndName countryCodeAndNames;
/**
* Gets the value of the continent property.
*
* @return
* possible object is
* {@link TContinent }
*
*/
public TContinent getContinent() {
return continent;
}
/**
* Sets the value of the continent property.
*
* @param value
* allowed object is
* {@link TContinent }
*
*/
public void setContinent(TContinent value) {
this.continent = value;
}
/**
* Gets the value of the countryCodeAndNames property.
*
* @return
* possible object is
* {@link ArrayOftCountryCodeAndName }
*
*/
public ArrayOftCountryCodeAndName getCountryCodeAndNames() {
return countryCodeAndNames;
}
/**
* Sets the value of the countryCodeAndNames property.
*
* @param value
* allowed object is
* {@link ArrayOftCountryCodeAndName }
*
*/
public void setCountryCodeAndNames(ArrayOftCountryCodeAndName value) {
this.countryCodeAndNames = value;
}
}
|
#!/bin/bash
source "./scripts/variables.sh"
if [[ $MAPR_CLUSTER1_COUNT == 3 ]]; then
(set -x; ./bin/terraform_apply.sh -var='mapr_cluster_1_count=0')
echo "NOTE: Deleted MAPR cluster will be reinstated after running './bin/terraform_apply.sh'"
fi
|
EXECUTE dbms_logmnr.add_logfile('/u02/ARCH/1564-orakic.arc', DBMS_LOGMNR.NEW);
EXECUTE DBMS_LOGMNR.ADD_LOGFILE('/u02/ARCH/1565-orakic.arc');
EXECUTE dbms_logmnr.add_logfile('/u02/ARCH/1566-orakic.arc');
EXECUTE dbms_logmnr.add_logfile('/u02/ARCH/1567-orakic.arc');
EXECUTE dbms_logmnr.add_logfile('/u02/ARCH/1568-orakic.arc');
EXECUTE dbms_logmnr.add_logfile('/u02/ARCH/1569-orakic.arc');
/* A select on v$logmnr_contents fails with: ORA-03113: end-of-file on communication channel
AND Trace file and core dump written IF DICTIONARY FILE IS USED */
EXECUTE DBMS_LOGMNR.START_LOGMNR( -
> DICTFILENAME =>'/ora/oracle/logminer/dictionary.ora');
execute DBMS_LOGMNR.START_LOGMNR(options => -
dbms_logmnr.dict_from_online_catalog);
execute dbms_logmnr.start_logmnr(options => -
dbms_logmnr.dict_from_online_catalog + dbms_logmnr.skip_corruption + -
dbms_logmnr.print_pretty_sql);
SELECT operation, sql_redo FROM v$logmnr_contents;
EXECUTE DBMS_LOGMNR.END_LOGMNR; |
### connector
curl http://localhost:8083 | python -m json.tool
curl -g -6 http://[::1]:8083 | python -m json.tool
curl http://localhost:8083/connector-plugins | python -m json.tool
echo '{"name": "load-kafka-config", "config": { "connector.class": "org.apache.kafka.connect.file.FileStreamSourceConnector", "file": "/var/log/agent/kafkaAgent.log", "topic": "agent-log"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
${KAFKA_HOME}/bin/kafka-console-consumer.sh --property print.timestamp=true --property print.key=true --bootstrap-server $(hostname):9092 --topic agent-log --from-beginning
echo '{"name": "dump-kafka-config", "config": {"connector.class": "org.apache.kafka.connect.file.FileStreamSinkConnector", "file": "/usr/local/kafka/copy-agent-log", "topics": "agent-log"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
curl http://localhost:8083/connectors | python -m json.tool
curl http://localhost:8083/connectors/load-kafka-config | python -m json.tool
curl -X DELETE http://localhost:8083/connectors/dump-kafka-config | python -m json.tool
### es
#### 获取所有配置项
``` json
{
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"tasks.max": "1",
"topics": "test-topic"
}
```
curl -X PUT -H "Content-Type: application/json" --data @config.json http://localhost:8083/connector-plugins/ElasticsearchSinkConnector/config/validate/
#### 提交connector
echo '{"name": "elastic-log-connector", "config": {"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", "connection.url": "http://10.9.107.141:9200", "type.name": "log", "topics": "log", "key.ignore": true, "schema.ignore": true, "tasks.max": 3}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "elastic-random-log-connector", "config": {"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", "connection.url": "http://10.9.107.141:9200", "type.name": "randomlog", "topics": "random-log", "key.ignore": true, "schema.ignore": true, "tasks.max": 3}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "elastic-large-log-connector", "config": {"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector", "connection.url": "http://10.9.107.141:9200", "type.name": "largelog", "topics": "large-log", "key.ignore": true, "schema.ignore": true, "tasks.max": 3}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
curl http://localhost:8083/connectors/elastic-log-connector/config | python -m json.tool
curl http://localhost:8083/connectors/elastic-log-connector/status | python -m json.tool
curl http://localhost:8083/connectors/elastic-log-connector/tasks | python -m json.tool
### hdfs
curl http://localhost:8083/connectors/hdfs-users-connector/config | python -m json.tool
curl http://localhost:8083/connectors/hdfs-users-connector/status | python -m json.tool
curl http://localhost:8083/connectors/hdfs-users-connector/tasks | python -m json.tool
echo '{"name": "hdfs-string-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.68.210:8020", "topics": "string", "tasks.max": 3, "flush.size": 3, "format.class": "io.confluent.connect.hdfs.json.JsonFormat"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-random-string-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.68.210:8020", "topics": "random-string", "tasks.max": 3, "flush.size": 3, "format.class": "io.confluent.connect.hdfs.json.JsonFormat"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-log-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.68.210:8020", "topics": "log", "tasks.max": 3, "flush.size": 3, "format.class": "io.confluent.connect.hdfs.json.JsonFormat", "topics.dir":"/ltopics", "logs.dir": "/llogs"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-users-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.109.103:8020", "topics": "users", "tasks.max": 3, "flush.size": 1024, "format.class": "io.confluent.connect.hdfs.json.JsonFormat", "topics.dir":"/ukafka/users/topic", "logs.dir": "/ukafka/users/log"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-logs-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.109.103:8020", "topics": "logs", "tasks.max": 3, "flush.size": 1024, "format.class": "io.confluent.connect.hdfs.json.JsonFormat", "topics.dir":"/ukafka/logs/topic", "logs.dir": "/ukafka/logs/log"}}' \
| curl -X POST -d @- http://10.9.138.90:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-large-log-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.68.210:8020", "topics": "large-log", "tasks.max": 3, "flush.size": 10, "format.class": "io.confluent.connect.hdfs.json.JsonFormat"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
echo '{"name": "hdfs-users-connector", "config": {"connector.class": "io.confluent.connect.hdfs.HdfsSinkConnector", "hdfs.url": "hdfs://10.9.109.103:8020", "topics": "users", "tasks.max": 3, "flush.size": 1024, "topics.dir": "/ukafka/users/topic", "logs.dir": "/ukafka/users/log"}}' \
| curl -X POST -d @- http://localhost:8083/connectors --header "Content-Type:application/json" \
| python -m json.tool
## 控制 connector
curl -X PUT http://localhost:8083/connectors/elastic-log-connector/pause
curl -X PUT http://localhost:8083/connectors/elastic-log-connector/resume
curl -X POST -d '{}' http://localhost:8083/connectors/elastic-log-connector/restart --header "Content-Type:application/json"
# 更新配置
``` json
{
"connection.url": "http://10.9.79.39:9200",
"connector.class": "io.confluent.connect.elasticsearch.ElasticsearchSinkConnector",
"key.ignore": "true",
"name": "elastic-log-connector",
"schema.ignore": "true",
"tasks.max": "10",
"topics": "log",
"type.name": "log"
}
```
curl -X PUT --data @elastic-log-connector.json http://localhost:8083/connectors/elastic-log-connector/config --header "Content-Type:application/json"
## es 交互
curl http://10.9.79.39:9200/
curl http://10.9.79.39:9200/_cat/indices?v
curl -s -X GET http://10.9.79.39:9200/agent-log/_search?pretty=true
## curl
wget https://github.com/confluentinc/kafka-connect-datagen/raw/master/config/connector_pageviews_cos.config
curl -X POST -H "Content-Type: application/json" --data @connector_pageviews_cos.config http://localhost:8083/connectors
vim config.json
``` json
{
"connector.class": "org.apache.kafka.connect.file.FileStreamSinkConnector",
"tasks.max": "1",
"topics": "test-topic"
}
```
curl -X PUT -H "Content-Type: application/json" --data @config.json http://localhost:8083/connector-plugins/FileStreamSinkConnector/config/validate/
unzip confluentinc-kafka-connect-elasticsearch-4.1.1.zip
mv confluentinc-kafka-connect-elasticsearch-4.1.1 plugins/
|
<filename>app/controllers/chargebee_controller.rb
class ChargebeeController < ApplicationController
rescue_from StandardError, with: :log_error
before_action :ensure_valid_key, :check_subscription
def index
cb = ChargebeeParse.new(params)
cb.maybe_update_subscription_and_customer
send_domain_emails(cb) and return unless domains_count_ok?(cb)
seed = ENV["LICENSE_SECRET_SEED"]
key, passphrase = license_signing_key, license_signing_passphrase
result_log = LicenseHandler.call(cb, seed, key, passphrase)
logger.info(result_log)
render plain: result_log
end
def log_error(e)
LicenseMailer.we_fucked_up(e,params).deliver_now
raise e
end
private
def domains_count_ok?(cb)
return true unless cb.domains_required?
false if cb.domains_under_min? || cb.domains_over_max?
end
def send_domain_emails(cb)
if cb.domains_under_min?
LicenseMailer.domains_under_min(cb.customer_email, cb.listed_domains_max).deliver_now
message = cb.message << "Domains under minimum, sent email to #{cb.customer_email}"
render plain: message.join("\n")
elsif cb.domains_over_max?
raise "Someone tried to register with too many domains"
# @TODO: not yet implemented
# LicenseMailer.domains_over_max(cb.customer_email, cb.listed_domains_max).deliver_now
end
end
def ensure_valid_key
head :forbidden if params[:key] != ENV["CHARGEBEE_WEBHOOK_TOKEN"]
end
def check_subscription
# Ignore events that lack a subscription
render plain: "No subscription given; webhook event: #{webhook_event}" if params.dig("content", "subscription").blank?
end
def license_signing_key
Web::Application.config.license_signing_key
end
def license_signing_passphrase
Web::Application.config.license_signing_key_passphrase
end
def webhook_event
params["event_type"] || "not given"
end
end
|
/* AUTO-GENERATED FILE. DO NOT MODIFY.
*
* This class was automatically generated by the
* gradle plugin from the resource data it found. It
* should not be modified by hand.
*/
package android.support.compat;
public final class R {
public static final class attr {
public static final int font = 0x7f010100;
public static final int fontProviderAuthority = 0x7f0100f9;
public static final int fontProviderCerts = 0x7f0100fc;
public static final int fontProviderFetchStrategy = 0x7f0100fd;
public static final int fontProviderFetchTimeout = 0x7f0100fe;
public static final int fontProviderPackage = 0x7f0100fa;
public static final int fontProviderQuery = 0x7f0100fb;
public static final int fontStyle = 0x7f0100ff;
public static final int fontWeight = 0x7f010101;
}
public static final class bool {
public static final int abc_action_bar_embed_tabs = 0x7f0a0000;
}
public static final class color {
public static final int notification_action_color_filter = 0x7f0c0000;
public static final int notification_icon_bg_color = 0x7f0c0035;
public static final int ripple_material_light = 0x7f0c0044;
public static final int secondary_text_default_material_light = 0x7f0c0046;
}
public static final class dimen {
public static final int compat_button_inset_horizontal_material = 0x7f080058;
public static final int compat_button_inset_vertical_material = 0x7f080059;
public static final int compat_button_padding_horizontal_material = 0x7f08005a;
public static final int compat_button_padding_vertical_material = 0x7f08005b;
public static final int compat_control_corner_material = 0x7f08005c;
public static final int notification_action_icon_size = 0x7f08008a;
public static final int notification_action_text_size = 0x7f08008b;
public static final int notification_big_circle_margin = 0x7f08008c;
public static final int notification_content_margin_start = 0x7f08001b;
public static final int notification_large_icon_height = 0x7f08008d;
public static final int notification_large_icon_width = 0x7f08008e;
public static final int notification_main_column_padding_top = 0x7f08001c;
public static final int notification_media_narrow_margin = 0x7f08001d;
public static final int notification_right_icon_size = 0x7f08008f;
public static final int notification_right_side_padding_top = 0x7f080019;
public static final int notification_small_icon_background_padding = 0x7f080090;
public static final int notification_small_icon_size_as_large = 0x7f080091;
public static final int notification_subtext_size = 0x7f080092;
public static final int notification_top_pad = 0x7f080093;
public static final int notification_top_pad_large_text = 0x7f080094;
}
public static final class drawable {
public static final int notification_action_background = 0x7f02007d;
public static final int notification_bg = 0x7f02007e;
public static final int notification_bg_low = 0x7f02007f;
public static final int notification_bg_low_normal = 0x7f020080;
public static final int notification_bg_low_pressed = 0x7f020081;
public static final int notification_bg_normal = 0x7f020082;
public static final int notification_bg_normal_pressed = 0x7f020083;
public static final int notification_icon_background = 0x7f020084;
public static final int notification_template_icon_bg = 0x7f020096;
public static final int notification_template_icon_low_bg = 0x7f020097;
public static final int notification_tile_bg = 0x7f020085;
public static final int notify_panel_notification_icon_bg = 0x7f020086;
}
public static final class id {
public static final int action_container = 0x7f0d00b5;
public static final int action_divider = 0x7f0d00bc;
public static final int action_image = 0x7f0d00b6;
public static final int action_text = 0x7f0d00b7;
public static final int actions = 0x7f0d00c5;
public static final int async = 0x7f0d0047;
public static final int blocking = 0x7f0d0048;
public static final int chronometer = 0x7f0d00c1;
public static final int forever = 0x7f0d0049;
public static final int icon = 0x7f0d0067;
public static final int icon_group = 0x7f0d00c6;
public static final int info = 0x7f0d00c2;
public static final int italic = 0x7f0d004a;
public static final int line1 = 0x7f0d0007;
public static final int line3 = 0x7f0d0008;
public static final int normal = 0x7f0d001e;
public static final int notification_background = 0x7f0d00c3;
public static final int notification_main_column = 0x7f0d00be;
public static final int notification_main_column_container = 0x7f0d00bd;
public static final int right_icon = 0x7f0d00c4;
public static final int right_side = 0x7f0d00bf;
public static final int text = 0x7f0d0011;
public static final int text2 = 0x7f0d0012;
public static final int time = 0x7f0d00c0;
public static final int title = 0x7f0d0015;
}
public static final class integer {
public static final int status_bar_notification_info_maxnum = 0x7f0b0009;
}
public static final class layout {
public static final int notification_action = 0x7f03002d;
public static final int notification_action_tombstone = 0x7f03002e;
public static final int notification_template_custom_big = 0x7f030035;
public static final int notification_template_icon_group = 0x7f030036;
public static final int notification_template_part_chronometer = 0x7f03003a;
public static final int notification_template_part_time = 0x7f03003b;
}
public static final class string {
public static final int status_bar_notification_info_overflow = 0x7f070014;
}
public static final class style {
public static final int TextAppearance_Compat_Notification = 0x7f090091;
public static final int TextAppearance_Compat_Notification_Info = 0x7f090092;
public static final int TextAppearance_Compat_Notification_Line2 = 0x7f090118;
public static final int TextAppearance_Compat_Notification_Time = 0x7f090095;
public static final int TextAppearance_Compat_Notification_Title = 0x7f090097;
public static final int Widget_Compat_NotificationActionContainer = 0x7f090099;
public static final int Widget_Compat_NotificationActionText = 0x7f09009a;
}
public static final class styleable {
public static final int[] FontFamily = { 0x7f0100f9, 0x7f0100fa, 0x7f0100fb, 0x7f0100fc, 0x7f0100fd, 0x7f0100fe };
public static final int[] FontFamilyFont = { 0x7f0100ff, 0x7f010100, 0x7f010101 };
public static final int FontFamilyFont_font = 1;
public static final int FontFamilyFont_fontStyle = 0;
public static final int FontFamilyFont_fontWeight = 2;
public static final int FontFamily_fontProviderAuthority = 0;
public static final int FontFamily_fontProviderCerts = 3;
public static final int FontFamily_fontProviderFetchStrategy = 4;
public static final int FontFamily_fontProviderFetchTimeout = 5;
public static final int FontFamily_fontProviderPackage = 1;
public static final int FontFamily_fontProviderQuery = 2;
}
}
|
<reponame>AleIV/Model-Tool
package me.aleiv.modeltool.listener;
import me.aleiv.modeltool.core.EntityModel;
import me.aleiv.modeltool.core.EntityModelManager;
import me.aleiv.modeltool.events.EntityModelRemoveEvent;
import org.bukkit.GameMode;
import org.bukkit.event.EventHandler;
import org.bukkit.event.Listener;
import org.bukkit.event.player.PlayerJoinEvent;
import org.bukkit.event.player.PlayerQuitEvent;
import java.util.HashMap;
import java.util.UUID;
public class JoinQuitListener implements Listener {
private final EntityModelManager manager;
private HashMap<UUID, EntityModel> playerCache;
private HashMap<UUID, GameMode> gamemodeCache;
public JoinQuitListener(EntityModelManager manager) {
this.manager = manager;
this.playerCache = new HashMap<>();
this.gamemodeCache = new HashMap<>();
}
@EventHandler
public void onPlayerQuit(PlayerQuitEvent e) {
EntityModel entityModel = manager.getEntityModel(e.getPlayer().getUniqueId());
if (entityModel != null && entityModel.isDisguised()) {
this.playerCache.put(e.getPlayer().getUniqueId(), entityModel);
this.gamemodeCache.put(e.getPlayer().getUniqueId(), e.getPlayer().getGameMode());
entityModel.undisguise(); // Will set gamemode to spectator
}
}
@EventHandler
public void onPlayerJoin(PlayerJoinEvent e) {
EntityModel entityModel = this.playerCache.get(e.getPlayer().getUniqueId());
if (entityModel != null) {
this.playerCache.remove(e.getPlayer().getUniqueId());
GameMode gamemode = this.gamemodeCache.get(e.getPlayer().getUniqueId());
if (!entityModel.isDisguised()) {
e.getPlayer().setGameMode(gamemode);
entityModel.disguise(e.getPlayer());
}
}
}
@EventHandler
public void onEntityModelForceKill(EntityModelRemoveEvent e) {
if (this.playerCache.containsValue(e.getEntityModel())) {
// Remove the player from the cache
for (UUID uuid : this.playerCache.keySet()) {
if (this.playerCache.get(uuid).equals(e.getEntityModel())) {
this.playerCache.remove(uuid);
this.gamemodeCache.remove(uuid);
}
}
}
}
}
|
<reponame>carlos-sancho-ramirez/android-java-langbook
package sword.bitstream;
import java.io.IOException;
/**
* Callback used to decode the length of any encoded.
*/
public interface CollectionLengthDecoder {
/**
* Decode the given length from the stream.
* @return A positive or 0 value read from the stream.
* @throws IOException if the stream cannot be read.
*/
int decodeLength() throws IOException;
}
|
package hr.fer.tel.rassus.lab3;
import org.springframework.boot.SpringApplication;
import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.cloud.client.discovery.EnableDiscoveryClient;
import org.springframework.cloud.openfeign.EnableFeignClients;
@SpringBootApplication
@EnableDiscoveryClient
@EnableFeignClients
public class AggregatorMicroserviceApplication {
public static void main(String[] args) {
SpringApplication.run(AggregatorMicroserviceApplication.class, args);
}
}
|
<gh_stars>0
package main
import (
"encoding/hex"
"fmt"
"time"
"github.com/alecthomas/kingpin"
"github.com/danilarff86/miio-go/common"
)
func installDiscovery(app *kingpin.Application) {
cmd := app.Command("discover", "Discover devices on the local network")
cmd.Action(func(ctx *kingpin.ParseContext) error {
sharedClient.SetDiscoveryInterval(time.Second * 2)
sub, err := sharedClient.NewSubscription()
if err != nil {
panic(err)
}
for event := range sub.Events() {
switch event.(type) {
case common.EventNewDevice:
dev := event.(common.EventNewDevice).Device
go writeDeviceInfo(dev)
case common.EventNewMaskedDevice:
deviceId := event.(common.EventNewMaskedDevice).DeviceID
go writeMaskedDeviceInfo(deviceId)
}
}
return nil
})
}
func writeDeviceInfo(dev common.Device) {
deviceInfo, _ := dev.GetInfo()
fmt.Println("-------------")
fmt.Println("Discovered new device:")
fmt.Printf("ID: %d\n", dev.ID())
fmt.Printf("Firmware Version: %s\n", deviceInfo.FirmwareVersion)
fmt.Printf("Hardware Version: %s\n", deviceInfo.HardwareVersion)
fmt.Printf("Mac Address: %s\n", deviceInfo.MacAddress)
fmt.Printf("Model: %s\n", deviceInfo.Model)
fmt.Printf("Token: %s\n", hex.EncodeToString(dev.GetToken()))
fmt.Println("-------------")
}
func writeMaskedDeviceInfo(deviceId uint32) {
fmt.Println("-------------")
fmt.Println("Discovered new device with masked token:")
fmt.Printf("ID: %d\n", deviceId)
fmt.Println("You must manually retreive this token in order to communicate with the device.")
fmt.Println("-------------")
}
|
#!/bin/bash
SRC_BASE=/usr/local
DEST_BASE=$(dirname "$0")/../dependent
TARGET=x86
do_copy()
{
mkdir -p $TARGET/lib $TARGET/include
cp -rf /usr/local/$1/include/* $TARGET/include
cp -rf /usr/local/$1/lib/*.a $TARGET/lib
}
cd $DEST_BASE
rm -rf $TARGET
do_copy http-parser
do_copy jemalloc
do_copy openssl
do_copy zlib
|
<gh_stars>10-100
const TatsuScript = require('../dist');
const assert = require('assert');
const variables = require('../dist/Functions/Common/variables');
/**
* abs function
*/
assert.equal(1, TatsuScript.run('{abs;-1}'));
/**
* args function
*/
assert.equal('foo,bar', TatsuScript.run('{args}', {
content: 'foo bar',
}));
/**
* argslen function
*/
assert.equal(2, TatsuScript.run('{argslen}', {
content: 'foo bar',
}));
/**
* avg function
*/
assert.equal(10, TatsuScript.run('{avg;5;15}'));
assert.equal(20, TatsuScript.run('{avg;10;30}'));
assert.equal(30, TatsuScript.run('{avg;10;30;50}'));
/**
* base function
*/
assert.equal(2, TatsuScript.run('{base;10;2;10}'));
assert.equal(64, TatsuScript.run('{base;100;8;10}'));
assert.equal('`Invalid radix`', TatsuScript.run('{base;10;2}'));
/**
* bit function
*/
assert.equal('0', TatsuScript.run('{bit;!;2}'));
assert.equal('-3', TatsuScript.run('{bit;~;2}'));
assert.equal('2', TatsuScript.run('{bit;&;2;10}'));
assert.equal('8', TatsuScript.run('{bit;^;2;10}'));
assert.equal('10', TatsuScript.run('{bit;|;2;10}'));
assert.equal('2048', TatsuScript.run('{bit;<<;2;10}'));
assert.equal('0', TatsuScript.run('{bit;>>;2;10}'));
assert.equal('0', TatsuScript.run('{bit;>>>;2;10}'));
/**
* ceil function
*/
assert.equal('3', TatsuScript.run('{ceil;2.6}'));
assert.equal('3', TatsuScript.run('{ceil;2.3}'));
assert.equal('-2', TatsuScript.run('{ceil;-2.6}'));
assert.equal('-2', TatsuScript.run('{ceil;-2.3}'));
/**
* channelid function
*/
assert.equal('123456789123456789', TatsuScript.run('{channelid}', {
channel: { id: '123456789123456789' },
}));
/**
* channelname function
*/
assert.equal('foobar', TatsuScript.run('{channelname}', {
channel: { name: 'foobar' },
}));
/**
* channeltopic function
*/
assert.equal('foo bar foobar', TatsuScript.run('{channeltopic}', {
channel: { topic: 'foo bar foobar' },
}));
/**
* decr function
*/
assert.equal(1, TatsuScript.run('{decr;2}'));
assert.equal(-1, TatsuScript.run('{decr;0}'));
/**
* floor function
*/
assert.equal(1, TatsuScript.run('{floor;1.6}'));
assert.equal(1, TatsuScript.run('{floor;1.3}'));
assert.equal(-2, TatsuScript.run('{floor;-1.6}'));
assert.equal(-2, TatsuScript.run('{floor;-1.3}'));
/**
* set function
*/
assert.equal('', TatsuScript.run('{set;foo;bar}'));
assert.equal('bar', variables.foo[0].value);
/**
* get function
*/
assert.equal('bar', TatsuScript.run('{get;foo}'));
/**
* i function
*/
assert.equal('0', TatsuScript.run('{i}'));
/**
* if function
*/
assert.equal('yes', TatsuScript.run('{if;==;2;2;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;!=;2;3;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;<;2;3;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;>;3;2;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;<=;2;3;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;>=;3;2;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;||;1;0;yes;no}'));
assert.equal('yes', TatsuScript.run('{if;&&;1;1;yes;no}'));
/**
* incr function
*/
assert.equal('2', TatsuScript.run('{incr;1}'));
assert.equal('0', TatsuScript.run('{incr;-1}'));
/**
* indexof function
*/
assert.equal('0', TatsuScript.run('{indexof;abcdefghij;a}'));
assert.equal('9', TatsuScript.run('{indexof;jihgfedbca;a}'));
/**
* inject function
*/
assert.equal('1', TatsuScript.run('{inject;{lb}floor{semi}1.4{rb}}'));
/**
* lastindexof function
*/
assert.equal('9', TatsuScript.run('{lastindexof;abcabcabca;a}'));
/**
* lb function
*/
assert.equal('{', TatsuScript.run('{lb}'));
/**
* length function
*/
assert.equal('10', TatsuScript.run('{length;abcabcabca}'));
assert.equal('', TatsuScript.run('{length}'));
/**
* loop function
*/
assert.equal('A A A A A A', TatsuScript.run('{loop;1;6;A; }'));
/**
* lower function
*/
assert.equal('aaaaa', TatsuScript.run('{lower;aAaAa}'));
/**
* math function
*/
assert.equal('3', TatsuScript.run('{math;+;1;2}'));
assert.equal('1', TatsuScript.run('{math;-;3;1;1}'));
assert.equal('2', TatsuScript.run('{math;/;4;2}'));
assert.equal('2', TatsuScript.run('{math;*;1;2}'));
assert.equal('1', TatsuScript.run('{math;%;9;2}'));
assert.equal('4', TatsuScript.run('{math;^;2;2}'));
assert.equal('2', TatsuScript.run('{math;sqrt;4}'));
assert.equal('2', TatsuScript.run('{math;cbrt;8}'));
/**
* nl function
*/
assert.equal('\n', TatsuScript.run('{nl}'));
/**
* pad function
*/
assert.equal('0000a', TatsuScript.run('{pad;l;a;5;0}'));
assert.equal('a0000', TatsuScript.run('{pad;r;a;5;0}'));
assert.equal('`Invalid direction`', TatsuScript.run('{pad}'));
assert.equal('', TatsuScript.run('{pad;l}'));
/**
* randarg function
*/
assert.equal('a', TatsuScript.run('{randarg}', {
content: 'a a a a',
}));
/**
* randchoice function
*/
assert.equal('a', TatsuScript.run('{randchoice;a;a;a;a;a}'));
/**
* randint function
*/
assert.equal('1', TatsuScript.run('{randint;1;1}'));
/**
* randstr function
*/
assert.equal('a', TatsuScript.run('{randstr;a;1}'));
/**
* randuser function
*/
assert.equal('123456789123456789', TatsuScript.run('{randuser}', {
channel: { guild: { members: { cache: { random: () => ({ id: '123456789123456789' }) } } } },
}));
/**
* rb function
*/
assert.equal('}', TatsuScript.run('{rb}'));
/**
* regexreplace function
*/
assert.equal('abbbba', TatsuScript.run('{regexreplace;a1111a;/\\d/g;b}'));
/**
* repeat function
*/
assert.equal('aaaaa', TatsuScript.run('{repeat;a;5}'));
/**
* replace function
*/
assert.equal('accdeb', TatsuScript.run('{replace;abcdeb;b;c}'));
/**
* reverse function
*/
assert.equal('edcba', TatsuScript.run('{reverse;abcde}'));
/**
* round function
*/
assert.equal('2', TatsuScript.run('{round;2.3}'));
assert.equal('3', TatsuScript.run('{round;2.6}'));
/**
* semi function
*/
assert.equal(';', TatsuScript.run('{semi}'));
/**
* serverid function
*/
assert.equal('123456789123456789', TatsuScript.run('{serverid}', {
channel: { guild: { id: '123456789123456789' } },
}));
/**
* servername function
*/
assert.equal('foobar', TatsuScript.run('{servername}', {
channel: { guild: { name: 'foobar' } },
}));
/**
* serverusers function
*/
assert.equal('100', TatsuScript.run('{serverusers}', {
channel: { guild: { members: { cache: { size: 100 } } } },
}));
assert.equal('100', TatsuScript.run('{serverusers}', {
channel: { guild: { members: { cache: {} }, memberCount: 100 } },
}));
/**
* shuffle function
*/
assert.equal('aaaaa', TatsuScript.run('{shuffle;aaaaa}'));
/**
* space function
*/
assert.equal(' ', TatsuScript.run('{space;3}'));
assert.equal(' ', TatsuScript.run('{space}'));
/**
* substr function
*/
assert.equal('cdef', TatsuScript.run('{substr;abcdef;2}'));
assert.equal('bcde', TatsuScript.run('{substr;abcdef;1;4}'));
assert.equal('', TatsuScript.run('{substr}'));
assert.equal('abcdef', TatsuScript.run('{substr;abcdef}'));
/**
* switch function
*/
assert.equal('3', TatsuScript.run('{switch;c;a;1;b;2;c;3;d;4;5}'));
assert.equal('5', TatsuScript.run('{switch;e;a;1;b;2;c;3;d;4;5}'));
/**
* time function
*/
//assert.equal('07:40:00', TatsuScript.run('{time;LTS;600000}'));
//assert.equal('21:10', TatsuScript.run('{time;HH:mm;600000;America/Sao_Paulo}'));
/**
* upper function
*/
assert.equal('AAAAA', TatsuScript.run('{upper;aaaAa}'));
/**
* usercreatedat function
*/
assert.equal('600000', TatsuScript.run('{usercreatedat;123456789123456789}', {
channel: { guild: { members: { cache: {
get: () => ({ user: { createdAt: 600000 } }),
has: () => true
} } } },
}));
assert.equal('600000', TatsuScript.run('{usercreatedat}', {
author: { createdAt: 600000 },
}));
/**
* userdiscrim function
*/
assert.equal('4000', TatsuScript.run('{userdiscrim;123456789123456789}', {
channel: { guild: { members: { cache: {
get: () => ({ discriminator: '4000' }),
has: () => true
} } } },
}));
assert.equal('4000', TatsuScript.run('{userdiscrim}', {
author: { discriminator: '4000' },
}));
/**
* userid function
*/
assert.equal('123456789123456789', TatsuScript.run('{userid;<@123456789123456789>}', {
channel: { guild: { members: { cache: {
get: () => ({ id: '123456789123456789' }),
has: () => true
} } } },
}));
assert.equal('123456789123456789', TatsuScript.run('{userid}', {
author: { id: '<PASSWORD>' },
}));
/**
* username function
*/
assert.equal('foobar', TatsuScript.run('{username;<@<PASSWORD>6789>}', {
channel: { guild: { members: { cache: {
get: () => ({ user: { username: 'foobar' } }),
has: () => true
} } } },
}));
assert.equal('foobar', TatsuScript.run('{username}', {
author: { username: 'foobar' },
}));
/**
* usernick function
*/
assert.equal('foobar', TatsuScript.run('{usernick;<@123456789123456789>}', {
channel: { guild: { members: { cache: {
get: () => ({ nick: 'foobar' }),
has: () => true
} } } },
}));
assert.equal('foobar', TatsuScript.run('{usernick;<@<PASSWORD>>}', {
channel: { guild: { members: { cache: {
get: () => ({ nickname: 'foobar' }),
has: () => true
} } } },
}));
assert.equal('foobar', TatsuScript.run('{usernick}', {
member: { nick: 'foobar' },
}));
assert.equal('foobar', TatsuScript.run('{usernick}', {
member: { nickname: 'foobar' },
})); |
<filename>spec/forms/candidate_interface/other_qualification_type_form_spec.rb
require 'rails_helper'
RSpec.describe CandidateInterface::OtherQualificationTypeForm do
let(:error_message_scope) do
'activemodel.errors.models.candidate_interface/other_qualification_type_form.attributes.'
end
describe 'validations' do
it { is_expected.to validate_presence_of(:qualification_type) }
it { is_expected.to validate_length_of(:other_uk_qualification_type).is_at_most(100) }
end
describe '#initialize' do
let(:current_application) { create(:application_form) }
let(:intermediate_data_service) do
Class.new {
def read
{
'qualification_type' => 'non_uk',
'institution_country' => 'New Zealand',
'non_uk_qualification_type' => 'German diploma',
}
end
}.new
end
context 'the qualification type is being updated from a non-uk qualification to a uk qualification' do
it 'assigns an empty string to the non_uk attributes' do
form = CandidateInterface::OtherQualificationTypeForm.new(
current_application,
intermediate_data_service,
'qualification_type' => 'GCSE',
'non_uk_qualification_type' => 'German diploma',
)
expect(form.qualification_type).to eq('GCSE')
expect(form.non_uk_qualification_type).to be_nil
expect(form.institution_country).to be_nil
end
end
end
end
|
#!/bin/bash
echo "starting killing nodejs processes"
killall -9 nodejs
echo "finished killing nodejs"
echo "start killing off mongod"
sudo service mongod stop
echo "start checking repository"
git status && git fetch
echo "switching to master"
git checkout master
echo "pull the latest code"
git pull
echo "start mongod"
sudo service mongod start
echo "start nodejs"
forever start app.js &
echo "kill off apache2"
sudo service apache2 stop
echo "start apache2"
sudo service apache2 start
echo "finish deployment"
|
import sys
# Define the list of available commands and arguments
available_commands = ['command1', 'command2', 'command3']
available_arguments = ['arg1', 'arg2', 'arg3']
def handle_tab_complete():
if "--get_completions" in sys.argv:
exit() # Exit if --get_completions flag is set
user_input = input("Enter your command or argument: ")
# Provide completions for commands based on user input
if user_input.startswith('c'):
completions = [cmd for cmd in available_commands if cmd.startswith(user_input)]
print("Completions for commands:", completions)
# Provide completions for arguments based on user input
elif user_input.startswith('a'):
completions = [arg for arg in available_arguments if arg.startswith(user_input)]
print("Completions for arguments:", completions)
else:
print("No completions available for input:", user_input)
def pymk_default():
print("Executing default behavior of the CLI")
if __name__ == "__main__":
# Everything above this line will be executed for each TAB press.
# If --get_completions is set, handle_tab_complete() calls exit().
handle_tab_complete()
pymk_default() |
const leftShift = (arr, times) => {
let result = arr.slice();
for (let i = 0; i < times; i++) {
const firstElement = result.shift();
result.push(firstElement);
}
return result;
};
const shiftedArray = leftShift(array, times);
console.log(shiftedArray); // [3, 4, 5, 1, 2] |
import getProviderName from './providerName';
import ExtendedProvider from '../interface/ExtendedProvider';
describe('getProviderName function', (): void => {
it('gets name of the provider', (): void => {
const provider = { isMetaMask: true };
const providerName = getProviderName(provider as ExtendedProvider);
expect(providerName).toBe('metamask');
});
});
|
<reponame>hongjsk/node-red-contrib-http-request-header
var should = require('should')
var helper = require('node-red-node-test-helper')
var testNode = require('../http-request-header.js')
helper.init(require.resolve('node-red'));
describe('http-request-header Node', function () {
this.timeout(15000);
beforeEach(function (done) {
helper.startServer(done);
});
afterEach(function (done) {
helper.unload();
helper.stopServer(done);
});
it('should be loaded', function (done) {
var flow = [{
"id":"n1",
"type":"http-request-header",
"name":"<NAME>",
"items":[{"k":"k1","v":"v1","vt":"str"}],
"wires":[[]]
}]
helper.load(testNode, flow, function () {
var n1 = helper.getNode("n1");
n1.should.have.property('name', 'test name');
done();
});
});
it('should make headers for HTTP', function (done) {
var flow = [{
"id":"n1",
"type":"http-request-header",
"name":"test name",
"items":[{"k":"User-Agent", "v":"Awesome", "vt":"str"}],
"wires":[['n2']]
},
{
"id": "n2",
"type": "helper"
}]
helper.load(testNode, flow, function () {
var n2 = helper.getNode('n2')
var n1 = helper.getNode('n1')
n2.on('input', function (msg) {
msg.should.have.property('headers')
msg.headers.should.have.property('User-Agent', 'Awesome')
done()
})
n1.receive({payload:"foo",topic: "bar"});
})
})
it('should make headers with mustache', function (done) {
var flow = [{
"id":"n1",
"type":"http-request-header",
"name":"<NAME>",
"items":[{"k":"User-Agent", "v":"Awesome {{payload}}", "vt":"str"}],
"wires":[['n2']]
}, {
"id": "n2",
"type": "helper"
}]
helper.load(testNode, flow, function () {
var n2 = helper.getNode('n2')
var n1 = helper.getNode('n1')
n2.on('input', function (msg) {
msg.should.have.property('headers')
msg.headers.should.have.property('User-Agent', 'Awesome foo')
done()
})
n1.receive({payload:"foo",topic: "bar"});
})
})
}) |
package com.itms.wikiapp.metric.repo;
import com.itms.wikiapp.metric.model.entity.MetricEntity;
import org.bson.types.ObjectId;
import org.springframework.data.mongodb.repository.MongoRepository;
import org.springframework.stereotype.Repository;
import java.util.UUID;
@Repository
public interface MetricRepository extends MongoRepository<MetricEntity, ObjectId> {
MetricEntity findByUuid(UUID uuid);
}
|
<filename>migrations/1644093411617-CreateUser.js<gh_stars>0
const { MigrationInterface, QueryRunner } = require("typeorm");
module.exports = class CreateUser1644093411617 {
name = 'CreateUser1644093411617'
async up(queryRunner) {
await queryRunner.query(`CREATE TABLE "users" ("id" SERIAL NOT NULL, "name" character varying NOT NULL, "age" integer NOT NULL, CONSTRAINT "PK_a3ffb1c0c8416b9fc6f907b7433" PRIMARY KEY ("id"))`);
}
async down(queryRunner) {
await queryRunner.query(`DROP TABLE "users"`);
}
}
|
<gh_stars>0
from sqlalchemy import create_engine
engine = create_engine('postgres://lcaojywwqecaor:0371754f3657c8a837db0dfea04c265ab5ce54c128ae23a611a5c4c920d752cf@ec2-54-83-22-244.compute-1.amazonaws.com:5432/d8g4bumo2vcgsn', echo=True)
|
package net.krazyweb.cataclysm.mapeditor.map.data.entryeditorcontrollers;
import net.krazyweb.cataclysm.mapeditor.map.data.MonsterGroupEntry;
import org.apache.logging.log4j.LogManager;
import org.apache.logging.log4j.Logger;
public class MonsterGroupController {
private static final Logger log = LogManager.getLogger(MonsterGroupController.class);
public void setMonsterGroup(final MonsterGroupEntry monsterGroup) {
log.debug(monsterGroup);
}
}
|
const programadora = {
nome: 'Julia',
idade: 23,
tecnologias: [
{
nome: 'JavaScript', especialidade: 'WEB/Mobile'
},
{
nome: 'C#', especialidade: 'WEB'
},
{
nome: "C++", especialidade: "Desktop"
}
]
}
console.log(`A usuária ${programadora.nome} tem ${programadora.idade} anos e usa a tecnologia ${programadora.tecnologias[0].nome} com especialidade em ${programadora.tecnologias[0].especialidade}`) |
def mostFrequentLabel(label_list):
'''This function takes a list of labels and returns the most frequent label'''
# Create a frequency counter for the labels
label_counts = {}
for label in label_list:
if label in label_counts:
label_counts[label] += 1
else:
label_counts[label] = 1
# Find the most frequent label
most_frequent = ''
max_count = 0
for label, count in label_counts.items():
if count > max_count:
most_frequent = label
max_count = count
# Return the most frequent label
return most_frequent |
/**
* @author ooooo
* @date 2021/5/26 13:25
*/
#ifndef CPP_1190__SOLUTION1_H_
#define CPP_1190__SOLUTION1_H_
#include <iostream>
#include <vector>
#include <stack>
using namespace std;
class Solution {
public:
string reverseParentheses(string s) {
stack<char> sk;
for (int i = 0; i < s.size(); i++) {
if (s[i] == ')') {
string tmp = "";
while (sk.top() != '(') {
tmp += sk.top();
sk.pop();
}
sk.pop();
for (auto c :tmp) {
sk.push(c);
}
} else {
sk.push(s[i]);
}
}
string ans = "";
while (!sk.empty()) {
ans += sk.top();
sk.pop();
}
reverse(ans.begin(), ans.end());
return ans;
}
};
#endif //CPP_1190__SOLUTION1_H_
|
## @file
# process OptionROM generation from INF statement
#
# Copyright (c) 2007, Intel Corporation. All rights reserved.<BR>
#
# This program and the accompanying materials
# are licensed and made available under the terms and conditions of the BSD License
# which accompanies this distribution. The full text of the license may be found at
# http://opensource.org/licenses/bsd-license.php
#
# THE PROGRAM IS DISTRIBUTED UNDER THE BSD LICENSE ON AN "AS IS" BASIS,
# WITHOUT WARRANTIES OR REPRESENTATIONS OF ANY KIND, EITHER EXPRESS OR IMPLIED.
#
##
# Import Modules
#
import RuleSimpleFile
import RuleComplexFile
import Section
import OptionRom
import Common.GlobalData as GlobalData
from Common.DataType import *
from Common.String import *
from FfsInfStatement import FfsInfStatement
from GenFdsGlobalVariable import GenFdsGlobalVariable
##
#
#
class OptRomInfStatement (FfsInfStatement):
## The constructor
#
# @param self The object pointer
#
def __init__(self):
FfsInfStatement.__init__(self)
self.OverrideAttribs = None
## __GetOptRomParams() method
#
# Parse inf file to get option ROM related parameters
#
# @param self The object pointer
#
def __GetOptRomParams(self):
if self.OverrideAttribs == None:
self.OverrideAttribs = OptionRom.OverrideAttribs()
if self.OverrideAttribs.NeedCompress == None:
self.OverrideAttribs.NeedCompress = self.OptRomDefs.get ('PCI_COMPRESS')
if self.OverrideAttribs.NeedCompress is not None:
if self.OverrideAttribs.NeedCompress.upper() not in ('TRUE', 'FALSE'):
GenFdsGlobalVariable.ErrorLogger( "Expected TRUE/FALSE for PCI_COMPRESS: %s" %self.InfFileName)
self.OverrideAttribs.NeedCompress = \
self.OverrideAttribs.NeedCompress.upper() == 'TRUE'
if self.OverrideAttribs.PciVendorId == None:
self.OverrideAttribs.PciVendorId = self.OptRomDefs.get ('PCI_VENDOR_ID')
if self.OverrideAttribs.PciClassCode == None:
self.OverrideAttribs.PciClassCode = self.OptRomDefs.get ('PCI_CLASS_CODE')
if self.OverrideAttribs.PciDeviceId == None:
self.OverrideAttribs.PciDeviceId = self.OptRomDefs.get ('PCI_DEVICE_ID')
if self.OverrideAttribs.PciRevision == None:
self.OverrideAttribs.PciRevision = self.OptRomDefs.get ('PCI_REVISION')
# InfObj = GenFdsGlobalVariable.WorkSpace.BuildObject[self.PathClassObj, self.CurrentArch]
# RecordList = InfObj._RawData[MODEL_META_DATA_HEADER, InfObj._Arch, InfObj._Platform]
# for Record in RecordList:
# Record = ReplaceMacros(Record, GlobalData.gEdkGlobal, False)
# Name = Record[0]
## GenFfs() method
#
# Generate FFS
#
# @param self The object pointer
# @retval string Generated .efi file name
#
def GenFfs(self):
#
# Parse Inf file get Module related information
#
self.__InfParse__()
self.__GetOptRomParams()
#
# Get the rule of how to generate Ffs file
#
Rule = self.__GetRule__()
GenFdsGlobalVariable.VerboseLogger( "Packing binaries from inf file : %s" %self.InfFileName)
#FileType = Ffs.Ffs.ModuleTypeToFileType[Rule.ModuleType]
#
# For the rule only has simpleFile
#
if isinstance (Rule, RuleSimpleFile.RuleSimpleFile) :
EfiOutputList = self.__GenSimpleFileSection__(Rule)
return EfiOutputList
#
# For Rule has ComplexFile
#
elif isinstance(Rule, RuleComplexFile.RuleComplexFile):
EfiOutputList = self.__GenComplexFileSection__(Rule)
return EfiOutputList
## __GenSimpleFileSection__() method
#
# Get .efi files according to simple rule.
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenSimpleFileSection__(self, Rule):
#
# Prepare the parameter of GenSection
#
OutputFileList = []
if Rule.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Rule.FileName)
OutputFileList.append(GenSecInputFile)
else:
OutputFileList, IsSect = Section.Section.GetFileList(self, '', Rule.FileExtension)
return OutputFileList
## __GenComplexFileSection__() method
#
# Get .efi by sections in complex Rule
#
# @param self The object pointer
# @param Rule The rule object used to generate section
# @retval string File name of the generated section file
#
def __GenComplexFileSection__(self, Rule):
OutputFileList = []
for Sect in Rule.SectionList:
if Sect.SectionType == 'PE32':
if Sect.FileName != None:
GenSecInputFile = self.__ExtendMacro__(Sect.FileName)
OutputFileList.append(GenSecInputFile)
else:
FileList, IsSect = Section.Section.GetFileList(self, '', Sect.FileExtension)
OutputFileList.extend(FileList)
return OutputFileList |
<reponame>athaa/godoc2puml
package annotator
import "github.com/athaa/godoc2puml/ast"
// Cut removes (probably) unnecessary relations preserving longest path.
func Cut(scope *ast.Scope) error {
backproj := buildBackProjections(scope)
for _, pkg := range scope.Packages {
for _, class := range pkg.Classes {
newrels := make([]*ast.Relation, 0, len(class.Relations))
for _, rel := range class.Relations {
if rel.RelType == ast.Implementation &&
!isLongestPath(backproj, class.Relations, rel) {
continue
}
newrels = append(newrels, rel)
}
class.Relations = newrels
}
for _, iface := range pkg.Interfaces {
newrels := make([]*ast.Relation, 0, len(iface.Relations))
for _, rel := range iface.Relations {
if rel.RelType == ast.Extension &&
!isLongestPath(backproj, iface.Relations, rel) {
continue
}
newrels = append(newrels, rel)
}
iface.Relations = newrels
}
}
return nil
}
func buildBackProjections(scope *ast.Scope) (backproj map[string][]string) {
backproj = map[string][]string{}
for _, pkg := range scope.Packages {
for _, iface := range pkg.Interfaces {
name := iface.Name
if pkg.Name != "" {
name = pkg.Name + "." + name
}
for _, rel := range iface.Relations {
if rel.RelType != ast.Extension {
continue
}
addPath(backproj, rel, name)
}
}
for _, class := range pkg.Classes {
name := class.Name
if pkg.Name != "" {
name = pkg.Name + "." + name
}
for _, rel := range class.Relations {
if rel.RelType != ast.Composition && rel.RelType != ast.Implementation {
continue
}
addPath(backproj, rel, name)
}
}
}
return
}
func addPath(backproj map[string][]string, to *ast.Relation, from string) {
target := to.Target
if backproj[target] == nil {
backproj[target] = []string{}
}
backproj[target] = append(backproj[target], from)
}
func isLongestPath(backproj map[string][]string, rootRels []*ast.Relation, goal *ast.Relation) bool {
roots := map[string]*ast.Relation{}
for _, rel := range rootRels {
if rel == goal || (rel.RelType != ast.Implementation && rel.RelType != ast.Composition) {
continue
}
target := rel.Target
roots[target] = rel
}
return !findRouteToGoalRecursive(backproj, roots, goal.Target)
}
func findRouteToGoalRecursive(backproj map[string][]string, roots map[string]*ast.Relation, goal string) (reachable bool) {
names := backproj[goal]
for _, name := range names {
if roots[name] != nil {
return true
}
if findRouteToGoalRecursive(backproj, roots, name) {
return true
}
}
return false
}
|
#!/usr/bin/env bash
#
# Fetch remote tags for the checked-out nvm repository as specified by NVM_DIR
# and then switch to the latest version tag.
set -euETo pipefail
shopt -s inherit_errexit
if ! [[ -v NVM_DIR && -d "$NVM_DIR" && -d "$NVM_DIR/.git" ]]; then
echo 'nvm does not seem to be installed here.' >&2
echo '(Expecting NVM_DIR to be set to a checked-out git repository.)' >&2
exit 1
fi
cd "$NVM_DIR"
active=$(git describe --exact-match) # n.b. nvm project uses annotated tags
if ! [[ "$active" == v*.*.* ]]; then
echo "Got '$active' for current HEAD of $NVM_DIR instead of version tag." >&2
exit 1
fi
git fetch --tags --prune --prune-tags
latest=$(git tag --list 'v*.*.*' --sort version:refname | tail --lines=1)
if [ -z "$latest" ]; then
echo 'Cannot determine the latest version.' >&2
exit 1
elif [ "$active" == "$latest" ]; then
git status
echo
echo "$active is already the latest version of nvm."
else
echo
echo "Switching nvm from $active to $latest..."
echo
git checkout "$latest"
echo
git status
fi
|
# Create two sets
set1 = {1,2,3,4,5}
set2 = {3,4,5,6,7}
# Find the intersection
intersection = set1 & set2
# Print the result
print(intersection) |
if [ -z "$1" ]
then
version="test"
else
version="$1"
fi
sudo docker rmi lenhattan86/my-kube-scheduler:$version -f
sudo docker rmi my-kube-scheduler:$version -f
# delete all containers
#docker rm -f $(docker ps -a -q)
# delete all images
#docker rmi -f $(docker images -q)
# compile Kuberntes first
#make quick-release
# make clean
make kube-scheduler
# build the scheduler docker image
kubernetes_src="."
#echo "FROM busybox
#ADD ./_output/dockerized/bin/linux/amd64/kube-scheduler /usr/local/bin/kube-scheduler" > $kubernetes_src/Dockerfile
docker build -t my-kube-scheduler:$version $kubernetes_src
docker tag my-kube-scheduler:$version lenhattan86/my-kube-scheduler:$version
# images may disappear before this command
docker push lenhattan86/my-kube-scheduler:$version
# upload my scheduler
echo $kubernetes_src $version
date |
package com.fauconnet.old;
import java.util.Date;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.JSONValue;
import com.fauconnet.devisu.I_DataManager;
import com.fauconnet.devisu.MongoProxy;
import com.fauconnet.devisu.XmlRadarModelManager;
import com.mongodb.BasicDBList;
import com.mongodb.BasicDBObject;
import com.mongodb.DB;
import com.mongodb.DBObject;
import com.mongodb.util.JSON;
public class SqlDataManager implements I_DataManager {
private long maxId;
private MongoProxy proxy;
private String dbName;
private XmlRadarModelManager xmlModelManager;
private String userId = "admin";
private String idField = "id";
public SqlDataManager(String dbName, String dataDirPath, String host, int port) throws Exception {
this.dbName = dbName;
try {
xmlModelManager = new XmlRadarModelManager(dbName, dataDirPath);
if (xmlModelManager != null)
idField = xmlModelManager.getIdColumnName();
} catch (Exception e) {
e.getMessage();
}
try {
proxy = new MongoProxy(host, port, dbName);
} catch (Exception e) {
throw new Exception (" Mongo Server not started or connected");
}
}
public String getUserId() {
return userId;
}
public void setUserId(String userId) {
this.userId = userId;
}
private String getDbName() {
return dbName;
}
private int getMongoPort() {
return 27017;
}
private String getMongoHost() {
return "localhost";
}
private DBObject getItemById(String collectionName, int id) {
DBObject query = new BasicDBObject("id", id);
return proxy.getOneDocument(collectionName, query);
}
private String updateItemById(String collectionName, DBObject object) throws Exception {
object.put("lastModified", new Date());
object.put("modifiedBy", userId);
Object id = object.get(idField);
if (id == null)
throw new Exception("object has nos id" + object.toString());
DBObject query = new BasicDBObject("id", id);
;
return proxy.updateObject(collectionName, query, object);
}
public void updateField(String collectionName, int id, String field, String newValue, boolean save) throws Exception {
DBObject obj = getItemById(collectionName, id);
Object objIdValue = obj.get(idField);
String field2 = xmlModelManager.getRole(field);
if (field2 == null)
field2 = field;
if (newValue.equals("")) {
obj.put(field2, newValue);
} else if (newValue.matches("[\\-0-9]*")) {// int
obj.put(field2, Integer.parseInt(newValue));
} else if (newValue.matches("[\\-0-9.]*")) {// numerique
float f = Float.parseFloat(newValue);
obj.put(field2, f);
} else
obj.put(field2, newValue);
updateItemById(collectionName, obj);
}
public void saveData(String collectionName, String json, String fileName) {
proxy.updateMultipleObjects(collectionName, json);
}
public void updateItemFromRadar(String collectionName, int id, String jsonStr) {
DBObject query = new BasicDBObject("id", id);
DBObject object = (DBObject) JSON.parse(jsonStr);
object.put("lastModified", new Date());
object.put("modifiedBy", userId);
proxy.updateObject(collectionName, query, object);
}
public void addRadarItem(String collectionName) {
DBObject obj = new BasicDBObject();
obj.put("name", "??");
obj.put("id", maxId + 1);
obj.put("x", 10);
obj.put("y", 10);
addAdminFields( obj, collectionName );
proxy.insert(collectionName, obj);
}
public void deleteItem(String collectionName, int id) {
DBObject query = new BasicDBObject("id", id);
proxy.removeByQuery(collectionName, query);
}
public String getRadarJsonData(String collectionName, String jsonQuery) {
boolean save = false;
boolean shouldSetItemsCoordinates=false;
DBObject query;
if (jsonQuery == null) {
query = new BasicDBObject();
} else
query = (DBObject) JSON.parse(jsonQuery);
List<DBObject> array = proxy.getDocuments(collectionName, query, 50000);
BasicDBList arrayOut = new BasicDBList();
int k = 0;
for (int i = 0; i < array.size(); i++) {
boolean shouldSave=false;
DBObject obj2 = (DBObject) array.get(i);
obj2.removeField("_id");
DBObject objOut = new BasicDBObject();
//objOut.put("action", "loadJSON"); // / a enlever
Map<String, String> radarRoles = xmlModelManager.getRadarRoles();
Iterator<String> it = radarRoles.keySet().iterator();
while (it.hasNext()) {
String role = it.next();
String colName = radarRoles.get(role);
Object val = obj2.get(colName);
if (role.equals("id")) {
if (val == null) {
shouldSave = true;
val = maxId++;// 0 d�conne ensuite...
obj2.put("id", val);
} else {
try {
maxId = Math.max(maxId, ((Integer) val).longValue());
} catch (Exception e) {
System.out.println(" pb parse " + val + " " + e.toString());
}
}
objOut.put("id", val);
} else if (role.equals("x")) {
if (val == null) {
shouldSave = true;
val = (k * 5);
obj2.put("x", val);
}
objOut.put("x", val);
} else if (role.equals("y")) {
if (val == null) {
shouldSave = true;
val = (k * 5);
obj2.put("y", val);
}
objOut.put("y", val);
} else if (val != null) {
objOut.put(role, val);
}
}
Iterator<String> it2 = xmlModelManager.getFilters().iterator();
while (it2.hasNext()) {
String colName = it2.next();
Object val = obj2.get(colName);
objOut.put(colName, val);
}
k++;
arrayOut.add(objOut);
if(shouldSave){
shouldSetItemsCoordinates=true;
proxy.updateObject(collectionName, new BasicDBObject(), obj2);
}
}
DBObject objOut2 = new BasicDBObject();
if(shouldSetItemsCoordinates){
objOut2.put("shouldSetItemsCoordinates", "yes");
}
objOut2.put("points", arrayOut);
BasicDBList arrayOut2 = new BasicDBList();
arrayOut2.add(objOut2);
return arrayOut2.toString();
}
public String getDataJson(String collectionName, String queryStr, boolean reload) throws Exception {
DBObject query;
if (queryStr == null) {
query = new BasicDBObject("id", new BasicDBObject("$gt", -1));
} else {
query = (DBObject) JSON.parse(queryStr);
}
return proxy.getDocuments(collectionName, query, 50000).toString();
}
public String getDetailedData(String collectionName, int id) throws Exception {
DBObject query = new BasicDBObject("id", id);
DBObject object = proxy.getOneDocument(collectionName, query);
List<String> fields = xmlModelManager.getDetailedPageFields();
Iterator<String> it = object.keySet().iterator();
while (it.hasNext()) {
String field = it.next();
if (fields.indexOf(field) < 0)
object.removeField(field);
}
return object.toString();
}
/**
* @param args
*/
public static void main(String[] args) {
// TODO Auto-generated method stub
}
public String updateItem(String collectionName, String json) throws Exception {
DBObject object = (DBObject) JSON.parse(json);
object.put("lastModified", new Date());
object.put("modifiedBy", userId);
Object id = object.get(idField);
if (id == null)
throw new Exception("object has nos id" + object.toString());
DBObject query = new BasicDBObject("id", id);
return proxy.updateObject(collectionName, query, object);
}
public XmlRadarModelManager getXmlModelManager() {
return xmlModelManager;
}
public void addItem(String collectionName, String jsonItem) {
DBObject object = (DBObject) JSON.parse(jsonItem);
addAdminFields( object, collectionName );
proxy.insert(collectionName, object);
}
public void createDB(String dbName) throws Exception {
DB db = proxy.createDB(dbName);
proxy.createCollection(db, "radar");
proxy.createCollection(db, "nodes");
proxy.createCollection(db, "links");
proxy.createCollection(db, "details");
proxy.createCollection(db, "admin");
}
public String getCollectionNames(String dbName) throws Exception {
return proxy.listCollections();
}
public String getDBNames(String dbName) throws Exception {
return proxy.listDBs();
}
public void addItems(String collectionName, String jsonItem) throws Exception {
Object obj = JSON.parse(jsonItem);
List<DBObject> list = (List<DBObject>) obj;
Iterator<DBObject> it = list.iterator();
while (it.hasNext()) {
DBObject object = it.next();
addAdminFields( object, collectionName );
proxy.insert(collectionName, object);
}
}
private void addAdminFields(DBObject object, String collectionName ){
if(object.get("id")==null){
object.put("id",proxy.getMaxId( collectionName)+1);
}
object.put("lastModified", new Date());
object.put("modifiedBy", userId);
}
public String getDBNames() throws Exception {
// TODO Auto-generated method stub
return null;
}
public String getUserRights(String login, String password) {
DBObject query= new BasicDBObject();
query.put("login", login);
query.put("password", password);
return proxy.getOneDocument("user", query).toString();
}
}
|
import matplotlib.pyplot as plt
import random
import time
import matplotlib.patches as patch
import math
size = [5,10,50,100,500,1000,5000,10000]
sortable = []
points = []
logs = []
tempTimes = []
for n in range(1,size.__len__()+1):
for x in range(10):
t1 = time.time()
for x in range(size[n-1]):
sortable.append(random.seed())
sortable.sort()
t2 = time.time()
tempTimes.append(t2-t1)
logs.append(n*math.log(n,2))
points.append(sum(tempTimes)/20)
redPatch = patch.Patch(color='red', label='Sort Time')
bluePatch = patch.Patch(color='blue', label='n(lg(n))')
plt.legend(handles=[redPatch, bluePatch], loc=2)
plt.plot(points, 'r', logs, 'b')
plt.ylabel('time in seconds')
plt.show() |
package org.hzero.sso.saml.autoconfigure;
import java.io.IOException;
import java.security.KeyStore;
import java.security.KeyStoreException;
import java.security.NoSuchAlgorithmException;
import java.security.cert.CertificateException;
import java.security.spec.InvalidKeySpecException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.List;
import javax.xml.stream.XMLStreamException;
import org.apache.commons.httpclient.HttpClient;
import org.apache.commons.httpclient.MultiThreadedHttpConnectionManager;
import org.apache.velocity.app.VelocityEngine;
import org.opensaml.saml2.metadata.provider.MetadataProvider;
import org.opensaml.saml2.metadata.provider.MetadataProviderException;
import org.opensaml.xml.parse.ParserPool;
import org.opensaml.xml.parse.StaticBasicParserPool;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.boot.autoconfigure.condition.ConditionalOnMissingBean;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.ComponentScan;
import org.springframework.context.annotation.Configuration;
import org.springframework.security.saml.SAMLBootstrap;
import org.springframework.security.saml.context.SAMLContextProviderImpl;
import org.springframework.security.saml.key.JKSKeyManager;
import org.springframework.security.saml.log.SAMLDefaultLogger;
import org.springframework.security.saml.metadata.CachingMetadataManager;
import org.springframework.security.saml.metadata.ExtendedMetadata;
import org.springframework.security.saml.parser.ParserPoolHolder;
import org.springframework.security.saml.processor.*;
import org.springframework.security.saml.trust.httpclient.TLSProtocolConfigurer;
import org.springframework.security.saml.util.VelocityFactory;
import org.springframework.security.saml.websso.*;
import org.hzero.sso.core.config.SsoProperties;
import org.hzero.sso.core.security.service.SsoUserAccountService;
import org.hzero.sso.core.security.service.SsoUserDetailsBuilder;
import org.hzero.sso.core.type.SsoRegister;
import org.hzero.sso.core.util.KeyStoreLocator;
import org.hzero.sso.saml.config.SamlSsoRegister;
import org.hzero.sso.saml.provider.SamlAuthenticationProvider;
import org.hzero.sso.saml.service.SamlUserDetailsService;
@Configuration
@ComponentScan(value = { "org.hzero.sso.saml", })
public class SamlAutoConfiguration {
@Autowired
private SsoProperties ssoProperties;
@Bean
public SsoRegister samlSsoRegister() {
return new SamlSsoRegister();
}
@Bean
@ConditionalOnMissingBean(SamlUserDetailsService.class)
public SamlUserDetailsService samlUserDetailsService(SsoUserAccountService userAccountService,
SsoUserDetailsBuilder userDetailsBuilder) {
return new SamlUserDetailsService(userAccountService, userDetailsBuilder);
}
@Bean
@ConditionalOnMissingBean(SamlAuthenticationProvider.class)
public SamlAuthenticationProvider samlAuthenticationProvider(SamlUserDetailsService samlUserDetailsService) {
SamlAuthenticationProvider samlAuthenticationProvider = new SamlAuthenticationProvider(
samlUserDetailsService);
// samlAuthenticationProvider.setUserDetails(defaultSAMLUserDetailsService);
samlAuthenticationProvider.setForcePrincipalAsString(false);
samlAuthenticationProvider.setExcludeCredential(true);
return samlAuthenticationProvider;
}
@Bean(name = "samlHttpClient")
public HttpClient httpClient() {
return new HttpClient(new MultiThreadedHttpConnectionManager());
}
@Bean
public ExtendedMetadata extendedMetadata() {
ExtendedMetadata extendedMetadata = new ExtendedMetadata();
extendedMetadata.setIdpDiscoveryEnabled(false);
extendedMetadata.setSignMetadata(true);
extendedMetadata.setEcpEnabled(true);
return extendedMetadata;
}
@Bean
@Qualifier("metadata")
public CachingMetadataManager metadata() throws MetadataProviderException {
List<MetadataProvider> providers = new ArrayList<>();
CachingMetadataManager metadataManager = new CachingMetadataManager(providers);
return metadataManager;
}
@Bean
public VelocityEngine velocityEngine() {
return VelocityFactory.getEngine();
}
@Bean(initMethod = "initialize")
public ParserPool parserPool() {
return new StaticBasicParserPool();
}
@Bean(name = "parserPoolHolder")
public ParserPoolHolder parserPoolHolder() {
return new ParserPoolHolder();
}
@Bean
public SAMLContextProviderImpl contextProvider() {
return new SAMLContextProviderImpl();
}
@Bean
public JKSKeyManager keyManager() throws InvalidKeySpecException, CertificateException, NoSuchAlgorithmException, KeyStoreException, IOException, XMLStreamException {
KeyStore keyStore = KeyStoreLocator.createKeyStore(ssoProperties.getSso().getSaml().getPassphrase());
KeyStoreLocator.addPrivateKey(keyStore, ssoProperties.getSso().getSaml().getEntityId(), ssoProperties.getSso().getSaml().getPrivateKey(), ssoProperties.getSso().getSaml().getCertificate(), ssoProperties.getSso().getSaml().getPassphrase());
return new JKSKeyManager(keyStore, Collections.singletonMap(ssoProperties.getSso().getSaml().getEntityId(), ssoProperties.getSso().getSaml().getPassphrase()), ssoProperties.getSso().getSaml().getEntityId());
}
@Bean
public WebSSOProfileOptions defaultWebSSOProfileOptions() {
WebSSOProfileOptions webSSOProfileOptions = new WebSSOProfileOptions();
webSSOProfileOptions.setIncludeScoping(false);
return webSSOProfileOptions;
}
private ArtifactResolutionProfile artifactResolutionProfile() {
final ArtifactResolutionProfileImpl artifactResolutionProfile =
new ArtifactResolutionProfileImpl(httpClient());
artifactResolutionProfile.setProcessor(new SAMLProcessorImpl(soapBinding()));
return artifactResolutionProfile;
}
@Bean
public HTTPArtifactBinding artifactBinding(ParserPool parserPool, VelocityEngine velocityEngine) {
return new HTTPArtifactBinding(parserPool, velocityEngine, artifactResolutionProfile());
}
@Bean
public HTTPSOAP11Binding soapBinding() {
return new HTTPSOAP11Binding(parserPool());
}
@Bean
public HTTPPostBinding httpPostBinding() {
return new HTTPPostBinding(parserPool(), velocityEngine());
}
@Bean
public HTTPRedirectDeflateBinding httpRedirectDeflateBinding() {
return new HTTPRedirectDeflateBinding(parserPool());
}
@Bean
public HTTPSOAP11Binding httpSOAP11Binding() {
return new HTTPSOAP11Binding(parserPool());
}
@Bean
public HTTPPAOS11Binding httpPAOS11Binding() {
return new HTTPPAOS11Binding(parserPool());
}
@Bean
public SAMLProcessorImpl processor() {
Collection<SAMLBinding> bindings = new ArrayList<SAMLBinding>();
bindings.add(httpRedirectDeflateBinding());
bindings.add(httpPostBinding());
bindings.add(artifactBinding(parserPool(), velocityEngine()));
bindings.add(httpSOAP11Binding());
bindings.add(httpPAOS11Binding());
return new SAMLProcessorImpl(bindings);
}
// Logger for SAML messages and events
@Bean
public SAMLDefaultLogger samlLogger() {
return new SAMLDefaultLogger();
}
// SAML 2.0 WebSSO Assertion Consumer
@Bean
public WebSSOProfileConsumer webSSOprofileConsumer() {
return new WebSSOProfileConsumerImpl();
}
// SAML 2.0 Holder-of-Key WebSSO Assertion Consumer
@Bean
public WebSSOProfileConsumerHoKImpl hokWebSSOprofileConsumer() {
return new WebSSOProfileConsumerHoKImpl();
}
//SAML 2.0 Web SSO profile
@Bean
public WebSSOProfile webSSOprofile() {
return new WebSSOProfileImpl();
}
// SAML 2.0 ECP profile
@Bean
public WebSSOProfileECPImpl ecpprofile() {
return new WebSSOProfileECPImpl();
}
@Bean
public TLSProtocolConfigurer tlsProtocolConfigurer() {
return new TLSProtocolConfigurer();
}
// Initialization of OpenSAML library
@Bean
public static SAMLBootstrap sAMLBootstrap() {
return new SAMLBootstrap();
}
}
|
from typing import List, Dict
def process_text_explanations(text_shap_values: List[str], label_index: int) -> Dict[str, float]:
# Parse text SHAP values
text_exp = {
k: float(v)
for k, v in (exp.split(": ") for exp in text_shap_values)
}
# Filter and sort explanations based on label index
filtered_exp = {
k: v
for k, v in text_exp.items()
if k.startswith(f"word{label_index + 1}:")
}
sorted_exp = dict(sorted(filtered_exp.items(), key=lambda item: item[1], reverse=True))
return sorted_exp |
#include <iostream>
#include <cassert>
// Enum to represent different statistics
enum class Statistics {
error_messages_sent__,
unexpected_messages__,
secure_messages_sent_,
statistics_count__ // Represents the total number of statistics
};
class OutstationStatistics {
public:
// Method to get the value of a specific statistic
int getStatistic(Statistics stat) {
// Implement logic to retrieve the value of the specified statistic
switch (stat) {
case Statistics::error_messages_sent__:
return errorMessagesSent;
case Statistics::unexpected_messages__:
return unexpectedMessages;
case Statistics::secure_messages_sent_:
return secureMessagesSent;
case Statistics::statistics_count__:
return static_cast<int>(Statistics::statistics_count__);
}
}
// Method to check if there is a pending APDU
bool pollAPDU() {
// Implement logic to check for pending APDU
// Return true if there is a pending APDU, false otherwise
return hasPendingAPDU;
}
private:
int errorMessagesSent = 0;
int unexpectedMessages = 0;
int secureMessagesSent = 0;
bool hasPendingAPDU = false;
};
// Unit test for the OutstationStatistics class
void testOutstationStatistics() {
OutstationStatistics outstation;
// Test getStatistic method
assert(outstation.getStatistic(Statistics::error_messages_sent__) == 0);
assert(outstation.getStatistic(Statistics::unexpected_messages__) == 0);
assert(outstation.getStatistic(Statistics::secure_messages_sent_) == 0);
assert(outstation.getStatistic(Statistics::statistics_count__) == 3); // Ensure statistics_count is accurate
// Test pollAPDU method
assert(!outstation.pollAPDU()); // No pending APDU
}
int main() {
testOutstationStatistics();
std::cout << "All tests passed successfully." << std::endl;
return 0;
} |
<filename>src/main/java/frc/robot/Robot.java
package frc.robot;
import edu.wpi.first.networktables.NetworkTableInstance;
import edu.wpi.first.wpilibj.DriverStation;
import edu.wpi.first.wpilibj.TimedRobot;
import edu.wpi.first.wpilibj2.command.Command;
import edu.wpi.first.wpilibj2.command.CommandScheduler;
import io.github.oblarg.oblog.Logger;
public class Robot extends TimedRobot {
private Command mAutonomousCommand;
private RobotContainer mRobotContainer;
@Override
public void robotInit() {
mRobotContainer = new RobotContainer();
Logger.configureLoggingAndConfig(mRobotContainer, false);
}
@Override
public void robotPeriodic() {
NetworkTableInstance.getDefault().flush();
CommandScheduler.getInstance().run();
Logger.updateEntries();
mRobotContainer.updateField();
}
@Override
public void disabledInit() {
mRobotContainer.stopAll().schedule();
}
@Override
public void autonomousInit() {
mAutonomousCommand = mRobotContainer.getAutonomousCommand();
if (mAutonomousCommand != null) {
mAutonomousCommand.schedule();
}
}
@Override
public void teleopInit() {
if (mAutonomousCommand != null) {
mAutonomousCommand.cancel();
}
}
@Override
public void testInit() {
CommandScheduler.getInstance().cancelAll();
}
@Override
public void simulationInit() {
DriverStation.silenceJoystickConnectionWarning(true);
}
}
|
#!/bin/sh
php console db:fixture:import
|
# -*- coding: utf-8 -*-
import os
import shutil
import io, sys
# スクリプトを実行するフォルダを指定
# デフォルトはスクリプトが置かれたフォルダ
workDir = os.getcwd() + "/"
#workDir = "/Users/user/Downloads/tesPython/tesDir2/"
# スペースで分割できなかった場合、指定された文字で分割
splitList = ["-",
"."
]
# Windowsのコマンドプロンプトではcp932文字コードを使用しないよう設定
if os.name == "nt":
sys.stdout = io.TextIOWrapper(sys.stdout.buffer,
encoding=sys.stdout.encoding,
errors='backslashreplace',
line_buffering=sys.stdout.line_buffering)
def moveDir(fileName, dirName):
shutil.move(fileName, dirName)
def getFilesDirs(workDir_):
files = os.listdir(workDir_)
fileList = []
dirList = []
for file in files:
# ファイルの判別
if "." in file and file[0] != ".":
fileList.append(file)
# ディレクトリの判別、隠しファイルは除外
elif file[0] != ".":
dirList.append(file)
return fileList, dirList
# 指定された条件でファイル名を分割
def getPrefix(file):
for split in splitList:
if len(file.split()) > 1:
splits = file.split()
break
elif len(file.split(split)) > 1:
splits = file.split(split)
break
prefix = splits[0]
return prefix
files, dirs = getFilesDirs(workDir)
for file in files:
prefix = getPrefix(file)
if prefix in dirs:
shutil.move(workDir + file, workDir + prefix)
else:
mkdirPath = workDir + prefix
print(mkdirPath)
os.mkdir(mkdirPath)
shutil.move(workDir + file, workDir + prefix)
files, dirs = getFilesDirs(workDir)
|
<filename>src/components/Core/Svg/interface.ts<gh_stars>0
import assets from '../../../assets';
import { FlexProps } from '../Flex/interface';
export interface Stop {
offset: string;
stopColor: string;
}
export interface Gradient {
stops: Array<Stop>;
}
export interface Animate {
fill: string;
begin: string;
dur: string;
repeatCount: string;
attributeName: string;
to: string;
}
export interface Path {
d: string;
fill?: string;
animates?: Array<Animate>;
}
export interface SvgAsset {
paths: Array<Path>;
viewBox: string;
fill?: string;
linearGradient?: Gradient;
radialGradient?: Gradient;
}
export type VectorTypes = keyof typeof assets;
export interface SvgProps extends FlexProps {
vector: VectorTypes;
color?: string;
}
|
#!/usr/bin/env bash
# default set to using debug version openresty
dbg_or
|
import matplotlib.pyplot as plt
import numpy as np
class RocketSimulator:
def __init__(self, position, velocity, orientation):
self.state = RigidBodyState_3DoF(position, velocity, orientation)
self.time = 0.0
def update_state(self, thrust, drag, gravitational_force):
# Calculate acceleration based on forces
acceleration = (thrust - drag - gravitational_force) / self.state.mass
# Update velocity and position using time-stepping
self.state.velocity += acceleration * Main.timeStep
self.state.position += self.state.velocity * Main.timeStep
# Update orientation (not shown for brevity)
# Increment time
self.time += Main.timeStep
def simulate_motion(self, duration):
num_steps = int(duration / Main.timeStep)
for _ in range(num_steps):
# Calculate forces acting on the rocket (not shown for brevity)
thrust = self.calculate_thrust()
drag = self.calculate_drag()
gravitational_force = self.calculate_gravitational_force()
self.update_state(thrust, drag, gravitational_force)
def plot_trajectory(self):
positions = np.array([self.state.position.toArray()])
while self.time < Main.simLength:
# Calculate forces acting on the rocket (not shown for brevity)
thrust = self.calculate_thrust()
drag = self.calculate_drag()
gravitational_force = self.calculate_gravitational_force()
self.update_state(thrust, drag, gravitational_force)
positions = np.append(positions, [self.state.position.toArray()], axis=0)
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.plot(positions[:, 0], positions[:, 1], positions[:, 2])
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
plt.show() |
<reponame>Mrlgm/voir-ui
import chai from 'chai'
import sinon from 'sinon';
import sinonChai from 'sinon-chai'
import validate from '../../src/validate';
const expect = chai.expect;
chai.use(sinonChai)
describe('validate', () => {
it('存在.', () => {
expect(validate).to.exist
})
it('required: true 通过', () => {
let data = {email: ''}
let rules = [{key: 'email', required: true}]
let errors = validate(data, rules)
expect(errors.email.required).to.eq('必填')
})
it('test email is 0', () => {
let data = {email: 0}
let rules = [{key: 'email', required: true}]
let errors = validate(data, rules)
expect(errors.email).to.not.exist
})
it('格式不正确', () => {
let data = {email: '@foxmail.com'}
let rules = [{key: 'email', pattern: /^.+@.+$/}]
let errors = validate(data, rules)
expect(errors.email.pattern).to.eq('格式不正确')
})
it('格式正确', () => {
let data = {email: '<EMAIL>'}
let rules = [{key: 'email', pattern: /^.+@.+$/}]
let errors = validate(data, rules)
expect(errors.email).to.not.exist
})
it('email格式不正确', () => {
let data = {email: '@foxmail.com'}
let rules = [{key: 'email', pattern: 'email'}]
let errors = validate(data, rules)
expect(errors.email.pattern).to.eq('格式不正确')
})
it('email格式正确', () => {
let data = {email: '<EMAIL>'}
let rules = [{key: 'email', pattern: 'email'}]
let errors = validate(data, rules)
expect(errors.email).to.not.exist
})
it('pattern & required', () => {
let data = {email: ''}
let rules = [{key: 'email', pattern: 'email', required: true}]
let errors = validate(data, rules)
expect(errors.email.required).to.eq('必填')
expect(errors.email.pattern).to.not.exist
})
it('pattern & minLength', () => {
let data = {email: ''}
let rules = [{key: 'email', pattern: 'email', minLength: 6}]
let errors = validate(data, rules)
expect(errors.email.minLength).to.exist
expect(errors.email.pattern).to.exist
})
}) |
<reponame>regseb/castkod
import assert from "node:assert";
import sinon from "sinon";
import { kodi } from "../../../src/core/kodi.js";
import { extract } from "../../../src/core/scrapers.js";
describe("Scraper: YouTube", function () {
it("should return URL when it's not a video", async function () {
const url = new URL("https://www.youtube.com/watch?x=123456");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file, url.href);
});
it("should return playlist id from video in playlist", async function () {
browser.storage.local.set({
"youtube-playlist": "playlist",
"youtube-order": "default",
});
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube.com/watch" +
"?v=avt4ZWlVjdY&list=PL7nedIL_qbuZBS5ZAiGkjB1LW9C3zZvum");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?playlist_id=PL7nedIL_qbuZBS5ZAiGkjB1LW9C3zZvum" +
"&order=default&play=1&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return video id", async function () {
browser.storage.local.set({ "youtube-playlist": "video" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube.com/watch" +
"?v=avt4ZWlVjdY&list=PL7nedIL_qbuZBS5ZAiGkjB1LW9C3zZvum");
const options = { depth: false, incognito: true };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=avt4ZWlVjdY&incognito=true");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return video id even with playlist option", async function () {
browser.storage.local.set({ "youtube-playlist": "playlist" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube.com/watch?v=sWfAtMQa_yo");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=sWfAtMQa_yo&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return video id when protocol is HTTP", async function () {
browser.storage.local.set({ "youtube-playlist": "playlist" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("http://www.youtube.com/watch?v=sWfAtMQa_yo");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=sWfAtMQa_yo&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return URL when it's not a video from mobile",
async function () {
const url = new URL("https://m.youtube.com/watch?a=dQw4w9WgXcQ");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file, url.href);
});
it("should return video id from mobile", async function () {
browser.storage.local.set({ "youtube-playlist": "playlist" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://m.youtube.com/watch?v=dQw4w9WgXcQ");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=dQw4w9WgXcQ&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return URL when it's not a video from music", async function () {
const url = new URL("https://music.youtube.com/watch?m=abcdef");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file, url.href);
});
it("should return video id from music", async function () {
browser.storage.local.set({ "youtube-playlist": "video" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://music.youtube.com/watch" +
"?v=IOqxarVWKRs&list=RDAMVMIOqxarVWKRs");
const options = { depth: false, incognito: true };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=IOqxarVWKRs&incognito=true");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return URL when it's not a playlist", async function () {
const url = new URL("https://www.youtube.com/playlist?v=dQw4w9WgXcQ");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file, url.href);
});
it("should return playlist id", async function () {
browser.storage.local.set({ "youtube-order": "" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube.com/playlist" +
"?list=PLd8UclkuwTj9vaRGP3859UHcdmlrkAd-9");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?playlist_id=PLd8UclkuwTj9vaRGP3859UHcdmlrkAd-9" +
"&order=&play=1&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return URL when it's not a playlist from mobile",
async function () {
const url = new URL("https://m.youtube.com/playlist" +
"?video=PL3A5849BDE0581B19");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file, url.href);
});
it("should return playlist id from mobile", async function () {
browser.storage.local.set({ "youtube-order": "reverse" });
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://m.youtube.com/playlist" +
"?list=PL3A5849BDE0581B19");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?playlist_id=PL3A5849BDE0581B19" +
"&order=reverse&play=1&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return embed video id", async function () {
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube.com/embed/v3gefWEggSc");
const options = { depth: false, incognito: true };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=v3gefWEggSc&incognito=true");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return video id without cookie", async function () {
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://www.youtube-nocookie.com/embed" +
"/u9gVaeb9le4");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=u9gVaeb9le4&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
it("should return video id from tiny URL", async function () {
const stub = sinon.stub(kodi.addons, "getAddons").resolves([]);
const url = new URL("https://youtu.be/NSFbekvYOlI");
const options = { depth: false, incognito: false };
const file = await extract(url, options);
assert.strictEqual(file,
"plugin://plugin.video.youtube/play/" +
"?video_id=NSFbekvYOlI&incognito=false");
assert.strictEqual(stub.callCount, 1);
assert.deepStrictEqual(stub.firstCall.args, ["video"]);
});
});
|
<filename>api/docker_api.go
package api
import (
"context"
"github.com/docker/docker/api/types"
"github.com/docker/docker/client"
"github.com/gin-gonic/gin"
"github.com/GaruGaru/Warden/agent"
)
type DockerApi struct {
DockerClient client.Client
}
func NewDockerApi() (DockerApi, error) {
clint, err := client.NewEnvClient()
if err != nil {
return DockerApi{}, err
}
return DockerApi{DockerClient: *clint,}, nil
}
type DockerNode struct {
ID string
Name string
Role string
Status string
Ip string
Containers []DockerContainer
}
type DockerContainer struct {
ID string
Name string
Image string
State string
CreatedAt int64
}
func (a DockerApi) Handler(c *gin.Context) {
ctx := context.Background()
nodesMap, err := a.GetNodeMapFromLocal(ctx)
if err != nil {
c.JSON(500, gin.H{"info": agent.HostInfo{}, "error": err.Error()})
} else {
c.JSON(200, gin.H{"nodes": nodesMap, "error": nil})
}
}
func (a DockerApi) GetNodeMapFromLocal(ctx context.Context) ([]DockerNode, error) {
nodes := make([]DockerNode, 1)
containers, err := a.DockerClient.ContainerList(ctx, types.ContainerListOptions{})
if err != nil {
return []DockerNode{}, err
}
containersList := make([]DockerContainer, len(containers))
for i, container := range containers {
containersList[i] = DockerContainer{
ID: container.ID,
Name: container.Names[0],
Image: container.Image,
State: container.State,
CreatedAt: container.Created,
}
}
nodes[0] = DockerNode{
ID: "0",
Name: "localhost",
Role: "master",
Status: "RUNNING",
Ip: "127.0.0.1",
Containers: containersList,
}
return nodes, nil
}
|
//
// IWViewController.h
// IWUserMainMoudle
//
// Created by Hanssea on 12/14/2018.
// Copyright (c) 2018 Hanssea. All rights reserved.
//
@import UIKit;
@interface IWViewController : UIViewController
@end
|
<gh_stars>0
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <netinet/in.h>
#include <stdbool.h>
#include <time.h>
#include "common.h"
#include "types.h"
int main(int argc, char** argv)
{
int sockfd;
struct sockaddr_in srv_addr;
sockfd = socket(AF_INET, SOCK_STREAM, 0);
if (sockfd < 0)
error_exit("Error creating socket", EXIT_FAILURE);
memset(&srv_addr, 0, sizeof(struct sockaddr_in));
srv_addr.sin_family = AF_INET;
srv_addr.sin_addr.s_addr = INADDR_ANY;
srv_addr.sin_port = htons(DEFAULT_PORT);
if (bind(sockfd, (struct sockaddr*)&srv_addr, sizeof(struct sockaddr_in)) < 0)
{
error_exit("Error binding socket", EXIT_FAILURE);
}
if (listen(sockfd, 5) < 0)
{
error_exit("Error on listening", EXIT_FAILURE);
}
struct sockaddr_in clt_addr;
socklen_t cltlen = sizeof(struct sockaddr_in);
ACCEPT_CONNECTION:
printf("Now accepting connections...\n");
int accept_sockfd = accept(sockfd, (struct sockaddr*)&clt_addr, &cltlen);
if (accept_sockfd < 0)
{
error_exit("Error accepting new socket", EXIT_FAILURE);
}
printf("Now initiating handshake... ");
uint8_t buf[2];
memset(buf, 0, sizeof(buf));
int n = read(accept_sockfd, buf, sizeof(buf));
if (n < 0)
{
error_exit("Error reading handshake from socket", EXIT_FAILURE);
}
if (memcmp(buf, HANDSHAKE_CLIENT, sizeof(HANDSHAKE_CLIENT)) != 0)
{
error_exit("Handshake failed", EXIT_FAILURE);
}
printf("Received correct handshake from client\n");
printf("Now sending handshake back to client... ");
n = write(accept_sockfd, HANDSHAKE_SERVER, sizeof(HANDSHAKE_SERVER));
if (n < 0)
{
error_exit("Error writing handshake to socket", EXIT_FAILURE);
}
printf("Done.\n");
motor_param_packet_header tryout_header;
uint8_t *tryout_buf = (uint8_t *)&tryout_header;
int tryout_buf_idx = 0;
int packet_counter = 0;
int heartbeat_counter = 0;
int missed_heartbeat = 0;
time_t last_heartbeat_time = time(NULL);
while(1)
{
if (tryout_buf_idx < sizeof(motor_param_packet_header))
{
n = read(accept_sockfd, tryout_buf + tryout_buf_idx, 1);
if (n > 0)
{
if (check_packet_header_by_byte(&tryout_header, tryout_buf_idx) == 0)
{
tryout_buf_idx++;
continue;
}
else
{
// wrong heade,
// memmove tryout_buf one byte forward
memmove(tryout_buf, tryout_buf+1, sizeof(motor_param_packet_header)-1);
//fprintf(stderr, "Wrong header at %d\n", tryout_buf_idx);
// reset tryout index
tryout_buf_idx = 0;
continue;
}
}
else
if (n < 0)
{
error_exit("Error reading packet", EXIT_FAILURE);
}
else
{
// read nothing, check for heartbeat missing
if (time(NULL) - last_heartbeat_time >= 1)
{
missed_heartbeat = time(NULL) - last_heartbeat_time;
}
}
}
else
{
tryout_buf_idx = 0;
// proceed the rest and check the tail
motor_param_packet_header *packet;
motor_param_packet_content *content;
motor_param_packet_footer *footer;
int count = (tryout_header.len_bytes - sizeof(motor_param_packet_header) - sizeof(motor_param_packet_footer)) / sizeof(motor_param_packet_content);
bool is_heartbeat = count == 0;
if (!is_heartbeat)
printf("Allocate %d content\n", count);
if (tryout_header.len_bytes != sizeof(motor_param_packet_header) + count * sizeof(motor_param_packet_content) + sizeof(motor_param_packet_footer))
{
fprintf(stderr, "Inconsistent packet length (%d), give it up\n", tryout_header.len_bytes);
continue;
}
packet = allocate_packet(count);
if (!packet)
{
fprintf(stderr, "Cannot allocate %d content, either memory is out or the number isn't right!\n", count);
continue;
}
memcpy(packet, &tryout_header, sizeof(tryout_header));
content = (motor_param_packet_content*)((uint8_t*)packet + sizeof(motor_param_packet_header));
footer = (motor_param_packet_footer*)((uint8_t*)packet + packet->len_bytes - sizeof(motor_param_packet_footer));
n = read(accept_sockfd, content, packet->len_bytes - sizeof(motor_param_packet_header));
if (n > 0)
{
debug_packet(stdout, packet);
if (n == packet->len_bytes - sizeof(motor_param_packet_header) &&
check_packet_header(packet) == packet->len_bytes &&
check_packet_footer(footer) == 0)
{
if (is_heartbeat)
{
printf("Heartbeat #%d\n", heartbeat_counter);
heartbeat_counter++;
last_heartbeat_time = time(NULL);
missed_heartbeat = 0;
}
else
{
printf("#%d packet\n", packet_counter);
packet_counter++;
motor_param_packet_content content;
for(int i = 0; i < count; ++i)
{
if (extract_packet_param(packet, i, &content) == 0)
{
printf("Recv param %d (%d, %d)\n", i, content.motor, content.value);
}
}
}
}
else
{
fprintf(stderr, "Wrong packet content\n");
}
}
else
if (n < 0)
{
error_exit("Error reading packet", EXIT_FAILURE);
}
else
{
// read nothing, check missing heartbeat
if (time(NULL) - last_heartbeat_time >= 1)
{
missed_heartbeat = time(NULL) - last_heartbeat_time;
}
}
free_packet(packet);
}
if (missed_heartbeat >= MAX_MISSING_HEARTBEAT)
{
fprintf(stderr, "Too many missing heartbeat. Stop\n");
close(accept_sockfd);
tryout_buf_idx = 0;
packet_counter = 0;
heartbeat_counter = 0;
missed_heartbeat = 0;
goto ACCEPT_CONNECTION;
}
}
printf("Done\n");
close(accept_sockfd);
close(sockfd);
return 0;
}
|
<gh_stars>0
package gwf
import (
"encoding/json"
"fmt"
"io/ioutil"
"math"
"mime"
"mime/multipart"
"net/http"
"net/url"
"strconv"
"github.com/go-playground/form"
)
// Context 是对某次请求上下文的抽象
type Context struct {
app *Application
Request *http.Request
Writer *responseWriter
//url参数列表
URLParameters url.Values
//url和POST/PUT/PATCH参数列表,url参数会被POST/PUT/PATCH的同名参数覆盖
URLFormParameters url.Values
//POST/PUT/PATCH参数列表
FormParameters url.Values
// HandlersChain的索引,用于控制handlers链执行流程
index int8
// 当前请求需要执行的所有handlers集合
handlers HandlersChain
// Keys key/value对,请求唯一,可以将参数贯串整个请求上下文
Keys map[string]interface{}
// 内部错误
errInternal *Error
}
const abortIndex int8 = math.MaxInt8 / 2
var decoder *form.Decoder = form.NewDecoder()
func newCtx(app *Application, r *http.Request) *Context {
c := &Context{
app: app,
Request: r,
index: -1,
}
c.URLParameters = r.URL.Query()
if r.Method == http.MethodPost {
v := r.Header.Get("Content-Type")
var parseMultipart bool = false
if v != "" {
d, _, err := mime.ParseMediaType(v)
if err == nil && d == "multipart/form-data" {
parseMultipart = true
}
}
if parseMultipart {
err := r.ParseMultipartForm(app.maxMultipartMemory)
if err != nil {
panic(fmt.Sprintf("ParseMultipartForm 失败, error:%s", err))
}
} else {
err := r.ParseForm()
if err != nil {
//如果解析参数失败,后续流程无法进行,快速失败,日志中将会有记录
panic(fmt.Sprintf("解析url参数和POST/PUT/PATCH上传参数失败, error:%s", err))
}
}
c.URLFormParameters = c.Request.Form
} else {
c.URLFormParameters = c.URLParameters
}
c.FormParameters = c.Request.PostForm
return c
}
// Next 循环执行hanlers链中的handler
func (c *Context) Next() {
c.index++
for c.index < int8(len(c.handlers)) {
c.handlers[c.index](c)
c.index++
}
}
// Status 写入响应code
func (c *Context) Status(code int) {
c.Writer.WriteHeader(code)
}
// IsAborted 当前Context终止返回true
func (c *Context) IsAborted() bool {
return c.index >= abortIndex
}
// Abort 终止待执行的hanlers,不会终止正在执行中的handler
// 比如认证middleware执行失败时,不需要再执行之后的handlers
// 调用Abort来终止剩下的handlers的调用
func (c *Context) Abort() {
c.index = abortIndex
}
// AbortWithStatus 写入status code并终止后续的handlers调用
func (c *Context) AbortWithStatus(code int) {
c.Abort()
c.Status(code)
c.Writer.WriteHeaderNow()
}
// AbortWithStatusString 响应为string格式,并终止后续的handlers调用
func (c *Context) AbortWithStatusString(code int, data string) {
c.Abort()
c.String(code, data)
}
// AbortWithStatusJson 响应为json格式,并终止后续的handlers调用
func (c *Context) AbortWithStatusJson(code int, data interface{}) {
c.Abort()
c.Json(code, data)
}
// Set 在当前Context中保存用户自定义key/value
func (c *Context) Set(key string, value interface{}) {
if c.Keys == nil {
c.Keys = make(map[string]interface{})
}
c.Keys[key] = value
}
// Get 取出当前Context中自定义key的value,存在时exists为true
func (c *Context) Get(key string) (value interface{}, exists bool) {
value, exists = c.Keys[key]
return
}
/**********请求参数相关方法 begin**********/
func intDefault(v string, defaultV int) int {
if v == "" {
return defaultV
}
i, err := strconv.Atoi(v)
if err != nil {
return defaultV
}
return i
}
func int8Default(v string, defaultV int8) int8 {
if v == "" {
return defaultV
}
i, err := strconv.ParseInt(v, 10, 8)
if err != nil {
return defaultV
}
return int8(i)
}
func int16Default(v string, defaultV int16) int16 {
if v == "" {
return defaultV
}
i, err := strconv.ParseInt(v, 10, 16)
if err != nil {
return defaultV
}
return int16(i)
}
func int32Default(v string, defaultV int32) int32 {
if v == "" {
return defaultV
}
i, err := strconv.ParseInt(v, 10, 32)
if err != nil {
return defaultV
}
return int32(i)
}
func int64Default(v string, defaultV int64) int64 {
if v == "" {
return defaultV
}
i, err := strconv.ParseInt(v, 10, 64)
if err != nil {
return defaultV
}
return i
}
func uintDefault(v string, defaultV uint) uint {
if v == "" {
return defaultV
}
i, err := strconv.ParseUint(v, 10, 32)
if err != nil {
return defaultV
}
return uint(i)
}
func uint8Default(v string, defaultV uint8) uint8 {
if v == "" {
return defaultV
}
i, err := strconv.ParseUint(v, 10, 8)
if err != nil {
return defaultV
}
return uint8(i)
}
func uint16Default(v string, defaultV uint16) uint16 {
if v == "" {
return defaultV
}
i, err := strconv.ParseUint(v, 10, 16)
if err != nil {
return defaultV
}
return uint16(i)
}
func uint32Default(v string, defaultV uint32) uint32 {
if v == "" {
return defaultV
}
i, err := strconv.ParseUint(v, 10, 32)
if err != nil {
return defaultV
}
return uint32(i)
}
func uint64Default(v string, defaultV uint64) uint64 {
if v == "" {
return defaultV
}
i, err := strconv.ParseUint(v, 10, 64)
if err != nil {
return defaultV
}
return i
}
func float32Default(v string, defaultV float32) float32 {
if v == "" {
return defaultV
}
f, err := strconv.ParseFloat(v, 32)
if err != nil {
return defaultV
}
return float32(f)
}
func float64Default(v string, defaultV float64) float64 {
if v == "" {
return defaultV
}
f, err := strconv.ParseFloat(v, 64)
if err != nil {
return defaultV
}
return f
}
func boolDefault(v string, defaultV bool) bool {
if v == "" {
return defaultV
}
b, err := strconv.ParseBool(v)
if err != nil {
return defaultV
}
return b
}
// ParamXXX和ParamXXXDefault函数可以取得url参数和POST、PUT、PATCH上传的参数
// 如果某个参数在url参数和POST、PUT、PATCH上传参数中都有,将取得POST、PUT、PATCH参数
// 如果一定要取得url参数,请使用QueryXXX和QueryXXXDefault方法
// 如果没有为key的参数名,或者值为空字符串,ParamXXX返回XXX类型的默认零值
// ParamXXXDefault返回传入的defaultV
func (c *Context) ParamString(key string) string {
return c.URLFormParameters.Get(key)
}
func (c *Context) ParamStringDefault(key string, defaultV string) string {
v := c.URLFormParameters.Get(key)
if v == "" {
return defaultV
}
return v
}
func (c *Context) ParamStringSlice(key string) []string {
return c.URLFormParameters[key]
}
func (c *Context) ParamInt(key string) int {
v := c.ParamString(key)
return intDefault(v, 0)
}
func (c *Context) ParamIntDefault(key string, defaultV int) int {
v := c.ParamString(key)
return intDefault(v, defaultV)
}
func (c *Context) ParamInt8(key string) int8 {
v := c.ParamString(key)
return int8Default(v, int8(0))
}
func (c *Context) ParamInt8Default(key string, defaultV int8) int8 {
v := c.ParamString(key)
return int8Default(v, defaultV)
}
func (c *Context) ParamInt16(key string) int16 {
v := c.ParamString(key)
return int16Default(v, int16(0))
}
func (c *Context) ParamInt16Default(key string, defaultV int16) int16 {
v := c.ParamString(key)
return int16Default(v, defaultV)
}
func (c *Context) ParamInt32(key string) int32 {
v := c.ParamString(key)
return int32Default(v, int32(0))
}
func (c *Context) ParamInt32Default(key string, defaultV int32) int32 {
v := c.ParamString(key)
return int32Default(v, defaultV)
}
func (c *Context) ParamInt64(key string) int64 {
v := c.ParamString(key)
return int64Default(v, int64(0))
}
func (c *Context) ParamInt64Default(key string, defaultV int64) int64 {
v := c.ParamString(key)
return int64Default(v, defaultV)
}
func (c *Context) ParamUint(key string) uint {
v := c.ParamString(key)
return uintDefault(v, uint(0))
}
func (c *Context) ParamUintDefault(key string, defaultV uint) uint {
v := c.ParamString(key)
return uintDefault(v, defaultV)
}
func (c *Context) ParamUint8(key string) uint8 {
v := c.ParamString(key)
return uint8Default(v, uint8(0))
}
func (c *Context) ParamUint8Default(key string, defaultV uint8) uint8 {
v := c.ParamString(key)
return uint8Default(v, defaultV)
}
func (c *Context) ParamUint16(key string) uint16 {
v := c.ParamString(key)
return uint16Default(v, uint16(0))
}
func (c *Context) ParamUint16Default(key string, defaultV uint16) uint16 {
v := c.ParamString(key)
return uint16Default(v, defaultV)
}
func (c *Context) ParamUint32(key string) uint32 {
v := c.ParamString(key)
return uint32Default(v, uint32(0))
}
func (c *Context) ParamUint32Default(key string, defaultV uint32) uint32 {
v := c.ParamString(key)
return uint32Default(v, defaultV)
}
func (c *Context) ParamUint64(key string) uint64 {
v := c.ParamString(key)
return uint64Default(v, uint64(0))
}
func (c *Context) ParamUint64Default(key string, defaultV uint64) uint64 {
v := c.ParamString(key)
return uint64Default(v, defaultV)
}
func (c *Context) ParamFloat32(key string) float32 {
v := c.ParamString(key)
return float32Default(v, float32(0.0))
}
func (c *Context) ParamFloat32Default(key string, defaultV float32) float32 {
v := c.ParamString(key)
return float32Default(v, defaultV)
}
func (c *Context) ParamFloat64(key string) float64 {
v := c.ParamString(key)
return float64Default(v, 0.0)
}
func (c *Context) ParamFloat64Default(key string, defaultV float64) float64 {
v := c.ParamString(key)
return float64Default(v, defaultV)
}
func (c *Context) ParamBool(key string) bool {
v := c.ParamString(key)
return boolDefault(v, false)
}
func (c *Context) ParamBoolDefault(key string, defaultV bool) bool {
v := c.ParamString(key)
return boolDefault(v, defaultV)
}
func (c *Context) QueryString(key string) string {
return c.URLParameters.Get(key)
}
func (c *Context) QueryStringDefault(key string, defaultV string) string {
v := c.URLParameters.Get(key)
if v == "" {
return defaultV
}
return v
}
func (c *Context) QueryStringSlice(key string) []string {
return c.URLParameters[key]
}
func (c *Context) QueryInt(key string) int {
v := c.QueryString(key)
return intDefault(v, 0)
}
func (c *Context) QueryIntDefault(key string, defaultV int) int {
v := c.QueryString(key)
return intDefault(v, defaultV)
}
func (c *Context) QueryInt8(key string) int8 {
v := c.QueryString(key)
return int8Default(v, 0)
}
func (c *Context) QueryInt8Default(key string, defaultV int8) int8 {
v := c.QueryString(key)
return int8Default(v, defaultV)
}
func (c *Context) QueryInt16(key string) int16 {
v := c.QueryString(key)
return int16Default(v, 0)
}
func (c *Context) QueryInt16Default(key string, defaultV int16) int16 {
v := c.QueryString(key)
return int16Default(v, defaultV)
}
func (c *Context) QueryInt32(key string) int32 {
v := c.QueryString(key)
return int32Default(v, 0)
}
func (c *Context) QueryInt32Default(key string, defaultV int32) int32 {
v := c.QueryString(key)
return int32Default(v, defaultV)
}
func (c *Context) QueryInt64(key string) int64 {
v := c.QueryString(key)
return int64Default(v, 0)
}
func (c *Context) QueryInt64Default(key string, defaultV int64) int64 {
v := c.QueryString(key)
return int64Default(v, defaultV)
}
func (c *Context) QueryUint(key string) uint {
v := c.QueryString(key)
return uintDefault(v, 0)
}
func (c *Context) QueryUintDefault(key string, defaultV uint) uint {
v := c.QueryString(key)
return uintDefault(v, defaultV)
}
func (c *Context) QueryUint8(key string) uint8 {
v := c.QueryString(key)
return uint8Default(v, 0)
}
func (c *Context) QueryUint8Default(key string, defaultV uint8) uint8 {
v := c.QueryString(key)
return uint8Default(v, defaultV)
}
func (c *Context) QueryUint16(key string) uint16 {
v := c.QueryString(key)
return uint16Default(v, 0)
}
func (c *Context) QueryUint16Default(key string, defaultV uint16) uint16 {
v := c.QueryString(key)
return uint16Default(v, defaultV)
}
func (c *Context) QueryUint32(key string) uint32 {
v := c.QueryString(key)
return uint32Default(v, 0)
}
func (c *Context) QueryUint32Default(key string, defaultV uint32) uint32 {
v := c.QueryString(key)
return uint32Default(v, defaultV)
}
func (c *Context) QueryUint64(key string) uint64 {
v := c.QueryString(key)
return uint64Default(v, 0)
}
func (c *Context) QueryUint64Default(key string, defaultV uint64) uint64 {
v := c.QueryString(key)
return uint64Default(v, defaultV)
}
func (c *Context) QueryFloat32(key string) float32 {
v := c.QueryString(key)
return float32Default(v, 0.0)
}
func (c *Context) QueryFloa32Default(key string, defaultV float32) float32 {
v := c.QueryString(key)
return float32Default(v, defaultV)
}
func (c *Context) QueryFloat64(key string) float64 {
v := c.QueryString(key)
return float64Default(v, 0.0)
}
func (c *Context) QueryFloa64Default(key string, defaultV float64) float64 {
v := c.QueryString(key)
return float64Default(v, defaultV)
}
func (c *Context) QueryBool(key string) bool {
v := c.QueryString(key)
return boolDefault(v, false)
}
func (c *Context) QueryBoolDefault(key string, defaultV bool) bool {
v := c.QueryString(key)
return boolDefault(v, defaultV)
}
func (c *Context) FormString(key string) string {
return c.FormParameters.Get(key)
}
func (c *Context) FormStringDefault(key string, defaultV string) string {
v := c.FormParameters.Get(key)
if v == "" {
return defaultV
}
return v
}
func (c *Context) FormStringSlice(key string) []string {
return c.FormParameters[key]
}
func (c *Context) FormInt(key string) int {
v := c.FormString(key)
return intDefault(v, 0)
}
func (c *Context) FormIntDefault(key string, defaultV int) int {
v := c.FormString(key)
return intDefault(v, defaultV)
}
func (c *Context) FormInt8(key string) int8 {
v := c.FormString(key)
return int8Default(v, 0)
}
func (c *Context) FormInt8Default(key string, defaultV int8) int8 {
v := c.FormString(key)
return int8Default(v, defaultV)
}
func (c *Context) FormInt16(key string) int16 {
v := c.FormString(key)
return int16Default(v, 0)
}
func (c *Context) FormInt16Default(key string, defaultV int16) int16 {
v := c.FormString(key)
return int16Default(v, defaultV)
}
func (c *Context) FormInt32(key string) int32 {
v := c.FormString(key)
return int32Default(v, 0)
}
func (c *Context) FormInt32Default(key string, defaultV int32) int32 {
v := c.FormString(key)
return int32Default(v, defaultV)
}
func (c *Context) FormInt64(key string) int64 {
v := c.FormString(key)
return int64Default(v, 0)
}
func (c *Context) FormInt64Default(key string, defaultV int64) int64 {
v := c.FormString(key)
return int64Default(v, defaultV)
}
func (c *Context) FormUint(key string) uint {
v := c.FormString(key)
return uintDefault(v, 0)
}
func (c *Context) FormUintDefault(key string, defaultV uint) uint {
v := c.FormString(key)
return uintDefault(v, defaultV)
}
func (c *Context) FormUint8(key string) uint8 {
v := c.FormString(key)
return uint8Default(v, 0)
}
func (c *Context) FormUint8Default(key string, defaultV uint8) uint8 {
v := c.FormString(key)
return uint8Default(v, defaultV)
}
func (c *Context) FormUint16(key string) uint16 {
v := c.FormString(key)
return uint16Default(v, 0)
}
func (c *Context) FormUint16Default(key string, defaultV uint16) uint16 {
v := c.FormString(key)
return uint16Default(v, defaultV)
}
func (c *Context) FormUint32(key string) uint32 {
v := c.FormString(key)
return uint32Default(v, 0)
}
func (c *Context) FormUint32Default(key string, defaultV uint32) uint32 {
v := c.FormString(key)
return uint32Default(v, defaultV)
}
func (c *Context) FormUint64(key string) uint64 {
v := c.FormString(key)
return uint64Default(v, 0)
}
func (c *Context) FormUint64Default(key string, defaultV uint64) uint64 {
v := c.FormString(key)
return uint64Default(v, defaultV)
}
func (c *Context) FormFloat32(key string) float32 {
v := c.FormString(key)
return float32Default(v, 0.0)
}
func (c *Context) FormFloa32Default(key string, defaultV float32) float32 {
v := c.FormString(key)
return float32Default(v, defaultV)
}
func (c *Context) FormFloat64(key string) float64 {
v := c.FormString(key)
return float64Default(v, 0.0)
}
func (c *Context) FormFloa64Default(key string, defaultV float64) float64 {
v := c.FormString(key)
return float64Default(v, defaultV)
}
func (c *Context) FormBool(key string) bool {
v := c.FormString(key)
return boolDefault(v, false)
}
func (c *Context) FormBoolDefault(key string, defaultV bool) bool {
v := c.FormString(key)
return boolDefault(v, defaultV)
}
// MultipartFormParameters返回form的enctype="multipart/form-data"的POST/PUT/PATCH参数
func (c *Context) MultipartFormParameters() (url.Values, error) {
if c.Request.MultipartForm == nil {
if err := c.Request.ParseMultipartForm(c.app.maxMultipartMemory); err != nil {
return nil, err
}
}
return c.Request.MultipartForm.Value, nil
}
func (c *Context) FormFile(name string) (*multipart.FileHeader, error) {
if c.Request.MultipartForm == nil {
if err := c.Request.ParseMultipartForm(c.app.maxMultipartMemory); err != nil {
return nil, err
}
}
_, fh, err := c.Request.FormFile(name)
return fh, err
}
func (c *Context) GetReqeustBody() ([]byte, error) {
return ioutil.ReadAll(c.Request.Body)
}
/**********请求参数相关函数 end**********/
/**********参数绑定到struct相关函数 begin**********/
// Bind 用于参数绑定,dst必须是struct的指针类型
func (c *Context) Bind(dst interface{}, src url.Values) error {
err := decoder.Decode(dst, src)
if err != nil {
return err
}
return nil
}
// BindQuery 将url参数绑定到dst
func (c *Context) BindQuery(dst interface{}) error {
err := c.Bind(dst, c.URLParameters)
if err != nil {
return err
}
return nil
}
// BindParam 将url参数和POST/PUT/PATCH参数绑定到dst
func (c *Context) BindParam(dst interface{}) error {
err := c.Bind(dst, c.URLFormParameters)
if err != nil {
return err
}
return nil
}
// BindForm 将POST/PUT/PATCH参数绑定到dst
func (c *Context) BindForm(dst interface{}) error {
err := c.Bind(dst, c.FormParameters)
if err != nil {
return err
}
return nil
}
// BindMultipartForm将POST/PUT/PATCH参数绑定到dst
// 与BindForm的区别是,此方法将绑定form的enctype="multipart/form-data"的参数
// 如果要获得上传的文件,请使用FormFile方法
func (c *Context) BindMultipartForm(dst interface{}) error {
data, err := c.MultipartFormParameters()
if err != nil {
return err
}
err = c.Bind(dst, data)
if err != nil {
return err
}
return nil
}
/**********参数绑定到struct相关函数 end**********/
/**********输出相关函数 begin**********/
// Header 用于输出http协议header
func (c *Context) Header(key string, value string) {
c.Writer.Header().Set(key, value)
}
// Bytes 用于输出[]byte类型数据,并设置HTTP状态码为statusCode
// 调用方需要自己使用return控制程序流程
func (c *Context) Bytes(statusCode int, bytes []byte) {
c.Status(statusCode)
n, err := c.Writer.Write(bytes)
if err != nil {
panic(fmt.Sprintf("错误 err:%s byte sent:%d", err, n))
}
}
// String 用于输出string类型数据,并设置HTTP状态码为statusCode
// 调用方需要自己使用return控制程序流程
func (c *Context) String(statusCode int, data string) {
c.Writer.Header().Add("Content-Type", "text/plain; charset=UTF-8")
c.Bytes(statusCode, []byte(data))
}
// Render 将context数据注入到模板中渲染
func (c *Context) Render(code int, layoutName, tmplName string, data map[string]interface{}) {
data["_ctx"] = c
Render(c.Writer, code, layoutName, tmplName, data)
}
// RenderAdmin 渲染后台模板
func (c *Context) RenderAdmin(layoutName, tmplName string, data map[string]interface{}) {
menuList := LoadMenuList(layoutName, nil)
menuList.SetActive(c.Request.URL.Path)
c.Render(http.StatusOK, layoutName, tmplName, data)
}
// RenderAdminDefaultLayout 渲染后台默认模板
func (c *Context) RenderAdminDefaultLayout(tmplName string, data map[string]interface{}) {
adminDefaultLayoutName := "admin/default"
c.RenderAdmin(adminDefaultLayoutName, tmplName, data)
}
// Json 将data输出,data一般是一个struct
// 调用方需要自己使用return控制程序流程
func (c *Context) Json(code int, data interface{}) {
b, err := json.Marshal(data)
if err != nil {
panic(fmt.Sprintf("错误 err:%s", err))
}
c.Writer.Header().Add("Content-Type", "application/json; charset=UTF-8")
c.Bytes(code, b)
}
func (c *Context) Redirect301(location string) {
http.Redirect(c.Writer, c.Request, location, 301)
}
func (c *Context) Redirect302(location string) {
http.Redirect(c.Writer, c.Request, location, 302)
}
/**********输出相关函数 end**********/
|
impl GameController {
fn process_gamepad_event(&mut self, event: GamepadEvent) {
match event {
GamepadEvent::ButtonPressed(Button::RightTrigger2) => {
self.is_trigger_holding = true;
}
GamepadEvent::ButtonReleased(Button::RightTrigger2) => {
self.is_trigger_holding = false;
self.is_sending = false;
}
GamepadEvent::ButtonPressed(Button::West) => {
self.is_sending = true;
}
_ => {} // Handle other events if needed
}
}
} |
using System;
using System.Runtime.InteropServices;
namespace Clr2Jvm.Interop.Native
{
public class JvmInterop
{
private IntPtr jvmHandle;
private IntPtr jniEnv;
public JvmInterop(string jvmPath, string[] jvmOptions)
{
// Load the JVM and initialize the JNI environment
JavaVMInitArgs initArgs = new JavaVMInitArgs();
initArgs.version = JNI_VERSION_1_8;
initArgs.nOptions = jvmOptions.Length;
initArgs.options = Marshal.AllocHGlobal(Marshal.SizeOf(typeof(JavaVMOption)) * jvmOptions.Length);
for (int i = 0; i < jvmOptions.Length; i++)
{
JavaVMOption option = new JavaVMOption();
option.optionString = Marshal.StringToHGlobalAnsi(jvmOptions[i]);
Marshal.StructureToPtr(option, initArgs.options + i * Marshal.SizeOf(typeof(JavaVMOption)), false);
}
JNI_CreateJavaVM(out jvmHandle, out jniEnv, ref initArgs);
Marshal.FreeHGlobal(initArgs.options);
}
public void LoadJavaClass(string className)
{
// Load a specific Java class
IntPtr classRef = (*jniEnv)->FindClass(jniEnv, className);
if (classRef == IntPtr.Zero)
{
throw new Exception("Failed to find Java class");
}
}
public void CallJavaMethod(string methodName, string methodSignature, params object[] args)
{
// Call a method from the loaded Java class
IntPtr methodID = (*jniEnv)->GetMethodID(jniEnv, classRef, methodName, methodSignature);
if (methodID == IntPtr.Zero)
{
throw new Exception("Failed to find Java method");
}
// Convert and pass arguments to Java method
// ...
// Call the Java method
// ...
// Convert and handle the return value from Java method
// ...
}
// Other helper methods for data type conversion
// ...
// Clean up resources
~JvmInterop()
{
(*jvmHandle)->DestroyJavaVM(jvmHandle);
}
}
} |
import os
# Set the current directory and Iroha home directory
cur_dir = os.path.abspath(os.path.dirname(__file__))
iroha_home = os.path.abspath(os.path.join(cur_dir, '..', '..'))
# Generate Iroha library with SWIG Python enabled using CMake
cmake_command = f'cmake -H{iroha_home} -Bbuild -DSWIG_PYTHON=ON'
os.system(cmake_command)
# Build the Iroha library
build_command = 'cmake --build build/ --target irohapy -- -j$(getconf _NPROCESSORS_ONLN)'
os.system(build_command)
# Generate protocol buffer files in the current directory
proto_path = os.path.abspath(os.path.join(cur_dir, '..', '..', 'schema'))
protoc_command = f'protoc --proto_path={proto_path} --python_out=. block.proto primitive.proto commands.proto queries.proto responses.proto endpoint.proto'
grpc_command = f'python2 -m grpc_tools.protoc --proto_path={proto_path} --python_out=. --grpc_python_out=. endpoint.proto yac.proto ordering.proto loader.proto'
os.system(protoc_command)
os.system(grpc_command) |
package mastermind.controllers.standalone;
import mastermind.controllers.StartController;
import mastermind.models.Session;
import mastermind.models.dao.DAOManager;
public class StartControllerImplStandalone extends StartController {
private DAOManager daoManager;
public StartControllerImplStandalone(Session session, DAOManager daoManager) {
super(session);
this.daoManager = daoManager;
}
@Override
public void start() {
session.nextState();
}
@Override
public void start(String fileName) {
daoManager.load(fileName);
}
@Override
public String[] getSavedGamesNames() {
return daoManager.getGamesNames();
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.