text stringlengths 1 1.05M |
|---|
#include <vector>
#include <unordered_map>
std::vector<int> twoSum(std::vector<int>& nums, int target) {
std::unordered_map<int, int> maps;
std::vector<int> result;
for (int i = 0; i < nums.size(); i++) {
int diff = target - nums[i];
if (maps.count(diff)) {
result.push_back(maps[diff]);
result.push_back(i);
return result;
}
maps[nums[i]] = i;
}
return result; // If no solution is found
} |
package sort;
import java.io.BufferedReader;
import java.io.InputStreamReader;
import java.util.ArrayList;
import java.util.Collections;
import java.util.StringTokenizer;
/**
*
* @author minchoba
* 백준 15970번: 화살표 그리기
*
* @see https://www.acmicpc.net/problem/15970/
*
*/
public class Boj15970 {
public static void main(String[] args) throws Exception{
BufferedReader br = new BufferedReader(new InputStreamReader(System.in));
int N = Integer.parseInt(br.readLine());
ArrayList<Integer>[] list = new ArrayList[N];
for(int i = 0; i < N; i++) {
list[i] = new ArrayList<>();
}
for(int i = 0; i < N; i++) {
StringTokenizer st = new StringTokenizer(br.readLine());
int point = Integer.parseInt(st.nextToken());
int flag = Integer.parseInt(st.nextToken()) - 1;
list[flag].add(point);
}
int result = 0;
for(int i = 0; i < N; i++) {
if(list[i] == null) continue;
result += getDistance(N, list[i]); // 각 케이스 별 화살표 그리기
}
System.out.println(result);
}
private static int getDistance(int n, ArrayList<Integer> arr) {
Collections.sort(arr);
int leng = 0;
int size = arr.size();
for(int i = 0; i < size; i++) {
if(i == 0) leng += arr.get(i + 1) - arr.get(i);
else if(i == size - 1) leng += arr.get(i) - arr.get(i - 1);
else leng += Math.min(arr.get(i) - arr.get(i - 1), arr.get(i + 1) - arr.get(i));
}
return leng;
}
}
|
<gh_stars>0
var _neon_depth_to_space_workload_8cpp =
[
[ "NeonDepthToSpaceWorkloadValidate", "_neon_depth_to_space_workload_8cpp.xhtml#a116d88067bf98ce9858ab73e68f605f9", null ]
]; |
package yimei.jss.algorithm.adaptivepop;
import ec.EvolutionState;
import ec.Individual;
import ec.select.TournamentSelection;
import ec.util.Parameter;
import yimei.jss.feature.FeatureUtil;
public class TournamentSelectionForPop extends TournamentSelection {
public static final String P_PRE_GENERATIONS = "pre-generations";
public static final String P_POP_ADAPT_FRAC_ELITES = "pop-adapt-frac-elites";
private int preGenerations;
private double fracElites;
public int produce(final int subpopulation,
final EvolutionState state,
final int thread)
{
// pick size random individuals, then pick the best.
preGenerations = state.parameters.getIntWithDefault(new Parameter(P_PRE_GENERATIONS), null, -1); //50
Individual[] oldinds;
if(state.generation == preGenerations)
oldinds = FeatureUtil.getNewpop().subpops[subpopulation].individuals;
else
oldinds = state.population.subpops[subpopulation].individuals;
int best = getRandomIndividual(0, subpopulation, state, thread);
int s = getTournamentSizeToUse(state.random[thread]);
if (pickWorst)
for (int x=1;x<s;x++)
{
int j = getRandomIndividual(x, subpopulation, state, thread);
if (!betterThan(oldinds[j], oldinds[best], subpopulation, state, thread)) // j is at least as bad as best
best = j;
}
else
for (int x=1;x<s;x++)
{
int j = getRandomIndividual(x, subpopulation, state, thread);
if (betterThan(oldinds[j], oldinds[best], subpopulation, state, thread)) // j is better than best
best = j;
}
return best;
}
/** Produces the index of a (typically uniformly distributed) randomly chosen individual
to fill the tournament. <i>number</> is the position of the individual in the tournament. */
public int getRandomIndividual(int number, int subpopulation, EvolutionState state, int thread)
{
preGenerations = state.parameters.getIntWithDefault(new Parameter(P_PRE_GENERATIONS), null, -1); //50
fracElites = state.parameters.getDoubleWithDefault(
new Parameter(P_POP_ADAPT_FRAC_ELITES), null, 0.0); //0.0
if(state.generation == preGenerations){
return state.random[thread].nextInt((int)(state.population.subpops[0].individuals.length * fracElites));
}else
{
Individual[] oldinds = state.population.subpops[subpopulation].individuals;
return state.random[thread].nextInt(oldinds.length);
}
}
}
|
import { Uri, window } from "vscode";
import { ICommand } from "./ICommand";
import { inject, injectable } from "inversify";
import { ScriptItem } from "../views/scriptExplorer/ScriptItem";
import TYPES from "../Types";
import { IFileService } from "../services/file/IFileService";
import { IScriptService } from "../services/script/IScriptService";
import { EngineType } from "../models/EngineType";
import { IScriptRemoteService } from "../services/scriptRemote/IScriptRemoteService";
@injectable()
export class ScriptRenameCommand implements ICommand {
id: string = "iobroker-javascript.view.scriptExplorer.rename";
constructor(
@inject(TYPES.services.scriptRemote) private scriptRemoteService: IScriptRemoteService,
@inject(TYPES.services.file) private fileService: IFileService,
@inject(TYPES.services.script) private scriptService: IScriptService,
) {}
async execute(...args: any[]) {
if (args && args[0]) {
const localScript = (<ScriptItem>args[0]).script;
const script = localScript.ioBrokerScript;
const scriptName = script.common.name;
const scriptId = script._id;
const newScriptName = await window.showInputBox({prompt: "The new name of the script.", value: scriptName});
if (newScriptName) {
await this.scriptRemoteService.rename(scriptId, newScriptName);
const oldPath = localScript.absoluteUri;
const fileExtension = this.scriptService.getFileExtension(<EngineType>script.common.engineType ?? EngineType.unkown);
const splittedPath = oldPath.path.split("/");
splittedPath.splice(-1,1);
splittedPath.push(`${newScriptName}.${fileExtension}`);
const newPath = Uri.file(splittedPath.join("/"));
await this.fileService.rename(oldPath, newPath);
}
} else {
window.showInformationMessage("This command can only be invoked over the script explorer!");
}
}
}
|
package de.hshannover.inform.dunkleit.gruppe12.snake.controller;
import java.util.Observable;
/**
* <h1>Score Model</h1> Purpose: hold current game score
*
* @author <NAME>
* @version 1.0
*/
public class Score extends Observable {
private int score;
/**
* Game score constructor without parameters. Initiate score value 0
**/
public Score() {
super();
score = 0;
}
/**
* Get current game score as Integer
*
* @return int current game score
**/
public int getValue() {
return score;
}
/**
* Method to increase score by 1 and notify observers. Used by snake's grow function
**/
public void addPoint() {
score++;
setChanged();
notifyObservers();
}
/**
* Method to clear game score. Score = 0;
**/
public void clear() {
score = 0;
}
}
|
package main
import (
"fmt"
"log"
"github.com/nicksnyder/go-i18n/i18n"
)
var (
availableLanuages = []string{
"de-DE",
"en-US",
}
)
func init() {
for _, lang := range availableLanuages {
filename := fmt.Sprintf("i18n/%s.all.yaml", lang)
langContent, err := Asset(filename)
if err != nil {
log.Fatalf("Could not read source of language file %s: %s", lang, err)
}
if err := i18n.ParseTranslationFileBytes(filename, langContent); err != nil {
log.Fatalf("Could not parse language file %s: %s", lang, err)
}
}
}
|
<gh_stars>0
# frozen_string_literal: true
module Twitch
class Client
## API method for streams
module Streams
def create_stream_marker(options = {})
require_access_token do
initialize_response StreamMarker, post('streams/markers', options)
end
end
def get_stream_markers(options = {})
require_access_token do
initialize_response StreamMarkerResponse, get('streams/markers', options)
end
end
def get_streams(options = {})
initialize_response Stream, get('streams', options)
end
def get_stream(stream_id)
initialize_response Stream, get("streams/#{stream_id}", {})
end
## TODO: Can't find this method in documentation, test it
def get_streams_metadata(options = {})
initialize_response StreamMetadata, get('streams/metadata', options)
end
end
end
end
|
#!/bin/bash
set -e
mvn -DskipTests clean deploy -P release |
#!/bin/sh
if [ "$DATABASE" = "postgres" ]
then
echo "Waiting for postgres..."
while ! nc -z $SQL_HOST $SQL_PORT; do
sleep 0.1
done
echo "PostgreSQL started"
fi
python manage.py flush --no-input
python manage.py migrate
python manage.py collectstatic --no-input --clear
exec "$@" |
set -e
run_test() {
entry=$1
CPYTHON_VERSION=$($entry -c 'import sys; print(str(sys.version_info[0])+str(sys.version_info[1]))')
(cd wheelhouse && $entry -m pip install *-cp${CPYTHON_VERSION}-*.whl)
$entry -m pip install -q pytest boto3 google-cloud-pubsub==0.39.1 pyarrow==0.11.1 pandas==0.19.2
(cd tests && $entry -m pytest -v --import-mode=append $(find . -type f \( -iname "test_*.py" ! \( -iname "test_*_eager.py" -o -iname "test_grpc.py" \) \)))
(cd tests && $entry -m pytest -v --import-mode=append $(find . -type f \( -iname "test_*_eager.py" \)))
(cd tests && $entry -m pytest -v --import-mode=append $(find . -type f \( -iname "test_grpc.py" \)))
}
PYTHON_VERSION=python
if [[ "$#" -gt 0 ]]; then
PYTHON_VERSION="${1}"
shift
fi
if [[ $(uname) == "Linux" ]]; then
apt-get -y -qq update
apt-get -y -qq install $PYTHON_VERSION ffmpeg
curl -sSOL https://bootstrap.pypa.io/get-pip.py
$PYTHON_VERSION get-pip.py -q
fi
run_test $PYTHON_VERSION
|
<filename>lib/functions_test.go
package lib
import "testing"
func TestZip(t *testing.T) {
err := Zip("./testZipDir", "archive.zip")
if err != nil {
t.Errorf("%v\n", err)
}
}
func TestUnzip(t *testing.T) {
idList, err := Unzip("./archive.zip", "hellohello")
if err != nil {
t.Errorf("%v\n", err)
}
if len(idList) != 1 || idList[0] != "dir" {
t.Errorf("期待%v,实际%v\n", []string{"dir"}, idList)
}
}
|
cd ../
export PYTHONPATH=.:$PYTHONPATH
MODEL=GBDT-NAS-3S_1st_Pruning_Net
OUTPUT_DIR=outputs/$MODEL
DATA_DIR=data/imagenet/raw-data
ARCH="4 4 2 1 6 7 5 3 4 6 5 1 3 2 7 1 6 1 2 6 4"
mkdir -p $OUTPUT_DIR
python train_imagenet.py \
--data_path=$DATA_DIR \
--output_dir=$OUTPUT_DIR \
--lazy_load \
--arch="$ARCH" \
--dropout=0.3 \
--width_stages="32,48,96,104,208,432" \
| tee -a $OUTPUT_DIR/train.log
|
#!/bin/bash
cat >/usr/local/etc/xray/config.json <<-EOF
{
"log": {
"loglevel": "warning"
},
"inbounds": [
{
"port": ${PORT},
"protocol": "vless",
"settings": {
"clients": [
{
"id": "${UUID}",
"flow": "xtls-rprx-direct",
"level": 0,
"email": "love@example.com"
}
],
"decryption": "none",
"fallbacks": [
{
"dest": 52000,
"xver": 1
},
{
"path": "/${WS_PATH}/",
"dest": 52001,
"xver": 1
},
{
"path": "/${VMTCP}/",
"dest": 52002,
"xver": 1
},
{
"path": "/${VMWS}/",
"dest": 52003,
"xver": 1
}
]
},
"streamSettings": {
"network": "tcp",
"security": "xtls",
"xtlsSettings": {
"alpn": [
"http/1.1"
],
"certificates": [
{
"certificateFile": "/ssl/xray.crt",
"keyFile": "/ssl/xray.key"
}
]
}
}
},
{
"port": 52000,
"listen": "127.0.0.1",
"protocol": "trojan",
"settings": {
"clients": [
{
"password": "${QJPASS}",
"level": 0,
"email": "love@example.com"
}
],
"fallbacks": [
{
"dest": 80
}
]
},
"streamSettings": {
"network": "tcp",
"security": "none",
"tcpSettings": {
"acceptProxyProtocol": true
}
}
},
{
"port": 52001,
"listen": "127.0.0.1",
"protocol": "vless",
"settings": {
"clients": [
{
"id": "${UUID}",
"level": 0,
"email": "love@example.com"
}
],
"decryption": "none"
},
"streamSettings": {
"network": "ws",
"security": "none",
"wsSettings": {
"acceptProxyProtocol": true,
"path": "/${WS_PATH}/"
}
}
},
{
"port": 52002,
"listen": "127.0.0.1",
"protocol": "vmess",
"settings": {
"clients": [
{
"id": "${UUID}",
"level": 0,
"email": "love@example.com"
}
]
},
"streamSettings": {
"network": "tcp",
"security": "none",
"tcpSettings": {
"acceptProxyProtocol": true,
"header": {
"type": "http",
"request": {
"path": [
"/${VMTCP}/"
]
}
}
}
}
},
{
"port": 52003,
"listen": "127.0.0.1",
"protocol": "vmess",
"settings": {
"clients": [
{
"id": "${UUID}",
"level": 0,
"email": "love@example.com"
}
]
},
"streamSettings": {
"network": "ws",
"security": "none",
"wsSettings": {
"acceptProxyProtocol": true,
"path": "/${VMWS}/"
}
}
}
],
"outbounds": [
{
"protocol": "freedom"
}
]
}
EOF
|
import asyncio
async def process_command(event, input_str, CMD_LIST, borg):
if not input_str:
await borg.send_message(event.chat_id, "`Lol Try .help`")
await asyncio.sleep(5)
else:
if input_str in CMD_LIST:
string = "Commands found in {}:\n".format(input_str)
for i in CMD_LIST[input_str]:
string += "\n " + i
string += "\n"
await event.edit(string)
else:
await event.edit("`Wait Checking..`")
await asyncio.sleep(2) |
#!/bin/bash
set -exu
echo "${DOCKER_PASSWORD}" | docker login -u "${DOCKER_USERNAME}" --password-stdin
docker push "${TRAVIS_REPO_SLUG}"
if [ "${TRAVIS_BRANCH}" != "master" ]; then
docker tag "${TRAVIS_REPO_SLUG}" "${TRAVIS_REPO_SLUG}:${TRAVIS_BRANCH}"
docker push "${TRAVIS_REPO_SLUG}:${TRAVIS_BRANCH}"
fi
|
import pytest
import pandas as pd
import numpy as np
import contextlib
from io import StringIO
import tifffile
from bg_atlasapi.core import AdditionalRefDict
def test_initialization(atlas):
assert atlas.metadata == {
"name": "example_mouse",
"citation": "Wang et al 2020, https://doi.org/10.1016/j.cell.2020.04.007",
"atlas_link": "http://www.brain-map.org",
"species": "Mus musculus",
"symmetric": True,
"resolution": [100.0, 100.0, 100.0],
"orientation": "asr",
"version": atlas.metadata["version"], # no target value for version
"shape": [132, 80, 114],
"trasform_to_bg": [
[1.0, 0.0, 0.0, 0.0],
[0.0, 1.0, 0.0, 0.0],
[0.0, 0.0, 1.0, 0.0],
[0.0, 0.0, 0.0, 1.0],
],
"additional_references": [],
}
assert atlas.orientation == "asr"
assert atlas.shape == (132, 80, 114)
assert atlas.resolution == (100.0, 100.0, 100.0)
assert atlas.shape_um == (13200.0, 8000.0, 11400.0)
def test_additional_ref_dict(temp_path):
fake_data = dict()
for k in ["1", "2"]:
stack = np.ones((10, 20, 30)) * int(k)
fake_data[k] = stack
tifffile.imsave(temp_path / f"{k}.tiff", stack)
add_ref_dict = AdditionalRefDict(fake_data.keys(), temp_path)
for k, stack in add_ref_dict.items():
assert add_ref_dict[k] == stack
assert add_ref_dict["3"] is None
@pytest.mark.parametrize(
"stack_name, val",
[
("reference", [[[146, 155], [153, 157]], [[148, 150], [153, 153]]]),
("annotation", [[[59, 362], [59, 362]], [[59, 362], [59, 362]]]),
("hemispheres", [[[2, 1], [2, 1]], [[2, 1], [2, 1]]]),
],
)
def test_stacks(atlas, stack_name, val):
loaded_stack = getattr(atlas, stack_name)
assert np.allclose(loaded_stack[65:67, 39:41, 57:59], val)
def test_structures(atlas):
assert {s["acronym"]: k for k, s in atlas.structures.items()} == {
"root": 997,
"grey": 8,
"CH": 567,
}
assert atlas._get_from_structure([997, 8, 567], "acronym") == [
"root",
"grey",
"CH",
]
@pytest.mark.parametrize(
"coords", [[39.0, 36.0, 57.0], (39, 36, 57), np.array([39.0, 36.0, 57.0])]
)
def test_data_from_coords(atlas, coords):
res = atlas.resolution
assert atlas.structure_from_coords(coords) == 997
assert atlas.structure_from_coords(coords, as_acronym=True) == "root"
assert (
atlas.structure_from_coords(
[c * r for c, r in zip(coords, res)], microns=True, as_acronym=True
)
== "root"
)
assert atlas.hemisphere_from_coords(coords) == atlas.right_hemisphere_value
assert atlas.hemisphere_from_coords(coords, as_string=True) == "right"
assert (
atlas.hemisphere_from_coords(
[c * r for c, r in zip(coords, res)], microns=True, as_string=True
)
== "right"
)
def test_meshfile_from_id(atlas):
assert (
atlas.meshfile_from_structure("CH")
== atlas.root_dir / "meshes/567.obj"
)
assert atlas.root_meshfile() == atlas.root_dir / "meshes/997.obj"
def test_mesh_from_id(atlas):
mesh = atlas.structures[567]["mesh"]
assert np.allclose(mesh.points[0], [8019.52, 3444.48, 507.104])
mesh = atlas.mesh_from_structure(567)
assert np.allclose(mesh.points[0], [8019.52, 3444.48, 507.104])
mesh = atlas.root_mesh()
assert np.allclose(mesh.points[0], [7896.56, 3384.15, 503.781])
def test_lookup_df(atlas):
df_lookup = atlas.lookup_df
df = pd.DataFrame(
dict(
acronym=["root", "grey", "CH"],
id=[997, 8, 567],
name=["root", "Basic cell groups and regions", "Cerebrum"],
)
)
assert all(df_lookup == df)
def test_hierarchy(atlas):
hier = atlas.hierarchy
temp_stdout = StringIO()
with contextlib.redirect_stdout(temp_stdout):
print(hier)
output = temp_stdout.getvalue().strip()
assert output == "root (997)\n└── grey (8)\n └── CH (567)"
assert {k: v.tag for k, v in hier.nodes.items()} == {
997: "root (997)",
8: "grey (8)",
567: "CH (567)",
}
def test_descendants(atlas):
anc = atlas.get_structure_ancestors("CH")
assert anc == ["root", "grey"]
desc = atlas.get_structure_descendants("root")
assert desc == ["grey", "CH"]
|
def count_substring(string, substring):
count = 0
for i in range(len(string)-len(substring)+1):
if string[i:i+len(substring)] == substring:
count += 1
return count |
<filename>pypy/translator/c/test/test_genc.py
import autopath, sys, os, py
from pypy.rpython.lltypesystem.lltype import *
from pypy.annotation import model as annmodel
from pypy.translator.translator import TranslationContext
from pypy.translator.c.database import LowLevelDatabase
from pypy.translator.c import genc
from pypy.translator.c.gc import NoneGcPolicy
from pypy.objspace.flow.model import Constant, Variable, SpaceOperation
from pypy.objspace.flow.model import Block, Link, FunctionGraph
from pypy.tool.udir import udir
from pypy.translator.gensupp import uniquemodulename
from pypy.translator.backendopt.all import backend_optimizations
from pypy.translator.interactive import Translation
from pypy.rlib.entrypoint import entrypoint
from pypy.tool.nullpath import NullPyPathLocal
def compile(fn, argtypes, view=False, gcpolicy="ref", backendopt=True,
annotatorpolicy=None):
if argtypes is not None and "__pypy__" in sys.builtin_module_names:
py.test.skip("requires building cpython extension modules")
t = Translation(fn, argtypes, gc=gcpolicy, backend="c",
policy=annotatorpolicy)
if not backendopt:
t.disable(["backendopt_lltype"])
t.annotate()
# XXX fish
t.driver.config.translation.countmallocs = True
compiled_fn = t.compile_c()
try:
if py.test.config.option.view:
t.view()
except AttributeError:
pass
malloc_counters = t.driver.cbuilder.get_malloc_counters()
def checking_fn(*args, **kwds):
if 'expected_extra_mallocs' in kwds:
expected_extra_mallocs = kwds.pop('expected_extra_mallocs')
else:
expected_extra_mallocs = 0
res = compiled_fn(*args, **kwds)
mallocs, frees = malloc_counters()
if isinstance(expected_extra_mallocs, int):
assert mallocs - frees == expected_extra_mallocs
else:
assert mallocs - frees in expected_extra_mallocs
return res
return checking_fn
def test_simple():
def f(x):
return x*2
t = TranslationContext()
t.buildannotator().build_types(f, [int])
t.buildrtyper().specialize()
t.config.translation.countmallocs = True
builder = genc.CExtModuleBuilder(t, f, config=t.config)
builder.generate_source()
builder.compile()
f1 = builder.get_entry_point()
assert f1(5) == 10
assert f1(-123) == -246
assert builder.get_malloc_counters()() == (0, 0)
py.test.raises(Exception, f1, "world") # check that it's really typed
def test_dont_write_source_files():
def f(x):
return x*2
t = TranslationContext()
t.buildannotator().build_types(f, [int])
t.buildrtyper().specialize()
t.config.translation.countmallocs = True
t.config.translation.dont_write_c_files = True
builder = genc.CExtModuleBuilder(t, f, config=t.config)
builder.generate_source()
assert isinstance(builder.targetdir, NullPyPathLocal)
assert builder.targetdir.listdir() == []
def test_simple_lambda():
f = lambda x: x*2
t = TranslationContext()
t.buildannotator().build_types(f, [int])
t.buildrtyper().specialize()
t.config.translation.countmallocs = True
builder = genc.CExtModuleBuilder(t, f, config=t.config)
builder.generate_source()
builder.compile()
f1 = builder.get_entry_point()
assert f1(5) == 10
def test_py_capi_exc():
def f(x):
if x:
l = None
else:
l = [2]
x = x*2
return l[0]
t = TranslationContext()
t.buildannotator().build_types(f, [int])
t.buildrtyper().specialize()
builder = genc.CExtModuleBuilder(t, f, config=t.config)
builder.generate_source()
builder.compile()
f1 = builder.get_entry_point(isolated=True)
x = py.test.raises(Exception, f1, "world")
assert not isinstance(x.value, EOFError) # EOFError === segfault
def test_rlist():
def f(x):
l = [x]
l.append(x+1)
return l[0] * l[-1]
f1 = compile(f, [int])
assert f1(5) == 30
#assert f1(x=5) == 30
def test_rptr():
S = GcStruct('testing', ('x', Signed), ('y', Signed))
def f(i):
if i < 0:
p = nullptr(S)
else:
p = malloc(S)
p.x = i*2
if i > 0:
return p.x
else:
return -42
f1 = compile(f, [int])
assert f1(5) == 10
#assert f1(i=5) == 10
assert f1(1) == 2
assert f1(0) == -42
assert f1(-1) == -42
assert f1(-5) == -42
def test_rptr_array():
A = GcArray(Ptr(PyObject))
def f(i, x):
p = malloc(A, i)
p[1] = x
return p[1]
f1 = compile(f, [int, annmodel.SomePtr(Ptr(PyObject))])
assert f1(5, 123) == 123
assert f1(12, "hello") == "hello"
def test_empty_string():
A = Array(Char, hints={'nolength': True})
p = malloc(A, 1, immortal=True)
def f():
return p[0]
f1 = compile(f, [])
assert f1() == '\x00'
def test_runtime_type_info():
S = GcStruct('s', ('is_actually_s1', Bool), rtti=True)
S1 = GcStruct('s1', ('sub', S), rtti=True)
def rtti_S(p):
if p.is_actually_s1:
return getRuntimeTypeInfo(S1)
else:
return getRuntimeTypeInfo(S)
def rtti_S1(p):
return getRuntimeTypeInfo(S1)
def does_stuff():
p = malloc(S)
p.is_actually_s1 = False
p1 = malloc(S1)
p1.sub.is_actually_s1 = True
# and no crash when p and p1 are decref'ed
return None
t = TranslationContext()
t.buildannotator().build_types(does_stuff, [])
rtyper = t.buildrtyper()
rtyper.attachRuntimeTypeInfoFunc(S, rtti_S)
rtyper.attachRuntimeTypeInfoFunc(S1, rtti_S1)
rtyper.specialize()
#t.view()
from pypy.translator.c import genc
t.config.translation.countmallocs = True
builder = genc.CExtModuleBuilder(t, does_stuff, config=t.config)
builder.generate_source()
builder.compile()
f1 = builder.get_entry_point()
f1()
mallocs, frees = builder.get_malloc_counters()()
assert mallocs == frees
def test_str():
def call_str(o):
return str(o)
f1 = compile(call_str, [object])
lst = (1, [5], "'hello'", lambda x: x+1)
res = f1(lst)
assert res == str(lst)
def test_rstr():
def fn(i):
return "hello"[i]
f1 = compile(fn, [int])
res = f1(1)
assert res == 'e'
def test_recursive_struct():
# B has an A as its super field, and A has a pointer to B.
class A:
pass
class B(A):
pass
def fn(i):
a = A()
b = B()
a.b = b
b.i = i
return a.b.i
f1 = compile(fn, [int])
res = f1(42)
assert res == 42
def test_recursive_struct_2():
class L:
def __init__(self, target):
self.target = target
class RL(L):
pass
class SL(L):
pass
class B:
def __init__(self, exits):
self.exits = exits
def fn(i):
rl = RL(None)
b = B([rl])
sl = SL(b)
f1 = compile(fn, [int])
f1(42)
def test_infinite_float():
x = 1.0
while x != x / 2:
x *= 3.1416
def fn():
return x
f1 = compile(fn, [])
res = f1()
assert res > 0 and res == res / 2
def fn():
return -x
f1 = compile(fn, [])
res = f1()
assert res < 0 and res == res / 2
class Box:
def __init__(self, d):
self.d = d
b1 = Box(x)
b2 = Box(-x)
b3 = Box(1.5)
def f(i):
if i==0:
b = b1
elif i==1:
b = b2
else:
b = b3
return b.d
f1 = compile(f, [int])
res = f1(0)
assert res > 0 and res == res / 2
res = f1(1)
assert res < 0 and res == res / 2
res = f1(3)
assert res == 1.5
def test_nan_and_special_values():
from pypy.rlib.rfloat import isnan, isinf, isfinite, copysign
inf = 1e300 * 1e300
assert isinf(inf)
nan = inf/inf
assert isnan(nan)
for value, checker in [
(inf, lambda x: isinf(x) and x > 0.0),
(-inf, lambda x: isinf(x) and x < 0.0),
(nan, isnan),
(42.0, isfinite),
(0.0, lambda x: not x and copysign(1., x) == 1.),
(-0.0, lambda x: not x and copysign(1., x) == -1.),
]:
def f():
return value
f1 = compile(f, [])
res = f1()
assert checker(res)
l = [value]
def g(x):
return l[x]
g2 = compile(g, [int])
res = g2(0)
assert checker(res)
l2 = [(-value, -value), (value, value)]
def h(x):
return l2[x][1]
h3 = compile(h, [int])
res = h3(1)
assert checker(res)
def test_prebuilt_instance_with_dict():
class A:
pass
a = A()
a.d = {}
a.d['hey'] = 42
def t():
a.d['hey'] = 2
return a.d['hey']
f = compile(t, [])
assert f() == 2
def test_long_strings():
s1 = 'hello'
s2 = ''.join([chr(i) for i in range(256)])
s3 = 'abcd'*17
s4 = open(__file__, 'rb').read()
choices = [s1, s2, s3, s4]
def f(i, j):
return choices[i][j]
f1 = compile(f, [int, int])
for i, s in enumerate(choices):
for j, c in enumerate(s):
assert f1(i, j) == c
def test_keepalive():
from pypy.rlib import objectmodel
def f():
x = [1]
y = ['b']
objectmodel.keepalive_until_here(x,y)
return 1
f1 = compile(f, [])
assert f1() == 1
def test_refcount_pyobj():
def prob_with_pyobj(b):
return 3, b
f = compile(prob_with_pyobj, [object])
from sys import getrefcount as g
obj = None
import gc; gc.collect()
before = g(obj)
f(obj)
after = g(obj)
assert before == after
def test_refcount_pyobj_setfield():
import weakref, gc
class S(object):
def __init__(self):
self.p = None
def foo(wref, objfact):
s = S()
b = objfact()
s.p = b
wr = wref(b)
s.p = None
return wr
f = compile(foo, [object, object], backendopt=False)
class C(object):
pass
wref = f(weakref.ref, C)
gc.collect()
assert not wref()
def test_refcount_pyobj_setfield_increfs():
class S(object):
def __init__(self):
self.p = None
def goo(objfact):
s = S()
b = objfact()
s.p = b
return s
def foo(objfact):
s = goo(objfact)
return s.p
f = compile(foo, [object], backendopt=False)
class C(object):
pass
print f(C)
def test_print():
def f():
for i in range(10):
print "xxx"
fn = compile(f, [])
fn(expected_extra_mallocs=1)
def test_name():
def f():
return 3
f.c_name = 'pypy_xyz_f'
t = Translation(f, [], backend="c")
t.annotate()
compiled_fn = t.compile_c()
if py.test.config.option.view:
t.view()
assert 'pypy_xyz_f' in t.driver.cbuilder.c_source_filename.read()
def test_entrypoints():
def f():
return 3
key = "test_entrypoints42"
@entrypoint(key, [int], "foobar")
def g(x):
return x + 42
t = Translation(f, [], backend="c", secondaryentrypoints="test_entrypoints42")
t.annotate()
compiled_fn = t.compile_c()
if py.test.config.option.view:
t.view()
assert 'foobar' in t.driver.cbuilder.c_source_filename.read()
def test_exportstruct():
from pypy.rlib.exports import export_struct
def f():
return 42
FOO = Struct("FOO", ("field1", Signed))
foo = malloc(FOO, flavor="raw")
foo.field1 = 43
export_struct("BarStruct", foo._obj)
t = Translation(f, [], backend="c")
t.annotate()
compiled_fn = t.compile_c()
if py.test.config.option.view:
t.view()
assert ' BarStruct ' in t.driver.cbuilder.c_source_filename.read()
free(foo, flavor="raw")
def test_recursive_llhelper():
from pypy.rpython.annlowlevel import llhelper
from pypy.rpython.lltypesystem import lltype
from pypy.rlib.objectmodel import specialize
from pypy.rlib.nonconst import NonConstant
FT = lltype.ForwardReference()
FTPTR = lltype.Ptr(FT)
STRUCT = lltype.Struct("foo", ("bar", FTPTR))
FT.become(lltype.FuncType([lltype.Ptr(STRUCT)], lltype.Signed))
class A:
def __init__(self, func, name):
self.func = func
self.name = name
def _freeze_(self):
return True
@specialize.memo()
def make_func(self):
f = getattr(self, "_f", None)
if f is not None:
return f
f = lambda *args: self.func(*args)
f.c_name = self.name
f.relax_sig_check = True
f.__name__ = "WRAP%s" % (self.name, )
self._f = f
return f
def get_llhelper(self):
return llhelper(FTPTR, self.make_func())
def f(s):
if s.bar == t.bar:
lltype.free(s, flavor="raw")
return 1
lltype.free(s, flavor="raw")
return 0
def g(x):
return 42
def chooser(x):
s = lltype.malloc(STRUCT, flavor="raw")
if x:
s.bar = llhelper(FTPTR, a_f.make_func())
else:
s.bar = llhelper(FTPTR, a_g.make_func())
return f(s)
a_f = A(f, "f")
a_g = A(g, "g")
t = lltype.malloc(STRUCT, flavor="raw", immortal=True)
t.bar = llhelper(FTPTR, a_f.make_func())
fn = compile(chooser, [bool])
assert fn(True)
def test_inhibit_tail_call():
from pypy.rpython.lltypesystem import lltype
def foobar_fn(n):
return 42
foobar_fn._dont_inline_ = True
def main(n):
return foobar_fn(n)
#
t = Translation(main, [int], backend="c")
t.rtype()
t.context._graphof(foobar_fn).inhibit_tail_call = True
t.source_c()
lines = t.driver.cbuilder.c_source_filename.readlines()
for i, line in enumerate(lines):
if '= pypy_g_foobar_fn' in line:
break
else:
assert 0, "the call was not found in the C source"
assert 'PYPY_INHIBIT_TAIL_CALL();' in lines[i+1]
|
import React, {Component} from 'react';
class App extends Component {
render() {
const countries = ["India", "China", "Brazil", "Russia"];
return (
<div>
{countries.map((country, index) => (
<div key={index}>
{country}
</div>
))}
</div>
);
}
}
export default App; |
#!/usr/bin/env bash
IFS=$'\n'
[ -n "$1" -a -n "$2" ] || {
echo "Usage: $0 <file> <directory>"
exit 1
}
[ -f "$1" -a -d "$2" ] || {
echo "File/directory not found"
exit 1
}
cat "$1" | (
cd "$2"
while read entry; do
[ -n "$entry" ] || break
[ ! -d "$entry" ] || [ -L "$entry" ] && rm -f "$entry"
done
)
sort -r "$1" | (
cd "$2"
while read entry; do
[ -n "$entry" ] || break
[ -d "$entry" ] && rmdir "$entry" > /dev/null 2>&1
done
)
true
|
import axios from 'axios';
import { useEffect, useState, useContext } from 'react';
import { SidebarContext } from '../context/SidebarContext';
const useAsync = (asyncFunction) => {
const [data, setData] = useState([] || {});
const [error, setError] = useState('');
const [loading, setLoading] = useState(true);
const { isUpdate, setIsUpdate } = useContext(SidebarContext);
useEffect(() => {
let unmounted = false;
let source = axios.CancelToken.source();
asyncFunction({ cancelToken: source.token })
.then((res) => {
if (!unmounted) {
setData(res);
setError('');
setLoading(false);
}
})
.catch((err) => {
if (!unmounted) {
setError(err.message);
if (axios.isCancel(err)) {
setError(err.message);
setLoading(false);
setData([]);
} else {
setError(err.message);
setLoading(false);
setData([]);
}
}
});
setIsUpdate(false);
return () => {
unmounted = true;
source.cancel('Cancelled in cleanup');
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [isUpdate]);
return {
data,
error,
loading,
};
};
export default useAsync;
|
puts "Enter two numbers:"
num1 = gets.chomp.to_i
num2 = gets.chomp.to_i
(num1..num2).each do |num|
puts num
end |
def count_occurance(base_string, target_string):
count = 0
for letter in base_string:
if letter == target_string:
count += 1
return count
print(count_occurance(base_string, target_string)) |
<reponame>siyingpoof/main
package seedu.address.model.prescription;
import java.util.Objects;
import seedu.address.model.medicalhistory.ValidDate;
import seedu.address.model.person.PersonId;
import seedu.address.model.person.doctor.Doctor;
import seedu.address.model.person.patient.Patient;
/**
* Represents a Prescription in the docX.
*/
public class Prescription {
// Identity field
private Patient patient = null;
private Doctor doctor = null;
// Id field
private PersonId patientId;
private PersonId doctorId;
// Data field
private ValidDate date;
private Description description;
private Medicine medicine;
//Constructor
public Prescription(PersonId patientId, PersonId doctorId, ValidDate date,
Medicine medicine, Description description) {
this.patientId = patientId;
this.doctorId = doctorId;
this.date = date;
this.medicine = medicine;
this.description = description;
}
public Description getDescription() {
return this.description;
}
public Medicine getMedicine() {
return this.medicine;
}
public PersonId getPatientId() {
return this.patientId;
}
public void setPatient(Patient patient) {
this.patient = patient;
}
public PersonId getDoctorId() {
return this.doctorId;
}
public void setDoctor(Doctor doctor) {
this.doctor = doctor;
}
public Doctor getDoctor() {
return this.doctor;
}
public Patient getPatient() {
return this.patient;
}
public ValidDate getDate() {
return this.date;
}
/**
* Returns true if both medicalhistory have the same identity and data fields.
* This defines a stronger notion of equality between two medicalhistory.
*/
@Override
public boolean equals(Object other) {
if (other instanceof Prescription == false) {
return false;
}
Prescription otherPrescription = (Prescription) other;
return otherPrescription.getPatientId().equals(this.getPatientId())
&& otherPrescription.getDoctorId().equals(this.getDoctorId())
&& otherPrescription.getDate().equals(getDate())
&& otherPrescription.getDescription().equals(this.getDescription())
&& otherPrescription.getMedicine().getName().equals(this.getMedicine().getName());
}
/**
* Returns true if both prescriptions have the same medicine, patientId, doctorId and description
*/
public boolean isSamePrescription(Prescription other) {
if (other == this) {
return true;
}
return other != null
&& other.getMedicine().getName().equals(this.getMedicine().getName())
&& other.getDoctorId().equals(this.getDoctorId())
&& other.getDate().equals(this.getDate())
&& other.getPatientId().equals(this.getPatientId())
&& other.getDescription().equals(this.getDescription());
}
@Override
public int hashCode() {
return Objects.hash(doctor, patient, date, medicine, description);
}
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
/*
sb.append("Doctor Name: ");
sb.append(this.doctor.getName());
sb.append(" Patient Name: ");
sb.append(this.patient.getName());
*/
sb.append(" Patient ID: ");
sb.append(getPatientId());
sb.append(" Doctor ID: ");
sb.append(getDoctorId());
sb.append(" Medicine name: ");
sb.append(this.medicine.getName());
sb.append(" Date: ");
sb.append(getDate());
sb.append(" Description: ");
sb.append(this.description.toString());
return sb.toString();
}
}
|
#!/usr/bin/env python3.4
import tensorflow as tf
import tflearn
import numpy as np
import numpy.random as npr
np.set_printoptions(precision=2)
# np.seterr(all='raise')
np.seterr(all='warn')
import argparse
import csv
import os
import sys
import time
import pickle as pkl
import json
import shutil
import setproctitle
from datetime import datetime
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
plt.style.use('bmh')
sys.path.append('../lib')
import olivetti
import bundle_entropy
import icnn_ebundle
# import bibsonomy
# import bundle_entropy
import bamos_opt
def entr(x):
z = -x * np.log(x) - (1.-x)*np.log(1.-x)
z[z!=z] = 0.0
return np.sum(z, axis=1)
def main():
parser = argparse.ArgumentParser()
parser.add_argument('chkpt', type=str)
parser.add_argument('--save', type=str, default='work')
parser.add_argument('--layerSizes', type=int, nargs='+', default=[600, 600])
parser.add_argument('--seed', type=int, default=42)
parser.add_argument('--dataset', type=str, choices=['bibtex', 'bookmarks', 'delicious'],
default='bibtex')
args = parser.parse_args()
setproctitle.setproctitle('bamos.icnn.ebundle')
npr.seed(args.seed)
tf.set_random_seed(args.seed)
data = olivetti.load("data/olivetti")
meanY = np.mean(data['trainY'], axis=0)
nTrain = data['trainX'].shape[0]
nTest = data['testX'].shape[0]
inputSz = list(data['trainX'][0].shape)
outputSz = list(data['trainY'][1].shape)
imgDir = os.path.join(args.save, 'imgs')
if not os.path.exists(imgDir):
os.makedirs(imgDir)
config = tf.ConfigProto(log_device_placement=False)
config.gpu_options.allow_growth = True
with tf.Session(config=config) as sess:
model = icnn_ebundle.Model(inputSz, outputSz, sess)
model.load(args.chkpt)
nSamples = 1
# Bundle Entropy
bundleIter, bundleTime, bundleEs = [], [], []
def fg(yhats):
yhats_shaped = yhats.reshape([nSamples]+outputSz)
fd = {model.x_: xBatch_flipped, model.y_: yhats_shaped}
e, ge = sess.run([model.E_, model.dE_dyFlat_], feed_dict=fd)
return e, ge
def cb(iterNum, es, x):
yhats_shaped = x.reshape([nSamples]+outputSz)
plt.imsave(os.path.join(imgDir, '{:05d}.png'.format(iterNum)),
yhats_shaped.squeeze(), cmap=mpl.cm.gray)
bundleIter.append(iterNum)
es_entr = es - entr(x)
bundleEs.append(np.mean(es_entr))
bundleTime.append(time.time()-start)
start = time.time()
I = npr.randint(nTrain, size=nSamples)
# xBatch = data['trainX'][I, :]
# yBatch = data['trainY'][I, :]
xBatch = data['testX'][[0],:]
yBatch = data['testY'][[0],:]
xBatch_flipped = xBatch[:,:,::-1,:]
y0 = np.expand_dims(meanY, axis=0).repeat(nSamples, axis=0)
y0 = y0.reshape((nSamples, -1))
yN, G, h, lam, ys, nIters = bundle_entropy.solveBatch(
fg, y0, nIter=30, callback=cb)
yN_shaped = yN.reshape([nSamples]+outputSz)
# PGD
pgdIter, pgdTime, pgdEs = {}, {}, {}
def fg(yhats):
yhats_shaped = yhats.reshape([nSamples]+outputSz)
fd = {model.x_: xBatch_flipped, model.y_: yhats_shaped}
e, ge = sess.run([model.E_entr_, model.dE_entr_dyFlat_], feed_dict=fd)
return e, ge
def proj(x):
return np.clip(x, 1e-6, 1.-1e-6)
lrs = [0.1, 0.01, 0.001]
for lr in lrs:
pgdIter[lr] = []
pgdTime[lr] = []
pgdEs[lr] = []
def cb(iterNum, es, gs, bestM):
pgdIter[lr].append(iterNum)
pgdEs[lr].append(np.mean(es))
pgdTime[lr].append(time.time()-start)
start = time.time()
y0 = np.expand_dims(meanY, axis=0).repeat(nSamples, axis=0)
y0 = y0.reshape((nSamples, -1))
bamos_opt.pgd.solve_batch(fg, proj, y0, lr=lr, rollingDecay=0.5, eps=1e-3,
minIter=50, maxIter=50, callback=cb)
fig, ax = plt.subplots(1, 1)
plt.xlabel('Iteration')
plt.ylabel('Entropy-Scaled Objective')
for lr in lrs:
plt.plot(pgdIter[lr], pgdEs[lr], label='PGD, lr={}'.format(lr))
plt.plot(bundleIter, bundleEs, label='Bundle Entropy', color='k',
linestyle='dashed')
plt.legend()
# ax.set_yscale('log')
for ext in ['png', 'pdf']:
fname = os.path.join(args.save, 'obj.'+ext)
plt.savefig(fname)
print("Created {}".format(fname))
if __name__=='__main__':
main()
|
package eon.graph;
import java.util.ArrayList;
import eon.network.*;
/**
* @restructured by vxFury
*
*/
public class SearchConstraint {
private Layer associatedLayer = null;
private ArrayList<Node> excludedNodelist = null;
private ArrayList<Link> excludedLinklist = null;
private ArrayList<Integer> linkmask = null;
public SearchConstraint(Layer associatedLayer) {
this.associatedLayer = associatedLayer;
excludedLinklist = new ArrayList<Link>();
excludedNodelist = new ArrayList<Node>();
initLinkMask();
}
public void initLinkMask() {
int llsize = associatedLayer.getLinkList().size();
int size = ((llsize >>> 5) + ((llsize & 0x1F) == 0 ? (0) : (1)));
linkmask = new ArrayList<Integer>();
for(int index = 0; index < size; index ++) {
linkmask.add(0x0);
}
}
public void addLink(Link link) {
excludedLinklist.add(link);
int index = link.getIndex() >>> 5;
int offset = link.getIndex() & 0x1F;
int status = linkmask.get(index);
int check = 0x1 << offset;
linkmask.set(index, status | check);
}
public void removeLink(Link link) {
excludedLinklist.remove(link);
int index = link.getIndex() >>> 5;
int offset = link.getIndex() & 0x1F;
int status = linkmask.get(index);
int check = ~(0x1 << offset);
linkmask.set(index, status & check);
}
public boolean containsLink(Link link) {
int index = link.getIndex() >>> 5;
int offset = link.getIndex() & 0x1F;
return (linkmask.get(index) & (0x1 << offset)) != 0;
}
public Layer getAssociatedLayer() {
return associatedLayer;
}
public void setAssociatedLayer(Layer associatedLayer) {
this.associatedLayer = associatedLayer;
}
public ArrayList<Node> getExcludedNodeList() {
return excludedNodelist;
}
public void setExcludedNodeList(ArrayList<Node> excludedNodeList) {
this.excludedNodelist = excludedNodeList;
}
public void addAllLinks(ArrayList<Link> linkList) {
for(Link link : linkList) {
addLink(link);
}
}
public ArrayList<Link> __getExcludedLinkList() {
return excludedLinklist;
}
public void __setExcludedLinkList(ArrayList<Link> excludedLinklist) {
this.excludedLinklist = excludedLinklist;
}
}
|
package util;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.net.URLConnection;
import org.json.simple.JSONArray;
import org.json.simple.JSONObject;
import org.json.simple.parser.JSONParser;
import org.json.simple.parser.ParseException;
public class Json_Util {
public static JSONObject Json_Util(String[] args) {
JSONParser parser = new JSONParser();
try {
URL oracle = new URL(args[0]); // URL to Parse
URLConnection yc = oracle.openConnection();
BufferedReader in = new BufferedReader(new InputStreamReader(yc.getInputStream()));
String inputLine;
while ((inputLine = in.readLine()) != null) {
JSONArray a = (JSONArray) parser.parse(inputLine);
// Loop through each item
for (Object o : a) {
JSONObject obj = (JSONObject) o;
if(args[1] == "Mixer") {
Boolean online = (Boolean) obj.get("online");
if(online == true) {
return obj;
}
}
else if(args[1] == "Twitch"){
String type = (String) obj.get("type");
if(type == "live") {
return obj;
}
}else if (args[1] == "test") {
return obj;
}
System.out.println("\n");
}
}
in.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} catch (ParseException e) {
e.printStackTrace();
}
return null;
}
public static void main(String[] args) {
}
} |
import nibabel as nib
import numpy as np
from scipy import ndimage
# initalize data
work_dir = '/mindhive/saxelab3/anzellotti/forrest/output_denoise/'
def apply_smoothing(input_path: str, output_path: str, sigma: float) -> None:
# Load the MRI image from the input path
img = nib.load(input_path)
data = img.get_fdata()
# Apply Gaussian smoothing with the specified sigma
smoothed_data = ndimage.gaussian_filter(data, sigma)
# Create a new Nifti1Image with the smoothed data
smoothed_img = nib.Nifti1Image(smoothed_data, img.affine)
# Save the smoothed image to the specified output path
nib.save(smoothed_img, output_path) |
const path = require("path");
const fs = require("fs");
const templatesDir = path.resolve(__dirname, "../templates");
const outputDir = path.resolve(__dirname, "../output");
const render = employees => {
const html1 = []; //added to split up manager and engineer/intern records
const html2 = []; //added to split up manager and engineer/intern records
html1.push(employees
.filter(employee => employee.getRole() === "Manager")
.map(manager => renderManager(manager))
);
html2.push(employees
.filter(employee => employee.getRole() === "Engineer")
.map(engineer => renderEngineer(engineer))
);
html2.push(employees
.filter(employee => employee.getRole() === "Intern")
.map(intern => renderIntern(intern))
);
// I created this to ensure functions were running serially.
async function init() {
console.log("in init function");
await writemain1(html1);
console.log("in init - just before writemain");
await writemain(html2);
}
// 1st function to write initial main.html with manager card - calling it Main1.html
function writemain1(html1) {
return new Promise(function(resolve, reject) {
const managerrend = renderMain1(html1.join(""));
console.log("managerrend = ", managerrend);
const os = require('os');
const fs = require('fs-extra');
// const util = require('util');
// const fs_writeFile = util.promisify(fs.writeFile);
const file = './output/main1.html';
fs.unlinkSync(file);
const options = { flag: 'w' };
console.log("in writemain1 just before output file...");
fs.writeFileSync(file, `${managerrend}${os.EOL}${os.EOL}`, options);
console.log("in writemain1 just after output file...")
resolve();
})
}
// 2nd function to write main.html with engineer and intern cards.
function writemain(html2) {
return new Promise(function(resolve, reject) {
const employeerend = renderMain2(html2.join(""));
console.log("employeerend = ", employeerend);
const os = require('os');
const fs = require('fs-extra');
const file = './output/main.html';
fs.unlinkSync(file); //hopefully deletes file before writing
const options = { flag: 'w' };
console.log("in writemain just before output file...");
fs.outputFile(file, `${employeerend}${os.EOL}${os.EOL}`, options);
console.log("in writemain just after output file...")
resolve();
})
}
init();
};
const renderManager = manager => {
let template = fs.readFileSync(path.resolve(templatesDir, "manager.html"), "utf8");
template = replacePlaceholders1(template, "name", manager.getName());
template = replacePlaceholders1(template, "role", manager.getRole());
template = replacePlaceholders1(template, "email", manager.getEmail());
template = replacePlaceholders1(template, "id", manager.getId());
template = replacePlaceholders1(template, "officeNumber", manager.getOfficeNumber());
return template;
};
const renderEngineer = engineer => {
let template = fs.readFileSync(path.resolve(templatesDir, "engineer.html"), "utf8");
template = replacePlaceholders2(template, "name", engineer.getName());
template = replacePlaceholders2(template, "role", engineer.getRole());
template = replacePlaceholders2(template, "email", engineer.getEmail());
template = replacePlaceholders2(template, "id", engineer.getId());
template = replacePlaceholders2(template, "github", engineer.getGithub());
return template;
};
const renderIntern = intern => {
let template = fs.readFileSync(path.resolve(templatesDir, "intern.html"), "utf8");
template = replacePlaceholders2(template, "name", intern.getName());
template = replacePlaceholders2(template, "role", intern.getRole());
template = replacePlaceholders2(template, "email", intern.getEmail());
template = replacePlaceholders2(template, "id", intern.getId());
template = replacePlaceholders2(template, "school", intern.getSchool());
return template;
};
const renderMain1 = html1 => {
const template = fs.readFileSync(path.resolve(templatesDir, "main.html"), "utf8");
return replacePlaceholders1(template, "manager", html1);
};
const replacePlaceholders1 = (template, placeholder, value) => {
const pattern = new RegExp("{{ " + placeholder + " }}", "gm");
return template.replace(pattern, value);
};
const renderMain2 = html2 => {
const template = fs.readFileSync(path.resolve(outputDir, "main1.html"), "utf8");
console.log("template ", template);
console.log("html2 = ", html2);
return replacePlaceholders2(template, "team", html2);
};
const replacePlaceholders2 = (template, placeholder, value) => {
const pattern = new RegExp("{{ " + placeholder + " }}", "gm");
return template.replace(pattern, value);
};
module.exports = render; |
<filename>website/docusaurus.config.js
/* eslint-disable no-undef */
module.exports = {
title: 'runty',
tagline: 'Extensible conditional string micro templates',
url: 'https://runty.js.org',
baseUrl: '/',
onBrokenLinks: 'throw',
favicon: 'img/favicon.ico',
organizationName: 'nderscore',
projectName: 'runty',
themeConfig: {
navbar: {
title: 'runty',
logo: {
alt: 'runty logo',
src: 'img/logo.svg',
},
items: [
{
to: 'docs/',
activeBasePath: 'docs',
label: 'Docs',
position: 'left',
},
{
to: 'demo/',
label: 'Demo',
position: 'left',
},
{
href: 'https://github.com/nderscore/runty',
label: 'GitHub',
position: 'right',
},
],
},
footer: {
style: 'dark',
links: [
{
title: 'Docs',
items: [
{
label: 'API Reference',
to: 'docs/api',
},
{
label: 'Standard Library (fns)',
to: 'docs/fns',
},
],
},
{
title: 'Community',
items: [
{
label: 'Stack Overflow',
href: 'https://stackoverflow.com/questions/tagged/runty',
},
],
},
{
title: 'More',
items: [
{
label: 'GitHub',
href: 'https://github.com/nderscore/runty',
},
],
},
],
copyright: `Copyright © ${new Date().getFullYear()} _nderscore. Built with Docusaurus.`,
},
googleAnalytics: {
trackingID: 'UA-179582829-1',
anonymizeIP: true,
},
algolia: {
apiKey: '<KEY>',
indexName: 'runty_js',
searchParameters: {},
},
},
presets: [
[
'@docusaurus/preset-classic',
{
docs: {
sidebarPath: require.resolve('./sidebars.js'),
// Please change this to your repo.
editUrl: 'https://github.com/nderscore/runty/edit/master/website/',
},
theme: {
customCss: require.resolve('./src/css/custom.css'),
},
},
],
],
};
|
#! /bin/bash
set -e
pushd $( dirname $0 )
dotnet publish ../ --configuration Release
docker-compose rm -f
docker-compose -f docker-compose.yml -p ci up --build --force-recreate -d
exitCode=$(docker wait ci_test_1)
docker logs -f ci_test_1
docker-compose stop -t 1
popd
exit $exitCode
|
package ctrlcrd
import (
"fmt"
boomv1 "github.com/caos/orbos/internal/api/boom/v1"
networkingv1 "github.com/caos/orbos/internal/api/networking/v1"
"github.com/caos/orbos/internal/ctrlcrd/boom"
"github.com/caos/orbos/internal/ctrlcrd/networking"
"github.com/caos/orbos/internal/utils/clientgo"
"github.com/caos/orbos/mntr"
"github.com/caos/orbos/pkg/kubernetes"
"k8s.io/apimachinery/pkg/runtime"
clientgoscheme "k8s.io/client-go/kubernetes/scheme"
ctrl "sigs.k8s.io/controller-runtime"
)
const (
Networking = "networking"
Boom = "boom"
)
var (
scheme = runtime.NewScheme()
)
func init() {
if err := clientgoscheme.AddToScheme(scheme); err != nil {
panic(fmt.Errorf("adding clientgo to scheme failed: %w", err))
}
if err := networkingv1.AddToScheme(scheme); err != nil {
panic(fmt.Errorf("adding networking v1 to scheme failed: %w", err))
}
if err := boomv1.AddToScheme(scheme); err != nil {
panic(fmt.Errorf("adding boom v1 to scheme failed: %w", err))
}
}
func Start(monitor mntr.Monitor, version, toolsDirectoryPath, metricsAddr string, kubeconfig string, features ...string) error {
cfg, err := clientgo.GetClusterConfig(monitor, kubeconfig)
if err != nil {
return err
}
k8sClient, err := kubernetes.NewK8sClientWithConfig(monitor, cfg)
if err != nil {
return err
}
monitor.Info("successfully connected to kubernetes cluster")
mgr, err := ctrl.NewManager(cfg, ctrl.Options{
Scheme: scheme,
MetricsBindAddress: metricsAddr,
Port: 9443,
LeaderElection: false,
LeaderElectionID: "98jasd12l.caos.ch",
})
if err != nil {
return fmt.Errorf("starting manager failed: %w", err)
}
for _, feature := range features {
switch feature {
case Networking:
monitor.Debug("Setting up networking")
if err = (&networking.Reconciler{
ClientInt: k8sClient,
Monitor: monitor,
Scheme: mgr.GetScheme(),
Version: version,
}).SetupWithManager(mgr); err != nil {
return fmt.Errorf("creating controller failed: %w", err)
}
monitor.Debug("Networking setup done")
case Boom:
monitor.Debug("Setting up BOOM")
if err = (&boom.Reconciler{
ClientInt: k8sClient,
Monitor: monitor,
Scheme: mgr.GetScheme(),
ToolsDirectoryPath: toolsDirectoryPath,
Version: version,
}).SetupWithManager(mgr); err != nil {
return fmt.Errorf("creating controller failed: %w", err)
}
monitor.Debug("BOOM setup done")
}
}
monitor.Debug("Controller is starting")
if err := mgr.Start(ctrl.SetupSignalHandler()); err != nil {
return fmt.Errorf("running manager failed: %w", err)
}
monitor.Debug("Controller is done")
return nil
}
|
#!/bin/bash
python orojar_vis_directions.py \
--load_A checkpoints/directions/orojar/church_coarse.pt \
--search_space coarse \
--path_size 2.5 \
--ndirs 40 \
--fix_class 497 \
--experiment_name church_coarse \
--parallel --batch_size 16 \
--G_B2 0.999 \
--G_attn 64 \
--G_nl inplace_relu \
--SN_eps 1e-6 --BN_eps 1e-5 --adam_eps 1e-6 \
--G_ortho 0.0 \
--G_shared \
--G_init ortho \
--hier --dim_z 120 --shared_dim 128 \
--G_eval_mode \
--G_ch 96 \
--ema --use_ema --ema_start 20000 \
--test_every 10000 --save_every 1000 --num_best_copies 5 --num_save_copies 2 --seed 0 \
--use_multiepoch_sampler \
--resume \
--num_epochs 50 |
<reponame>frgomes/sri-mobile-examples
package sri.mobile.examples.uiexplorer
import sri.mobile.examples.uiexplorer.components.{
UIExplorerDetailsScreen,
UIExplorerListScreen
}
import sri.navigation._
import sri.navigation.navigators._
import sri.universal.apis.AppRegistry
object MobileApp {
def main(args: Array[String]) = {
val root = StackNavigator(
registerStackScreen[UIExplorerListScreen](navigationOptions =
NavigationStackScreenOptions(title = "UIExplorer")),
registerStackScreen[UIExplorerDetailsScreen](
navigationOptionsDynamic =
(props: NavigationScreenConfigProps[UIExplorerDetailsScreen]) =>
NavigationStackScreenOptions(
title = props.navigation.state.params.get.title))
)
AppRegistry.registerComponent("UIExplorer", () => root)
}
}
|
package com.nortal.spring.cw.core.cache;
import net.sf.ehcache.Ehcache;
/**
* Imported and refactored from EMPIS project
*
* @author <NAME> <<EMAIL>>
* @since 15.02.2013
*
*/
public class CacheLoaderImpl implements CacheLoader {
protected Ehcache cache;
protected String cacheType;
public void setCache(Ehcache cache) {
this.cache = cache;
}
public void setCacheType(String cacheType) {
this.cacheType = cacheType;
}
public String getCacheType() {
return cacheType;
}
}
|
<reponame>elhananby/flydra
from __future__ import division
from __future__ import with_statement
from __future__ import print_function
from __future__ import absolute_import
if 1:
# deal with old files, forcing to numpy
import tables.flavor
tables.flavor.restrict_flavors(keep=["numpy"])
import os, sys, math, contextlib, collections, warnings
import pkg_resources
import numpy as np
import tables as PT
from optparse import OptionParser
import flydra_core.reconstruct as reconstruct
import motmot.ufmf.ufmf as ufmf
import motmot.imops.imops as imops
import flydra_analysis.a2.utils as utils
import flydra_analysis.analysis.result_utils as result_utils
from . import core_analysis
import scipy.ndimage
import cairo
from . import benu
import adskalman.adskalman
from .tables_tools import clear_col, open_file_safe
font_size = 14
def shift_image(im, xy):
def mapping(x):
return (x[0] + xy[1], x[1] + xy[0])
result = scipy.ndimage.geometric_transform(im, mapping, im.shape, order=0)
return result
def get_cam_id_from_filename(filename, all_cam_ids):
# guess cam_id
n = 0
found_cam_id = None
for cam_id in all_cam_ids:
if cam_id in filename:
n += 1
if found_cam_id is not None:
raise ValueError("cam_id found more than once in filename")
found_cam_id = cam_id
return found_cam_id
def plot_image_subregion(
raw_im,
mean_im,
absdiff_im,
roiradius,
fname,
user_coords,
scale=4.0,
view="orig",
extras=None,
):
if extras is None:
extras = {}
output_ext = os.path.splitext(fname)[1].lower()
roisize = 2 * roiradius
imtypes = ["raw", "absdiff", "mean"]
margin = 10
square_edge = roisize * scale
width = int(round(len(imtypes) * square_edge + (len(imtypes) + 1) * margin))
height = int(round(square_edge + 2 * margin))
if output_ext == ".pdf":
output_surface = cairo.PDFSurface(fname, width, height)
elif output_ext == ".svg":
output_surface = cairo.SVGSurface(fname, width, height)
elif output_ext == ".png":
output_surface = cairo.ImageSurface(cairo.FORMAT_ARGB32, width, height)
else:
raise ValueError("unknown output extension %s" % output_ext)
ctx = cairo.Context(output_surface)
# fill with white
ctx.set_source_rgb(1, 1, 1)
ctx.rectangle(0, 0, width, height)
ctx.fill()
user_l, user_b, user_r, user_t = user_coords
# setup transform
# calculate image boundary (user coords)
for im_idx, im in enumerate(imtypes):
if im == "raw":
display_im = raw_im
elif im == "mean":
display_im = mean_im
elif im == "absdiff":
display_im = np.clip(5 * absdiff_im, 0, 255)
# set transform - make a patch of the cairo
# device be addressed with our image space
# coords
device_l = (im_idx + 1) * margin + im_idx * square_edge
device_b = margin
ctx.identity_matrix() # reset
if view == "orig":
matrix = cairo.Matrix(
xx=scale,
yx=0,
xy=0,
yy=scale,
x0=(device_l - scale * user_l),
y0=(device_b - scale * user_b),
)
elif view == "rot -90":
matrix = cairo.Matrix(
xx=0,
yx=scale,
xy=scale,
yy=0,
x0=(device_l - scale * user_b),
y0=(device_b - scale * user_l),
)
elif view == "rot 180":
matrix = cairo.Matrix(
xx=-scale,
yx=0,
xy=0,
yy=-scale,
x0=(device_l + scale * user_r),
y0=(device_b + scale * user_t),
)
else:
raise ValueError("unknown view '%s'" % view)
ctx.set_matrix(matrix)
## print 'device_l-user_l, device_b-user_b',device_l-user_l, device_b-user_b
## #ctx.translate(device_l-user_l, device_b-user_b)
## if scale!= 1.0:
## ctx.scale( scale, scale )
## #raise NotImplementedError('')
## ctx.translate(device_l-user_l, device_b-user_b)
## #print 'square_edge/roisize, square_edge/roisize',square_edge/roisize, square_edge/roisize
## #ctx.scale( roisize/square_edge, square_edge/roisize)
if 1:
in_surface = benu.numpy2cairo(display_im.astype(np.uint8))
ctx.rectangle(user_l, user_b, display_im.shape[1], display_im.shape[0])
if 1:
ctx.save()
ctx.set_source_surface(in_surface, user_l, user_b)
ctx.paint()
ctx.restore()
else:
ctx.set_source_rgb(0, 0.3, 0)
ctx.fill()
if 0:
ctx.move_to(user_l, user_b)
ctx.line_to(user_r, user_b)
ctx.line_to(user_r, user_t)
ctx.line_to(user_l, user_t)
ctx.line_to(user_l, user_b)
ctx.close_path()
ctx.set_source_rgb(0, 1, 0)
ctx.fill()
ctx.move_to(user_l + 5, user_b + 5)
ctx.line_to(user_r - 40, user_b + 5)
ctx.line_to(user_r - 40, user_t - 40)
ctx.line_to(user_l + 5, user_t - 40)
ctx.line_to(user_l + 5, user_b + 5)
ctx.close_path()
ctx.set_source_rgb(0, 0, 1)
ctx.fill()
if output_ext == ".png":
output_surface.write_to_png(fname)
else:
ctx.show_page()
output_surface.finish()
def flatten_image_stack(image_framenumbers, ims, im_coords, camn_pt_no_array, N=None):
"""take a stack of several images and flatten by finding min pixel"""
if N is None:
raise ValueError("N must be specified")
assert np.all((image_framenumbers[1:] - image_framenumbers[:-1]) > 0)
all_framenumbers = np.arange(
image_framenumbers[0], image_framenumbers[-1] + 1, dtype=np.int64
)
assert N % 2 == 1
offset = N // 2
results = []
for center_fno in range(offset, len(all_framenumbers) - offset):
center_fno += all_framenumbers[0]
center_idx = np.searchsorted(image_framenumbers, center_fno, side="right") - 1
camn_pt_no = camn_pt_no_array[center_idx]
orig_idxs_in_average = []
ims_to_average = []
coords_to_average = []
for fno in range(center_fno - offset, center_fno + offset + 1):
idx = np.searchsorted(image_framenumbers, fno, side="right") - 1
if image_framenumbers[idx] == fno:
orig_idxs_in_average.append(idx)
ims_to_average.append(ims[idx])
coords_to_average.append(im_coords[idx])
n_images = len(coords_to_average)
if 1:
# XXX this is not very efficient.
to_av = np.array(ims_to_average)
## print 'fno %d: min %.1f max %.1f'%(center_fno, to_av.min(), to_av.max())
# av_im = np.mean( to_av, axis=0 )
if to_av.shape == (0,):
av_im = np.zeros(
(2, 2), dtype=np.uint8
) # just create a small blank image
mean_lowerleft = np.array([np.nan, np.nan])
else:
av_im = np.min(to_av, axis=0)
coords_to_average = np.array(coords_to_average)
mean_lowerleft = np.mean(coords_to_average[:, :2], axis=0)
results.append(
(
center_fno,
av_im,
mean_lowerleft,
camn_pt_no,
center_idx,
orig_idxs_in_average,
)
)
return results
def clip_and_math(raw_image, mean_image, xy, roiradius, maxsize):
roisize = 2 * roiradius
x, y = xy
l = max(x - roiradius, 0)
b = max(y - roiradius, 0)
r = l + roisize
t = b + roisize
maxwidth, maxheight = maxsize
if r > maxwidth:
r = maxwidth
l = r - roisize
if t > maxheight:
t = maxheight
b = t - roisize
raw_im = raw_image[b:t, l:r]
mean_im = mean_image[b:t, l:r]
absdiff_im = abs(mean_im.astype(np.float32) - raw_im)
if absdiff_im.ndim == 3:
# convert to grayscale
absdiff_im = np.mean(absdiff_im, axis=2)
return (l, b, r, t), raw_im, mean_im, absdiff_im
def doit(
h5_filename=None,
output_h5_filename=None,
ufmf_filenames=None,
kalman_filename=None,
start=None,
stop=None,
view=None,
erode=0,
save_images=False,
save_image_dir=None,
intermediate_thresh_frac=None,
final_thresh=None,
stack_N_images=None,
stack_N_images_min=None,
old_sync_timestamp_source=False,
do_rts_smoothing=True,
):
"""
Copy all data in .h5 file (specified by h5_filename) to a new .h5
file in which orientations are set based on image analysis of
.ufmf files. Tracking data to associate 2D points from subsequent
frames is read from the .h5 kalman file specified by
kalman_filename.
"""
# We do a deferred import so test runners can import this python script
# without depending on these, which depend on Intel IPP.
import motmot.FastImage.FastImage as FastImage
import motmot.realtime_image_analysis.realtime_image_analysis as realtime_image_analysis
if view is None:
view = ["orig" for f in ufmf_filenames]
else:
assert len(view) == len(ufmf_filenames)
if intermediate_thresh_frac is None or final_thresh is None:
raise ValueError("intermediate_thresh_frac and final_thresh must be " "set")
filename2view = dict(zip(ufmf_filenames, view))
ca = core_analysis.get_global_CachingAnalyzer()
obj_ids, use_obj_ids, is_mat_file, data_file, extra = ca.initial_file_load(
kalman_filename
)
try:
ML_estimates_2d_idxs = data_file.root.ML_estimates_2d_idxs[:]
except tables.exceptions.NoSuchNodeError as err1:
# backwards compatibility
try:
ML_estimates_2d_idxs = data_file.root.kalman_observations_2d_idxs[:]
except tables.exceptions.NoSuchNodeError as err2:
raise err1
if os.path.exists(output_h5_filename):
raise RuntimeError("will not overwrite old file '%s'" % output_h5_filename)
with open_file_safe(
output_h5_filename, delete_on_error=True, mode="w"
) as output_h5:
if save_image_dir is not None:
if not os.path.exists(save_image_dir):
os.mkdir(save_image_dir)
with open_file_safe(h5_filename, mode="r") as h5:
fps = result_utils.get_fps(h5, fail_on_error=True)
for input_node in h5.root._f_iter_nodes():
# copy everything from source to dest
input_node._f_copy(output_h5.root, recursive=True)
print("done copying")
# Clear values in destination table that we may overwrite.
dest_table = output_h5.root.data2d_distorted
for colname in [
"x",
"y",
"area",
"slope",
"eccentricity",
"cur_val",
"mean_val",
"sumsqf_val",
]:
if colname == "cur_val":
fill_value = 0
else:
fill_value = np.nan
clear_col(dest_table, colname, fill_value=fill_value)
dest_table.flush()
print("done clearing")
camn2cam_id, cam_id2camns = result_utils.get_caminfo_dicts(h5)
cam_id2fmfs = collections.defaultdict(list)
cam_id2view = {}
for ufmf_filename in ufmf_filenames:
fmf = ufmf.FlyMovieEmulator(
ufmf_filename,
# darken=-50,
allow_no_such_frame_errors=True,
)
timestamps = fmf.get_all_timestamps()
cam_id = get_cam_id_from_filename(fmf.filename, cam_id2camns.keys())
cam_id2fmfs[cam_id].append(
(fmf, result_utils.Quick1DIndexer(timestamps))
)
cam_id2view[cam_id] = filename2view[fmf.filename]
# associate framenumbers with timestamps using 2d .h5 file
data2d = h5.root.data2d_distorted[:] # load to RAM
data2d_idxs = np.arange(len(data2d))
h5_framenumbers = data2d["frame"]
h5_frame_qfi = result_utils.QuickFrameIndexer(h5_framenumbers)
fpc = realtime_image_analysis.FitParamsClass() # allocate FitParamsClass
for obj_id_enum, obj_id in enumerate(use_obj_ids):
print("object %d of %d" % (obj_id_enum, len(use_obj_ids)))
# get all images for this camera and this obj_id
obj_3d_rows = ca.load_dynamics_free_MLE_position(obj_id, data_file)
this_obj_framenumbers = collections.defaultdict(list)
if save_images:
this_obj_raw_images = collections.defaultdict(list)
this_obj_mean_images = collections.defaultdict(list)
this_obj_absdiff_images = collections.defaultdict(list)
this_obj_morphed_images = collections.defaultdict(list)
this_obj_morph_failures = collections.defaultdict(list)
this_obj_im_coords = collections.defaultdict(list)
this_obj_com_coords = collections.defaultdict(list)
this_obj_camn_pt_no = collections.defaultdict(list)
for this_3d_row in obj_3d_rows:
# iterate over each sample in the current camera
framenumber = this_3d_row["frame"]
if start is not None:
if not framenumber >= start:
continue
if stop is not None:
if not framenumber <= stop:
continue
h5_2d_row_idxs = h5_frame_qfi.get_frame_idxs(framenumber)
frame2d = data2d[h5_2d_row_idxs]
frame2d_idxs = data2d_idxs[h5_2d_row_idxs]
obs_2d_idx = this_3d_row["obs_2d_idx"]
kobs_2d_data = ML_estimates_2d_idxs[int(obs_2d_idx)]
# Parse VLArray.
this_camns = kobs_2d_data[0::2]
this_camn_idxs = kobs_2d_data[1::2]
# Now, for each camera viewing this object at this
# frame, extract images.
for camn, camn_pt_no in zip(this_camns, this_camn_idxs):
# find 2D point corresponding to object
cam_id = camn2cam_id[camn]
movie_tups_for_this_camn = cam_id2fmfs[cam_id]
cond = (frame2d["camn"] == camn) & (
frame2d["frame_pt_idx"] == camn_pt_no
)
idxs = np.nonzero(cond)[0]
assert len(idxs) == 1
idx = idxs[0]
orig_data2d_rownum = frame2d_idxs[idx]
if not old_sync_timestamp_source:
# Change the next line to 'timestamp' for old
# data (before May/June 2009 -- the switch to
# fview_ext_trig)
frame_timestamp = frame2d[idx]["cam_received_timestamp"]
else:
# previous version
frame_timestamp = frame2d[idx]["timestamp"]
found = None
for fmf, fmf_timestamp_qi in movie_tups_for_this_camn:
fmf_fnos = fmf_timestamp_qi.get_idxs(frame_timestamp)
if not len(fmf_fnos):
continue
assert len(fmf_fnos) == 1
# should only be one .ufmf with this frame and cam_id
assert found is None
fmf_fno = fmf_fnos[0]
found = (fmf, fmf_fno)
if found is None:
print(
"no image data for frame timestamp %s cam_id %s"
% (repr(frame_timestamp), cam_id)
)
continue
fmf, fmf_fno = found
image, fmf_timestamp = fmf.get_frame(fmf_fno)
mean_image = fmf.get_mean_for_timestamp(fmf_timestamp)
coding = fmf.get_format()
if imops.is_coding_color(coding):
image = imops.to_rgb8(coding, image)
mean_image = imops.to_rgb8(coding, mean_image)
else:
image = imops.to_mono8(coding, image)
mean_image = imops.to_mono8(coding, mean_image)
xy = (
int(round(frame2d[idx]["x"])),
int(round(frame2d[idx]["y"])),
)
maxsize = (fmf.get_width(), fmf.get_height())
# Accumulate cropped images. Note that the region
# of the full image that the cropped image
# occupies changes over time as the tracked object
# moves. Thus, averaging these cropped-and-shifted
# images is not the same as simply averaging the
# full frame.
roiradius = 25
warnings.warn(
"roiradius hard-coded to %d: could be set "
"from 3D tracking" % roiradius
)
tmp = clip_and_math(image, mean_image, xy, roiradius, maxsize)
im_coords, raw_im, mean_im, absdiff_im = tmp
max_absdiff_im = absdiff_im.max()
intermediate_thresh = intermediate_thresh_frac * max_absdiff_im
absdiff_im[absdiff_im <= intermediate_thresh] = 0
if erode > 0:
morphed_im = scipy.ndimage.grey_erosion(
absdiff_im, size=erode
)
## morphed_im = scipy.ndimage.binary_erosion(absdiff_im>1).astype(np.float32)*255.0
else:
morphed_im = absdiff_im
y0_roi, x0_roi = scipy.ndimage.center_of_mass(morphed_im)
x0 = im_coords[0] + x0_roi
y0 = im_coords[1] + y0_roi
if 1:
morphed_im_binary = morphed_im > 0
labels, n_labels = scipy.ndimage.label(morphed_im_binary)
morph_fail_because_multiple_blobs = False
if n_labels > 1:
x0, y0 = np.nan, np.nan
# More than one blob -- don't allow image.
if 1:
# for min flattening
morphed_im = np.empty(
morphed_im.shape, dtype=np.uint8
)
morphed_im.fill(255)
morph_fail_because_multiple_blobs = True
else:
# for mean flattening
morphed_im = np.zeros_like(morphed_im)
morph_fail_because_multiple_blobs = True
this_obj_framenumbers[camn].append(framenumber)
if save_images:
this_obj_raw_images[camn].append((raw_im, im_coords))
this_obj_mean_images[camn].append(mean_im)
this_obj_absdiff_images[camn].append(absdiff_im)
this_obj_morphed_images[camn].append(morphed_im)
this_obj_morph_failures[camn].append(
morph_fail_because_multiple_blobs
)
this_obj_im_coords[camn].append(im_coords)
this_obj_com_coords[camn].append((x0, y0))
this_obj_camn_pt_no[camn].append(orig_data2d_rownum)
if 0:
fname = "obj%05d_%s_frame%07d_pt%02d.png" % (
obj_id,
cam_id,
framenumber,
camn_pt_no,
)
plot_image_subregion(
raw_im,
mean_im,
absdiff_im,
roiradius,
fname,
im_coords,
view=filename2view[fmf.filename],
)
# Now, all the frames from all cameras for this obj_id
# have been gathered. Do a camera-by-camera analysis.
for camn in this_obj_absdiff_images:
cam_id = camn2cam_id[camn]
image_framenumbers = np.array(this_obj_framenumbers[camn])
if save_images:
raw_images = this_obj_raw_images[camn]
mean_images = this_obj_mean_images[camn]
absdiff_images = this_obj_absdiff_images[camn]
morphed_images = this_obj_morphed_images[camn]
morph_failures = np.array(this_obj_morph_failures[camn])
im_coords = this_obj_im_coords[camn]
com_coords = this_obj_com_coords[camn]
camn_pt_no_array = this_obj_camn_pt_no[camn]
all_framenumbers = np.arange(
image_framenumbers[0], image_framenumbers[-1] + 1
)
com_coords = np.array(com_coords)
if do_rts_smoothing:
# Perform RTS smoothing on center-of-mass coordinates.
# Find first good datum.
fgnz = np.nonzero(~np.isnan(com_coords[:, 0]))
com_coords_smooth = np.empty(com_coords.shape, dtype=np.float)
com_coords_smooth.fill(np.nan)
if len(fgnz[0]):
first_good = fgnz[0][0]
RTS_com_coords = com_coords[first_good:, :]
# Setup parameters for Kalman filter.
dt = 1.0 / fps
A = np.array(
[
[1, 0, dt, 0], # process update
[0, 1, 0, dt],
[0, 0, 1, 0],
[0, 0, 0, 1],
],
dtype=np.float,
)
C = np.array(
[[1, 0, 0, 0], [0, 1, 0, 0]], # observation matrix
dtype=np.float,
)
Q = 0.1 * np.eye(4) # process noise
R = 1.0 * np.eye(2) # observation noise
initx = np.array(
[RTS_com_coords[0, 0], RTS_com_coords[0, 1], 0, 0],
dtype=np.float,
)
initV = 2 * np.eye(4)
initV[0, 0] = 0.1
initV[1, 1] = 0.1
y = RTS_com_coords
xsmooth, Vsmooth = adskalman.adskalman.kalman_smoother(
y, A, C, Q, R, initx, initV
)
com_coords_smooth[first_good:] = xsmooth[:, :2]
# Now shift images
image_shift = com_coords_smooth - com_coords
bad_cond = np.isnan(image_shift[:, 0])
# broadcast zeros to places where no good tracking
image_shift[bad_cond, 0] = 0
image_shift[bad_cond, 1] = 0
shifted_morphed_images = [
shift_image(im, xy)
for im, xy in zip(morphed_images, image_shift)
]
results = flatten_image_stack(
image_framenumbers,
shifted_morphed_images,
im_coords,
camn_pt_no_array,
N=stack_N_images,
)
else:
results = flatten_image_stack(
image_framenumbers,
morphed_images,
im_coords,
camn_pt_no_array,
N=stack_N_images,
)
# The variable fno (the first element of the results
# tuple) is guaranteed to be contiguous and to span
# the range from the first to last frames available.
for (
fno,
av_im,
lowerleft,
orig_data2d_rownum,
orig_idx,
orig_idxs_in_average,
) in results:
# Clip image to reduce moment arms.
av_im[av_im <= final_thresh] = 0
fail_fit = False
fast_av_im = FastImage.asfastimage(av_im.astype(np.uint8))
try:
(x0_roi, y0_roi, area, slope, eccentricity) = fpc.fit(
fast_av_im
)
except realtime_image_analysis.FitParamsError as err:
fail_fit = True
this_morph_failures = morph_failures[orig_idxs_in_average]
n_failed_images = np.sum(this_morph_failures)
n_good_images = stack_N_images - n_failed_images
if n_good_images >= stack_N_images_min:
n_images_is_acceptable = True
else:
n_images_is_acceptable = False
if fail_fit:
x0_roi = np.nan
y0_roi = np.nan
area, slope, eccentricity = np.nan, np.nan, np.nan
if not n_images_is_acceptable:
x0_roi = np.nan
y0_roi = np.nan
area, slope, eccentricity = np.nan, np.nan, np.nan
x0 = x0_roi + lowerleft[0]
y0 = y0_roi + lowerleft[1]
if 1:
for row in dest_table.iterrows(
start=orig_data2d_rownum, stop=orig_data2d_rownum + 1
):
row["x"] = x0
row["y"] = y0
row["area"] = area
row["slope"] = slope
row["eccentricity"] = eccentricity
row.update() # save data
if save_images:
# Display debugging images
fname = "av_obj%05d_%s_frame%07d.png" % (
obj_id,
cam_id,
fno,
)
if save_image_dir is not None:
fname = os.path.join(save_image_dir, fname)
raw_im, raw_coords = raw_images[orig_idx]
mean_im = mean_images[orig_idx]
absdiff_im = absdiff_images[orig_idx]
morphed_im = morphed_images[orig_idx]
raw_l, raw_b = raw_coords[:2]
imh, imw = raw_im.shape[:2]
n_ims = 5
if 1:
# increase contrast
contrast_scale = 2.0
av_im_show = np.clip(av_im * contrast_scale, 0, 255)
margin = 10
scale = 3
# calculate the orientation line
yintercept = y0 - slope * x0
xplt = np.array(
[
lowerleft[0] - 5,
lowerleft[0] + av_im_show.shape[1] + 5,
]
)
yplt = slope * xplt + yintercept
if 1:
# only send non-nan values to plot
plt_good = ~np.isnan(xplt) & ~np.isnan(yplt)
xplt = xplt[plt_good]
yplt = yplt[plt_good]
top_row_width = scale * imw * n_ims + (1 + n_ims) * margin
SHOW_STACK = True
if SHOW_STACK:
n_stack_rows = 4
rw = scale * imw * stack_N_images + (1 + n_ims) * margin
row_width = max(top_row_width, rw)
col_height = (
n_stack_rows * scale * imh
+ (n_stack_rows + 1) * margin
)
stack_margin = 20
else:
row_width = top_row_width
col_height = scale * imh + 2 * margin
stack_margin = 0
canv = benu.Canvas(
fname,
row_width,
col_height + stack_margin,
color_rgba=(1, 1, 1, 1),
)
if SHOW_STACK:
for (stacki, s_orig_idx) in enumerate(
orig_idxs_in_average
):
(s_raw_im, s_raw_coords) = raw_images[s_orig_idx]
s_raw_l, s_raw_b = s_raw_coords[:2]
s_imh, s_imw = s_raw_im.shape[:2]
user_rect = (s_raw_l, s_raw_b, s_imw, s_imh)
x_display = (stacki + 1) * margin + (
scale * imw
) * stacki
for show in ["raw", "absdiff", "morphed"]:
if show == "raw":
y_display = scale * imh + 2 * margin
elif show == "absdiff":
y_display = 2 * scale * imh + 3 * margin
elif show == "morphed":
y_display = 3 * scale * imh + 4 * margin
display_rect = (
x_display,
y_display + stack_margin,
scale * raw_im.shape[1],
scale * raw_im.shape[0],
)
with canv.set_user_coords(
display_rect,
user_rect,
transform=cam_id2view[cam_id],
):
if show == "raw":
s_im = s_raw_im.astype(np.uint8)
elif show == "absdiff":
tmp = absdiff_images[s_orig_idx]
s_im = tmp.astype(np.uint8)
elif show == "morphed":
tmp = morphed_images[s_orig_idx]
s_im = tmp.astype(np.uint8)
canv.imshow(s_im, s_raw_l, s_raw_b)
sx0, sy0 = com_coords[s_orig_idx]
X = [sx0]
Y = [sy0]
# the raw coords in red
canv.scatter(
X, Y, color_rgba=(1, 0.5, 0.5, 1)
)
if do_rts_smoothing:
sx0, sy0 = com_coords_smooth[s_orig_idx]
X = [sx0]
Y = [sy0]
# the RTS smoothed coords in green
canv.scatter(
X, Y, color_rgba=(0.5, 1, 0.5, 1)
)
if s_orig_idx == orig_idx:
boxx = np.array(
[
s_raw_l,
s_raw_l,
s_raw_l + s_imw,
s_raw_l + s_imw,
s_raw_l,
]
)
boxy = np.array(
[
s_raw_b,
s_raw_b + s_imh,
s_raw_b + s_imh,
s_raw_b,
s_raw_b,
]
)
canv.plot(
boxx,
boxy,
color_rgba=(0.5, 1, 0.5, 1),
)
if show == "morphed":
canv.text(
"morphed %d" % (s_orig_idx - orig_idx,),
display_rect[0],
(
display_rect[1]
+ display_rect[3]
+ stack_margin
- 20
),
font_size=font_size,
color_rgba=(1, 0, 0, 1),
)
# Display raw_im
display_rect = (
margin,
margin,
scale * raw_im.shape[1],
scale * raw_im.shape[0],
)
user_rect = (raw_l, raw_b, imw, imh)
with canv.set_user_coords(
display_rect, user_rect, transform=cam_id2view[cam_id],
):
canv.imshow(raw_im.astype(np.uint8), raw_l, raw_b)
canv.plot(
xplt, yplt, color_rgba=(0, 1, 0, 0.5)
) # the orientation line
canv.text(
"raw",
display_rect[0],
display_rect[1] + display_rect[3],
font_size=font_size,
color_rgba=(0.5, 0.5, 0.9, 1),
shadow_offset=1,
)
# Display mean_im
display_rect = (
2 * margin + (scale * imw),
margin,
scale * mean_im.shape[1],
scale * mean_im.shape[0],
)
user_rect = (raw_l, raw_b, imw, imh)
with canv.set_user_coords(
display_rect, user_rect, transform=cam_id2view[cam_id],
):
canv.imshow(mean_im.astype(np.uint8), raw_l, raw_b)
canv.text(
"mean",
display_rect[0],
display_rect[1] + display_rect[3],
font_size=font_size,
color_rgba=(0.5, 0.5, 0.9, 1),
shadow_offset=1,
)
# Display absdiff_im
display_rect = (
3 * margin + (scale * imw) * 2,
margin,
scale * absdiff_im.shape[1],
scale * absdiff_im.shape[0],
)
user_rect = (raw_l, raw_b, imw, imh)
absdiff_clip = np.clip(absdiff_im * contrast_scale, 0, 255)
with canv.set_user_coords(
display_rect, user_rect, transform=cam_id2view[cam_id],
):
canv.imshow(absdiff_clip.astype(np.uint8), raw_l, raw_b)
canv.text(
"absdiff",
display_rect[0],
display_rect[1] + display_rect[3],
font_size=font_size,
color_rgba=(0.5, 0.5, 0.9, 1),
shadow_offset=1,
)
# Display morphed_im
display_rect = (
4 * margin + (scale * imw) * 3,
margin,
scale * morphed_im.shape[1],
scale * morphed_im.shape[0],
)
user_rect = (raw_l, raw_b, imw, imh)
morphed_clip = np.clip(morphed_im * contrast_scale, 0, 255)
with canv.set_user_coords(
display_rect, user_rect, transform=cam_id2view[cam_id],
):
canv.imshow(morphed_clip.astype(np.uint8), raw_l, raw_b)
if 0:
canv.text(
"morphed",
display_rect[0],
display_rect[1] + display_rect[3],
font_size=font_size,
color_rgba=(0.5, 0.5, 0.9, 1),
shadow_offset=1,
)
# Display time-averaged absdiff_im
display_rect = (
5 * margin + (scale * imw) * 4,
margin,
scale * av_im_show.shape[1],
scale * av_im_show.shape[0],
)
user_rect = (
lowerleft[0],
lowerleft[1],
av_im_show.shape[1],
av_im_show.shape[0],
)
with canv.set_user_coords(
display_rect, user_rect, transform=cam_id2view[cam_id],
):
canv.imshow(
av_im_show.astype(np.uint8),
lowerleft[0],
lowerleft[1],
)
canv.plot(
xplt, yplt, color_rgba=(0, 1, 0, 0.5)
) # the orientation line
canv.text(
"stacked/flattened",
display_rect[0],
display_rect[1] + display_rect[3],
font_size=font_size,
color_rgba=(0.5, 0.5, 0.9, 1),
shadow_offset=1,
)
canv.text(
"%s frame % 7d: eccentricity % 5.1f, min N images %d, actual N images %d"
% (
cam_id,
fno,
eccentricity,
stack_N_images_min,
n_good_images,
),
0,
15,
font_size=font_size,
color_rgba=(0.6, 0.7, 0.9, 1),
shadow_offset=1,
)
canv.save()
# Save results to new table
if 0:
recarray = np.rec.array(
list_of_rows_of_data2d, dtype=Info2DCol_description
)
dest_table.append(recarray)
dest_table.flush()
dest_table.attrs.has_ibo_data = True
data_file.close()
def main():
usage = "%prog [options]"
parser = OptionParser(usage)
parser.add_option(
"--ufmfs",
type="string",
help=("sequence of .ufmf filenames " "(e.g. 'cam1.ufmf:cam2.ufmf')"),
)
parser.add_option("--view", type="string", help="how to view .ufmf files")
parser.add_option(
"--h5", type="string", help=".h5 file with data2d_distorted (REQUIRED)"
)
parser.add_option(
"--output-h5",
type="string",
help="filename for output .h5 file with data2d_distorted",
)
parser.add_option(
"--kalman",
dest="kalman_filename",
type="string",
help=".h5 file with kalman data and 3D reconstructor",
)
parser.add_option(
"--start", type="int", default=None, help="frame number to begin analysis on"
)
parser.add_option(
"--stop", type="int", default=None, help="frame number to end analysis on"
)
parser.add_option(
"--erode", type="int", default=0, help="amount of erosion to perform"
)
parser.add_option(
"--intermediate-thresh-frac",
type="float",
default=0.5,
help=(
"accumublate pixels greater than this fraction "
"times brightest absdiff pixel"
),
)
parser.add_option(
"--final-thresh",
type="int",
default=7,
help=(
"clip final image to reduce moment arms before " "extracting orientation"
),
)
parser.add_option(
"--stack-N-images",
type="int",
default=5,
help=("preferred number of images to accumulate " "before reducing"),
)
parser.add_option(
"--stack-N-images-min",
type="int",
default=5,
help=("minimum number of images to accumulate " "before reducing"),
)
parser.add_option("--save-images", action="store_true", default=False)
parser.add_option(
"--no-rts-smoothing",
action="store_false",
dest="do_rts_smoothing",
default=True,
)
parser.add_option("--save-image-dir", type="string", default=None)
parser.add_option(
"--old-sync-timestamp-source",
action="store_true",
default=False,
help="use data2d['timestamp'] to find matching ufmf frame",
)
(options, args) = parser.parse_args()
if options.ufmfs is None:
raise ValueError("--ufmfs option must be specified")
if options.h5 is None:
raise ValueError("--h5 option must be specified")
if options.output_h5 is None:
raise ValueError("--output-h5 option must be specified")
if options.kalman_filename is None:
raise ValueError("--kalman option must be specified")
ufmf_filenames = options.ufmfs.split(os.pathsep)
## print 'ufmf_filenames',ufmf_filenames
## print 'options.h5',options.h5
if options.view is not None:
view = eval(options.view)
else:
view = ["orig"] * len(ufmf_filenames)
doit(
ufmf_filenames=ufmf_filenames,
h5_filename=options.h5,
kalman_filename=options.kalman_filename,
start=options.start,
stop=options.stop,
view=view,
output_h5_filename=options.output_h5,
erode=options.erode,
save_images=options.save_images,
save_image_dir=options.save_image_dir,
intermediate_thresh_frac=options.intermediate_thresh_frac,
final_thresh=options.final_thresh,
stack_N_images=options.stack_N_images,
stack_N_images_min=options.stack_N_images_min,
old_sync_timestamp_source=options.old_sync_timestamp_source,
do_rts_smoothing=options.do_rts_smoothing,
)
if __name__ == "__main__":
main()
|
#!/bin/zsh
echo -n "Enter keystore password: "
read -s storepass
keytool -genkey -keyalg RSA -alias mykey -keystore src/main/resources/keystore.jks -storepass "$storepass" -validity 365 -keysize 4096 -storetype pkcs12 |
<reponame>oag221/vcaslib
/*
* Copyright 2018 <NAME> (<EMAIL>, http://winsh.me)
*
* This file is part of JavaRQBench
*
* catrees is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* catrees is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with catrees. If not, see <http://www.gnu.org/licenses/>.
*/
package algorithms.lfca;
class Stack<T> {
private int stackSize;
private int stackPos = 0;
private Object[] stackArray;
public Stack(int stackSize){
this.stackSize = stackSize;
stackArray = new Object[stackSize];
}
public Stack(){
this(32);
}
public void push(T node){
if(stackPos == stackSize){
int newStackSize = stackSize*4;
Object[] newStackArray = new Object[newStackSize];
for(int i = 0; i < stackSize;i++){
newStackArray[i] = stackArray[i];
}
stackSize = newStackSize;
stackArray = newStackArray;
}
stackArray[stackPos] = node;
stackPos = stackPos + 1;
}
@SuppressWarnings("unchecked")
public T pop(){
if(stackPos == 0){
return null;
}
stackPos = stackPos - 1;
return (T)stackArray[stackPos];
}
@SuppressWarnings("unchecked")
public T top(){
if(stackPos == 0){
return null;
}
return (T) stackArray[stackPos - 1];
}
public void reverseStack(){
for(int i = 0; i < stackPos / 2; i++)
{
Object temp = stackArray[i];
stackArray[i] = stackArray[stackPos - i - 1];
stackArray[stackPos - i - 1] = temp;
}
}
public void resetStack(){
stackPos = 0;
}
public int size(){
return stackPos;
}
public Object[] getStackArray(){
return stackArray;
};
public int getStackPos(){
return stackPos;
}
public void setStackPos(int stackPos){
this.stackPos = stackPos;
}
public void copyStateFrom(Stack<T> stack){
if(stack.stackSize > stackSize){
this.stackSize = stack.stackSize;
stackArray = new Object[this.stackSize];
}
this.stackPos = stack.stackPos;
for(int i = 0; i < this.stackPos; i++){
this.stackArray[i] = stack.stackArray[i];
}
}
}
|
def sort_out_of_order_list(lst):
index = -1
for i in range(len(lst)-1):
if lst[i] > lst[i+1]:
index = i
break
sorted_lst = lst[:index] + sorted(lst[index:])
return sorted_lst |
<filename>core/publisher.go
package core
import (
"context"
"github.com/pkg/errors"
"github.com/filecoin-project/go-filecoin/types"
)
// DefaultMessagePublisher adds messages to a message pool and can publish them to its topic.
// This is wiring for message publication from the outbox.
type DefaultMessagePublisher struct {
network networkPublisher
topic string
pool *MessagePool
}
type networkPublisher interface {
Publish(topic string, data []byte) error
}
// NewDefaultMessagePublisher creates a new publisher.
func NewDefaultMessagePublisher(pubsub networkPublisher, topic string, pool *MessagePool) *DefaultMessagePublisher {
return &DefaultMessagePublisher{pubsub, topic, pool}
}
// Publish marshals and publishes a message to the core message pool, and if bcast is true,
// broadcasts it to the network with the publisher's topic.
func (p *DefaultMessagePublisher) Publish(ctx context.Context, message *types.SignedMessage, height uint64, bcast bool) error {
encoded, err := message.Marshal()
if err != nil {
return errors.Wrap(err, "failed to marshal message")
}
if _, err := p.pool.Add(ctx, message, height); err != nil {
return errors.Wrap(err, "failed to add message to message pool")
}
if bcast {
if err = p.network.Publish(p.topic, encoded); err != nil {
return errors.Wrap(err, "failed to publish message to network")
}
}
return nil
}
|
import sys
import traceback
import tellopy
import av
import cv2.cv2 as cv2 # for avoidance of pylint error
import numpy
import sys
import time
import contextlib
with contextlib.redirect_stdout(None):
import pygame
import pygame.display
import pygame.key
import pygame.locals
import pygame.font
prev_flight_data = None
stat = None
font = None
video_recorder = None
class tello_drone(object):
class FlightDataDisplay(object):
# previous flight data value and surface to overlay
_value = None
_surface = None
# function (drone, data) => new value
# default is lambda drone,data: getattr(data, self._key)
_update = None
def __init__(self, key, format, colour=(255,255,255), update=None):
self._key = key
self._format = format
self._colour = colour
if update:
self._update = update
else:
self._update = lambda drone,data: getattr(data, self._key)
def update(self, drone, data):
new_value = self._update(drone, data)
if self._value != new_value:
self._value = new_value
self._surface = font.render(self._format % (new_value,), True, self._colour)
return self._surface
def palm_land(self, drone, speed):
if speed == 0:
return
drone.palm_land()
def toggle_zoom(self, drone, speed):
# In "video" mode the drone sends 1280x720 frames.
# In "photo" mode it sends 2592x1936 (952x720) frames.
# The video will always be centered in the window.
# In photo mode, if we keep the window at 1280x720 that gives us ~160px on
# each side for status information, which is ample.
# Video mode is harder because then we need to abandon the 16:9 display size
# if we want to put the HUD next to the video.
if speed == 0:
return
drone.set_video_mode(not drone.zoom)
pygame.display.get_surface().fill((0, 0, 0))
pygame.display.flip()
controls = {
'w': 'forward',
's': 'backward',
'a': 'left',
'd': 'right',
'space': 'up',
'left shift': 'down',
'right shift': 'down',
'q': 'counter_clockwise',
'e': 'clockwise',
# arrow keys for fast turns and altitude adjustments
'left': lambda drone, speed: drone.counter_clockwise(speed*2),
'right': lambda drone, speed: drone.clockwise(speed*2),
'up': lambda drone, speed: drone.up(speed*2),
'down': lambda drone, speed: drone.down(speed*2),
'tab': lambda drone, speed: drone.takeoff(),
'backspace': lambda drone, speed: drone.land(),
'p': palm_land,
'z': toggle_zoom
}
def flight_data_mode(self, drone, *args):
return (drone.zoom and "VID" or "PIC")
def flight_data_recording(self, *args):
return (video_recorder and "REC 00:00" or "") # TODO: duration of recording
# def status_print(text):
# pygame.display.set_caption(text)
hud = [
FlightDataDisplay('height', 'ALT %3d'),
FlightDataDisplay('ground_speed', 'SPD %3d'),
FlightDataDisplay('battery_percentage', 'BAT %3d%%'),
FlightDataDisplay('wifi_strength', 'NET %3d%%'),
FlightDataDisplay(None, 'CAM %s', update=flight_data_mode),
FlightDataDisplay(None, '%s', colour=(255, 0, 0), update=flight_data_recording),
]
# def update_hud(hud, drone, flight_data):
# (w,h) = (158,0) # width available on side of screen in 4:3 mode
# blits = []
# for element in hud:
# surface = element.update(drone, flight_data)
# if surface is None:
# continue
# blits += [(surface, (0, h))]
# # w = max(w, surface.get_width())
# h += surface.get_height()
# h += 64 # add some padding
# overlay = pygame.Surface((w, h), pygame.SRCALPHA)
# overlay.fill((0,0,0)) # remove for mplayer overlay mode
# for blit in blits:
# overlay.blit(*blit)
# pygame.display.get_surface().blit(overlay, (0,0))
# pygame.display.update(overlay.get_rect())
def flightDataHandler(self, event, sender, data):
global prev_flight_data
global stat
stat = data
text = str(data)
# if prev_flight_data != text:
# update_hud(hud, sender, data)
# prev_flight_data = text
def display(self, frame, manual):
str_b = "Battery: "+str(stat.battery_percentage)
text_surface = font.render(str(str_b), True, (219, 53, 53))
if manual:
str_m = "Manual"
else:
str_m = "Automatic"
text_surface_manual = font.render(str_m, False, (215, 66, 245))
# start_time = time.time()
image = cv2.cvtColor(numpy.array(frame.to_image()), cv2.COLOR_RGB2BGR)
# print(image.shape)
f = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
f = numpy.rot90(f)
f = pygame.surfarray.make_surface(f)
pygame.display.flip()
self.screen.blit(pygame.transform.flip(f, True, False), (0, 0))
self.screen.blit(text_surface_manual, (840, 0))
self.screen.blit(text_surface, (0, 0))
pygame.display.update()
def get_drone_stats(self):
drone_stat = {
'battery_percentage': stat.battery_percentage,
'battery_low': stat.battery_low,
'camera_state': stat.camera_state,
'height': stat.height,
'imu_state': stat.imu_state,
'wifi_strength': stat.wifi_strength
}
return drone_stat
def connect_video(self, drone):
retry = 3
container = None
while container is None and 0 < retry:
retry -= 1
try:
container = av.open(drone.get_video_stream())
except av.AVError as ave:
print(ave)
print('retry...')
return container
def take_action_3(self, drone, action):
#['Forward', 'Left', 'Right', 'Backward']
speed = 40
key_handler = 'custom'
LY = [0, 0, 0, 0, 0] # no height
LX = [0, -speed, speed, 0, 0]
RY = [1*speed, 0, 0, -1*speed, 0]
getattr(drone, key_handler)(LX[action], LY[action], RY[action])
# def take_action(self, drone, act, num_act, manual):
# speed = 20
# key_handler = 'custom'
# for e in pygame.event.get():
# # WASD for movement
# if e.type == pygame.locals.KEYDOWN:
# print('+' + pygame.key.name(e.key))
# keyname = pygame.key.name(e.key)
# if keyname == 'escape':
# drone.quit()
# exit(0)
# if keyname == 'm':
# manual = not manual
# getattr(drone, key_handler)(0, 0, 0)
#
# if not manual:
# Ry = 2*speed # always go forward
# row = int(act / numpy.sqrt(num_act)) - numpy.floor(int(numpy.sqrt(num_act)) / 2)
# col = int(act % numpy.sqrt(num_act)) - numpy.floor(int(numpy.sqrt(num_act)) / 2)
# print('row:', row)
# print('col:', col)
# Lx = col*speed
# Ly = -row*speed
#
# getattr(drone, key_handler)(Lx, Ly, Ry)
#
# return manual
def if_takeover(self, drone):
manual = False
for e in pygame.event.get():
# WASD for movement
if e.type == pygame.locals.KEYDOWN:
print('+' + pygame.key.name(e.key))
keyname = pygame.key.name(e.key)
if keyname == 'm':
manual = True
return manual
def check_action(self, drone, manual, dict):
# print('Action called')
speed = 60
for e in pygame.event.get():
# WASD for movement
if e.type == pygame.locals.KEYDOWN:
print('+' + pygame.key.name(e.key))
keyname = pygame.key.name(e.key)
if keyname == 'escape':
drone.quit()
exit(0)
if keyname == 'm':
manual = not manual
if keyname == 'l':
# save data-tuple
numpy.save(dict.stat_path, dict.stat)
agent = dict.agent
agent.save_network(dict.iteration, dict.network_path)
numpy.save(dict.data_path, dict.data_tuple)
Mem = dict.Replay_memory
Mem.save(load_path=dict.load_path)
if keyname in self.controls:
key_handler = self.controls[keyname]
if type(key_handler) == str:
getattr(drone, key_handler)(speed)
else:
key_handler(drone, speed)
elif e.type == pygame.locals.KEYUP:
print('-' + pygame.key.name(e.key))
keyname = pygame.key.name(e.key)
if keyname in self.controls:
key_handler = self.controls[keyname]
if type(key_handler) == str:
getattr(drone, key_handler)(0)
else:
key_handler(drone, 0)
return manual
def connect(self):
drone = tellopy.Tello()
drone.subscribe(drone.EVENT_FLIGHT_DATA, self.flightDataHandler)
drone.connect()
drone.wait_for_connection(60.0)
container = self.connect_video(drone)
return container, drone
# def mark_frame_grid(self, action, num_actions,frame):
# BLUE = (0, 0, 255)
# BLACK = (0, 0, 0)
# h = frame.height
# w = frame.width
# len_a = numpy.round(numpy.sqrt(num_actions))
#
# grid_w = w / len_a
# grid_h = h / len_a
#
# a_col = action % len_a
# a_row = int(action / len_a)
# x = a_col * grid_w
# y = a_row * grid_h
# width = grid_w
# height = grid_h
#
# pygame.draw.rect(self.screen, BLUE, (x, y, width, height), 3)
# x = int(x + width / 2)
# y = int(y + height / 2)
# pygame.draw.circle(self.screen, BLACK, (x, y), 10)
# pygame.display.update()
def mark_frame(self, action, num_actions, frame):
black_color = (0, 0, 0)
red_color = (255, 0, 0)
H = frame.height
W = frame.width
pygame.draw.line(self.screen, black_color, (W / 2, H / 2 - H / 4), (W / 2, H / 2 + H / 4), 5)
pygame.draw.line(self.screen, black_color, (W / 2 - W / 4, H / 2), (W / 2 + W / 4, H / 2), 5)
y = int(H / 2)
if action == 0:
# Forward
x = int(W / 2)
elif action == 1:
# Left
x = int(W / 2 - H / 6)
else:
# Right
x = int(W / 2 + H / 6)
pygame.draw.circle(self.screen, red_color, (x, y), 25)
pygame.display.update()
def pygame_connect(self, H, W):
global font
pygame.init()
pygame.display.init()
self.screen = pygame.display.set_mode((H, W))
pygame.font.init()
font = pygame.font.SysFont("cmr10", 32)
return self.screen
# if __name__ == '__main__':
# screen = pygame_connect()
# container, drone = connect()
#
# skip_frame = 5
# frame_skip = skip_frame
#
# while True:
# # flightDataHandler()
# for frame in container.decode(video=0):
# if 0 < frame_skip:
# frame_skip = frame_skip - 1
# continue
# # print(frame)
# else:
# frame_skip = skip_frame
# # Do calculations here
# display(frame, screen)
# check_action(drone)
|
<gh_stars>1-10
import debug from 'debug';
import {Volume} from 'memfs';
import path from 'path';
import type {Readable} from 'stream';
import yauzl from 'yauzl';
import ZipReader from './ZipReader';
const d = debug('thx.unzipper.unzipper');
export type OnFileCallback = (fileStreamData: {stream: Readable; filename: string; mimetype?: string}) => Promise<void>;
/**
* Unzips a zip file calling a callback for each file in the zip file.
* @param {Readable} zipReadStream - The readable zip file stream.
* @param {OnFileCallback} onFile - Called for each file in the zip file.
* @return {Promise<void>} Returns a promise when the zip file is complete.
*/
export default function unzipper(zipReadStream: Readable, onFile: OnFileCallback): Promise<void> {
return new Promise((resolveUnzipper, rejectUnzipper) => {
d('Unzipping zip file...');
const mfs = new Volume();
const zipMemWriteStream = mfs.createWriteStream('/zip');
// Event: called when finished writing zip file to memory fs
zipMemWriteStream.on('finish', () => {
d(`Finished writing zip to memory. ${zipMemWriteStream.bytesWritten} bytes.`);
const zipMemReadStream = new ZipReader(mfs, '/zip');
yauzl.fromRandomAccessReader(
zipMemReadStream,
zipMemWriteStream.bytesWritten,
{
autoClose: true,
lazyEntries: true,
},
(err, zipfile) => {
if (err || !zipfile) {
rejectUnzipper(err);
return;
}
d('> Opened zip file from memory');
// Read first entry
zipfile.readEntry();
// Event: called when a folder or file is found in a zip file
zipfile.on('entry', entry => {
if (/\/$/.test(entry.fileName)) {
// folder entry
zipfile.readEntry();
} else {
// file entry
zipfile.openReadStream(entry, (zerr, zipEntryFileReadStream) => {
if (zerr) throw zerr;
if (!zipEntryFileReadStream) throw new Error(`Error opening read stream from zipped file: ${entry.fileName}`);
d(`> Reading file in zip: ${entry.fileName}`);
onFile({
stream: zipEntryFileReadStream,
filename: path.basename(entry.fileName),
}).then(() => {
zipfile.readEntry();
});
});
}
});
// Event: called when the zip file is closed
zipfile.on('close', () => {
d('> Zip file closed');
// Remove the zipfile from memory fs and resolve promise
resolveUnzipper();
mfs.reset();
});
// Event: called when zip file has an error
zipfile.on('error', rejectUnzipper);
},
);
});
// Start piping zip file to memory fs
zipReadStream.pipe(zipMemWriteStream);
});
}
|
import React, { Component } from 'react'
import {
AppRegistry,
StyleSheet,
Text,
View,
ListView
} from 'react-native'
import { Provider } from 'react-redux'
import store from './store'
import ProductList from './ProductList'
export default class SuperMarketApp extends Component {
render() {
return (
<Provider store={store}>
<View style={styles.container}>
<Text style={styles.welcome}>
Welcome to SuperMarket App
</Text>
<ProductList />
</View>
</Provider>
)
}
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: '#F5FCFF',
},
welcome: {
fontSize: 20,
textAlign: 'center',
margin: 10,
},
})
AppRegistry.registerComponent('SuperMarketApp', () => SuperMarketApp) |
# Create a Python package for dynamic DNS updates using the Cloudflare API
# Step 1: Create the package structure
# Create a directory for the package
mkdir cfddns_package
cd cfddns_package
# Create the necessary files and directories
touch setup.py
mkdir cfddns
touch cfddns/__init__.py
touch cfddns/main.py
# Step 2: Implement the command-line tool
# In cfddns/main.py, implement the command-line interface
# Example implementation:
"""
import argparse
def main():
parser = argparse.ArgumentParser(description='Update DNS records on Cloudflare')
parser.add_argument('domain', help='The domain for which to update DNS records')
parser.add_argument('record', help='The DNS record to update')
parser.add_argument('ip', help='The new IP address')
args = parser.parse_args()
# Call the Cloudflare API to update the DNS record for the specified domain with the new IP address
# Implementation of the Cloudflare API call goes here
if __name__ == '__main__':
main()
"""
# Step 3: Define the package metadata and dependencies in setup.py
# In setup.py, define the package metadata and dependencies
# Example implementation:
"""
from setuptools import setup
setup(
name='cfddns',
version='1.0',
packages=['cfddns'],
entry_points={
'console_scripts': ["cfddns = cfddns.main:main"],
},
install_requires=["cloudflare", "pyyaml"],
)
"""
# Step 4: Install the package and its dependencies
# Install the package and its dependencies using pip
pip install . |
exports.seed = function(knex, Promise) {
return knex('hubs').insert([
{ name: 'api-1' }, // 1
{ name: 'api-2' }, // 2
{ name: 'api-3' }, // 3
{ name: 'api-4' }, // 4
{ name: 'db-1' }, // 5
{ name: 'db-2' }, // 6
{ name: 'db-3' }, // 7
{ name: 'db-4' }, // 8
{ name: 'auth-1' }, // 9
{ name: 'auth-2' }, // 10
{ name: 'auth-3' }, // 11
{ name: 'auth-4' }, // 12
{ name: 'testing-1' }, // 13
{ name: 'testing-2' }, // 14
{ name: 'testing-3' }, // 15
{ name: 'testing-4' }, // 16
{ name: 'build-1' }, // 17
{ name: 'build-2' }, // 18
{ name: 'build-3' }, // 19
{ name: 'build-4' }, // 20
]);
};
|
package io.opensphere.controlpanels.layers.layersets;
import java.awt.BorderLayout;
import java.awt.Color;
import java.awt.Dimension;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.Set;
import javax.swing.BorderFactory;
import javax.swing.Box;
import javax.swing.DefaultListModel;
import javax.swing.JButton;
import javax.swing.JLabel;
import javax.swing.JList;
import javax.swing.JPanel;
import javax.swing.JScrollPane;
import javax.swing.border.TitledBorder;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import io.opensphere.controlpanels.layers.layersets.LayerSetController.LayerSetControllerListener;
import io.opensphere.core.Toolbox;
import io.opensphere.core.preferences.PreferenceChangeEvent;
import io.opensphere.core.util.collections.New;
import io.opensphere.core.util.image.IconUtil.IconType;
import io.opensphere.core.util.swing.AbstractHUDPanel;
import io.opensphere.core.util.swing.ButtonPanel;
import io.opensphere.core.util.swing.EventQueueUtilities;
import io.opensphere.core.util.swing.HighlightedBorder;
import io.opensphere.core.util.swing.IconButton;
import io.opensphere.core.util.swing.ToStringProxy;
import io.opensphere.mantle.data.ActiveGroupEntry;
/**
* The panel used for managing layer sets.
*/
@SuppressWarnings("PMD.GodClass")
public class LayerSetPanel extends AbstractHUDPanel implements LayerSetControllerListener
{
/** serialVersionUID. */
private static final long serialVersionUID = 1L;
/** The activate button. */
private JButton myActivateButton;
/** The add button. */
private IconButton myAddButton;
/** The close button. */
private JButton myCloseButton;
/** The controller. */
private final transient LayerSetController myController;
/** The delete button. */
private IconButton myDeleteButton;
/** The frame. */
private final transient LayerSetFrame myFrame;
/** The layer list. */
private JList<EntryProxy> myLayerList;
/** The layer list scroll pane. */
private JScrollPane myLayerListScrollPane;
/** The layer panel. */
private Box myLayerPanel;
/** The layer set list. */
private JList<String> myLayerSetList;
/** The layer set panel. */
private Box myLayerSetPanel;
/** The layer set scroll pane. */
private JScrollPane myLayerSetScrollPane;
/** The remove layer buton. */
private IconButton myRemoveLayerButon;
/** The rename button. */
private JButton myRenameButton;
/** The toolbox. */
private final transient Toolbox myToolbox;
/**
* Instantiates a new layer set panel.
*
* @param tb the {@link Toolbox}
* @param frame the frame
*/
@SuppressWarnings("PMD.ConstructorCallsOverridableMethod")
public LayerSetPanel(Toolbox tb, LayerSetFrame frame)
{
super();
myToolbox = tb;
myFrame = frame;
myController = new LayerSetController(myToolbox);
setBackground(new Color(myToolbox.getPreferencesRegistry().getPreferences(AbstractHUDPanel.class)
.getInt(AbstractHUDPanel.ourHUDBackgroundColorKey, new JPanel().getBackground().getRGB()), true));
myToolbox.getPreferencesRegistry().getPreferences(AbstractHUDPanel.class)
.addPreferenceChangeListener(AbstractHUDPanel.ourHUDBackgroundColorKey, this);
setBorder(BorderFactory.createEmptyBorder(10, 5, 5, 5));
myController.addListener(this);
setLayout(new BorderLayout());
Box topBox = Box.createHorizontalBox();
topBox.add(getLayerSetPanel());
topBox.add(Box.createHorizontalStrut(3));
topBox.add(getLayerPanel());
add(topBox, BorderLayout.CENTER);
Box b = Box.createHorizontalBox();
b.setBorder(BorderFactory.createEmptyBorder(4, 0, 0, 0));
b.setMinimumSize(new Dimension(100, 40));
b.add(Box.createHorizontalGlue());
b.add(getCloseButton());
b.add(Box.createHorizontalGlue());
add(b, BorderLayout.SOUTH);
layerSetsChanged();
getRenameLayerSetButton().setEnabled(false);
getActivateButton().setEnabled(false);
getDeleteLayerSetButton().setEnabled(false);
getRemoveLayerButton().setEnabled(false);
}
/**
* Gets the activate button.
*
* @return the activate button
*/
public JButton getActivateButton()
{
if (myActivateButton == null)
{
myActivateButton = new IconButton("Activate");
myActivateButton.setToolTipText("Activate the layers in the selected layer set.");
myActivateButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
List<String> values = getLayerSetList().getSelectedValuesList();
if (values.size() == 1)
{
myController.activateLayerSet(values.get(0), true);
}
}
});
}
return myActivateButton;
}
/**
* Gets the adds the layer set button.
*
* @return the adds the layer set button
*/
public JButton getAddLayerSetButton()
{
if (myAddButton == null)
{
myAddButton = new IconButton(IconType.PLUS);
myAddButton.setToolTipText("Create a new layer set with the currently active layers.");
myAddButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
myController.saveCurrentSet();
}
});
}
return myAddButton;
}
/**
* Gets the close button.
*
* @return the close button
*/
public JButton getCloseButton()
{
if (myCloseButton == null)
{
myCloseButton = new JButton("Close");
myCloseButton.setMargin(ButtonPanel.INSETS_MEDIUM);
myCloseButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
myFrame.setFrameVisible(false);
}
});
}
return myCloseButton;
}
/**
* Gets the delete layer set button.
*
* @return the delete layer set button
*/
public JButton getDeleteLayerSetButton()
{
if (myDeleteButton == null)
{
myDeleteButton = new IconButton();
myDeleteButton.setToolTipText("Delete the selected layer set(s)");
myDeleteButton.setIcon("/images/minus_big.png");
myDeleteButton.setRolloverIcon("/images/minus_big_over.png");
myDeleteButton.setPressedIcon("/images/minus_big_press.png");
myDeleteButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
List<String> values = getLayerSetList().getSelectedValuesList();
if (!values.isEmpty())
{
for (String setName : values)
{
myController.deleteSet(setName);
}
}
}
});
}
return myDeleteButton;
}
/**
* Gets the layer list.
*
* @return the layer list
*/
public JList<EntryProxy> getLayerList()
{
if (myLayerList == null)
{
myLayerList = new JList<>();
myLayerList.setBackground(new Color(0, 0, 0, 0));
myLayerList.getSelectionModel().addListSelectionListener(new ListSelectionListener()
{
@Override
public void valueChanged(ListSelectionEvent e)
{
if (!e.getValueIsAdjusting())
{
int[] selectedIndexes = myLayerList.getSelectedIndices();
int count = selectedIndexes.length;
getRemoveLayerButton().setEnabled(count >= 1);
}
}
});
}
return myLayerList;
}
/**
* Gets the layer list scroll pane.
*
* @return the layer list scroll pane
*/
public JScrollPane getLayerListScrollPane()
{
if (myLayerListScrollPane == null)
{
myLayerListScrollPane = new JScrollPane(getLayerList());
myLayerListScrollPane.setBackground(new Color(0, 0, 0, 0));
}
return myLayerListScrollPane;
}
/**
* Gets the layer panel.
*
* @return the layer panel
*/
public Box getLayerPanel()
{
if (myLayerPanel == null)
{
myLayerPanel = Box.createVerticalBox();
HighlightedBorder hb = new HighlightedBorder(BorderFactory.createLineBorder(getBorderColor(), 1), "Layers",
TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, getTitleFont(),
new JLabel().getForeground(), getBorderHighlightColor());
hb.setWidthOffset(3);
hb.setXOffset(-1);
myLayerPanel.setBorder(hb);
myLayerPanel.setPreferredSize(new Dimension(0, 250));
myLayerPanel.setMinimumSize(new Dimension(0, 250));
myLayerPanel.add(Box.createVerticalStrut(3));
myLayerPanel.add(getLayerListScrollPane());
myLayerPanel.add(Box.createVerticalStrut(3));
Box buttonBox = Box.createHorizontalBox();
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getRemoveLayerButton());
buttonBox.add(Box.createHorizontalGlue());
myLayerPanel.add(buttonBox);
}
return myLayerPanel;
}
/**
* Gets the layer set list.
*
* @return the layer set list
*/
public JList<String> getLayerSetList()
{
if (myLayerSetList == null)
{
myLayerSetList = new JList<>();
myLayerSetList.setBackground(new Color(0, 0, 0, 0));
myLayerSetList.getSelectionModel().addListSelectionListener(new ListSelectionListener()
{
@Override
public void valueChanged(ListSelectionEvent e)
{
if (!e.getValueIsAdjusting())
{
int[] selectedIndexes = myLayerSetList.getSelectedIndices();
refreshLayerList();
int count = selectedIndexes.length;
getRenameLayerSetButton().setEnabled(count == 1);
getActivateButton().setEnabled(count == 1);
getDeleteLayerSetButton().setEnabled(count >= 1);
}
}
});
myLayerSetList.addMouseListener(new MouseAdapter()
{
@Override
public void mouseClicked(MouseEvent e)
{
if (e.getButton() == MouseEvent.BUTTON1 && e.getClickCount() == 2)
{
List<String> values = getLayerSetList().getSelectedValuesList();
if (values.size() == 1)
{
myController.activateLayerSet(values.get(0), true);
}
}
}
});
}
return myLayerSetList;
}
/**
* Gets the layer set panel.
*
* @return the layer set panel
*/
public Box getLayerSetPanel()
{
if (myLayerSetPanel == null)
{
myLayerSetPanel = Box.createVerticalBox();
HighlightedBorder hb = new HighlightedBorder(BorderFactory.createLineBorder(getBorderColor(), 1), "Layer Sets",
TitledBorder.DEFAULT_JUSTIFICATION, TitledBorder.DEFAULT_POSITION, getTitleFont(),
new JLabel().getForeground(), getBorderHighlightColor());
hb.setWidthOffset(3);
hb.setXOffset(-1);
myLayerSetPanel.setBorder(hb);
myLayerSetPanel.setMinimumSize(new Dimension(0, 250));
myLayerSetPanel.setPreferredSize(new Dimension(0, 250));
myLayerSetPanel.add(Box.createVerticalStrut(3));
myLayerSetPanel.add(getLayerSetScrollPane());
myLayerSetPanel.add(Box.createVerticalStrut(3));
Box buttonBox = Box.createHorizontalBox();
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getAddLayerSetButton());
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getDeleteLayerSetButton());
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getRenameLayerSetButton());
buttonBox.add(Box.createHorizontalStrut(4));
buttonBox.add(getActivateButton());
buttonBox.add(Box.createHorizontalGlue());
myLayerSetPanel.add(buttonBox);
}
return myLayerSetPanel;
}
/**
* Gets the layer set scroll pane.
*
* @return the layer set scroll pane
*/
public JScrollPane getLayerSetScrollPane()
{
if (myLayerSetScrollPane == null)
{
myLayerSetScrollPane = new JScrollPane(getLayerSetList());
myLayerSetScrollPane.setBackground(new Color(0, 0, 0, 0));
}
return myLayerSetScrollPane;
}
/**
* Gets the removes the layer button.
*
* @return the removes the layer button
*/
public JButton getRemoveLayerButton()
{
if (myRemoveLayerButon == null)
{
myRemoveLayerButon = new IconButton();
myRemoveLayerButon.setToolTipText("Remove the selected layers from the current layer set.");
myRemoveLayerButon.setIcon("/images/minus_big.png");
myRemoveLayerButon.setRolloverIcon("/images/minus_big_over.png");
myRemoveLayerButon.setPressedIcon("/images/minus_big_press.png");
myRemoveLayerButon.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
List<String> layerSets = getLayerSetList().getSelectedValuesList();
if (layerSets.size() == 1)
{
List<EntryProxy> layers = getLayerList().getSelectedValuesList();
if (!layers.isEmpty())
{
Set<ActiveGroupEntry> entrySet = New.set();
for (EntryProxy obj : layers)
{
entrySet.add(obj.getItem());
}
myController.deleteLayersFromSet(layerSets.get(0), entrySet);
}
}
}
});
}
return myRemoveLayerButon;
}
/**
* My rename layer set button.
*
* @return the j button
*/
public JButton getRenameLayerSetButton()
{
if (myRenameButton == null)
{
myRenameButton = new IconButton("Rename");
myRenameButton.setToolTipText("Rename the selected layer set.");
myRenameButton.addActionListener(new ActionListener()
{
@Override
public void actionPerformed(ActionEvent e)
{
List<String> values = getLayerSetList().getSelectedValuesList();
if (values.size() == 1)
{
myController.renameSet(values.get(0));
}
}
});
}
return myRenameButton;
}
@Override
public void layerSetsChanged()
{
EventQueueUtilities.runOnEDT(new Runnable()
{
@Override
public void run()
{
String lastSelectedLayerSet = null;
List<String> values = getLayerSetList().getSelectedValuesList();
if (values.size() == 1)
{
lastSelectedLayerSet = values.get(0);
}
List<String> setNames = myController.getSavedSetNames();
DefaultListModel<String> model = new DefaultListModel<>();
if (setNames != null && !setNames.isEmpty())
{
model.ensureCapacity(setNames.size());
for (String name : setNames)
{
model.addElement(name);
}
}
getLayerSetList().setModel(model);
if (lastSelectedLayerSet != null)
{
getLayerSetList().setSelectedValue(lastSelectedLayerSet, true);
}
else
{
refreshLayerList();
}
}
});
}
@Override
public void preferenceChange(PreferenceChangeEvent evt)
{
setBackground(new Color(myToolbox.getPreferencesRegistry().getPreferences(AbstractHUDPanel.class)
.getInt(AbstractHUDPanel.ourHUDBackgroundColorKey, new JPanel().getBackground().getRGB()), true));
}
/**
* Refresh layer list.
*/
private void refreshLayerList()
{
List<String> values = getLayerSetList().getSelectedValuesList();
DefaultListModel<EntryProxy> model = new DefaultListModel<>();
if (values.size() == 1)
{
myController.getAvailableGroupIds();
List<ActiveGroupEntry> layers = myController.getSavedSetLayers(values.get(0));
if (layers != null && !layers.isEmpty())
{
model.ensureCapacity(layers.size());
for (ActiveGroupEntry entry : layers)
{
model.addElement(new EntryProxy(entry));
}
}
}
getLayerList().setModel(model);
getRemoveLayerButton().setEnabled(false);
}
/**
* The Class EntryProxy.
*/
public static class EntryProxy extends ToStringProxy<ActiveGroupEntry>
{
/**
* Instantiates a new entry proxy.
*
* @param entry the entry
*/
public EntryProxy(ActiveGroupEntry entry)
{
super(entry);
}
@Override
public String toString()
{
return getItem().getName();
}
}
}
|
/**
* @author github.com/luncliff (<EMAIL>)
*/
#undef NDEBUG
#include <atomic>
#include <cassert>
#include <iostream>
#include <gsl/gsl>
#include <coroutine/return.h>
#include <coroutine/windows.h>
using namespace std;
using namespace coro;
auto wait_an_event(set_or_cancel& token, atomic_flag& flag) -> frame_t;
int main(int, char*[]) {
HANDLE e = CreateEventEx(nullptr, nullptr, //
CREATE_EVENT_MANUAL_RESET, EVENT_ALL_ACCESS);
assert(e != NULL);
auto on_return_1 = gsl::finally([e]() { CloseHandle(e); });
ResetEvent(e);
set_or_cancel token{e};
atomic_flag flag = ATOMIC_FLAG_INIT;
auto frame = wait_an_event(token, flag);
auto on_return_2 = gsl::finally([&frame]() { frame.destroy(); });
SetEvent(e); // set
// give time to windows threads
SleepEx(300, true);
return flag.test_and_set() == false ? EXIT_FAILURE : EXIT_SUCCESS;
}
auto wait_an_event(set_or_cancel& evt, atomic_flag& flag) -> frame_t {
// wait for set or cancel
// `co_await` will forward `GetLastError` if canceled.
if (DWORD ec = co_await evt) {
cerr << system_category().message(ec) << endl;
co_return;
}
flag.test_and_set();
}
|
namespace particles {
enum Flag {
enabled = 1 << 0,
destroyed = 1 << 1,
}
// maximum count of sources before removing previous sources
const MAX_SOURCES = (() => {
const sz = control.ramSize();
if (sz <= 1024 * 100) {
return 8;
} else if (sz <= 1024 * 200) {
return 16;
} else {
return 50;
}
})();
const TIME_PRECISION = 10; // time goes down to down to the 1<<10 seconds
let lastUpdate: number;
/**
* A single particle
*/
export class Particle {
_x: Fx8;
_y: Fx8;
vx: Fx8;
vy: Fx8;
lifespan: number;
next: Particle;
data?: number;
color?: number;
}
/**
* An anchor for a Particle to originate from
*/
export interface ParticleAnchor {
x: number;
y: number;
vx?: number;
vy?: number;
width?: number;
height?: number;
image?: Image;
flags?: number;
setImage?: (i: Image) => void;
}
/**
* A source of particles
*/
export class ParticleSource implements SpriteLike {
private _z: number;
/**
* A relative ranking of this sources priority
* When necessary, a source with a lower priority will
* be culled before a source with a higher priority.
*/
priority: number;
id: number;
_dt: number;
/**
* The anchor this source is currently attached to
*/
anchor: ParticleAnchor;
/**
* Time to live in milliseconds. The lifespan decreases by 1 on each millisecond
* and the source gets destroyed when it reaches 0.
*/
lifespan: number;
protected flags: number;
protected head: Particle;
protected timer: number;
protected period: number;
protected _factory: ParticleFactory;
protected ax: Fx8;
protected ay: Fx8;
get z() {
return this._z;
}
set z(v: number) {
if (v != this._z) {
this._z = v;
game.currentScene().flags |= scene.Flag.NeedsSorting;
}
}
/**
* @param anchor to emit particles from
* @param particlesPerSecond rate at which particles are emitted
* @param factory [optional] factory to generate particles with; otherwise,
*/
constructor(anchor: ParticleAnchor, particlesPerSecond: number, factory?: ParticleFactory) {
init();
const scene = game.currentScene();
const sources = particleSources();
// remove and immediately destroy oldest source if over MAX_SOURCES
if (sources.length >= MAX_SOURCES) {
sortSources(sources);
const removedSource = sources.shift();
removedSource.clear();
removedSource.destroy();
}
this.flags = 0;
this.setRate(particlesPerSecond);
this.setAcceleration(0, 0);
this.setAnchor(anchor);
this.lifespan = undefined;
this._dt = 0;
this.z = 0;
this.priority = 0;
this.setFactory(factory || particles.defaultFactory);
sources.push(this);
scene.addSprite(this);
this.enabled = true;
}
__serialize(offset: number): Buffer {
return undefined;
}
__update(camera: scene.Camera, dt: number) {
// see _update()
}
__draw(camera: scene.Camera) {
let current = this.head;
const left = Fx8(camera.drawOffsetX);
const top = Fx8(camera.drawOffsetY);
while (current) {
if (current.lifespan > 0)
this.drawParticle(current, left, top);
current = current.next;
}
}
_update(dt: number) {
this.timer -= dt;
const anchor: ParticleAnchor = this.anchor;
if (this.lifespan !== undefined) {
this.lifespan -= dt;
if (this.lifespan <= 0) {
this.lifespan = undefined;
this.destroy();
}
} else if (this.anchor && this.anchor.flags !== undefined && (this.anchor.flags & sprites.Flag.Destroyed)) {
this.lifespan = 750;
}
while (this.timer < 0 && this.enabled) {
this.timer += this.period;
const p = this._factory.createParticle(this.anchor);
if (!p) continue; // some factories can decide to not produce a particle
p.next = this.head;
this.head = p;
}
if (!this.head) return;
let current = this.head;
this._dt += dt;
let fixedDt = Fx8(this._dt);
if (fixedDt) {
do {
if (current.lifespan > 0) {
current.lifespan -= dt;
this.updateParticle(current, fixedDt)
}
} while (current = current.next);
this._dt = 0;
} else {
do {
current.lifespan -= dt;
} while (current = current.next);
}
}
_prune() {
while (this.head && this.head.lifespan <= 0) {
this.head = this.head.next;
}
if ((this.flags & Flag.destroyed) && !this.head) {
const scene = game.currentScene();
if (scene)
scene.allSprites.removeElement(this);
const sources = particleSources();
if (sources && sources.length)
sources.removeElement(this);
this.anchor == undefined;
}
let current = this.head;
while (current && current.next) {
if (current.next.lifespan <= 0) {
current.next = current.next.next;
} else {
current = current.next;
}
}
}
/**
* Sets the acceleration applied to the particles
*/
setAcceleration(ax: number, ay: number) {
this.ax = Fx8(ax);
this.ay = Fx8(ay);
}
/**
* Enables or disables particles
* @param on
*/
setEnabled(on: boolean) {
this.enabled = on;
}
get enabled() {
return !!(this.flags & Flag.enabled);
}
/**
* Set whether this source is currently enabled (emitting particles) or not
*/
set enabled(v: boolean) {
if (v !== this.enabled) {
this.flags = v ? (this.flags | Flag.enabled) : (this.flags ^ Flag.enabled);
this.timer = 0;
}
}
/**
* Destroy the source
*/
destroy() {
// The `_prune` step will finishing destroying this Source once all emitted particles finish rendering
this.enabled = false;
this.flags |= Flag.destroyed;
this._prune();
}
/**
* Clear all particles emitted from this source
*/
clear() {
this.head = undefined;
}
/**
* Set a anchor for particles to be emitted from
* @param anchor
*/
setAnchor(anchor: ParticleAnchor) {
this.anchor = anchor;
}
/**
* Sets the number of particle created per second
* @param particlesPerSecond
*/
setRate(particlesPerSecond: number) {
this.period = Math.ceil(1000 / particlesPerSecond);
this.timer = 0;
}
get factory(): ParticleFactory {
return this._factory;
}
/**
* Sets the particle factor
* @param factory
*/
setFactory(factory: ParticleFactory) {
if (factory)
this._factory = factory;
}
protected updateParticle(p: Particle, fixedDt: Fx8) {
fixedDt = Fx.rightShift(fixedDt, TIME_PRECISION);
p.vx = Fx.add(p.vx, Fx.mul(this.ax, fixedDt));
p.vy = Fx.add(p.vy, Fx.mul(this.ay, fixedDt));
p._x = Fx.add(p._x, Fx.mul(p.vx, fixedDt));
p._y = Fx.add(p._y, Fx.mul(p.vy, fixedDt));
}
protected drawParticle(p: Particle, screenLeft: Fx8, screenTop: Fx8) {
this._factory.drawParticle(p, Fx.sub(p._x, screenLeft), Fx.sub(p._y, screenTop));
}
}
//% whenUsed
export const defaultFactory = new particles.SprayFactory(20, 0, 60);
/**
* Creates a new source of particles attached to a sprite
* @param sprite
* @param particlesPerSecond number of particles created per second
*/
export function createParticleSource(sprite: Sprite, particlesPerSecond: number): ParticleSource {
return new ParticleSource(sprite, particlesPerSecond);
}
function init() {
const scene = game.currentScene();
if (scene.particleSources) return;
scene.particleSources = [];
lastUpdate = control.millis();
game.onUpdate(updateParticles);
game.onUpdateInterval(250, pruneParticles);
}
function updateParticles() {
const sources = particleSources();
if (!sources) return;
sortSources(sources);
const time = control.millis();
const dt = time - lastUpdate;
lastUpdate = time;
for (let i = 0; i < sources.length; i++) {
sources[i]._update(dt);
}
}
function pruneParticles() {
const sources = particleSources();
if (sources) sources.slice(0, sources.length).forEach(s => s._prune());
}
function sortSources(sources: ParticleSource[]) {
sources.sort((a, b) => (a.priority - b.priority || a.id - b.id));
}
/**
* A source of particles where particles will occasionally change speed based off of each other
*/
export class FireSource extends ParticleSource {
protected galois: Math.FastRandom;
constructor(anchor: ParticleAnchor, particlesPerSecond: number, factory?: ParticleFactory) {
super(anchor, particlesPerSecond, factory);
this.galois = new Math.FastRandom();
this.z = 20;
}
updateParticle(p: Particle, fixedDt: Fx8) {
super.updateParticle(p, fixedDt);
if (p.next && this.galois.percentChance(30)) {
p.vx = p.next.vx;
p.vy = p.next.vy;
}
}
}
/**
* A source of particles where the particles oscillate horizontally, and occasionally change
* between a given number of defined states
*/
export class BubbleSource extends ParticleSource {
protected maxState: number;
protected galois: Math.FastRandom;
stateChangePercentage: number;
oscillationPercentage: number
constructor(anchor: ParticleAnchor, particlesPerSecond: number, maxState: number, factory?: ParticleFactory) {
super(anchor, particlesPerSecond, factory);
this.galois = new Math.FastRandom();
this.maxState = maxState;
this.stateChangePercentage = 3;
this.oscillationPercentage = 4;
}
updateParticle(p: Particle, fixedDt: Fx8) {
super.updateParticle(p, fixedDt);
if (this.galois.percentChance(this.stateChangePercentage)) {
if (p.data < this.maxState) {
p.data++;
} else if (p.data > 0) {
p.data--;
}
}
if (this.galois.percentChance(this.oscillationPercentage)) {
p.vx = Fx.neg(p.vx);
}
}
}
export function clearAll() {
const sources = particleSources();
if (sources) {
sources.forEach(s => s.clear());
pruneParticles();
}
}
/**
* Stop all particle sources from creating any new particles
*/
export function disableAll() {
const sources = particleSources();
if (sources) {
sources.forEach(s => s.enabled = false);
pruneParticles();
}
}
/**
* Allow all particle sources to create any new particles
*/
export function enableAll() {
const sources = particleSources();
if (sources) {
sources.forEach(s => s.enabled = true);
pruneParticles();
}
}
function particleSources() {
const sources = game.currentScene().particleSources;
return sources;
}
}
|
#!/bin/bash
# Copyright 2015, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
set -ex
cd $(dirname $0)/../../..
# Make sure there are no pre-existing QPS workers around before starting
# the performance test suite
# C++
killall -9 qps_worker || true
# C#
ps -C mono -o pid=,cmd= | grep QpsWorker | awk '{print $1}' | xargs kill -9
# Ruby
ps -C ruby -o pid=,cmd= | grep 'qps/worker.rb' | awk '{print $1}' | xargs kill -9
# Node
ps -C node -o pid=,cmd= | grep 'performance/worker.js' | awk '{print $1}' | xargs kill -9
# Python
ps -C python -o pid=,cmd= | grep 'qps_worker.py' | awk '{print $1}' | xargs kill -9
# Java
jps | grep LoadWorker | awk '{print $1}' | xargs kill -9
|
<?php
// Function to delete a specific element from an indexed array
function delete_element($arr, $delete){
if(($key = array_search($delete, $arr)) !== false) {
unset($arr[$key]);
}
// Return the array with element deleted
return $arr;
}
// Driver code
$arr = array(1, 2, 3, 4, 5);
$delete = 2;
$result = delete_element($arr, $delete);
// Printing the result
print_r($result);
?> |
<reponame>menghuanlunhui/springboot-master<filename>jframe-web-app/src/main/java/com/jf/system/aspect/AspectToken.java<gh_stars>0
package com.jf.system.aspect;
import com.jf.annotation.Token;
import com.jf.common.TokenHandler;
import com.jf.database.enums.ResCode;
import com.jf.string.StringUtil;
import com.jf.system.conf.IConstant;
import com.jf.exception.AppException;
import com.jf.exception.AppTokenException;
import org.aspectj.lang.ProceedingJoinPoint;
import org.aspectj.lang.annotation.Around;
import org.aspectj.lang.annotation.Aspect;
import org.aspectj.lang.annotation.Pointcut;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.annotation.Order;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.stereotype.Component;
import org.springframework.web.context.request.RequestContextHolder;
import org.springframework.web.context.request.ServletRequestAttributes;
import org.springframework.web.util.WebUtils;
import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
/**
* Created with IntelliJ IDEA.
* Description: App Token
* User: admin
* Date: 2018-05-24
* Time: 15:07
*/
@Aspect
@Component
@Order(2)
public class AspectToken {
private final static Logger log = LoggerFactory.getLogger(AspectToken.class);
@Autowired(required = false)
private RedisTemplate redisTemplate;
@Autowired
private TokenHandler tokenHandler;
@Pointcut("@annotation(com.jf.annotation.Token)")
public void token() {
}
/**
* 将token转换为userId
* 注意:注解方法中,第一个参数必须要是Long param_name
*
* @param pjp
* @param tk
*/
@Around("token()&&@annotation(tk)")
public Object token(ProceedingJoinPoint pjp, Token tk) {
ServletRequestAttributes attributes = (ServletRequestAttributes) RequestContextHolder.getRequestAttributes();
HttpServletRequest request = attributes.getRequest();
Object[] args = pjp.getArgs();
if (args.length < 1) {
throw new AppTokenException("必须指定一个参数");
}
String name = tk.name(); // 键名
String type = tk.type(); // 类型 1-header 2-cookie
boolean need = tk.need(); // 是否必须 默认:true
boolean cache = tk.useCache(); // 是否使用缓存 默认:true
try {
if (IConstant.TOKEN_HEADER.equals(type)) {
String token = request.getHeader(name);
if (!need) {
if (StringUtil.isBlank(token)) {
return pjp.proceed();
} else {
if (cache) {
Long uid = (Long) redisTemplate.opsForValue().get(token);
if (uid != null) {
args[0] = tokenHandler.getIdByTokenFromRedis(token);
return pjp.proceed(args);
} else {
return pjp.proceed();
}
} else {
args[0] = tokenHandler.getIdByTokenFromDb(token);
return pjp.proceed(args);
}
}
} else {
if (StringUtil.isBlank(token)) {
throw new AppTokenException(ResCode.TOKEN_EXP.msg());
} else {
log.info(IConstant.TOKEN_HEADER + " token:" + token);
if (cache) {
args[0] = tokenHandler.getIdByTokenFromRedis(token);
} else {
args[0] = tokenHandler.getIdByTokenFromDb(token);
}
return pjp.proceed(args);
}
}
} else if (IConstant.TOKEN_COOKIE.equals(type)) {
Cookie cookieValue = WebUtils.getCookie(request, name);
if (!need) {
if (cookieValue == null) {
return pjp.proceed();
} else {
String token = cookieValue.getValue();
if (StringUtil.isBlank(token)) {
return pjp.proceed();
}
if (cache) {
Long uid = (Long) redisTemplate.opsForValue().get(token);
if (uid != null) {
args[0] = tokenHandler.getIdByTokenFromRedis(token);
return pjp.proceed(args);
} else {
return pjp.proceed();
}
} else {
args[0] = tokenHandler.getIdByTokenFromDb(token);
return pjp.proceed(args);
}
}
} else {
if (cookieValue == null) {
throw new AppTokenException(ResCode.TOKEN_EXP.msg());
}
String token = cookieValue.getValue();
if (StringUtil.isNotBlank(token)) {
log.info(IConstant.TOKEN_COOKIE + " token:" + token);
if (cache) {
args[0] = tokenHandler.getIdByTokenFromRedis(token);
} else {
args[0] = tokenHandler.getIdByTokenFromDb(token);
}
return pjp.proceed(args);
} else {
throw new AppTokenException(ResCode.TOKEN_EXP.msg());
}
}
} else {
throw new AppException("APP接口异常: Invalid token value.");
}
} catch (Throwable throwable) {
if (throwable instanceof AppTokenException) {
throw new AppTokenException(throwable.getMessage(), throwable);
} else if (throwable instanceof NullPointerException) {
throw new AppException("NullPointerException", throwable);
} else {
throw new AppException(StringUtil.isBlank(throwable.getMessage()) ? "Null" : throwable.getMessage(), throwable);
}
}
}
}
|
<filename>jgrapht-master/jgrapht-core/src/test/java/org/jgrapht/alg/shortestpath/TreeSingleSourcePathsTest.java<gh_stars>1-10
/*
* (C) Copyright 2016-2018, by <NAME> and Contributors.
*
* JGraphT : a free Java graph-theory library
*
* This program and the accompanying materials are dual-licensed under
* either
*
* (a) the terms of the GNU Lesser General Public License version 2.1
* as published by the Free Software Foundation, or (at your option) any
* later version.
*
* or (per the licensee's choosing)
*
* (b) the terms of the Eclipse Public License v1.0 as published by
* the Eclipse Foundation.
*/
package org.jgrapht.alg.shortestpath;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.util.*;
import org.jgrapht.*;
import org.jgrapht.alg.util.*;
import org.jgrapht.graph.*;
import org.junit.*;
/**
* @author <NAME>
*/
public class TreeSingleSourcePathsTest
{
@Test
public void test()
{
DirectedWeightedPseudograph<Integer, DefaultWeightedEdge> g =
new DirectedWeightedPseudograph<>(DefaultWeightedEdge.class);
Graphs.addAllVertices(g, Arrays.asList(1, 2, 3, 4));
DefaultWeightedEdge e12_1 = g.addEdge(1, 2);
g.setEdgeWeight(e12_1, -5.0);
DefaultWeightedEdge e12_2 = g.addEdge(1, 2);
g.setEdgeWeight(e12_2, -2.0);
DefaultWeightedEdge e12_3 = g.addEdge(1, 2);
g.setEdgeWeight(e12_3, 1.0);
DefaultWeightedEdge e23_1 = g.addEdge(2, 3);
g.setEdgeWeight(e23_1, 0d);
DefaultWeightedEdge e23_2 = g.addEdge(2, 3);
g.setEdgeWeight(e23_2, -2.0);
DefaultWeightedEdge e23_3 = g.addEdge(2, 3);
g.setEdgeWeight(e23_3, -5.0);
DefaultWeightedEdge e34_1 = g.addEdge(3, 4);
g.setEdgeWeight(e34_1, -100.0);
DefaultWeightedEdge e34_2 = g.addEdge(3, 4);
g.setEdgeWeight(e34_2, 100.0);
DefaultWeightedEdge e34_3 = g.addEdge(3, 4);
g.setEdgeWeight(e34_3, 1.0);
Map<Integer, Pair<Double, DefaultWeightedEdge>> map = new HashMap<>();
map.put(2, Pair.of(-5d, e12_1));
map.put(3, Pair.of(-10d, e23_3));
map.put(4, Pair.of(-110d, e34_1));
TreeSingleSourcePathsImpl<Integer, DefaultWeightedEdge> t1 =
new TreeSingleSourcePathsImpl<>(g, 1, map);
assertEquals(1, t1.getSourceVertex().intValue());
assertEquals(0d, t1.getWeight(1), 1e-9);
assertTrue(t1.getPath(1).getEdgeList().isEmpty());
assertEquals(Arrays.asList(g.getEdgeSource(e12_1)), t1.getPath(1).getVertexList());
assertEquals(-5d, t1.getWeight(2), 1e-9);
assertEquals(Arrays.asList(e12_1), t1.getPath(2).getEdgeList());
assertEquals(-10d, t1.getWeight(3), 1e-9);
assertEquals(Arrays.asList(e12_1, e23_3), t1.getPath(3).getEdgeList());
assertEquals(-110d, t1.getWeight(4), 1e-9);
assertEquals(Arrays.asList(e12_1, e23_3, e34_1), t1.getPath(4).getEdgeList());
}
}
|
<filename>cg-ui-demo-main/js-demo/ts-js-comp.js
var Book = /** @class */ (function () {
function Book(name, author) {
this.name = name;
this.author = author;
}
Book.prototype.show = function (city) {
console.log(city);
console.log("Book name: " + this.name);
console.log("Author: " + this.author);
return 10;
};
return Book;
}());
var obj = new Book("Kavita", "My Jee Batch");
obj.show("Hyd");
|
import * as React from "react";
import Svg, { Path, SvgProps } from "react-native-svg";
interface Props extends SvgProps {
size?: number;
}
const SwitchHorizontal = ({ size = 24, ...props }: Props) => {
return (
<Svg
viewBox="0 0 20 20"
fill="currentColor"
width={size}
height={size}
{...props}
>
<Path d="M8 5a1 1 0 100 2h5.586l-1.293 1.293a1 1 0 001.414 1.414l3-3a1 1 0 000-1.414l-3-3a1 1 0 10-1.414 1.414L13.586 5H8zm4 10a1 1 0 100-2H6.414l1.293-1.293a1 1 0 10-1.414-1.414l-3 3a1 1 0 000 1.414l3 3a1 1 0 001.414-1.414L6.414 15H12z" />
</Svg>
);
};
export default SwitchHorizontal;
|
#!/usr/bin/env bash
sudo -u $USER launchctl load /Library/LaunchAgents/com.peanuthut.TrackballWorks.load.plist
|
<gh_stars>0
import React from "react";
import styled from 'styled-components'
import Text from "./Text";
export default PinScreen = () => {
return (
<Container>
<Text>Aplikacja bankowa</Text>
<Text>Wprowadz PIN</Text>
</Container>
);
}
const Container = styled.SafeAreaView`
flex: 1;
background-color: #1e1e1e;
`; |
#!/bin/sh
DateStr=`date +%Y/%m/%d`
DateStr=`echo -n $DateStr`
ThisYear=`date +%Y`
ThisYear=`echo -n $ThisYear`
fileList=`find . -name "InfoPlist-Seed.strings"`
for target in $fileList
do
genFile=${target%-*}
iconv -f UTF-16 -t UTF-8 $target \
|sed -e "s%@@VERSION@@%$DateStr%g" \
|sed -e "s%@@THISYEAR@@%$ThisYear%g" \
|sed -e s%@@COMPANY@@%OME\(Open\ Mail\ Environment:http:\/\/mac-ome.jp\)%g \
|iconv -f UTF-8 -t UTF-16 > $genFile.strings
done
|
<reponame>olujedai/sw-api<filename>src/character/character.module.ts
import { Module } from '@nestjs/common';
import { CharacterService } from './character.service';
import { RequestModule } from '../request/request.module';
import { UtilsModule } from '../utils/utils.module';
@Module({
providers: [CharacterService],
imports: [RequestModule, UtilsModule],
exports: [CharacterService],
})
export class CharacterModule {}
|
#!/bin/bash -e
#SETUP GPS
on_chroot << EOG
apt-get install -y gpsd gpsd-clients python-gps
apt-get install -y ntp
EOG
|
package com.qtimes.views.level;
import android.content.Context;
import android.content.res.Resources;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Canvas;
import android.util.AttributeSet;
import android.view.ViewGroup;
import android.widget.RelativeLayout;
import com.qtimes.views.R;
import com.qtimes.views.ViewUtils;
/**
* Created by gufei on 2016/8/23 0023.
*/
public class LevelView extends RelativeLayout {
private String TAG = "LevelView";
NumView numView;
public LevelView(Context context) {
super(context);
init();
}
public LevelView(Context context, AttributeSet attrs) {
super(context, attrs);
init();
}
public LevelView(Context context, AttributeSet attrs, int defStyleAttr) {
super(context, attrs, defStyleAttr);
init();
}
private void init() {
Resources resources = getResources();
Bitmap bitmap = BitmapFactory.decodeResource(resources, R.drawable.ic_user_lv_bg_1);
int width = bitmap.getWidth();
int height = bitmap.getHeight();
ViewGroup.LayoutParams params = new ViewGroup.LayoutParams(width, height);
setLayoutParams(params);
numView = new NumView(getContext());
LayoutParams layoutParams = new LayoutParams(width / 2, height);
layoutParams.addRule(RelativeLayout.ALIGN_PARENT_RIGHT);
layoutParams.addRule(RelativeLayout.CENTER_VERTICAL);
layoutParams.rightMargin = ViewUtils.dp2px(getContext(), 1);
numView.setLayoutParams(layoutParams);
addView(numView);
}
public void setLevel(int level) {
setLevel(LevelType.ANCHOR, level);
}
/**
* 等级类型
*
* @param type
* @param level
*/
public void setLevel(String type, int level) {
setLevelBackground(type, level);
}
public int formatLevel(String type, int level) {
if (level <= 0) level = 1;
switch (type) {
case LevelType.USER:
if (level > 150) level = 150;
break;
case LevelType.ANCHOR:
if (level > 50) level = 50;
break;
}
return level;
}
private void setLevelBackground(String type, int level) {
int resBack = 0;
level = formatLevel(type, level);
switch (type) {
case LevelType.USER:
if (level <= 30) {
resBack = R.drawable.ic_user_lv_bg_1;
} else if (level > 30 && level <= 60) {
resBack = R.drawable.ic_user_lv_bg_2;
} else if (level > 60 && level <= 90) {
resBack = R.drawable.ic_user_lv_bg_3;
} else if (level > 90 && level <= 120) {
resBack = R.drawable.ic_user_lv_bg_4;
} else if (level > 120 & level <= 150) {
resBack = R.drawable.ic_user_lv_bg_5;
}
break;
case LevelType.ANCHOR:
if (level <= 10) {
resBack = R.drawable.ic_zhubo_lv_bg_1;
} else if (level > 10 && level <= 20) {
resBack = R.drawable.ic_zhubo_lv_bg_2;
} else if (level > 20 && level <= 30) {
resBack = R.drawable.ic_zhubo_lv_bg_3;
} else if (level > 30 && level <= 40) {
resBack = R.drawable.ic_zhubo_lv_bg_4;
} else if (level > 40 && level <= 50) {
resBack = R.drawable.ic_zhubo_lv_bg_5;
}
break;
}
if (resBack != 0) {
setBackgroundResource(resBack);
numView.setLevel(level);
}
}
@Override
protected void onDraw(Canvas canvas) {
super.onDraw(canvas);
}
public interface LevelType {
String USER = "user";//用户等级
String ANCHOR = "anchor";//主播等级
}
}
|
<filename>test/unit/lib/find-parent-form.js
const findParentForm = require('../../../src/lib/find-parent-form').findParentForm;
describe('findParentForm', () => {
test('returns undefined if the element has no parentNode', () => {
expect(findParentForm({})).toBeUndefined();
});
test(
'checks recursively and returns undefined if no parentNode is a form',
() => {
expect(findParentForm({
parentNode: {
parentNode: {
parentNode: {}
}
}
})).toBeUndefined();
}
);
test('returns the parent node if it is a form', () => {
const form = {
nodeName: 'FORM'
};
expect(findParentForm({
parentNode: form
})).toBe(form);
});
test(
'checks recursively until it finds a parent node that is a form',
() => {
const form = {
nodeName: 'FORM'
};
expect(findParentForm({
parentNode: {
parentNode: {
parentNode: form
}
}
})).toBe(form);
}
);
});
|
def matrix_to_list(matrix):
result = []
for row in matrix:
lst = []
for element in row:
lst.append(element)
result.append(lst)
return result
matrix = [[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
print(matrix_to_list(matrix)) |
class ImageProcessor:
def __init__(self, scene, infoLabel_2):
self.scene = scene
self.infoLabel_2 = infoLabel_2
def startDFT(self):
self.scene.currentTransform = 6
self.infoLabel_2.setText('DFT')
self.scene.transform6()
def startDenoising(self):
self.scene.currentTransform = 8
self.infoLabel_2.setText('Denoising')
self.scene.transform8()
def startDifference(self):
self.scene.currentTransform = 7
self.infoLabel_2.setText('Difference')
self.scene.transform7() |
<reponame>mnemonic-no/commons
package no.mnemonic.commons.testtools;
import org.mockito.ArgumentMatcher;
import org.mockito.Mockito;
import java.util.function.Predicate;
public class MockitoTools {
/**
* Simplify testing with mockito and argThat by doing
* <p>
* verify(mock).method(match(arg->arg.isSomething()));
*
* @param predicate predicate to match the expected argument
* @param <T> the argument type
* @return a mock reporter which matches the argument using this predicate
*/
public static <T> T match(Predicate<T> predicate) {
return Mockito.argThat(new ArgMatcher<>(predicate));
}
/**
* Same as {@link #match(Predicate)}, but with a specific expected argument class, to use if the verified
* method takes a superclass of expected argument
*
* verify(mock).method(match(c->c.getValue()==expectedValue, ExpectedClass.class))
* @param expectedClass Expected argument class
* @param predicate predicate to test on the argument
* @param <T> mocked method argument type
* @param <U> expected argument type, subclass of T
* @return true if argument is instanceof U and passes the predicate test
*/
public static <T, U extends T> T match(Predicate<T> predicate, Class<U> expectedClass) {
return Mockito.argThat(new ArgMatcher<>(o -> {
if (!expectedClass.isInstance(o)) return false;
//noinspection unchecked
U target = (U) o;
return predicate.test(target);
}));
}
//helpers
private static class ArgMatcher<T> implements ArgumentMatcher<T> {
final Predicate<T> predicate;
ArgMatcher(Predicate<T> predicate) {
this.predicate = predicate;
}
@Override
public boolean matches(T t) {
return predicate.test(t);
}
}
}
|
<gh_stars>0
import React, { Component } from 'react';
import Moment from 'react-moment';
class Contact extends Component {
render() {
const { contact } = this.props;
let formattedDate = <Moment format="MM/DD/YY" >{contact.lastContact}</Moment>
return (
<div>
<h4>{contact.name} - {contact.title}</h4>
<p>Last Contact: {formattedDate}</p>
<p>Organzation: {contact.organization} - {contact.type}</p>
<p>Notes: {contact.notes}</p>
</div>
)
}
}
export default Contact; |
<reponame>smagill/opensphere-desktop
package io.opensphere.core.image;
/**
* A service that provides images.
*
* @param <T> The type of object to be used to look up the images.
*/
@FunctionalInterface
public interface ImageProvider<T>
{
/**
* Retrieve an image.
*
* @param key The key for the image.
* @return The image, or <code>null</code> if it cannot be retrieved.
*/
Image getImage(T key);
}
|
from unittest import mock
import pytest
from rest_framework.serializers import ValidationError
from drf_recaptcha.client import RecaptchaResponse
from drf_recaptcha.validators import ReCaptchaV2Validator, ReCaptchaV3Validator
@pytest.mark.parametrize(
("validator_class", "params"),
[
(ReCaptchaV2Validator, {}),
(ReCaptchaV3Validator, {"action": "test_action", "required_score": 0.4}),
],
)
def test_recaptcha_validator_get_response_success(validator_class, params):
validator = validator_class(secret_key="TEST_SECRET_KEY", **params)
assert isinstance(validator.get_response("test_token"), RecaptchaResponse)
@pytest.mark.parametrize(
("validator_class", "params"),
[
(ReCaptchaV2Validator, {}),
(ReCaptchaV3Validator, {"action": "test_action", "required_score": 0.4}),
],
)
def test_recaptcha_validator_get_response_fail(validator_class, params):
validator = validator_class(secret_key="TEST_SECRET_KEY", **params)
assert isinstance(validator.get_response("test_token"), RecaptchaResponse)
@pytest.mark.parametrize(
("validator_class", "params", "response"),
[
(ReCaptchaV2Validator, {}, RecaptchaResponse(is_valid=True)),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(
is_valid=True, extra_data={"score": 0.6, "action": "test_action"}
),
),
],
)
def test_recaptcha_validator_call_success(validator_class, params, response):
validator = validator_class(secret_key="TEST_SECRET_KEY", **params)
validator.get_response = mock.Mock(return_value=response)
try:
validator("test_token")
except ValidationError:
pytest.fail("Validation is not passed")
@pytest.mark.parametrize(
("validator_class", "params", "response", "error"),
[
(
ReCaptchaV2Validator,
{},
RecaptchaResponse(is_valid=False),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_invalid')]",
),
(
ReCaptchaV2Validator,
{},
RecaptchaResponse(
is_valid=True, extra_data={"score": 0.6, "action": "test_action"}
),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_error')]",
),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(is_valid=False),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_invalid')]",
),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(is_valid=True),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_error')]",
),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(is_valid=True, extra_data={"score": 0.3}),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_invalid')]",
),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(is_valid=True, extra_data={"score": 0.5}),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_invalid')]",
),
(
ReCaptchaV3Validator,
{"action": "test_action", "required_score": 0.4},
RecaptchaResponse(
is_valid=True, extra_data={"score": 0.5, "action": "other_action"}
),
"[ErrorDetail(string='Error verifying reCAPTCHA, please try again.', code='captcha_invalid')]",
),
],
)
def test_recaptcha_validator_call_fail(validator_class, params, response, error):
validator = validator_class(secret_key="TEST_SECRET_KEY", **params)
validator.get_response = mock.Mock(return_value=response)
with pytest.raises(ValidationError) as exc_info:
validator("test_token")
assert str(exc_info.value) == error
@pytest.mark.parametrize(
("validator_class", "params"),
[
(ReCaptchaV2Validator, {}),
(ReCaptchaV3Validator, {"action": "test_action", "required_score": 0.4}),
],
)
def test_recaptcha_validator_set_context(validator_class, params, settings):
settings.DRF_RECAPTCHA_TESTING = True
validator = validator_class(secret_key="TEST_SECRET_KEY", **params)
assert validator.recaptcha_client_ip == ""
serializer_field = mock.Mock(
context={"request": mock.Mock(META={"HTTP_X_FORWARDED_FOR": "4.3.2.1"})}
)
validator("test_token", serializer_field)
assert validator.recaptcha_client_ip == "4.3.2.1"
|
<filename>src/app/zelda/event/GoDownStairsEvent.ts<gh_stars>1-10
import { Animation } from '../Animation';
import { AnimationListener } from '../AnimationListener';
import { Position, PositionData } from '../Position';
import { Event, EventData } from './Event';
import { ZeldaGame } from '../ZeldaGame';
import { CurtainOpeningState } from '../CurtainOpeningState';
import { MainGameState } from '../MainGameState';
declare let game: ZeldaGame;
/**
* Occurs when Link steps on a stairwell or doorway on the overworld map.
*/
export class GoDownStairsEvent extends Event<GoDownStairsEventData> implements AnimationListener {
static readonly EVENT_TYPE: string = 'goDownStairs';
destMap: string;
destScreen: Position;
destPos: Position;
private readonly animate: boolean;
private readonly curtainOpenNextScreen: boolean;
constructor(tile: Position, destMap: string, destScreen: Position, destPos: Position, animate: boolean,
curtainOpenNextScreen: boolean) {
super(tile);
this.destMap = destMap;
this.destScreen = destScreen;
this.destPos = destPos;
this.animate = animate;
this.curtainOpenNextScreen = curtainOpenNextScreen;
}
animationCompleted(anim: Animation) {
if (this.curtainOpenNextScreen) {
game.setMap(this.destMap, this.destScreen, this.destPos, false);
game.setState(new CurtainOpeningState(game.state as MainGameState));
}
else {
game.setMap(this.destMap, this.destScreen, this.destPos);
}
}
clone(): GoDownStairsEvent {
return new GoDownStairsEvent(this.getTile().clone(), this.destMap, this.destScreen.clone(),
this.destPos.clone(), this.animate, this.curtainOpenNextScreen);
}
execute(): boolean {
game.audio.stopMusic();
if (this.animate) {
game.link.enterCave(this);
}
return false;
}
getAnimate(): boolean {
return this.animate;
}
shouldOccur(): boolean {
return game.link.isWalkingUpOnto(this.tile) && game.link.dir === 'UP';
//return game.link.isEntirelyOn(this.tile) && game.link.dir === 'UP';
}
toJson(): GoDownStairsEventData {
return {
type: GoDownStairsEvent.EVENT_TYPE,
tile: this.tile.toJson(),
animate: this.animate,
destMap: this.destMap,
destScreen: this.destScreen.toJson(),
destPos: this.destPos.toJson()
};
}
update() {
// Do nothing
}
}
export interface GoDownStairsEventData extends EventData {
animate: boolean;
destMap: string;
destScreen: PositionData;
destPos: PositionData;
}
|
import { Command, flags } from "@oclif/command";
import fs from "fs";
import http from "http";
import open from "open";
import path from "path";
import qs from "querystring";
import url from "url";
import { isLoggedIn } from "../lib/auth";
import {
defaultConfigPath,
DEFAULT_SITE,
writeSiteConfig,
} from "../lib/config";
import pkceChallenge from "pkce-challenge";
import { callAPI, getStateString } from "../lib/cli-api";
import { println } from "../lib/print";
import { ChildProcess } from "child_process";
type ResponseParams = { [key: string]: string | string[] | undefined };
let server_: http.Server | undefined;
const waitForLogin = (
hostname: string,
port: number,
ready: () => void
): Promise<ResponseParams> => {
const pagePromise = fs.promises.readFile(
path.join(__dirname, "../..", "templates", "logged-in.html"),
"utf-8"
);
const loginCancelledPagePromise = fs.promises.readFile(
path.join(__dirname, "../..", "templates", "logged-in-cancelled.html"),
"utf-8"
);
return new Promise((resolve) => {
server_ = http.createServer(async (req, res) => {
const urlInfo = url.parse(req.url || "");
const query = qs.parse(urlInfo.query || "");
resolve(query);
if (query.next) {
res.statusCode = 302;
res.setHeader("Location", query.next);
res.end();
} else {
res.statusCode = 200;
res.setHeader("Content-Type", "text/html");
if (query.cancelled) {
res.end(await loginCancelledPagePromise);
} else {
res.end(await pagePromise);
}
}
server_?.close();
});
server_.listen(port, hostname, ready);
});
};
const DEFAULT_HOSTNAME = "127.0.0.1";
const DEFAULT_PORT = 9898;
export const login = async ({
site,
hostname = DEFAULT_HOSTNAME,
port = DEFAULT_PORT,
config = defaultConfigPath(),
}: {
site: string;
hostname?: string;
port?: number;
config?: string;
}): Promise<void> => {
const { code_challenge, code_verifier } = pkceChallenge(43);
const state = getStateString();
const redirectURL = `http://${hostname}:${port}`;
let loginURL = `https://${site}/cli/login?client_id=quip-cli&response_type=code&redirect_uri=${encodeURIComponent(
redirectURL
)}&state=${state}&code_challenge=${code_challenge}&code_challenge_method=S256`;
println(
`opening login URL in your browser. Log in to Quip there.\n${loginURL}`
);
let currentWindow: ChildProcess | undefined;
const responseParams = await waitForLogin(hostname, port, async () => {
currentWindow = await open(loginURL);
});
currentWindow?.emit("close");
if (responseParams.cancelled) {
throw new Error("Login cancelled.");
} else if (responseParams.state !== state) {
throw new Error("API returned invalid state.");
} else if (!responseParams.code || responseParams.error) {
throw new Error(
`Login Failed: ${
responseParams.error ||
`no code returned, got ${JSON.stringify(
responseParams,
null,
2
)}`
}`
);
}
const tokenResponse = await callAPI(site, "token", "post", {
client_id: "quip-cli",
grant_type: "authorization_code",
redirect_uri: encodeURIComponent(redirectURL),
code_verifier: code_verifier,
code: responseParams.code,
});
const accessToken = tokenResponse.accessToken || tokenResponse.access_token;
if (!accessToken || tokenResponse.error) {
throw new Error(
`Failed to acquire access token: ${
tokenResponse.error
} - response: ${JSON.stringify(tokenResponse, null, 2)}`
);
}
await writeSiteConfig(config, site, { accessToken });
};
export default class Login extends Command {
static description =
"Logs in to Quip and stores credentials in the .quiprc file";
static flags = {
help: flags.help({ char: "h" }),
force: flags.boolean({
char: "f",
description:
"forces a re-login even if a user is currently logged in",
}),
site: flags.string({
char: "s",
description:
"use a specific quip site rather than the standard quip.com login",
default: DEFAULT_SITE,
}),
"with-token": flags.string({
char: "t",
description:
"log in users with your specified access token instead of redirecting to a login page.\n" +
"SEE ALSO: https://quip.com/dev/liveapps/1.x.x/reference/auth/#oauth2",
helpValue: "token",
}),
port: flags.integer({
hidden: true,
description:
"Use a custom port for the OAuth redirect server (defaults to 9898)",
default: DEFAULT_PORT,
}),
hostname: flags.string({
hidden: true,
description:
"Use a custom hostname for the OAuth redirect server (defaults to 127.0.0.1)",
default: DEFAULT_HOSTNAME,
}),
config: flags.string({
hidden: true,
description: "Use a custom config file (default ~/.quiprc)",
default: () => defaultConfigPath(),
}),
};
static args = [];
async catch(error: Error) {
server_?.close();
throw error;
}
async run() {
const { flags } = this.parse(Login);
const { site, force, hostname, port, config } = flags;
const accessToken = flags["with-token"];
// displays error message if command has "--with-token" flag without passing a value.
if (accessToken === "") {
this.error("Flag --with-token expects a value.");
return;
}
if (!force && (await isLoggedIn(config, site))) {
let alt = "";
if (site === DEFAULT_SITE) {
alt = " or --site to log in to a different site";
}
this.log(
`You're already logged in to ${site}. Pass --force to log in again${alt}.`
);
return;
}
try {
if (accessToken) {
await writeSiteConfig(config, site, { accessToken });
} else {
await login({ site, hostname, port, config });
}
this.log("Successfully logged in.");
} catch (e) {
this.error(e);
}
}
}
|
import React, {Component} from 'react';
import {
StyleSheet,
View,
Text,
Image,
FlatList,
} from 'react-native';
class Products extends Component {
constructor(props) {
super(props);
this.state = {
data: [],
};
}
componentDidMount() {
const data = [
{
id: 1,
name: "Product 1",
category: "category 1",
price: "$10.00",
image: "./images/product1.jpg"
},
{
id: 2,
name: "Product 2",
category: "category 2",
price: "$ |
import numpy as np
class ReinforcementLearningModel:
def dynamics_model(self, input_tensor, training=True):
# Perform specific training or inference process based on the 'training' flag
if training:
# Example training process (replace with actual training logic)
hidden_states = np.mean(input_tensor, axis=-1, keepdims=True) # Example: Mean along the channel axis
reward = np.sum(input_tensor) # Example: Sum of all elements as reward
else:
# Example inference process (replace with actual inference logic)
hidden_states = np.max(input_tensor, axis=-1, keepdims=True) # Example: Max value along the channel axis
reward = np.mean(input_tensor) # Example: Mean of all elements as reward
return hidden_states, reward
# Example usage
input_tensor = np.random.rand(4, 3, 3, 2) # Example input tensor
model = ReinforcementLearningModel()
hidden_states, reward = model.dynamics_model(input_tensor, training=True)
print(hidden_states.shape) # Output: (4, 3, 3, 1)
print(reward) # Output: (single scalar value) |
SELECT *
FROM Product
WHERE (price >= 50 and price <= 100); |
def segment_list(input_list, threshold):
segmented_list = []
current_segment = [input_list[0]]
for i in range(1, len(input_list)):
if abs(input_list[i] - current_segment[-1]) <= threshold:
current_segment.append(input_list[i])
else:
segmented_list.append(current_segment)
current_segment = [input_list[i]]
segmented_list.append(current_segment)
return segmented_list |
#!/bin/sh
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2015 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
#######################################################################
# Copyright [2014] [Cisco Systems, Inc.]
#
# Licensed under the Apache License, Version 2.0 (the \"License\");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an \"AS IS\" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#######################################################################
#------------------------------------------------------------------
#------------------------------------------------------------------
source /etc/utopia/service.d/ulog_functions.sh
SERVICE_NAME="mldproxy"
SELF_NAME="`basename $0`"
BIN=mldproxy
CONF_FILE=/tmp/mldproxy.conf
do_start_mldproxy () {
LOCAL_CONF_FILE=/tmp/mldproxy.conf$$
killall $BIN
rm -rf $LOCAL_CONF_FILE
#echo "fastleave" >> $LOCAL_CONF_FILE
if [ "started" = "`sysevent get wan-status`" ] ; then
echo "phyint $WAN_IFNAME upstream" >> $LOCAL_CONF_FILE
else
echo "phyint $WAN_IFNAME disabled" >> $LOCAL_CONF_FILE
fi
# Commenting brlan0 downstream from mldproxy config (RDKB-10413)
# echo "phyint $SYSCFG_lan_ifname downstream" >> $LOCAL_CONF_FILE
cat $LOCAL_CONF_FILE > $CONF_FILE
rm -f $LOCAL_CONF_FILE
mld=`which mldproxy`
if [ -z "$mld" ]; then
echo " mldproxy not found "
else
$BIN -c $CONF_FILE -f
fi
}
service_init ()
{
eval `utctx_cmd get mldproxy_enabled lan_ifname`
WAN_IFNAME=`sysevent get current_wan_ifname`
}
service_start ()
{
ulog ${SERVICE_NAME} status "starting ${SERVICE_NAME} service"
if [ "" != "$WAN_IFNAME" ] && [ "1" == "$SYSCFG_mldproxy_enabled" ] ; then
do_start_mldproxy
sysevent set ${SERVICE_NAME}-errinfo
sysevent set ${SERVICE_NAME}-status "started"
fi
}
service_stop ()
{
ulog ${SERVICE_NAME} status "stopping ${SERVICE_NAME} service"
killall $BIN
rm -rf $CONF_FILE
sysevent set ${SERVICE_NAME}-errinfo
sysevent set ${SERVICE_NAME}-status "stopped"
}
# Entry
service_init
case "$1" in
${SERVICE_NAME}-start)
service_start
;;
${SERVICE_NAME}-stop)
service_stop
;;
${SERVICE_NAME}-restart)
service_stop
service_start
;;
wan-status)
CURRENT_WAN_STATUS=`sysevent get wan-status`
CURRENT_LAN_STATUS=`sysevent get lan-status`
if [ "started" = "$CURRENT_WAN_STATUS" ] && [ "started" = "$CURRENT_LAN_STATUS" ] ; then
service_start
elif [ "stopped" = "$CURRENT_WAN_STATUS" ] || [ "stopped" = "$CURRENT_LAN_STATUS" ] ; then
service_stop
fi
;;
lan-status)
CURRENT_WAN_STATUS=`sysevent get wan-status`
CURRENT_LAN_STATUS=`sysevent get lan-status`
if [ "started" = "$CURRENT_WAN_STATUS" ] && [ "started" = "$CURRENT_LAN_STATUS" ] ; then
service_start
elif [ "stopped" = "$CURRENT_WAN_STATUS" ] || [ "stopped" = "$CURRENT_LAN_STATUS" ] ; then
service_stop
fi
;;
*)
echo "Usage: $SELF_NAME [ ${SERVICE_NAME}-start | ${SERVICE_NAME}-stop | ${SERVICE_NAME}-restart | wan-status | lan-status ]" >&2
exit 3
;;
esac
|
mysql -uroot -p#qwe$123 -h127.0.0.1 -e "drop database if exists edu_sso"
mysql -uroot -p#qwe$123 -h127.0.0.1 -e "CREATE DATABASE edu_sso DEFAULT CHARACTER SET utf8 COLLATE utf8_general_ci"
pause |
#!/bin/bash
# created: 09-sep-2020 16:00PM (GMT)
# objective(s):
# The following script simulates the deployment of resources
# on the gcp platform using google cloud deployment manager
echo "previewing the deployment"
gcloud deployment-manager deployments create basicdep \
--config=config.yaml --preview
sleep 15
read -p "proceed(yes/no)? " proceed
if [ $proceed == 'yes' ] ; then
gcloud deployment-manager deployments update basicdep
exit 0
else
echo "usage: yes"
exit 2
fi
exit 0 |
<filename>index.js<gh_stars>0
const semver = require('semver')
const kPluginMeta = Symbol.for('smallify.plugin.meta')
module.exports = function wrapper (fn, opts = {}) {
if (typeof fn !== 'function') {
throw new TypeError(
`smallify-plugin expects a function, instead got a '${typeof fn}'`
)
}
const bVer = require('smallify/package.json').version
const dVer = '>=1.0.1'
if (!semver.satisfies(bVer, dVer)) {
throw new Error(
`smallify-plugin: expected '${dVer}' smallify version, '${bVer}' is installed`
)
}
fn[kPluginMeta] = opts
return fn
}
|
suffix='0000_inference.caffemodel'
echo 'Begin' &> log_detviplV4d2_D_test
for i in {9..9}
do
echo 'Testing' $i &>> log_detviplV4d2_D_test
tools/test_net.py \
--gpu 0 \
--def models/full1_D/test_inference.prototxt \
--net output/pvanet_full1_ohem_D/detviplV4d2_train/zf_faster_rcnn_iter_$i$suffix \
--cfg cfgs/submit_160715_full_ohem_D_800x1440.yml \
--imdb detviplV4d2_2016_test &>> log_detviplV4d2_D_test
done
|
<gh_stars>100-1000
#include "z3D/z3D.h"
#include "settings.h"
s32 EnGm_CheckRewardFlag(void) {
// 0: blt, skip equipment check
// 1: beq, goto text id 304D, no item offer
// 2: continue like normal
if (gSettingsContext.shuffleMerchants != SHUFFLEMERCHANTS_OFF && (gSaveContext.eventChkInf[3] & 0x20) == 0) {
return 0;
}
else if (gSettingsContext.progressiveGoronSword && ((gSaveContext.equipment >> 2) & 0x1) == 0) {
return 1;
}
return 2;
}
void EnGm_SetRewardFlag(void) {
gSaveContext.eventChkInf[3] |= 0x20;
}
s32 EnGm_UseCustomText(void) {
return (gSettingsContext.shuffleMerchants == SHUFFLEMERCHANTS_HINTS && (gSaveContext.eventChkInf[3] & 0x20) == 0);
}
s32 EnGm_ItemOverride(void) {
if (gSettingsContext.shuffleMerchants == SHUFFLEMERCHANTS_OFF || gSaveContext.eventChkInf[3] & 0x20) {
return GI_SWORD_KNIFE;
} else {
return GI_MASK_GORON;
}
}
|
from typing import List
def generate_column_addition_sql(table_name: str, column_names: List[str]) -> str:
sql_commands = []
for column in column_names:
sql_commands.append(f"ALTER TABLE {table_name} ADD COLUMN {column} VARCHAR(255);")
return '\n'.join(sql_commands) |
public class ExchangeRateCalculator {
private double exchangeRate;
public ExchangeRateCalculator() {
}
public double convertCurrency(double amount, String from, String to) {
return amount * exchangeRate;
}
public void setExchangeRate(double exchangeRate) {
this.exchangeRate = exchangeRate;
}
} |
#!/bin/bash
if [[ $target_platform =~ linux.* ]] || [[ $target_platform == win-32 ]] || [[ $target_platform == win-64 ]] || [[ $target_platform == osx-64 ]]; then
export DISABLE_AUTOBREW=1
$R CMD INSTALL --build .
else
mkdir -p $PREFIX/lib/R/library/treemap
mv * $PREFIX/lib/R/library/treemap
fi
|
#!/bin/bash
# fail on some errors
set -e
# our output path for .css files
OUTPUT_DIR=./../dist/static/css/
# run our commands from this directory
cd "$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
# create the directory if it doesn't exist already
mkdir -p ./../dist/static/css
sass --watch ./../src/sass:./../dist/static/css
# files should be transpiled successfully |
#!/bin/bash
# Script to deploy a very simple web application.
# The web app has a customizable image and some text.
cat << EOM > /var/www/html/index.html
<html>
<head><title>Meow!</title></head>
<body>
<div style="width:800px;margin: 0 auto">
<!-- BEGIN -->
<center><img src="http://${PLACEHOLDER}/${WIDTH}/${HEIGHT}"></img></center>
<center><h2>Meow World!</h2></center>
Welcome to ${PREFIX}'s app. Training session
<!-- END -->
</div>
</body>
</html>
EOM
echo "Script complete."
|
$(document).ready(function(){
var slider = tns({
container: '.home-slider',
items: 1,
// controlsContainer: "#customize-controls",
slideBy: 'page',
// autoWidth: true,
autoplay: true,
mouseDrag: true,
controls: false,
navPosition: "bottom",
lazyload: true,
preventScrollOnTouch: 'force',
// nav: false,
speed: 1000,
onInit: 'customizedFunction',
responsive: {
640: {
items: 1,
},
768: {
items: 1,
}
}
});
var customizedFunction = function (info, eventName) {
if (info.navItems[info.displayIndex - 1]) {
const src = info.navItems[info.displayIndex - 1].dataset.image;
document.querySelector('.header').style.backgroundImage = `url("${src}")`;
// console.log(info.navItems[info.displayIndex - 1].dataset.src);
if (info.navItems[info.displayIndex - 1].dataset.loaded == "true") {
info.navItems[info.displayIndex - 1].dataset.image = info.navItems[info.displayIndex - 1].dataset.src;
} else {
var imgs = [];
imgs[info.displayIndex - 1] = new Image();
imgs[info.displayIndex - 1].src = info.navItems[info.displayIndex - 1].dataset.src;
imgs[info.displayIndex - 1].onload = function () {
if (imgs[info.displayIndex - 1].complete) {
info.navItems[info.displayIndex - 1].dataset.image = info.navItems[info.displayIndex - 1].dataset.src;
document.querySelector('.header').dataset.src = info.navItems[info.displayIndex - 1].dataset.src;
}
};
}
}
}
});
$(window).on('load', function() {
var pre_loader = $('#loader*');
$('#slow*').removeClass('slow');
$('#slow*').addClass('slow');
pre_loader.removeClass('loader');
}); |
#!/bin/bash
ep /usr/local/etc/php/php.ini
ep /usr/local/etc/php-fpm.conf
ep /usr/local/etc/php-fpm.d/* |
#!/bin/bash
toy_moduli=testdata/toy.moduli
toy_moduli_10=testdata/toy-base10.moduli
echo "This will run batchGCD on a tiny set of moduli:"
cat $toy_moduli
read -p "Press enter to continue "; echo ""
make batchgcd
./batchgcd $toy_moduli
read -p "Press enter to continue "; echo ""
echo "Now with the the same moduli, base 10:"
cat $toy_moduli_10
read -p "Press enter to continue"
./batchgcd $toy_moduli_10 -base10
|
import os
dir_name = "example_directory"
limit = 10
def create_dir(dir_name):
if not os.path.exists(dir_name):
os.makedirs(dir_name)
def get_links(i):
# Implement the logic to obtain a list of links based on the value of i
# Example:
# return ["link1_" + str(i), "link2_" + str(i), "link3_" + str(i)]
pass # Replace this with your implementation
def main():
create_dir(dir_name)
for i in range(1, limit + 1):
for path in get_links(i):
# Perform the specific operation on each link
# Example:
# print("Processing link:", path)
pass # Replace this with your implementation
if __name__ == "__main__":
main() |
package com.company.example.user;
import java.util.Date;
import io.leopard.data4j.cache.api.uid.IDelete;
/**
* 用户
*
* @author 谭海潮
*
*/
public interface UserDao extends IDelete<User, Long> {
/**
* 添加用户
*/
@Override
boolean add(User user);
/**
* 根据主键查询用户
*/
@Override
User get(Long uid);
/**
* 根据主键删除用户
*/
@Override
boolean delete(Long uid, long opuid, Date lmodify);
}
|
<reponame>guoqqqi/hooks
module.exports = {
preset: '@midwayjs/hooks',
forceExit: true,
};
|
#!/bin/sh
set -e
echo "mkdir -p ${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
mkdir -p "${CONFIGURATION_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
SWIFT_STDLIB_PATH="${DT_TOOLCHAIN_DIR}/usr/lib/swift/${PLATFORM_NAME}"
install_framework()
{
if [ -r "${BUILT_PRODUCTS_DIR}/$1" ]; then
local source="${BUILT_PRODUCTS_DIR}/$1"
elif [ -r "${BUILT_PRODUCTS_DIR}/$(basename "$1")" ]; then
local source="${BUILT_PRODUCTS_DIR}/$(basename "$1")"
elif [ -r "$1" ]; then
local source="$1"
fi
local destination="${TARGET_BUILD_DIR}/${FRAMEWORKS_FOLDER_PATH}"
if [ -L "${source}" ]; then
echo "Symlinked..."
source="$(readlink "${source}")"
fi
# use filter instead of exclude so missing patterns dont' throw errors
echo "rsync -av --filter \"- CVS/\" --filter \"- .svn/\" --filter \"- .git/\" --filter \"- .hg/\" --filter \"- Headers\" --filter \"- PrivateHeaders\" --filter \"- Modules\" \"${source}\" \"${destination}\""
rsync -av --filter "- CVS/" --filter "- .svn/" --filter "- .git/" --filter "- .hg/" --filter "- Headers" --filter "- PrivateHeaders" --filter "- Modules" "${source}" "${destination}"
local basename
basename="$(basename -s .framework "$1")"
binary="${destination}/${basename}.framework/${basename}"
if ! [ -r "$binary" ]; then
binary="${destination}/${basename}"
fi
# Strip invalid architectures so "fat" simulator / device frameworks work on device
if [[ "$(file "$binary")" == *"dynamically linked shared library"* ]]; then
strip_invalid_archs "$binary"
fi
# Resign the code if required by the build settings to avoid unstable apps
code_sign_if_enabled "${destination}/$(basename "$1")"
# Embed linked Swift runtime libraries. No longer necessary as of Xcode 7.
if [ "${XCODE_VERSION_MAJOR}" -lt 7 ]; then
local swift_runtime_libs
swift_runtime_libs=$(xcrun otool -LX "$binary" | grep --color=never @rpath/libswift | sed -E s/@rpath\\/\(.+dylib\).*/\\1/g | uniq -u && exit ${PIPESTATUS[0]})
for lib in $swift_runtime_libs; do
echo "rsync -auv \"${SWIFT_STDLIB_PATH}/${lib}\" \"${destination}\""
rsync -auv "${SWIFT_STDLIB_PATH}/${lib}" "${destination}"
code_sign_if_enabled "${destination}/${lib}"
done
fi
}
# Signs a framework with the provided identity
code_sign_if_enabled() {
if [ -n "${EXPANDED_CODE_SIGN_IDENTITY}" -a "${CODE_SIGNING_REQUIRED}" != "NO" -a "${CODE_SIGNING_ALLOWED}" != "NO" ]; then
# Use the current code_sign_identitiy
echo "Code Signing $1 with Identity ${EXPANDED_CODE_SIGN_IDENTITY_NAME}"
echo "/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements \"$1\""
/usr/bin/codesign --force --sign ${EXPANDED_CODE_SIGN_IDENTITY} ${OTHER_CODE_SIGN_FLAGS} --preserve-metadata=identifier,entitlements "$1"
fi
}
# Strip invalid architectures
strip_invalid_archs() {
binary="$1"
# Get architectures for current file
archs="$(lipo -info "$binary" | rev | cut -d ':' -f1 | rev)"
stripped=""
for arch in $archs; do
if ! [[ "${VALID_ARCHS}" == *"$arch"* ]]; then
# Strip non-valid architectures in-place
lipo -remove "$arch" -output "$binary" "$binary" || exit 1
stripped="$stripped $arch"
fi
done
if [[ "$stripped" ]]; then
echo "Stripped $binary of architectures:$stripped"
fi
}
if [[ "$CONFIGURATION" == "Debug" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SGVInstanceSwizzling-Objective-C-iOS/SGVInstanceSwizzling.framework"
install_framework "$BUILT_PRODUCTS_DIR/SGVSuperMessagingProxy-Common-Objective-C-iOS/SGVSuperMessagingProxy.framework"
fi
if [[ "$CONFIGURATION" == "Release" ]]; then
install_framework "$BUILT_PRODUCTS_DIR/SGVInstanceSwizzling-Objective-C-iOS/SGVInstanceSwizzling.framework"
install_framework "$BUILT_PRODUCTS_DIR/SGVSuperMessagingProxy-Common-Objective-C-iOS/SGVSuperMessagingProxy.framework"
fi
|
#ifndef UTIL_RED_BLACK_TREE_H
#define UTIL_RED_BLACK_TREE_H
// Implementation of the Red-Black-Tree algorithm of the book:
// "Introduction to Algorithms", by Cormen, Leiserson, Rivest and Stein.
#include <stdlib.h>
#include <stdint.h>
#include <memory>
#include "util/minus.h"
namespace util {
template<typename _Key,
typename _Value,
typename _Compare = util::minus<_Key> >
class red_black_tree {
private:
typedef uint8_t color_t;
struct node {
_Key key;
_Value value;
color_t color;
node* left;
node* right;
node* p;
// Constructor.
node();
node(const _Key& k, const _Value& v);
};
public:
class iterator {
friend class red_black_tree;
public:
_Key* first;
_Value* second;
private:
node* _M_node;
};
class const_iterator {
friend class red_black_tree;
public:
const _Key* first;
const _Value* second;
private:
const node* _M_node;
};
// Constructor.
red_black_tree();
red_black_tree(const _Compare& comp);
red_black_tree(red_black_tree&& other);
// Destructor.
~red_black_tree();
// Clear.
void clear();
// Get number of elements.
size_t count() const;
// Insert.
bool insert(const _Key& key, const _Value& value);
// Erase.
bool erase(iterator& it);
bool erase(const _Key& key);
// Find.
bool find(const _Key& key, iterator& it);
bool find(const _Key& key, const_iterator& it) const;
// Begin.
bool begin(iterator& it);
bool begin(const_iterator& it) const;
// End.
bool end(iterator& it);
bool end(const_iterator& it) const;
// Previous.
bool prev(iterator& it);
bool prev(const_iterator& it) const;
// Next.
bool next(iterator& it);
bool next(const_iterator& it) const;
private:
enum {
kRed = 0,
kBlack = 1
};
static node _M_nil;
node* _M_root;
#if HAVE_FREE_LIST
node* _M_free;
#endif // HAVE_FREE_LIST
size_t _M_count;
_Compare _M_comp;
// Search.
node* search(const _Key& key);
const node* search(const _Key& key) const;
// Minimum.
node* minimum(node* x);
const node* minimum(const node* x) const;
// Maximum.
node* maximum(node* x);
const node* maximum(const node* x) const;
// Predecessor.
node* predecessor(node* x);
const node* predecessor(const node* x) const;
// Successor.
node* successor(node* x);
const node* successor(const node* x) const;
// Left rotate.
void left_rotate(node* x);
// Right rotate.
void right_rotate(node* x);
// Insert fixup.
void insert_fixup(node* z);
// Transplant.
void transplant(node* u, node* v);
// Delete fixup.
void delete_fixup(node* x);
// Create node.
node* create(const _Key& key, const _Value& value);
#if HAVE_FREE_LIST
// Add node to free list.
void add_free_list(node* node);
#endif // HAVE_FREE_LIST
// Erase subtree.
void erase_subtree(node* node);
// Get nil.
static node* nil();
// Disable copy constructor and assignment operator.
red_black_tree(const red_black_tree&) = delete;
red_black_tree& operator=(const red_black_tree&) = delete;
};
template<typename _Key, typename _Value, typename _Compare>
typename red_black_tree<_Key, _Value, _Compare>::node
red_black_tree<_Key, _Value, _Compare>::_M_nil;
template<typename _Key, typename _Value, typename _Compare>
inline red_black_tree<_Key, _Value, _Compare>::node::node()
: color(kBlack)
{
}
template<typename _Key, typename _Value, typename _Compare>
inline red_black_tree<_Key, _Value, _Compare>::node::node(const _Key& k,
const _Value& v)
: key(k),
value(v)
{
}
template<typename _Key, typename _Value, typename _Compare>
inline
red_black_tree<_Key, _Value, _Compare>::red_black_tree()
: _M_root(nil()),
#if HAVE_FREE_LIST
_M_free(NULL),
#endif // HAVE_FREE_LIST
_M_count(0),
_M_comp()
{
}
template<typename _Key, typename _Value, typename _Compare>
inline
red_black_tree<_Key, _Value, _Compare>::red_black_tree(const _Compare& comp)
: _M_root(nil()),
#if HAVE_FREE_LIST
_M_free(NULL),
#endif // HAVE_FREE_LIST
_M_count(0),
_M_comp(comp)
{
}
template<typename _Key, typename _Value, typename _Compare>
inline
red_black_tree<_Key, _Value, _Compare>::red_black_tree(red_black_tree&& other)
{
_M_root = other._M_root;
#if HAVE_FREE_LIST
_M_free = other._M_free;
#endif // HAVE_FREE_LIST
_M_count = other._M_count;
_M_comp = other._M_comp;
other._M_root = nil();
other._M_free = NULL;
}
template<typename _Key, typename _Value, typename _Compare>
inline red_black_tree<_Key, _Value, _Compare>::~red_black_tree()
{
clear();
}
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::clear()
{
if (_M_root != nil()) {
erase_subtree(_M_root);
_M_root = nil();
}
#if HAVE_FREE_LIST
while (_M_free) {
node* next = _M_free->left;
delete _M_free;
_M_free = next;
}
#endif // HAVE_FREE_LIST
_M_count = 0;
}
template<typename _Key, typename _Value, typename _Compare>
inline size_t red_black_tree<_Key, _Value, _Compare>::count() const
{
return _M_count;
}
template<typename _Key, typename _Value, typename _Compare>
bool red_black_tree<_Key, _Value, _Compare>::insert(const _Key& key,
const _Value& value)
{
node* z;
if ((z = create(key, value)) == NULL) {
return false;
}
node* y = nil();
node* x = _M_root;
while (x != nil()) {
y = x;
if (_M_comp(z->key, x->key) < 0) {
x = x->left;
} else {
x = x->right;
}
}
z->p = y;
if (y == nil()) {
_M_root = z;
} else if (_M_comp(z->key, y->key) < 0) {
y->left = z;
} else {
y->right = z;
}
z->left = nil();
z->right = nil();
z->color = kRed;
insert_fixup(z);
_M_count++;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
bool red_black_tree<_Key, _Value, _Compare>::erase(iterator& it)
{
node* z = it._M_node;
it._M_node = successor(z);
node* x;
node* y = z;
color_t ycolor = y->color;
if (z->left == nil()) {
x = z->right;
transplant(z, z->right);
} else if (z->right == nil()) {
x = z->left;
transplant(z, z->left);
} else {
y = minimum(z->right);
ycolor = y->color;
x = y->right;
if (y->p == z) {
x->p = y;
} else {
transplant(y, y->right);
y->right = z->right;
y->right->p = y;
}
transplant(z, y);
y->left = z->left;
y->left->p = y;
y->color = z->color;
}
if (ycolor == kBlack) {
delete_fixup(x);
}
_M_count--;
#if HAVE_FREE_LIST
// Call destructor.
z->~node();
add_free_list(z);
#else
delete z;
#endif
if (it._M_node == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::erase(const _Key& key)
{
iterator it;
if ((it._M_node = search(key)) == nil()) {
return false;
}
erase(it);
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::find(const _Key& key,
iterator& it)
{
if ((it._M_node = search(key)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool
red_black_tree<_Key, _Value, _Compare>::find(const _Key& key,
const_iterator& it) const
{
if ((it._M_node = search(key)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::begin(iterator& it)
{
if (_M_root == nil()) {
return false;
}
it._M_node = minimum(_M_root);
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool
red_black_tree<_Key, _Value, _Compare>::begin(const_iterator& it) const
{
if (_M_root == nil()) {
return false;
}
it._M_node = minimum(_M_root);
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::end(iterator& it)
{
if (_M_root == nil()) {
return false;
}
it._M_node = maximum(_M_root);
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool
red_black_tree<_Key, _Value, _Compare>::end(const_iterator& it) const
{
if (_M_root == nil()) {
return false;
}
it._M_node = maximum(_M_root);
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::prev(iterator& it)
{
if ((it._M_node = predecessor(it._M_node)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool
red_black_tree<_Key, _Value, _Compare>::prev(const_iterator& it) const
{
if ((it._M_node = predecessor(it._M_node)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool red_black_tree<_Key, _Value, _Compare>::next(iterator& it)
{
if ((it._M_node = successor(it._M_node)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline bool
red_black_tree<_Key, _Value, _Compare>::next(const_iterator& it) const
{
if ((it._M_node = successor(it._M_node)) == nil()) {
return false;
}
it.first = &it._M_node->key;
it.second = &it._M_node->value;
return true;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::search(const _Key& key)
{
return const_cast<red_black_tree<_Key, _Value, _Compare>::node*>(
const_cast<const red_black_tree<_Key, _Value, _Compare>&>(
*this
).search(key)
);
}
template<typename _Key, typename _Value, typename _Compare>
const typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::search(const _Key& key) const
{
const node* x = _M_root;
while (x != nil()) {
int ret;
if ((ret = _M_comp(key, x->key)) < 0) {
x = x->left;
} else if (ret == 0) {
return x;
} else {
x = x->right;
}
}
return nil();
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::minimum(node* x)
{
return const_cast<red_black_tree<_Key, _Value, _Compare>::node*>(
const_cast<const red_black_tree<_Key, _Value, _Compare>&>(
*this
).minimum(x)
);
}
template<typename _Key, typename _Value, typename _Compare>
inline const typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::minimum(const node* x) const
{
while (x->left != nil()) {
x = x->left;
}
return x;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::maximum(node* x)
{
return const_cast<red_black_tree<_Key, _Value, _Compare>::node*>(
const_cast<const red_black_tree<_Key, _Value, _Compare>&>(
*this
).maximum(x)
);
}
template<typename _Key, typename _Value, typename _Compare>
inline const typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::maximum(const node* x) const
{
while (x->right != nil()) {
x = x->right;
}
return x;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::predecessor(node* x)
{
return const_cast<red_black_tree<_Key, _Value, _Compare>::node*>(
const_cast<const red_black_tree<_Key, _Value, _Compare>&>(
*this
).predecessor(x)
);
}
template<typename _Key, typename _Value, typename _Compare>
const typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::predecessor(const node* x) const
{
if (x->left != nil()) {
return maximum(x->left);
}
const node* y = x->p;
while ((y != nil()) && (x == y->left)) {
x = y;
y = y->p;
}
return y;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::successor(node* x)
{
return const_cast<red_black_tree<_Key, _Value, _Compare>::node*>(
const_cast<const red_black_tree<_Key, _Value, _Compare>&>(
*this
).successor(x)
);
}
template<typename _Key, typename _Value, typename _Compare>
const typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::successor(const node* x) const
{
if (x->right != nil()) {
return minimum(x->right);
}
const node* y = x->p;
while ((y != nil()) && (x == y->right)) {
x = y;
y = y->p;
}
return y;
}
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::left_rotate(node* x)
{
node* y = x->right;
x->right = y->left;
if (y->left != nil()) {
y->left->p = x;
}
y->p = x->p;
if (x->p == nil()) {
_M_root = y;
} else if (x == x->p->left) {
x->p->left = y;
} else {
x->p->right = y;
}
y->left = x;
x->p = y;
}
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::right_rotate(node* x)
{
node* y = x->left;
x->left = y->right;
if (y->right != nil()) {
y->right->p = x;
}
y->p = x->p;
if (x->p == nil()) {
_M_root = y;
} else if (x == x->p->right) {
x->p->right = y;
} else {
x->p->left = y;
}
y->right = x;
x->p = y;
}
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::insert_fixup(node* z)
{
while (z->p->color == kRed) {
if (z->p == z->p->p->left) {
node* y = z->p->p->right;
if (y->color == kRed) {
z->p->color = kBlack;
y->color = kBlack;
z->p->p->color = kRed;
z = z->p->p;
} else {
if (z == z->p->right) {
z = z->p;
left_rotate(z);
}
z->p->color = kBlack;
z->p->p->color = kRed;
right_rotate(z->p->p);
}
} else {
node* y = z->p->p->left;
if (y->color == kRed) {
z->p->color = kBlack;
y->color = kBlack;
z->p->p->color = kRed;
z = z->p->p;
} else {
if (z == z->p->left) {
z = z->p;
right_rotate(z);
}
z->p->color = kBlack;
z->p->p->color = kRed;
left_rotate(z->p->p);
}
}
}
_M_root->color = kBlack;
}
template<typename _Key, typename _Value, typename _Compare>
inline void red_black_tree<_Key, _Value, _Compare>::transplant(node* u,
node* v)
{
if (u->p == nil()) {
_M_root = v;
} else if (u == u->p->left) {
u->p->left = v;
} else {
u->p->right = v;
}
v->p = u->p;
}
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::delete_fixup(node* x)
{
while ((x != _M_root) && (x->color == kBlack)) {
if (x == x->p->left) {
node* w = x->p->right;
if (w->color == kRed) {
w->color = kBlack;
x->p->color = kRed;
left_rotate(x->p);
w = x->p->right;
}
if ((w->left->color == kBlack) && (w->right->color == kBlack)) {
w->color = kRed;
x = x->p;
} else {
if (w->right->color == kBlack) {
w->left->color = kBlack;
w->color = kRed;
right_rotate(w);
w = x->p->right;
}
w->color = x->p->color;
x->p->color = kBlack;
w->right->color = kBlack;
left_rotate(x->p);
x = _M_root;
}
} else {
node* w = x->p->left;
if (w->color == kRed) {
w->color = kBlack;
x->p->color = kRed;
right_rotate(x->p);
w = x->p->left;
}
if ((w->right->color == kBlack) && (w->left->color == kBlack)) {
w->color = kRed;
x = x->p;
} else {
if (w->left->color == kBlack) {
w->right->color = kBlack;
w->color = kRed;
left_rotate(w);
w = x->p->left;
}
w->color = x->p->color;
x->p->color = kBlack;
w->left->color = kBlack;
right_rotate(x->p);
x = _M_root;
}
}
}
x->color = kBlack;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::create(const _Key& key,
const _Value& value)
{
#if HAVE_FREE_LIST
if (!_M_free) {
return new (std::nothrow) node(key, value);
}
node* n = _M_free;
_M_free = _M_free->left;
return new (n) node(key, value);
#else
return new (std::nothrow) node(key, value);
#endif
}
#if HAVE_FREE_LIST
template<typename _Key, typename _Value, typename _Compare>
inline void red_black_tree<_Key, _Value, _Compare>::add_free_list(node* node)
{
node->left = _M_free;
_M_free = node;
}
#endif // HAVE_FREE_LIST
template<typename _Key, typename _Value, typename _Compare>
void red_black_tree<_Key, _Value, _Compare>::erase_subtree(node* node)
{
if (node->left != nil()) {
erase_subtree(node->left);
}
if (node->right != nil()) {
erase_subtree(node->right);
}
delete node;
}
template<typename _Key, typename _Value, typename _Compare>
inline typename red_black_tree<_Key, _Value, _Compare>::node*
red_black_tree<_Key, _Value, _Compare>::nil()
{
return &_M_nil;
}
}
#endif // UTIL_RED_BLACK_TREE_H
|
<gh_stars>100-1000
//
// mulle_objc_cache.h
// mulle-objc-runtime
//
// Created by Nat! on 15.09.15.
// Copyright (c) 2015 Nat! - <NAME>.
// Copyright (c) 2015 Codeon GmbH.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// Neither the name of Mulle kybernetiK nor the names of its contributors
// may be used to endorse or promote products derived from this software
// without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
//
#ifndef mulle_objc_cache_h__
#define mulle_objc_cache_h__
#include "mulle-objc-uniqueid.h"
#include "include.h"
#include <stddef.h>
#include <assert.h>
# pragma mark - method cache
#define MULLE_OBJC_MIN_CACHE_SIZE 4
typedef mulle_objc_uniqueid_t mulle_objc_cache_uint_t;
// this sizeof() must be a power of 2 else stuff fails
struct _mulle_objc_cacheentry
{
union
{
mulle_objc_uniqueid_t uniqueid;
mulle_atomic_pointer_t pointer;
} key;
union
{
mulle_atomic_functionpointer_t functionpointer;
mulle_atomic_pointer_t pointer;
} value;
};
struct _mulle_objc_cache
{
mulle_atomic_pointer_t n;
mulle_objc_cache_uint_t size; // don't optimize away (alignment!)
mulle_objc_cache_uint_t mask;
struct _mulle_objc_cacheentry entries[ 1];
};
enum mulle_objc_cachesizing_t
{
MULLE_OBJC_CACHESIZE_SHRINK = -1,
MULLE_OBJC_CACHESIZE_STAGNATE = 0,
MULLE_OBJC_CACHESIZE_GROW = 2
};
static inline size_t _mulle_objc_cache_get_resize( struct _mulle_objc_cache *cache,
enum mulle_objc_cachesizing_t strategy)
{
switch( strategy)
{
case MULLE_OBJC_CACHESIZE_STAGNATE :
return( cache->size);
case MULLE_OBJC_CACHESIZE_GROW :
return( cache->size * 2);
case MULLE_OBJC_CACHESIZE_SHRINK :
return( cache->size <= MULLE_OBJC_MIN_CACHE_SIZE * 2
? MULLE_OBJC_MIN_CACHE_SIZE
: cache->size >> 1);
}
}
MULLE_C_ALWAYS_INLINE static inline struct _mulle_objc_cache *
_mulle_objc_cacheentry_get_cache_from_entries( struct _mulle_objc_cacheentry *entries)
{
return( (void *) &((char *) entries)[ -(int) offsetof( struct _mulle_objc_cache, entries)]);
}
struct _mulle_objc_cachepivot
{
mulle_atomic_pointer_t entries; // for atomic XCHG with pointer indirection
};
MULLE_C_ALWAYS_INLINE static inline struct _mulle_objc_cacheentry *
_mulle_objc_cachepivot_nonatomicget_entries( struct _mulle_objc_cachepivot *p)
{
return( _mulle_atomic_pointer_nonatomic_read( &p->entries));
}
MULLE_C_ALWAYS_INLINE
static inline struct _mulle_objc_cache *
_mulle_objc_cachepivot_nonatomicget_cache( struct _mulle_objc_cachepivot *p)
{
struct _mulle_objc_cacheentry *entries;
entries = _mulle_objc_cachepivot_nonatomicget_entries( p);
return( _mulle_objc_cacheentry_get_cache_from_entries( entries));
}
MULLE_C_ALWAYS_INLINE static inline struct _mulle_objc_cacheentry *
_mulle_objc_cachepivot_atomicget_entries( struct _mulle_objc_cachepivot *p)
{
return( (void *) _mulle_atomic_pointer_read( &p->entries));
}
static inline struct _mulle_objc_cacheentry *
_mulle_objc_cachepivot_atomicset_entries( struct _mulle_objc_cachepivot *p,
struct _mulle_objc_cacheentry *new_entries)
{
return( _mulle_atomic_pointer_set( &p->entries, new_entries));
}
// will return 0 if successful
static inline int
_mulle_objc_cachepivot_atomiccas_entries( struct _mulle_objc_cachepivot *p,
struct _mulle_objc_cacheentry *new_entries,
struct _mulle_objc_cacheentry *old_entries)
{
assert( old_entries != new_entries);
return( ! _mulle_atomic_pointer_cas( &p->entries, new_entries, old_entries));
}
static inline struct _mulle_objc_cacheentry *
_mulle_objc_cachepivot_atomiccweakcas_entries( struct _mulle_objc_cachepivot *p,
struct _mulle_objc_cacheentry *new_entries,
struct _mulle_objc_cacheentry *old_entries)
{
assert( old_entries != new_entries);
return( __mulle_atomic_pointer_weakcas( &p->entries, new_entries, old_entries));
}
static inline struct _mulle_objc_cache *
_mulle_objc_cachepivot_atomicget_cache( struct _mulle_objc_cachepivot *p)
{
struct _mulle_objc_cacheentry *entries;
entries = _mulle_objc_cachepivot_atomicget_entries( p);
return( _mulle_objc_cacheentry_get_cache_from_entries( entries));
}
# pragma mark - cache petty accessors
static inline mulle_objc_cache_uint_t
_mulle_objc_cache_get_count( struct _mulle_objc_cache *cache)
{
// yay double cast, how C like...
return( (mulle_objc_cache_uint_t) (uintptr_t) _mulle_atomic_pointer_read( &cache->n));
}
static inline mulle_objc_uniqueid_t
_mulle_objc_cache_get_size( struct _mulle_objc_cache *cache)
{
return( cache->size);
}
static inline mulle_objc_cache_uint_t
_mulle_objc_cache_get_mask( struct _mulle_objc_cache *cache)
{
return( cache->mask);
}
# pragma mark - cache allocation
struct _mulle_objc_cache *mulle_objc_cache_new( mulle_objc_cache_uint_t size,
struct mulle_allocator *allocator);
void _mulle_objc_cache_free( struct _mulle_objc_cache *cache,
struct mulle_allocator *allocator);
void _mulle_objc_cache_abafree( struct _mulle_objc_cache *cache,
struct mulle_allocator *allocator);
# pragma mark - cache add entry
struct _mulle_objc_cacheentry *
_mulle_objc_cache_inactivecache_add_pointer_entry( struct _mulle_objc_cache *cache,
void *pointer,
mulle_objc_uniqueid_t uniqueid);
struct _mulle_objc_cacheentry *
_mulle_objc_cache_inactivecache_add_functionpointer_entry( struct _mulle_objc_cache *cache,
mulle_functionpointer_t pointer,
mulle_objc_uniqueid_t uniqueid);
// returns null if cache is full
struct _mulle_objc_cacheentry *
_mulle_objc_cache_add_pointer_entry( struct _mulle_objc_cache *cache,
void *pointer,
mulle_objc_uniqueid_t uniqueid);
struct _mulle_objc_cacheentry *
_mulle_objc_cache_add_functionpointer_entry( struct _mulle_objc_cache *cache,
mulle_functionpointer_t pointer,
mulle_objc_uniqueid_t uniqueid);
# pragma mark - cache method lookup
void *_mulle_objc_cache_lookup_pointer( struct _mulle_objc_cache *cache,
mulle_objc_uniqueid_t uniqueid);
mulle_functionpointer_t
_mulle_objc_cache_lookup_functionpointer( struct _mulle_objc_cache *cache,
mulle_objc_uniqueid_t uniqueid);
mulle_objc_cache_uint_t
_mulle_objc_cache_find_entryoffset( struct _mulle_objc_cache *cache,
mulle_objc_uniqueid_t uniqueid);
# pragma mark - cache utilitites
unsigned int mulle_objc_cache_calculate_fillpercentage( struct _mulle_objc_cache *cache);
// gives percentage of relative indexes high percentages[ 0] is good. size must be > 1
unsigned int mulle_objc_cache_calculate_hitpercentage( struct _mulle_objc_cache *cache,
unsigned int *percentages,
unsigned int size);
int _mulle_objc_cache_find_entryindex( struct _mulle_objc_cache *cache,
mulle_objc_uniqueid_t uniqueid);
#endif /* defined(__MULLE_OBJC__mulle_objc_cache__) */
|
#!/bin/bash
dieharder -d 16 -g 21 -S 2784593861
|
<reponame>moc-yuto/envoy
#include "extensions/filters/network/mysql_proxy/mysql_codec.h"
#include "extensions/filters/network/mysql_proxy/mysql_filter.h"
#include "extensions/filters/network/mysql_proxy/mysql_utils.h"
#include "test/mocks/network/mocks.h"
#include "gmock/gmock.h"
#include "gtest/gtest.h"
#include "mysql_test_utils.h"
using testing::NiceMock;
namespace Envoy {
namespace Extensions {
namespace NetworkFilters {
namespace MySQLProxy {
constexpr int SESSIONS = 5;
class MySQLFilterTest : public MySQLTestUtils, public testing::Test {
public:
MySQLFilterTest() { ENVOY_LOG_MISC(info, "test"); }
void initialize() {
config_ = std::make_shared<MySQLFilterConfig>(stat_prefix_, scope_);
filter_ = std::make_unique<MySQLFilter>(config_);
filter_->initializeReadFilterCallbacks(filter_callbacks_);
}
MySQLFilterConfigSharedPtr config_;
std::unique_ptr<MySQLFilter> filter_;
Stats::IsolatedStoreImpl scope_;
std::string stat_prefix_{"test"};
NiceMock<Network::MockReadFilterCallbacks> filter_callbacks_;
};
// Test New Session counter increment
TEST_F(MySQLFilterTest, NewSessionStatsTest) {
initialize();
for (int idx = 0; idx < SESSIONS; idx++) {
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
}
EXPECT_EQ(SESSIONS, config_->stats().sessions_.value());
}
// Test that the filter falls back to tcp proxy if it cant decode
TEST_F(MySQLFilterTest, MySqlFallbackToTcpProxy) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl("scooby doo - part 1!"));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(1UL, config_->stats().decoder_errors_.value());
Buffer::InstancePtr more_data(new Buffer::OwnedImpl("scooby doo - part 2!"));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*more_data, false));
}
/**
* Test MySQL Handshake with protocol version 41
* SM: greeting(p=10) -> challenge-req(v41) -> serv-resp-ok
*/
TEST_F(MySQLFilterTest, MySqlHandshake41OkTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with protocol version 41
* Server responds with Error
* SM: greeting(p=10) -> challenge-req(v41) -> serv-resp-err
*/
TEST_F(MySQLFilterTest, MySqlHandshake41ErrTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_ERR);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(1UL, config_->stats().login_failures_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_ERROR, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with protocol version 320
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-ok
*/
TEST_F(MySQLFilterTest, MySqlHandshake320OkTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with protocol version 320
* Server responds with Error
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-err
*/
TEST_F(MySQLFilterTest, MySqlHandshake320ErrTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_ERR);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(1UL, config_->stats().login_failures_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_ERROR, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with SSL Request
* State-machine moves to SSL-Pass-Through
* SM: greeting(p=10) -> challenge-req(v320) -> SSL_PT
*/
TEST_F(MySQLFilterTest, MySqlHandshakeSSLTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(MYSQL_CLIENT_CAPAB_SSL | MYSQL_CLIENT_CAPAB_41VS320,
"user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(1UL, config_->stats().upgraded_to_ssl_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_SSL_PT, filter_->getSession().getState());
std::string encr_data = "!@#$encr$#@!";
std::string mysql_ssl_msg = BufferHelper::encodeHdr(encr_data, 2);
Buffer::InstancePtr query_create_index(new Buffer::OwnedImpl(mysql_ssl_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*query_create_index, false));
EXPECT_EQ(MySQLSession::State::MYSQL_SSL_PT, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with protocol version 320
* Server responds with Auth Switch
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-auth-switch ->
* -> auth_switch_resp -> serv-resp-ok
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_MORE, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(MYSQL_RESP_OK, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
/**
* Test MySQL Handshake with protocol version 320
* Server responds with Auth Switch and error
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-auth-switch ->
* -> auth_switch_resp -> serv-resp-err
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchErrTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_MORE, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(MYSQL_RESP_ERR, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_RESYNC, filter_->getSession().getState());
Command mysql_cmd_encode{};
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
std::string query = "CREATE DATABASE mysqldb";
mysql_cmd_encode.setData(query);
std::string query_data = mysql_cmd_encode.encode();
std::string mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr client_query_data(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_query_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().queries_parsed_.value());
}
/**
* Resync Test failure MySQL Handshake with protocol version 320
* Server responds with Auth Switch and error
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-auth-switch ->
* -> auth_switch_resp -> serv-resp-err -> Resync fails
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchErrFailResync) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_MORE, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(MYSQL_RESP_ERR, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_RESYNC, filter_->getSession().getState());
Command mysql_cmd_encode{};
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
std::string query = "CREATE DATABASE mysqldb";
mysql_cmd_encode.setData(query);
std::string query_data = mysql_cmd_encode.encode();
std::string mysql_msg = BufferHelper::encodeHdr(query_data, 5);
Buffer::InstancePtr client_query_data(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_query_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_RESYNC, filter_->getSession().getState());
}
/**
* Negative Testing MySQL Handshake with protocol version 320
* Server responds with Auth Switch More
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-auth-switch ->
* -> auth_switch_resp -> serv-resp-auth-switch-more
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchMoreandMore) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_MORE, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(MYSQL_RESP_MORE, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_RESP, filter_->getSession().getState());
}
/**
* Negative Testing MySQL Handshake with protocol version 320
* Server responds with unhandled code
* SM: greeting(p=10) -> challenge-req(v320) -> serv-resp-auth-switch ->
* -> auth_switch_resp -> serv-resp-unhandled
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchMoreandUnhandled) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_MORE, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(0x32, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_NOT_HANDLED, filter_->getSession().getState());
}
/**
* Negative sequence
* Test MySQL Handshake with protocol version 41
* - send 2 back-to-back Greeting message (duplicated message)
* -> expect filter to ignore the second.
*/
TEST_F(MySQLFilterTest, MySqlHandshake41Ok2GreetTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string greeting_data2 = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data2(new Buffer::OwnedImpl(greeting_data2));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data2, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().protocol_errors_.value());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(2UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
/**
* Negative sequence
* Test MySQL Handshake with protocol version 41
* - send 2 back-to-back Challenge messages.
* -> expect the filter to ignore the second
*/
TEST_F(MySQLFilterTest, MySqlHandshake41Ok2CloginTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
std::string clogin_data2 =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data2(new Buffer::OwnedImpl(clogin_data2));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data2, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().protocol_errors_.value());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
/**
* Negative sequence
* Test MySQL Handshake with protocol version 41
* - send out or order challenge and greeting messages.
* -> expect the filter to ignore the challenge,
* since greeting was not seen
*/
TEST_F(MySQLFilterTest, MySqlHandshake41OkOOOLoginTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_INIT, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().protocol_errors_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
}
/**
* Negative sequence
* Test MySQL Handshake with protocol version 41
* - send out or order challenge and greeting messages
* followed by login ok
* -> expect the filter to ignore initial challenge as well as
* serverOK because out of order
*/
TEST_F(MySQLFilterTest, MySqlHandshake41OkOOOFullLoginTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_INIT, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().protocol_errors_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
EXPECT_EQ(2UL, config_->stats().protocol_errors_.value());
}
/**
* Negative sequence
* Test MySQL Handshake with protocol version 41
* - send greeting messages followed by login ok
* -> expect filter to ignore serverOK, because it has not
* processed Challenge message
*/
TEST_F(MySQLFilterTest, MySqlHandshake41OkGreetingLoginOKTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats_.protocol_errors_.value());
}
/**
* Negative Testing
* Test MySQL Handshake with protocol version 320
* and wrong Client Login Sequence number
*/
TEST_F(MySQLFilterTest, MySqlHandshake320WrongCloginSeqTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", 2);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
}
/**
* Negative Testing
* Test MySQL Handshake with protocol version 320
* Server responds with Auth Switch wrong sequence
* -> expect filter to ignore auth-switch message
* because of wrong seq.
*/
TEST_F(MySQLFilterTest, MySqlHandshake320AuthSwitchWromgSeqTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string auth_switch_resp = encodeAuthSwitchResp();
Buffer::InstancePtr client_switch_resp(new Buffer::OwnedImpl(auth_switch_resp));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_switch_resp, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_AUTH_SWITCH);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_RESP, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(MYSQL_RESP_OK, 1);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_AUTH_SWITCH_RESP, filter_->getSession().getState());
}
/**
* Negative Testing
* Test MySQL Handshake with protocol version 320
* Server responds with unexpected code
* -> expect filter to set state to not handled
*/
TEST_F(MySQLFilterTest, MySqlHandshake320WrongServerRespCode) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(0x53, 0);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_NOT_HANDLED, filter_->getSession().getState());
std::string msg_data = "";
std::string mysql_msg = BufferHelper::encodeHdr(msg_data, 3);
Buffer::InstancePtr client_query_data(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_query_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_NOT_HANDLED, filter_->getSession().getState());
}
/**
* Negative Testing
* Invalid Mysql Pkt Hdr
* -> expect filter to set state to not handled
*/
TEST_F(MySQLFilterTest, MySqlWrongHdrPkt) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data = encodeClientLogin(0, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_320, filter_->getSession().getState());
std::string srv_resp_ok_data = encodeClientLoginResp(0x53, 0);
Buffer::InstancePtr server_resp_ok_data(new Buffer::OwnedImpl(srv_resp_ok_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_ok_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_NOT_HANDLED, filter_->getSession().getState());
Command mysql_cmd_encode{};
std::string query_data = mysql_cmd_encode.encode();
std::string mysql_msg = "123";
Buffer::InstancePtr client_query_data(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_query_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_NOT_HANDLED, filter_->getSession().getState());
}
/*
* Test Mysql query handler, after handshake completes
* SM: greeting(p=10) -> challenge-req(v41) -> serv-resp-ok ->
* -> Query-request -> Query-response
* validate counters and state-machine
*/
TEST_F(MySQLFilterTest, MySqlLoginAndQueryTest) {
initialize();
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onNewConnection());
EXPECT_EQ(1UL, config_->stats().sessions_.value());
std::string greeting_data = encodeServerGreeting(MYSQL_PROTOCOL_10);
Buffer::InstancePtr greet_data(new Buffer::OwnedImpl(greeting_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*greet_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_REQ, filter_->getSession().getState());
std::string clogin_data =
encodeClientLogin(MYSQL_CLIENT_CAPAB_41VS320, "user1", CHALLENGE_SEQ_NUM);
Buffer::InstancePtr client_login_data(new Buffer::OwnedImpl(clogin_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_login_data, false));
EXPECT_EQ(1UL, config_->stats().login_attempts_.value());
EXPECT_EQ(MySQLSession::State::MYSQL_CHALLENGE_RESP_41, filter_->getSession().getState());
std::string srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK);
Buffer::InstancePtr server_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*server_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
Command mysql_cmd_encode{};
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
std::string query = "CREATE DATABASE mysqldb";
mysql_cmd_encode.setData(query);
std::string query_data = mysql_cmd_encode.encode();
std::string mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr client_query_data(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*client_query_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(1UL, config_->stats().queries_parsed_.value());
srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK, 0, 1);
Buffer::InstancePtr request_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*request_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
query = "show databases";
mysql_cmd_encode.setData(query);
query_data = mysql_cmd_encode.encode();
mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr query_show(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*query_show, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(2UL, config_->stats().queries_parsed_.value());
srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK, 0, 1);
Buffer::InstancePtr show_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*show_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
query = "CREATE TABLE students (name TEXT, student_number INTEGER, city TEXT)";
mysql_cmd_encode.setData(query);
query_data = mysql_cmd_encode.encode();
mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr query_create(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*query_create, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(3UL, config_->stats().queries_parsed_.value());
srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK, 0, 1);
Buffer::InstancePtr create_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*create_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
mysql_cmd_encode.setCmd(Command::Cmd::COM_QUERY);
query = "CREATE index index1";
mysql_cmd_encode.setData(query);
query_data = mysql_cmd_encode.encode();
mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr query_create_index(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*query_create_index, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(3UL, config_->stats().queries_parsed_.value());
srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK, 0, 1);
Buffer::InstancePtr create_index_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue,
filter_->onData(*create_index_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
mysql_cmd_encode.setCmd(Command::Cmd::COM_FIELD_LIST);
query = "";
mysql_cmd_encode.setData(query);
query_data = mysql_cmd_encode.encode();
mysql_msg = BufferHelper::encodeHdr(query_data, 0);
Buffer::InstancePtr cmd_field_list(new Buffer::OwnedImpl(mysql_msg));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*cmd_field_list, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ_RESP, filter_->getSession().getState());
EXPECT_EQ(3UL, config_->stats().queries_parsed_.value());
srv_resp_data = encodeClientLoginResp(MYSQL_RESP_OK, 0, 1);
Buffer::InstancePtr field_list_resp_data(new Buffer::OwnedImpl(srv_resp_data));
EXPECT_EQ(Envoy::Network::FilterStatus::Continue, filter_->onData(*field_list_resp_data, false));
EXPECT_EQ(MySQLSession::State::MYSQL_REQ, filter_->getSession().getState());
}
} // namespace MySQLProxy
} // namespace NetworkFilters
} // namespace Extensions
} // namespace Envoy
|
#!/bin/bash
fn="$1"
string=""
count=0
#echo "#This is a comment" > audioMake.txt
for f in "$fn"/*.ogg
do
string="$string $f"
count=`expr $count + 1`
done
string="oggz merge -o $fn/$fn.ogg $string"
echo $string >> audioMake.txt
eval $string
|
package client;
/**
* The minesweeper class keeps instances of an induvidual minesweeper game
* this includes the board, location of bombs, and creating them
*/
public class MineSweeperLogic {
int sizeRow;
int sizeColumn;
int numBombs;
public int field[][];
public int bombCoor[][];
/**
* This is the constructor, creates a new instance
* @param dem is the demention of the game board
* @param numBombs is the number of bombs that should be loaded
*/
public MineSweeperLogic(int dem, int numBombs){
this.sizeColumn = dem;
this.sizeRow = dem;
this.numBombs = numBombs;
field = new int[(sizeRow+2)][(sizeColumn +2)];
bombCoor = new int [numBombs][2];
loadBombs();
}
/**
* Uses an algorithm to spawn random coordinated for the location of the bombs on the board
* There is checks to make sure the bomb fits on the board and will not over lap a previous bomb
*/
public void loadBombs() {
int numBombsPlaced = 0;
while (numBombsPlaced < numBombs) {
int x = 0;
int y = 0;
while ((x < 1) || (x > sizeRow))
x = (int) (Math.random() * sizeRow);
while ((y < 1) || (y > sizeColumn))
y = (int) (Math.random() * sizeRow);
if (field[x][y] != -1 && x > 0 && y > 0 && x < sizeRow && y < sizeColumn) {
field[x][y] = -1;
bombCoor[numBombsPlaced][0] = x;
bombCoor[numBombsPlaced][1] = y;
numBombsPlaced ++;
}
}
setNumbers();
}
/**
* This is a test function used to print out all values of the board
*/
public void printMap(){
for(int i = 0; i<field.length; i++){
for(int j = 0; j< field[0].length; j++) {
System.out.print(field[i][j]+ " , ");
}
System.out.println();
}
}
/**
* SetNumbers iterate through each spot on the board to see if it is a bomb
* If it is not a bomb it goes to center check
*/
public void setNumbers(){
for(int i=0; i< field.length; i++){
for(int j=0; j< field[0].length; j++) {
if(i> 0 && i< field.length-1 && j> 0 && j<field[0].length-1) {
if(field[i][j]!= -1){
field[i][j] = centerCheck(i,j);
}
}
}
}
}
/**
* centerCheck takes a space that is not a bomb and check to see if there are any bombs around it
* It adds the number of bombs and puts that value on the board
* @param X the x coordinate
* @param Y the y coordinate
* @return the number of bombs
*/
public int centerCheck(int X , int Y) { ///Note x, y are actually revered in this function
int results = 0;
for(int i = X-1; i<X+2; i++) {
for(int j = Y-1; j<Y+2; j++) {
if(field[i][j]==-1){
results++;
}
}
}
return results;
}
/**
* getter function for the given coordinates
* @param x the x coordinate
* @param y the y coordinate
* @return the value at given coordinates
*/
public int getNum (int x, int y) {
return field[x+1][y+1];
}
/**
* gets the array of bomb coordinates
* @return the 2D array of bomb coordinates
*/
public int[][] getBombCoor() {
return bombCoor;
}
} |
require_relative 'base'
require 'git'
module I3
module Blocks
class WatchGit < I3::Blocks::Base
attribute :clean_format, String, default: '<span color="lime">%{name}: </span>'
attribute :dirty_format, String, default: '%{name}: <span color="cyan">𝛥%{modified}</span> <span color="lime">+%{added}</span> <span color="red">-%{deleted}</span> <span color="yellow">?%{untracked}</span>'
attribute :filthy_format, String, default: '<span color="red">%{name}: </span>'
attribute :filthy_threshold, Integer, default: 8
def status(repo_dir)
repo = Git.open(repo_dir)
results = {}
results[:name] = repo_dir.split(/\//).last
status = Git::Status.new(repo)
status.each {}
results[:added] = status.added.length
results[:deleted] = status.deleted.length
results[:modified] = status.changed.length
results[:untracked] = status.untracked.length
results[:cleanliness] = results[:added] + results[:deleted] + results[:modified] + results[:untracked]
results
end
def call(repo_dir)
build_message(markup: 'pango', full_text: format_results(status(repo_dir)))
end
def format_results(status)
if status[:cleanliness] == 0
return clean_format % status
elsif status[:cleanliness] >= filthy_threshold
return filthy_format % status
else
return dirty_format % status
end
end
end
end
end
|
#!/bin/bash
i=1
while read p; do
./data_prepare_one.sh $p
i=$((i+1))
done < lists/masif_site_only.txt
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.