content stringlengths 1 1.04M | input_ids listlengths 1 774k | ratio_char_token float64 0.38 22.9 | token_count int64 1 774k |
|---|---|---|---|
import smart_imports
smart_imports.all()
| [
198,
11748,
4451,
62,
320,
3742,
198,
198,
27004,
62,
320,
3742,
13,
439,
3419,
628,
628,
628,
628,
628,
628,
198
] | 2.5 | 22 |
from warnings import warn
from rxnpy.chemical.ingredient import Ingredient
from rxnpy.reaction.tools.ingredients_calc import QuantityCalculator, RelativeCalculator
if __name__ == "__main__":
calc = IngredientCalculator()
calc.add({
"name": "secbuLi",
"volume": 0.0172 * Unit("ml"),
"molar_mass": 64.06 * Unit("g/mol"),
"density": 0.768 * Unit("g/ml"),
"molar_conc": 1.3 * Unit("M")
})
calc.add({
"name": "styrene",
"mass": 0.455 * Unit("g"),
"molar_mass": 104.15 * Unit("g/mol"),
"density": 0.909 * Unit("g/ml")
})
calc.add({
"name": "toluene",
"volume": 10 * Unit("ml"),
"molar_mass": 92.141 * Unit("g/mol"),
"density": 0.87 * Unit("g/ml")
})
calc.add({
"name": "THF",
"mole": 45.545 * Unit("mmol"),
"molar_mass": 72.107 * Unit("g/mol"),
"density": .8876 * Unit("g/ml"),
})
print(calc)
calc.scale(2)
print(calc)
calc.remove("toluene")
print(calc)
calc.scale_one("styrene", 0.5)
print(calc)
| [
6738,
14601,
1330,
9828,
628,
198,
6738,
374,
87,
77,
9078,
13,
31379,
13,
278,
445,
1153,
1330,
17589,
445,
1153,
198,
6738,
374,
87,
77,
9078,
13,
260,
2673,
13,
31391,
13,
278,
23320,
62,
9948,
66,
1330,
39789,
9771,
3129,
1352,
... | 2.007326 | 546 |
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, SubmitField
from wtforms.validators import Required
| [
6738,
42903,
62,
86,
27110,
1330,
46947,
8479,
198,
6738,
266,
83,
23914,
1330,
10903,
15878,
11,
8255,
30547,
15878,
11,
39900,
15878,
198,
6738,
266,
83,
23914,
13,
12102,
2024,
1330,
20906,
628
] | 3.911765 | 34 |
import schema
| [
11748,
32815,
628,
628
] | 4.25 | 4 |
from genericpath import exists
import json
import os
import pandas as pd
from src.modules.scraper import driver
import webbrowser
import numpy as np
from pathlib import Path
from shutil import get_terminal_size
| [
6738,
14276,
6978,
1330,
7160,
198,
11748,
33918,
198,
11748,
28686,
198,
11748,
19798,
292,
355,
279,
67,
198,
6738,
12351,
13,
18170,
13,
1416,
38545,
1330,
4639,
198,
11748,
3992,
40259,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
... | 3.785714 | 56 |
import math
import numpy as np
if __name__ == '__main__':
anchor = generate_anchor(50, 50, (512,812.34))
assert anchor.shape == (50*50*9,4) | [
11748,
10688,
198,
11748,
299,
32152,
355,
45941,
628,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
18021,
796,
7716,
62,
3702,
273,
7,
1120,
11,
2026,
11,
357,
25836,
11,
23,
1065,
13,
2682,
40... | 2.442623 | 61 |
#copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tiny imagenet input."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import tensorflow as tf
FLAGS = flags.FLAGS
flags.DEFINE_string('tiny_imagenet_data_dir', None,
'Directory with Tiny Imagenet dataset in TFRecord format.')
def tiny_imagenet_parser(value, image_size, is_training):
"""Parses tiny imagenet example.
Args:
value: encoded example.
image_size: size of the image.
is_training: if True then do training preprocessing (which includes
random cropping), otherwise do eval preprocessing.
Returns:
image: tensor with the image.
label: true label of the image.
"""
keys_to_features = {
'image/encoded': tf.FixedLenFeature((), tf.string, ''),
'label/tiny_imagenet': tf.FixedLenFeature([], tf.int64, -1),
}
parsed = tf.parse_single_example(value, keys_to_features)
image_buffer = tf.reshape(parsed['image/encoded'], shape=[])
image = tf.image.decode_image(image_buffer, channels=3)
image = tf.image.convert_image_dtype(
image, dtype=tf.float32)
# Crop image
if is_training:
bbox_begin, bbox_size, _ = tf.image.sample_distorted_bounding_box(
tf.shape(image),
bounding_boxes=tf.constant([0.0, 0.0, 1.0, 1.0],
dtype=tf.float32,
shape=[1, 1, 4]),
min_object_covered=0.5,
aspect_ratio_range=[0.75, 1.33],
area_range=[0.5, 1.0],
max_attempts=20,
use_image_if_no_bounding_boxes=True)
image = tf.slice(image, bbox_begin, bbox_size)
# resize image
image = tf.image.resize_bicubic([image], [image_size, image_size])[0]
# Rescale image to [-1, 1] range.
image = tf.multiply(tf.subtract(image, 0.5), 2.0)
image = tf.reshape(image, [image_size, image_size, 3])
# Labels are in [0, 199] range
label = tf.cast(
tf.reshape(parsed['label/tiny_imagenet'], shape=[]), dtype=tf.int32)
return image, label
def tiny_imagenet_input(split, batch_size, image_size, is_training):
"""Returns Tiny Imagenet Dataset.
Args:
split: name of the split, "train" or "validation".
batch_size: size of the minibatch.
image_size: size of the one side of the image. Output images will be
resized to square shape image_size*image_size.
is_training: if True then training preprocessing is done, otherwise eval
preprocessing is done.instance of tf.data.Dataset with the dataset.
Raises:
ValueError: if name of the split is incorrect.
Returns:
Instance of tf.data.Dataset with the dataset.
"""
if split.lower().startswith('train'):
filepath = os.path.join(FLAGS.tiny_imagenet_data_dir, 'train.tfrecord')
elif split.lower().startswith('validation'):
filepath = os.path.join(FLAGS.tiny_imagenet_data_dir, 'validation.tfrecord')
else:
raise ValueError('Invalid split: %s' % split)
dataset = tf.data.TFRecordDataset(filepath, buffer_size=8*1024*1024)
if is_training:
dataset = dataset.shuffle(10000)
dataset = dataset.repeat()
dataset = dataset.apply(
tf.contrib.data.map_and_batch(
lambda value: tiny_imagenet_parser(value, image_size, is_training),
batch_size=batch_size,
num_parallel_batches=4,
drop_remainder=True))
def set_shapes(images, labels):
"""Statically set the batch_size dimension."""
images.set_shape(images.get_shape().merge_with(
tf.TensorShape([batch_size, None, None, None])))
labels.set_shape(labels.get_shape().merge_with(
tf.TensorShape([batch_size])))
return images, labels
# Assign static batch size dimension
dataset = dataset.map(set_shapes)
dataset = dataset.prefetch(tf.contrib.data.AUTOTUNE)
return dataset
def num_examples_per_epoch(split):
"""Returns the number of examples in the data set.
Args:
split: name of the split, "train" or "validation".
Raises:
ValueError: if split name is incorrect.
Returns:
Number of example in the split.
"""
if split.lower().startswith('train'):
return 100000
elif split.lower().startswith('validation'):
return 10000
else:
raise ValueError('Invalid split: %s' % split)
| [
2,
22163,
4766,
2864,
3012,
3457,
13,
1439,
6923,
33876,
13,
198,
2,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
1... | 2.691345 | 1,837 |
# Copyright 2018-2021 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Unit tests for the get_unitary_matrix transform
"""
from functools import reduce
import pytest
from pennylane import numpy as np
from gate_data import I, X, Y, Z, H, S, CNOT
import pennylane as qml
from pennylane.transforms.get_unitary_matrix import get_unitary_matrix
# test non-parametric single qubit gates
nonparam_1qubit_op_list = [qml.PauliX, qml.PauliY, qml.PauliZ, qml.Hadamard, qml.S, qml.T, qml.SX]
@pytest.mark.parametrize("op", nonparam_1qubit_op_list)
@pytest.mark.parametrize("wire", [0, 1, 2])
def test_get_unitary_matrix_nonparam_1qubit_ops(op, wire):
"""Check the matrices for different nonparametrized single-qubit gates, which are acting on different qubits in a space of three qubits."""
wires = [0, 1, 2]
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix(wire)
if wire == 0:
expected_matrix = np.kron(op(wires=wire).get_matrix(), np.eye(4))
if wire == 1:
expected_matrix = np.kron(np.eye(2), np.kron(op(wires=wire).get_matrix(), np.eye(2)))
if wire == 2:
expected_matrix = np.kron(np.eye(4), op(wires=wire).get_matrix())
assert np.allclose(matrix, expected_matrix)
# Test a circuit containing multiple gates
def test_get_unitary_matrix_multiple_ops():
"""Check the total matrix for a circuit containing multiple gates. Also
checks that non-integer wires work"""
wires = ["a", "b", "c"]
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix()
expected_matrix = np.kron(I, CNOT) @ np.kron(X, np.kron(S, H))
assert np.allclose(matrix, expected_matrix)
@pytest.mark.parametrize("target_wire", [0, 2, 3, 4])
def test_get_unitary_matrix_CNOT(target_wire):
"""Test CNOT: 2-qubit gate with different target wires, some non-adjacent."""
wires = [0, 1, 2, 3, 4]
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix()
# test the matrix operation on a state
state0 = [1, 0]
state1 = [0, 1]
teststate = reduce(np.kron, [state1, state1, state1, state1, state1])
if target_wire == 0:
expected_state = reduce(np.kron, [state0, state1, state1, state1, state1])
elif target_wire == 2:
expected_state = reduce(np.kron, [state1, state1, state0, state1, state1])
elif target_wire == 3:
expected_state = reduce(np.kron, [state1, state1, state1, state0, state1])
elif target_wire == 4:
expected_state = reduce(np.kron, [state1, state1, state1, state1, state0])
obtained_state = matrix @ teststate
assert np.allclose(obtained_state, expected_state)
def test_get_unitary_matrix_CRX():
"""Test controlled rotation with non-adjacent control and target wires"""
testangle = np.pi / 4
wires = [0, 1, 2]
# test applying to state
state0 = [1, 0]
state1 = [0, 1]
# perform controlled rotation
teststate1 = reduce(np.kron, [state1, state1, state1])
# do not perform controlled rotation
teststate0 = reduce(np.kron, [state1, state1, state0])
expected_state1 = reduce(
np.kron, [qml.RX(testangle, wires=1).get_matrix() @ state1, state1, state1]
)
expected_state0 = teststate0
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix()
obtained_state1 = matrix @ teststate1
obtained_state0 = matrix @ teststate0
assert np.allclose(obtained_state1, expected_state1)
assert np.allclose(obtained_state0, expected_state0)
def test_get_unitary_matrix_Toffoli():
"""Check the Toffoli matrix by its action on states"""
wires = [0, "a", 2, "c", 4]
# test applying to state
state0 = [1, 0]
state1 = [0, 1]
teststate1 = reduce(np.kron, [state1, state1, state1, state1, state1])
teststate2 = reduce(np.kron, [state0, state0, state1, state1, state0])
expected_state1 = reduce(np.kron, [state1, state0, state1, state1, state1])
expected_state2 = teststate2
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix()
obtained_state1 = matrix @ teststate1
obtained_state2 = matrix @ teststate2
assert np.allclose(obtained_state1, expected_state1)
assert np.allclose(obtained_state2, expected_state2)
def test_get_unitary_matrix_MultiControlledX():
"""Test with many control wires"""
wires = [0, 1, 2, 3, 4, 5]
state0 = [1, 0]
state1 = [0, 1]
teststate1 = reduce(np.kron, [state1, state1, state1, state1, state1, state1])
teststate2 = reduce(np.kron, [state0, state1, state0, state0, state1, state0])
expected_state1 = reduce(np.kron, [state1, state1, state1, state0, state1, state1])
expected_state2 = teststate2
get_matrix = get_unitary_matrix(testcircuit, wires)
matrix = get_matrix()
obtained_state1 = matrix @ teststate1
obtained_state2 = matrix @ teststate2
assert np.allclose(obtained_state1, expected_state1)
assert np.allclose(obtained_state2, expected_state2)
def test_get_unitary_matrix_default_wireorder():
"""Test without specified wire order"""
get_matrix = get_unitary_matrix(testcircuit)
matrix = get_matrix()
expected_matrix = np.kron(X, np.kron(Y, Z))
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_input_tape():
"""Test with quantum tape as input"""
with qml.tape.QuantumTape() as tape:
qml.RX(0.432, wires=0)
qml.RY(0.543, wires=0)
qml.CNOT(wires=[0, 1])
qml.RX(0.133, wires=1)
get_matrix = get_unitary_matrix(tape)
matrix = get_matrix()
part_expected_matrix = np.kron(
qml.RY(0.543, wires=0).get_matrix() @ qml.RX(0.432, wires=0).get_matrix(), I
)
expected_matrix = np.kron(I, qml.RX(0.133, wires=1).get_matrix()) @ CNOT @ part_expected_matrix
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_input_tape_wireorder():
"""Test with quantum tape as input, and nonstandard wire ordering"""
with qml.tape.QuantumTape() as tape:
qml.RX(0.432, wires=0)
qml.RY(0.543, wires=0)
qml.CNOT(wires=[0, 1])
qml.RX(0.133, wires=1)
get_matrix = get_unitary_matrix(tape, wire_order=[1, 0])
matrix = get_matrix()
# CNOT where the second wire is the control wire, as opposed to qml.CNOT.get_matrix()
CNOT10 = np.array([[1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0]])
part_expected_matrix = np.kron(
I, qml.RY(0.543, wires=0).get_matrix() @ qml.RX(0.432, wires=0).get_matrix()
)
expected_matrix = (
np.kron(qml.RX(0.133, wires=1).get_matrix(), I) @ CNOT10 @ part_expected_matrix
)
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_input_QNode():
"""Test with QNode as input"""
dev = qml.device("default.qubit", wires=5)
@qml.qnode(dev)
get_matrix = get_unitary_matrix(my_quantum_function) # default wire_order = [0, 1, 2, 3, 4]
matrix = get_matrix()
expected_matrix = (
reduce(np.kron, [I, I, I, I, X])
@ reduce(np.kron, [I, I, qml.CRZ(0.2, wires=[2, 3]).get_matrix(), I])
@ reduce(np.kron, [I, Y, I, I, I])
@ reduce(np.kron, [CNOT, I, I, I])
@ reduce(np.kron, [Z, I, I, I, I])
)
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_input_QNode_wireorder():
"""Test with QNode as input, and nonstandard wire ordering"""
dev = qml.device("default.qubit", wires=5)
@qml.qnode(dev)
get_matrix = get_unitary_matrix(my_quantum_function, wire_order=[1, 0, 4, 2, 3])
matrix = get_matrix()
CNOT10 = np.array([[1, 0, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0], [0, 1, 0, 0]])
expected_matrix = (
reduce(np.kron, [I, I, X, I, I])
@ reduce(np.kron, [I, I, I, qml.CRZ(0.2, wires=[2, 3]).get_matrix()])
@ reduce(np.kron, [Y, I, I, I, I])
@ reduce(np.kron, [CNOT10, I, I, I])
@ reduce(np.kron, [I, Z, I, I, I])
)
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_invalid_argument():
"""Assert error raised when input is neither a tape, QNode, nor quantum function"""
get_matrix = get_unitary_matrix(qml.PauliZ(0))
with pytest.raises(ValueError, match="Input is not a tape, QNode, or quantum function"):
matrix = get_matrix()
def test_get_unitary_matrix_wrong_function():
"""Assert error raised when input function is not a quantum function"""
get_matrix = get_unitary_matrix(testfunction, [0])
with pytest.raises(ValueError, match="Function contains no quantum operation"):
matrix = get_matrix(1)
def test_get_unitary_matrix_interface_tf():
"""Test with tensorflow interface"""
tf = pytest.importorskip("tensorflow")
dev = qml.device("default.qubit", wires=3)
# set qnode interface
qnode_tensorflow = qml.QNode(circuit, dev, interface="tf")
get_matrix = get_unitary_matrix(qnode_tensorflow)
beta = 0.1
# input tensorflow parameters
theta = tf.Variable([0.2, 0.3])
matrix = get_matrix(beta, theta)
# expected matrix
theta_np = theta.numpy()
matrix1 = np.kron(
qml.RZ(beta, wires=0).get_matrix(), np.kron(qml.RZ(theta_np[0], wires=1).get_matrix(), I)
)
matrix2 = np.kron(I, qml.CRY(theta_np[1], wires=[1, 2]).get_matrix())
expected_matrix = matrix2 @ matrix1
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_interface_torch():
"""Test with torch interface"""
torch = pytest.importorskip("torch", minversion="1.8")
dev = qml.device("default.qubit", wires=3)
# set qnode interface
qnode_torch = qml.QNode(circuit, dev, interface="torch")
get_matrix = get_unitary_matrix(qnode_torch)
# input torch parameters
theta = torch.tensor([0.1, 0.2, 0.3])
matrix = get_matrix(theta)
# expected matrix
matrix1 = np.kron(
qml.RZ(theta[0], wires=0).get_matrix(), np.kron(qml.RZ(theta[1], wires=1).get_matrix(), I)
)
matrix2 = np.kron(I, qml.CRY(theta[2], wires=[1, 2]).get_matrix())
expected_matrix = matrix2 @ matrix1
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_interface_autograd():
"""Test with autograd interface"""
dev = qml.device("default.qubit", wires=3)
# set qnode interface
qnode = qml.QNode(circuit, dev, interface="autograd")
get_matrix = get_unitary_matrix(qnode)
# set input parameters
theta = np.array([0.1, 0.2, 0.3], requires_grad=True)
matrix = get_matrix(theta)
# expected matrix
matrix1 = np.kron(
qml.RZ(theta[0], wires=0).get_matrix(), np.kron(qml.RZ(theta[1], wires=1).get_matrix(), I)
)
matrix2 = np.kron(I, qml.CRY(theta[2], wires=[1, 2]).get_matrix())
expected_matrix = matrix2 @ matrix1
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_interface_jax():
"""Test with JAX interface"""
jax = pytest.importorskip("jax")
from jax import numpy as jnp
from jax.config import config
remember = config.read("jax_enable_x64")
config.update("jax_enable_x64", True)
dev = qml.device("default.qubit", wires=3)
# set qnode interface
qnode = qml.QNode(circuit, dev, interface="jax")
get_matrix = get_unitary_matrix(qnode)
# input jax parameters
theta = jnp.array([0.1, 0.2, 0.3], dtype=jnp.float64)
matrix = get_matrix(theta)
# expected matrix
matrix1 = np.kron(
qml.RZ(theta[0], wires=0).get_matrix(), np.kron(qml.RZ(theta[1], wires=1).get_matrix(), I)
)
matrix2 = np.kron(I, qml.CRY(theta[2], wires=[1, 2]).get_matrix())
expected_matrix = matrix2 @ matrix1
assert np.allclose(matrix, expected_matrix)
def test_get_unitary_matrix_wronglabel():
"""Assert error raised when wire labels in wire_order and circuit are inconsistent"""
wires = [0, "b"]
get_matrix = get_unitary_matrix(circuit, wires)
with pytest.raises(
ValueError, match="Wires in circuit are inconsistent with those in wire_order"
):
matrix = get_matrix()
@pytest.mark.parametrize("v", np.linspace(0.2, 1.6, 8))
@pytest.mark.parametrize("v", np.linspace(0.2, 1.6, 8))
@pytest.mark.parametrize("v", np.linspace(0.2, 1.6, 8))
@pytest.mark.parametrize("v", np.linspace(0.2, 1.6, 8))
| [
2,
15069,
2864,
12,
1238,
2481,
47482,
324,
84,
29082,
21852,
3457,
13,
198,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
... | 2.387139 | 5,396 |
# -*- coding: utf-8 -*-
import new, sys, os, types, native, thread
#import classfile, classloader, javaclasses
from classloader import JClass, descriptor # eg. for putfield
from helper import make_String, throw_NullPointerException, throw_ArithmeticException, throw_ArrayIndexOutOfBoundsException, throw_ClassCastException
from hooks import HOOKS, vmobject_getClass_helper
from objectmodel import TypedMap, Objectref, Classref, Arrayref, Stack, JObject, JException, JArrayClass
from native import current_classloader, env_ptr # for jni
from ctypes import py_object, c_int, c_float, c_long, c_double, c_char
from rpython.rlib.rarithmetic import r_singlefloat, r_longlong
from rpython.rlib.objectmodel import instantiate
# for threading:
interp_lock = thread.allocate_lock()
opcodes_count = 0
OPCODES_MAX = 2**8
# JVMS: 4.4.4
# cast int (n) to IEEE 754 float (num)
# TODO: NaN infinety
# JVMS: 4.4.5
# adds to the AbstractClassLoader just one method
# which is able to run Frames
# This method is called by the invoke-virtual,
# -special, -staticmethods (or native.py/JNI) and at the jvm-start
# It executes the bytecode by using a Frame
# TODO: refactor this method
# FIXME use r_singlefloat and r_long_long
# javaclasses > print has problems with this types
DESCR_CAST = {'byte': signedbytemask,
'char': cast_char,
'double': float,
'float': float,#r_singlefloat,
'int': intmask,
'long': long,#r_longlong,
'short': shortmask,
'boolean': bool,
}
# FIXME: Frame and interpreter are mixed :(
# After every methodcall, a Stackframe is created
# It executes every Java-Bytecode until a return opcode occurs
WIDE_TARGET = {0xc8: "goto_w",
}
DEFAULT_BY_TYPECODE = {4: False, # boolean
5: '\x00',# char
6: 0.0, # float
7: 0.0, # double
8: 0, # byte
9: 0, # short
10: 0, # int
11: 0, # long
}
CHAR_BY_TYPECODE = {4: 'Z', # boolean
5: 'C',# char
6: 'F', # float
7: 'D', # double
8: 'B', # byte
9: 'S', # short
10: 'I', # int
11: 'J', # long
}
# raised when a return opcode is reached | [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
11748,
649,
11,
25064,
11,
28686,
11,
3858,
11,
6868,
11,
4704,
198,
198,
2,
11748,
1398,
7753,
11,
1398,
29356,
11,
474,
615,
330,
28958,
198,
198,
6738,
1398,
29356,... | 2.061688 | 1,232 |
# Binomial Distribution I
# Task: The ratio of boys to girls for babies born in Russia is 1.09 : 1.
# If there is child born per birth, what proportion of Russian families with exactly 6 children will have at least 3 boys?
import sys
from math import factorial
args = sys.stdin.readlines()[0].split(' ')
boys_rat, girls_rat = [float(ratio) for ratio in args]
# print(boys_rat, girls_rat)
# p(boy) = 0.521 = p
# p(girl) = 0.479 = q
p = boys_rat / (boys_rat + girls_rat)
result = 0
for x in range(3, 7):
result += binomial_distribution(x, 6, p)
print(round(result, 3))
| [
2,
20828,
49070,
27484,
314,
198,
2,
15941,
25,
383,
8064,
286,
6510,
284,
4813,
329,
11903,
4642,
287,
3284,
318,
352,
13,
2931,
1058,
352,
13,
198,
2,
1002,
612,
318,
220,
1200,
4642,
583,
4082,
11,
644,
9823,
286,
3394,
4172,
3... | 2.894472 | 199 |
import argparse
from mmcv import Config
from mmcv.cnn.utils import get_model_complexity_info
from mmcls.models import build_classifier
from mmcls.models.backbones.binary_utils.binary_convs import BaseBinaryConv2d
import torch
TOTAL_BOPS = 0
if __name__ == '__main__':
main()
| [
11748,
1822,
29572,
201,
198,
201,
198,
6738,
8085,
33967,
1330,
17056,
201,
198,
6738,
8085,
33967,
13,
66,
20471,
13,
26791,
1330,
651,
62,
19849,
62,
41887,
414,
62,
10951,
201,
198,
201,
198,
6738,
8085,
565,
82,
13,
27530,
1330,
... | 2.614035 | 114 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jan 1 13:57:09 2019
@author: Tom
"""
import sys
import json
import logging
import configparser
import pprint
from datetime import datetime
from typing import Dict
import requests
import send_gmail
INAT_NODE_API_BASE_URL = "https://api.inaturalist.org/v1/"
INAT_BASE_URL = "https://www.inaturalist.org"
class AuthenticationError(Exception):
''' Exception for an Authentication error '''
class ObservationNotFound(Exception):
''' Exception for an Observation not found error '''
def get_access_token(username: str,
password: str,
app_id: str,
app_secret: str) -> str:
"""
Get an access token using the user's iNaturalist username and password.
(you still need an iNaturalist app to do this)
:param username:
:param password:
:param app_id:
:param app_secret:
:return: the access token, example use:
headers = {"Authorization": "Bearer %s" % access_token}
"""
payload = {
'client_id': app_id,
'client_secret': app_secret,
'grant_type': "password",
'username': username,
'password': password
}
response = requests.post("{base_url}/oauth/token".\
format(base_url=INAT_BASE_URL), payload)
try:
#LOGGER.info("Access token: '%s'" % response.json()["access_token"])
return response.json()["access_token"]
except KeyError as an_error:
raise AuthenticationError("Authentication error, "
" please check credentials.") from an_error
def get_place_name(place_id):
''' Get Place name from ID '''
LOGGER.info("Looking up place: %s", place_id)
place_name = None
place = requests.get("https://api.inaturalist.org/v1/places/%s" \
% place_id)
if place.status_code == 200:
response_data = json.loads(place.text)
try:
place_name = response_data['results'][0]['display_name']
except KeyError:
LOGGER.error("place_id '%s' not found", place_id)
else:
LOGGER.error("response status = %d", place.status_code)
return place_name
def get_project_id(project_slug):
''' Get Project ID from slug (short name) '''
project_id = None
project = requests.get("https://api.inaturalist.org/v1/projects/%s" \
% project_slug)
if project.status_code == 200:
response_data = json.loads(project.text)
try:
project_id = response_data['results'][0]['id']
except KeyError:
LOGGER.error("Project ID not found")
else:
LOGGER.error("Project %s not found", project_slug)
return project_id
# pylint: disable=too-many-locals,too-many-statements
def get_project(project_id, config):
''' retrieve project information, return a list of species IDs '''
project_species = []
project = requests.get(\
'https://api.inaturalist.org/v1/projects/%s?rule_details=true' % \
project_id)
#LOGGER.info("Project Request Status: %d" % project.status_code)
if project.status_code == 200:
response_data = json.loads(project.text)
if int(response_data['total_results']) > 0:
result = response_data['results'][0]
LOGGER.info("----------------------------------")
LOGGER.info("Title: %s", result['title'])
LOGGER.info("Description: %s", result['description'])
place = result['place']
LOGGER.info(" Place: %s (%s)", place['display_name'],
place['id'])
LOGGER.debug("Number of rules: %d",
len(result['project_observation_rules']))
LOGGER.info("Taxon Rules:")
for a_rule in result['project_observation_rules']:
if a_rule['operand_type'] == 'Taxon':
taxon = a_rule['taxon']
LOGGER.info(" Taxon: %s", taxon['name'])
LOGGER.info("----------------------------------")
else:
return project_species
prev_observation_count = config.getint('last run', 'observation_count', fallback=0)
get_url = '%sobservations?project_id=%s' % (INAT_NODE_API_BASE_URL, project_id)
get_req = requests.get(get_url)
#LOGGER.info("GET project request status code: %d", get_req.status_code)
#LOGGER.info("GET project request response: '%s'", get_req.text)
if get_req.status_code == 200:
response_data = json.loads(get_req.text)
observation_count = int(response_data['total_results'])
LOGGER.debug(pprint.pformat(response_data))
LOGGER.info("Project %s observation count: %d, previously: %d",
project_id, observation_count, prev_observation_count)
else:
LOGGER.info("GET failed, status = %d", get_req.status_code)
prev_species_count = config.getint('last run', 'species_count', fallback=0)
LOGGER.info("\nGet project stats for %s", project_id)
get_stats_url = '%sobservations/species_counts' \
'?project_id=%s&place_id=any' \
'&verifiable=any&captive=any' % \
(INAT_NODE_API_BASE_URL, project_id)
get_stats_req = requests.get(get_stats_url)
if get_stats_req.status_code == 200:
response_data = json.loads(get_stats_req.text)
LOGGER.debug(pprint.pformat(response_data))
species_count = int(response_data['total_results'])
LOGGER.info("\nTotal species: %d, previous: %d\n------------",
species_count, prev_species_count)
results = response_data['results']
for a_result in results:
try:
rank = a_result['taxon']['rank']
except KeyError:
rank = '<none>'
taxon = a_result['taxon']['iconic_taxon_name']
if config.getboolean('inaturalist.org', 'showspecies'):
LOGGER.info("Name: %s\n"
"Common name: %s\n"
"Taxon ID: %s\n"
"Rank: %s\n"
"Taxon: %s\n"
"Count: %s\n",
a_result['taxon']['name'],
a_result['taxon']['preferred_common_name'],
a_result['taxon']['id'],
rank,
taxon,
a_result['count'])
project_species.append(a_result['taxon']['id'])
else:
LOGGER.error("Stats request '%s' failed: %d", get_stats_url,
get_stats_req.status_code)
# Save counts to config file
config['last run']['species_count'] = str(species_count)
config['last run']['observation_count'] = str(observation_count)
return project_species
# THIS DIDN'T WORK
def add_ob_2_proj_v1(observation_id, project_id, access_token):
''' Use V1 API to add an observation to a project '''
payload = {"observation_id": observation_id}
post_url = 'https://api.inaturalist.org/v1/projects/%s/add' % project_id
post_req = requests.post(post_url,
data=json.dumps(payload),
headers=_build_auth_header(access_token))
#LOGGER.info("POST request status code: %d", post_req.status_code)
#LOGGER.info("POST request response: '%s'", post_req.text)
if post_req.status_code == 200:
LOGGER.debug("add_ob_2_proj_v1 POST successful")
return True
return False
def add_ob_2_proj(observation_id, project_id, access_token):
''' Use V1 API to add an observation to a project '''
data = {'project_observation[observation_id]': observation_id,
'project_observation[project_id]': project_id}
post_url = '%s/project_observations' % INAT_BASE_URL
post_req = requests.post(post_url,
data=data,
headers=_build_auth_header(access_token))
if post_req.status_code == 200:
LOGGER.debug("add_ob_2_proj POST successful")
return True
LOGGER.error("POST request status code: %d", post_req.status_code)
try:
response_data = json.loads(post_req.text)
for error in response_data['errors']:
LOGGER.error("POST request response: '%s'", error)
except json.JSONDecodeError:
LOGGER.error("Failed to decode post response:\n%s", post_req.text)
return False
def _build_auth_header(access_token: str) -> Dict[str, str]:
''' This function takes the access_token and creates the Authorization
header needed by the non-V1 interface'''
return {"Authorization": "Bearer %s" % access_token}
LOG_FILE_NAME = "/tmp/results.log"
with open(LOG_FILE_NAME, "w"):
pass
LOG_FORMATTER = logging.Formatter("%(asctime)s [%(threadName)-12.12s]"
" [%(levelname)-5.5s] %(message)s")
FILE_LOG_FORMATTER = logging.Formatter("%(message)s")
LOGGER = logging.getLogger()
FILE_HANDLER = logging.FileHandler("{0}".format(LOG_FILE_NAME))
FILE_HANDLER.setFormatter(FILE_LOG_FORMATTER)
LOGGER.addHandler(FILE_HANDLER)
LOG_FORMATTER = logging.Formatter("%(message)s")
CONSOLE_HANDLER = logging.StreamHandler()
CONSOLE_HANDLER.setFormatter(LOG_FORMATTER)
LOGGER.addHandler(CONSOLE_HANDLER)
def print_obs(result):
''' print observations '''
obs_id = result['id']
taxon_id = result['taxon']['id']
# Print some information about observation
LOGGER.info("Observation ID: %s", obs_id)
LOGGER.info("Taxon ID: %s", taxon_id)
LOGGER.info("Name: %s",
result['taxon']['name'])
LOGGER.info("Preferred common name: %s",
result['taxon']['preferred_common_name'])
#LOGGER.info("Rank: %s", rank)
#LOGGER.info("Taxon: %s", taxon)
LOGGER.info("Grade: %s",
result['quality_grade'])
LOGGER.info("Observed at: %s",
result['time_observed_at'])
LOGGER.info("Created at: %s",
result['created_at'])
LOGGER.info("User Name: %s",
result['user']['name'])
#LOGGER.info("User ID: %s",
# result['user']['login'])
#LOGGER.info("Place IDs: %s",
# ",".join(str(x) for x in result['place_ids'][:5]))
#LOGGER.info("Project IDs: %s",
# ",".join(str(x) for x in result['project_ids']))
#LOGGER.info("\n")
# pylint: disable=too-many-branches
def search_new_obs(config, project_id, project_species):
''' Search for new observations for project '''
place_id = config['inaturalist.org']['place_id']
place_name = get_place_name(place_id)
if place_name is None:
LOGGER.error("Failed to find place id: '%s'", place_id)
sys.exit(6)
taxon_list = [x.strip() for x in config['inaturalist.org']['taxon_list'].split(',')]
taxon_response_count = {}
# As we find new species, put in this list
new_species = []
new_species_count = 0
new_species_add = 0
observations_added = 0
observations_add_failures = 0
# Get token information to access iNaturalist.org from config file
try:
access_token = get_access_token(config['inaturalist.org']['username'],
config['inaturalist.org']['password'],
config['inaturalist.org']['app_id'],
config['inaturalist.org']['app_secret'])
except KeyError:
config_filename = config.get('DEFAULT', 'config_filename')
LOGGER.warning("Need to define username, password, app_id, and "
"app_secret in [inaturalist.org] section of "
"configuration file: %s",
config_filename)
sys.exit(7)
excluded_observations = [x.strip() for x in \
config['last run']['excluded_observations'].split(',')]
add_obs_flag = config.getboolean('inaturalist.org',
'addobservations')
# Loop for each taxon in list
# pylint: disable=too-many-nested-blocks
for a_taxon in taxon_list:
LOGGER.info("\nQuery for research grade %s in %s "
"not in project: %s", a_taxon,
config['inaturalist.org']['project_slug'],
place_name)
# Start with page 1
page = 1
done = False
page_size = 100
while not done:
LOGGER.info("Page %d, page size: %d", page, page_size)
# Query all observations in place ID, with matching Taxon ID,
# not already in project, is research grade, on desired page
req_resp = requests.get(\
'https://api.inaturalist.org/v1/observations'
'?place_id=%s'
'&iconic_taxa=%s'
'¬_in_project=%s'
'&quality_grade=research'
'&page=%d'
'&per_page=%s'
'&order=desc'
'&order_by=created_at' % \
(config['inaturalist.org']['place_id'],
a_taxon, project_id,
page, page_size))
LOGGER.info("Observation Request Status: %d", req_resp.status_code)
# 200 means success
if req_resp.status_code == 200:
# convert JSON response to a python dictionary
response_data = json.loads(req_resp.text)
#LOGGER.info("----------------------------------")
if page == 1:
LOGGER.info("Total responses: %d",
response_data['total_results'])
taxon_response_count[a_taxon] = \
response_data['total_results']
# If we get back no results, we are done
# pylint: disable=len-as-condition
if len(response_data['results']) == 0:
done = True
for result in response_data['results']:
if str(result['id']) in excluded_observations:
continue
new_species_flag = True
# Try to add observation to project using access_token for
# authentication
if add_obs_flag:
if add_ob_2_proj(result['id'],
project_id,
access_token):
observations_added += 1
else:
observations_add_failures += 1
excluded_observations.append(str(result['id']))
continue
# If taxon ID is not in list of species already in
# project and not is list of new species we have
# already found
# print banner, increment counter, and set flag
new_species_flag = False
taxon_id = result['taxon']['id']
if taxon_id not in project_species and \
taxon_id not in new_species:
new_species.append(taxon_id)
LOGGER.info("=== NEW SPECIES FOR PROJECT, %d ===", taxon_id)
new_species_add += 1
print_obs(result)
else:
print_obs(result)
page += 1
else:
done = True
LOGGER.info("Observation response: %s", req_resp.text)
for a_taxon in taxon_response_count:
LOGGER.info("Taxon: %s, total results: %d",
a_taxon, taxon_response_count[a_taxon])
if add_obs_flag:
# Get some project information and a list of current species
project_species = get_project(project_id, config)
LOGGER.info("\nNew Species: %d", new_species_count)
LOGGER.info("New Species Added: %d", new_species_add)
LOGGER.info("Observations Added: %d", observations_added)
LOGGER.info("Observations Add Failures: %d", observations_add_failures)
# Save excluded observations for next time
config['last run']['excluded_observations'] = ",".join(excluded_observations)
return new_species
############################################
# Main program #
############################################
# pylint: disable=too-many-statements,too-many-branches,too-many-locals
def main():
''' Main function '''
config = configparser.ConfigParser()
config['DEFAULT'] = {'loggingLevel': 'INFO'}
config['inaturalist.org'] = {'addobservations': True}
config['inaturalist.org'] = {'showspecies': True}
config['inaturalist.org'] = {'searchnew': True}
config['gmail.com'] = {'send_email': False}
config['last run'] = {'excluded_observations': ''}
if len(sys.argv) > 1:
config_filename = sys.argv[1]
else:
config_filename = 'inat_add_obs2project.ini'
try:
dummy_h = open(config_filename, 'r')
dummy_h.close()
except FileNotFoundError:
LOGGER.warning("File: '%s' not found, creating", config_filename)
# Read config file
config.read(config_filename)
config['DEFAULT']['config_filename'] = config_filename
LOGGER.setLevel(config['DEFAULT']['loggingLevel'])
LOGGER.info("Adding observations: %s",
str(config.getboolean('inaturalist.org', 'addobservations')))
LOGGER.info("Show species: %s",
str(config.getboolean('inaturalist.org', 'showspecies')))
now = datetime.utcnow()
try:
last_run = config['last run']['timestamp']
LOGGER.info("This configuration file last run at: '%s'", last_run)
except KeyError:
LOGGER.info("This configuration file has not been used before")
# Update timestamp
config['last run']['timestamp'] = str(now)
# Get project_id from slug name
try:
project_id = get_project_id(config['inaturalist.org']['project_slug'])
except KeyError:
LOGGER.error("Need to define project_slug "
"in [inaturalist.org] section of "
"configuration file: %s",
config_filename)
return 3
if project_id is None:
LOGGER.error("Need to define project_slug "
"in [inaturalist.org] section of "
"configuration file: %s",
config_filename)
return 3
# Get some project information and a list of current species
project_species = get_project(project_id, config)
if project_species is None:
LOGGER.warning("Failed to get species list ")
return 4
# These are some variables used for counting things and keeping track
# of states
search_new = config.getboolean('inaturalist.org',
'searchnew')
if search_new:
new_species = search_new_obs(config, project_id, project_species)
# Read results file into a buffer
with open(LOG_FILE_NAME, "r") as results_file:
results_buffer = results_file.read()
# Send results to the following email addresses
if config.getboolean('gmail.com',
'send_email'):
try:
dummy_gmail_config = config['gmail.com']
if send_gmail.send_email(config, LOGGER, results_buffer,
subject="inat_add_obs2project results"):
LOGGER.info("Email sent")
else:
LOGGER.error("Failed to send email")
except KeyError:
LOGGER.warning("gmail.com configuration not defined")
# Write possibly update to configuration file
config_filename = config.get('DEFAULT', 'config_filename')
try:
with open(config_filename, 'w') as config_file:
config.write(config_file)
except OSError:
LOGGER.error("Failed to write config file, '%s'", config_filename)
return 0
if __name__ == "__main__":
sys.exit(main())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
37811,
198,
41972,
319,
30030,
2365,
220,
352,
1511,
25,
3553,
25,
2931,
13130,
198,
198,
31,
9800,
25,
4186,
198... | 2.08516 | 9,852 |
from datetime import datetime
import os
import logging
from django.conf.urls import url
from django.contrib.auth.decorators import login_required
from django.core.files.base import ContentFile
from django.core.files.storage import FileSystemStorage
from django.db.models import get_model
from django.http import HttpResponse, HttpResponseNotFound
from annoying.decorators import render_to
from django.shortcuts import render_to_response
from django.utils import simplejson
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.views.decorators.http import require_POST
from jfu.http import upload_receive, UploadResponse
from core.utils import live
from dss import localsettings, settings
from spa.models import UserProfile, Release
from spa.models.mix import Mix
from spa.models.comment import Comment
from core.serialisers import json
from core.tasks import create_waveform_task
from spa.models.notification import Notification
logger = logging.getLogger(__name__)
@render_to('inc/header.html')
def session_play_count(request):
"""
:param request:
:return: Number of tracks played in this session
"""
if not request.user.is_authenticated():
if 'play_count' in request.session:
result = simplejson.dumps({
'play_count': request.session['play_count']
})
else:
result = simplejson.dumps({
'play_count': '0'
})
else:
result = simplejson.dumps({
'play_count': '0'
})
return HttpResponse(result, mimetype='application/json')
@render_to('inc/release_player.html')
@render_to('inc/comment_list.html')
@login_required
@login_required()
@csrf_exempt
@require_POST
@login_required
@csrf_protect
@csrf_exempt
@csrf_exempt
| [
6738,
4818,
8079,
1330,
4818,
8079,
201,
198,
11748,
28686,
201,
198,
11748,
18931,
201,
198,
201,
198,
6738,
42625,
14208,
13,
10414,
13,
6371,
82,
1330,
19016,
201,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
12501,
273,
... | 2.564276 | 739 |
#!/usr/bin/env python3
import sys
sys.setrecursionlimit(10**6)
INF = 10 ** 9 + 1 # sys.maxsize # float("inf")
MOD = 10 ** 9 + 7
# tests
T1 = """
4
1 4
2 3
3 1
4 2
"""
TEST_T1 = """
>>> as_input(T1)
>>> main()
1
1
2
2
"""
T2 = """
7
6 4
4 3
3 5
7 1
2 7
5 2
1 6
"""
TEST_T2 = """
>>> as_input(T2)
>>> main()
3
3
1
1
2
3
2
"""
def as_input(s):
"use in test, use given string as input file"
import io
f = io.StringIO(s.strip())
g = globals()
g["input"] = lambda: bytes(f.readline(), "ascii")
g["read"] = lambda: bytes(f.read(), "ascii")
input = sys.stdin.buffer.readline
read = sys.stdin.buffer.read
if sys.argv[-1] == "-t":
print("testing")
_test()
sys.exit()
main()
| [
198,
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
11748,
25064,
198,
17597,
13,
2617,
8344,
24197,
32374,
7,
940,
1174,
21,
8,
198,
1268,
37,
796,
838,
12429,
860,
1343,
352,
220,
1303,
25064,
13,
9806,
7857,
1303,
12178,
7... | 2.060519 | 347 |
"""Resources file for NiftyPET NIPET and NIMPA etc."""
#---------------------------------------------------------------
__author__ = "Pawel Markiewicz"
__copyright__ = "Copyright 2018"
#---------------------------------------------------------------
import numpy as np
from math import pi
import platform
import os
# Hardware (CT-based) mu-maps, which come with the mMR scanner.
# The names may be different
hrdwr_mu = [
'umap_HNMCL_10606489.v.hdr', # (1) Head and neck lower coil
'umap_HNMCU_10606489.v.hdr', # (2) Head and neck upper coil
'umap_SPMC_10606491.v.hdr', # (3) Spine coil
'umap_PT_2291734.v.hdr', # (4) Table
'umap_HOMCU_10606489.v.hdr', # (5) Head and neck upper coil
'umap_BR4CH_10185525.v.hdr' # (6)
]
# Radioisotope look-up table
riLUT = { 'Ge68':{'BF':0.891,'thalf':270.9516*24*60*60},
'Ga68':{'BF':0.891,'thalf':67.71*60},
'F18':{'BF':0.967, 'thalf':109.77120*60},
'C11':{'BF':0.998, 'thalf':20.38*60},
'O15':{'BF':0.999, 'thalf':122.2416}
}
# -----------------------------------------------------
# The name and path to the NiftyPET tools (software)
DIRTOOLS = 'NiftyPET_tools'
MSVC_VRSN = 'Visual Studio 12 2013 Win64'
CMAKE_TLS_PAR = '' #-DUSE_SSE=OFF'
# PATHTOOLS = os.path.join('/chosen/path/', DIRTOOLS)
#> path to Python wrapper of Vinci
VINCIPATH = ''
# -----------------------------------------------------
# -----------------------------------------------------
# DO NOT MODIFY BELOW--DONE AUTOMATICALLY
### start GPU properties ###
### end GPU properties ###
# paths to apps and tools needed by NiftyPET
### start NiftyPET tools ###
### end NiftyPET tools ###
# -----------------------------------------------------
# enable xnat module
ENBLXNAT = False
# enable Agg
ENBLAGG = False
# compile DCM2NIIX, otherwise download a compiled version for the system used
CMPL_DCM2NIIX = False
#============ SIEMENS mMR SCANNER C O N S T A N T S ===============
# number of rings (axially) and crystals (transaxially)
NRNG = 64
# number of crystals transaxially
NCRS = 504
# reduced number of crystals by the gaps (dead crystals)
NCRSR = 448
# maximum ring difference
MRD = 60
# number of linear indexes for 1/2 Michelogram (NRNG**2/2 + NRNG/2)
NLI2R = 2080-6
#number of angular indexes in a 2D sinogram
A = 252
#number of bin indexes in a 2D sino
W = 344
H = W/2
NSN11 = 837
NSN1 = 4084
NSN64 = NRNG*NRNG
# 0: SSRB, 1: span-1, or 11: span-11 (11, default)
SPAN = 11
RNG_STRT = 0
RNG_END = 64
# no of sinos in a segment out of 11 segments
seg = np.array([127,115,115,93,93,71,71,49,49,27,27])
# minimum and maximum ring difference for each segment
minrd = np.array([-5,-16, 6,-27,17,-38,28,-49,39,-60,50])
maxrd = np.array([ 5, -6,16,-17,27,-28,38,-39,49,-50,60])
#----------
#------------------------------------------------------
# scatter ring definition
sct_irng = np.int16([0, 10, 19, 28, 35, 44, 53, 63])
# number of scatter rings (used for scatter estimation)
NSRNG = len(sct_irng)
#------------------------------------------------------
# number of direct sinograms (i.e., for segment 0)
SEG0 = 127
# Reference image size (usually the default from Siemens) and GPU dimensions for optimal execution
#~~~
SO_IMZ = 127
SO_IMY = 344
SO_IMX = 344
SO_VXX = 0.208626
SO_VXZ = 0.203125
SO_VXY = SO_VXX
SZ_IMZ = 127
SZ_IMY = 320
SZ_IMX = 320
SZ_VOXY = 0.208626
SZ_VOXZ = 0.203125
#~~~
# SO_IMZ = 127
# SO_IMY = 384
# SO_IMX = 384
# SO_VXX = 0.1669
# SO_VXZ = 0.203125
# SO_VXY = SO_VXX
# SZ_IMZ = 127
# SZ_IMY = 384
# SZ_IMX = 384
# SZ_VOXY = 0.1669
# SZ_VOXZ = 0.203125
#~~~
#~~~
# inverse size
SZ_VOXZi = round(1/SZ_VOXZ,6)
# squared radius of the transaxial field of view
TFOV2 = 890.0
#-------Scatter image size in x,y,z directions
# target scale factors for scatter mu-map and emission image respectively
# transmission (mu-map)
TRGTSCT = [0.5, 0.33]
SS_IMX = int(np.ceil(TRGTSCT[0]*SO_IMX)//2*2)
SS_IMY = int(np.ceil(TRGTSCT[0]*SO_IMY)//2*2)
SS_IMZ = int(np.ceil(TRGTSCT[0]*SO_IMZ)//2*2-1)
SS_VXY = round((SO_VXY*SO_IMX)/SS_IMX,6)
SS_VXZ = round((SO_VXZ*SO_IMZ)/SS_IMZ,6)
IS_VXZ = round(1/SS_VXZ,6)
# scaling [z,y,x]
SCTSCLMU = [float(SS_IMZ)/SO_IMZ, float(SS_IMY)/SO_IMY, float(SS_IMX)/SO_IMX]
# emission
SSE_IMX = int(np.ceil(TRGTSCT[1]*SO_IMX)//2*2)
SSE_IMY = int(np.ceil(TRGTSCT[1]*SO_IMY)//2*2)
SSE_IMZ = int(np.ceil(TRGTSCT[1]*SO_IMZ)//2*2+1)
SSE_VXY = round((SO_VXY*SO_IMX)/SSE_IMX,6)
SSE_VXZ = round((SO_VXZ*SO_IMZ)/SSE_IMZ,6)
# scaling [z,y,x]
SCTSCLEM = [float(SSE_IMZ)/SO_IMZ, float(SSE_IMY)/SO_IMY, float(SSE_IMX)/SO_IMX]
# # scaling for the emission image [z,y,x]
# SCTSCLEM = [0.34, 0.33, 0.33]
# # scaling for the mu-map
# SCTSCLMU = [0.499, 0.5, 0.5]
# SS_IMX = int(np.ceil(SCTSCLMU[2]*SO_IMX)//2*2)#172
# SS_IMY = int(np.ceil(SCTSCLMU[1]*SO_IMY)//2*2)#172
# SS_IMZ = int(np.ceil(SCTSCLMU[0]*SO_IMZ)//2*2-1)#63
# SS_VXY = round((SO_VXY*SO_IMX)/SS_IMX,6) # 0.417252 #
# SS_VXZ = round((SO_VXZ*SO_IMZ)/SS_IMZ,6) # 0.409474 #
# IS_VXZ = round(1/SS_VXZ,6)
# SSE_IMX = int(np.ceil(SCTSCLEM[2]*SO_IMX)//2*2) #114
# SSE_IMY = int(np.ceil(SCTSCLEM[1]*SO_IMY)//2*2) #114
# SSE_IMZ = int(np.ceil(SCTSCLEM[0]*SO_IMZ)//2*2-1) #43
# SSE_VXY = round((SO_VXY*SO_IMX)/SSE_IMX,6) #0.629538
# SSE_VXZ = round((SO_VXZ*SO_IMZ)/SSE_IMZ,6) #0.599927
#-------
#> decay correction
DCYCRR = True
#--- Time of Flight ---
#speed of light
CLGHT = 29979245800 #cm/s
#coincidence time window [ps]
CWND = 5859.38e-12
#number of TOF bins
TOFBINN = 1
#size of TOF bin in [ps]
TOFBINS = 390e-12
#size of TOF BIN in cm of travelled distance
TOFBIND = TOFBINS*CLGHT
#inverse of the above
ITOFBIND = 1/TOFBIND
#ring radius
R = 32.8
#effective ring radius accounting for the depth of interaction
RE = (R + 0.67)#0.67
#axial crystal width
AXR = 0.40625
#crystal angle
ALPHA = 0.714286*pi/180 #2*pi/NCRS
#crystal gap period
TGAP = 9
#crystal gap offset (used for getting the sino gaps right at the position)
OFFGAP = 1
#--- FOR SCATTER ---
#electron radius **2
R02 = 7.940787449825884e-26
#detection lower energy threashold
LLD = 430000
E511 = 511008
#energy resolution
ER = 0#0.154
#discretisation of the scatter angle spectrum
NCOS = 256
#cosine of maximum allowed scatter angle
COSUPSMX = 0.725 #0.58 #0.722 #Elow = E511/(2-cos(upsmx))
#step of the discreatisation
COSSTP = (1-COSUPSMX)/(NCOS-1)
#inverse of the step
ICOSSTP = 1/COSSTP
#bool for using all emissions in mask for scatter corrections
EMMSKS = False
#intensity percentage threshold of voxels to be considered in the image
ETHRLD = 0.05
#=================================================================================================
def get_gpu_constants(Cnt = {}):
'''Return a dictionary of GPU related constants'''
if 'DEV_ID' in globals(): Cnt['DEVID'] = DEV_ID # device id; used for choosing the GPU device for calculations
if 'CC_ARCH' in globals(): Cnt['CCARCH'] = CC_ARCH # chosen device architectures for NVCC compilation
return Cnt
#=================================================================================================
def get_setup(Cnt = {}):
'''Return a dictionary of GPU, mu-map hardware and third party set-up.'''
# the name of the folder for NiftyPET tools
Cnt['DIRTOOLS'] = DIRTOOLS
# additional paramteres for compiling tools with cmake
Cnt['CMAKE_TLS_PAR'] = CMAKE_TLS_PAR
# hardware mu-maps
Cnt['HMULIST'] = hrdwr_mu
# Microsoft Visual Studio Compiler version
Cnt['MSVC_VRSN'] = MSVC_VRSN
# GPU related setup
Cnt = get_gpu_constants(Cnt)
if 'PATHTOOLS' in globals() and PATHTOOLS!='': Cnt['PATHTOOLS'] = PATHTOOLS
# image processing setup
if 'RESPATH' in globals() and RESPATH!='': Cnt['RESPATH'] = RESPATH
if 'REGPATH' in globals() and REGPATH!='': Cnt['REGPATH'] = REGPATH
if 'DCM2NIIX' in globals() and DCM2NIIX!='': Cnt['DCM2NIIX'] = DCM2NIIX
# hardware mu-maps
if 'HMUDIR' in globals() and HMUDIR!='': Cnt['HMUDIR'] = HMUDIR
if 'VINCIPATH' in globals() and VINCIPATH!='': Cnt['VINCIPATH'] = VINCIPATH
Cnt['ENBLXNAT'] = ENBLXNAT
Cnt['ENBLAGG'] = ENBLAGG
Cnt['CMPL_DCM2NIIX'] = CMPL_DCM2NIIX
return Cnt
#=================================================================================================
def get_mmr_constants():
'''
Put all the constants together in a dictionary
'''
Cnt = {
'ISOTOPE':'F18',
'DCYCRR':DCYCRR,
'ALPHA':ALPHA,
'NRNG':NRNG,
'NSRNG':NSRNG,
'NCRS':NCRS,
'NCRSR':NCRSR,
'NBCKT':224,
'NSANGLES':A,
'NSBINS':W,
'Naw':-1, # number of total active bins per 2D sino
'NSN11': NSN11, # number of sinos in span-11
'NSN1': NSN1, # number of sinos in span-1
'NSN64': NSN64, # number of sinos in span-1 with no MRD limit
'MRD': MRD, # maximum ring difference RD
'SPN':SPAN, # span-1 (1), span-11 (11), ssrb (0)
'TFOV2':TFOV2, # squared radius of TFOV
'RNG_STRT':RNG_STRT, # limit axial extension by defining start and end ring
'RNG_END' :RNG_END, # this feature only works with span-1 processing (Cnt['SPN']=1)
'SS_IMZ':SS_IMZ, #Scatter mu-map iamge size
'SS_IMY':SS_IMY,
'SS_IMX':SS_IMX,
'SS_VXZ':SS_VXZ,
'SS_VXY':SS_VXY,
'IS_VXZ':IS_VXZ,
'SSE_IMZ':SSE_IMZ, #Scatter emission image size
'SSE_IMY':SSE_IMY,
'SSE_IMX':SSE_IMX,
'SSE_VXZ':SSE_VXZ,
'SSE_VXY':SSE_VXY,
'SZ_IMZ':SZ_IMZ, #GPU optimised image size
'SZ_IMY':SZ_IMY,
'SZ_IMX':SZ_IMX,
'SZ_VOXZ':SZ_VOXZ,
'SZ_VOXY':SZ_VOXY,
'SZ_VOXZi':SZ_VOXZi,
'SO_IMZ':SO_IMZ, #Original image size (from Siemens)
'SO_IMY':SO_IMY,
'SO_IMX':SO_IMX,
'SO_VXZ':SO_VXZ,
'SO_VXY':SO_VXY,
'SO_VXX':SO_VXX,
'NSEG0':SEG0,
'RE':RE, #effective ring radius
'R':R,
'SEG':seg,
'MNRD':minrd,
'MXRD':maxrd,
'SCTRNG':sct_irng,
'TGAP':TGAP,
'OFFGAP':OFFGAP,
'AXR':AXR,
'R02':R02, #squared electron radius
'LLD':LLD, #lower energy threashold
'E511':E511,
'EMMSKS':EMMSKS, #emissions determine mask for scatter corrections
'ER':ER, #energy resolution
'COSUPSMX':COSUPSMX, #cosine of max allowed scatter angle
'NCOS':NCOS, #number of cos samples for LUT
'COSSTP':COSSTP, #cosine step
'ICOSSTP':ICOSSTP, #inverse of cosine step
'ETHRLD':ETHRLD, #intensity emission image threshold (used in scatter modelling)
'CLGHT':CLGHT, #speed of light [cm/s]
'CWND':CWND, #coincidence time window [ps]
'TOFBINN':TOFBINN, #number of TOF bins
'TOFBINS':TOFBINS, #TOF bin width [ps]
'TOFBIND':TOFBIND,
'ITOFBIND':ITOFBIND,
# affine and image size for the reconstructed image, assuming the centre of voxels in mm
'AFFINE':np.array([ [-10*SO_VXX, 0., 0., 5.*SO_IMX*SO_VXX ], #+5.*SO_VXX
[0., 10*SO_VXY, 0., -5.*SO_IMY*SO_VXY ], #+5.*SO_VXY
[0., 0., 10*SO_VXZ, -5.*SO_IMZ*SO_VXZ ], #-5.*SO_VXZ
[0., 0., 0., 1.]]),
'IMSIZE':np.array([SO_IMZ, SO_IMY, SO_IMX]),
'BTP':0, #1:non parametric bootstrap, 2: parametric bootstrap (recommended)
'BTPRT':1.0, # Ratio of bootstrapped/original events (enables downsampling)
'VERBOSE':False,
'SCTSCLEM':SCTSCLEM,
'SCTSCLMU':SCTSCLMU,
}
# get the setup for GPU and third party apps
Cnt = get_setup(Cnt=Cnt)
return Cnt
| [
37811,
33236,
2393,
329,
399,
24905,
47731,
399,
4061,
2767,
290,
399,
3955,
4537,
3503,
526,
15931,
198,
2,
47232,
24305,
198,
834,
9800,
834,
220,
220,
220,
220,
220,
796,
366,
47,
707,
417,
2940,
48596,
1,
198,
834,
22163,
4766,
... | 2.129616 | 5,524 |
RESULTS_PATH = "tracking_results"
EVALUATION_PATH = "evaluation_results"
VISUALIZATION_PATH = "visualization_results"
# Dataset
GOT10K_PATH = "/home/heonsong/Disk2/Dataset/Got10K"
LASOT_PATH = "/home/heonsong/Disk2/Dataset/LaSOT"
NFS_PATH = "/home/heonsong/Disk2/Dataset/NFS"
OTB_PATH = "/home/heonsong/Disk2/Dataset/OTB"
OXUVA_PATH = "/home/heonsong/Disk2/Dataset/OxUvA"
TPL_PATH = "/home/heonsong/Disk2/Dataset/TColor128"
UAV_PATH = "/home/heonsong/Disk2/Dataset/UAV123"
TRACKINGNET_PATH = "/home/heonsong/Disk2/Dataset/TrackingNet"
VOT_PATH = "/home/heonsong/Disk2/Dataset/VOT2018"
OTB_NOISY_PATH = "noisy_idx"
# DaSiamRPN
DASIAMRPN_MODEL = "weights/DaSiamRPN/SiamRPNOTB.model"
# DROL
DROL_CONFIG = "weights/DROL/siamrpn_r50_l234_dwxcorr_otb/config.yaml"
DROL_SNAPSHOT = "weights/DROL/siamrpn_r50_l234_dwxcorr_otb/model.pth"
# GradNet
GRADNET_MODEL = "weights/GradNet/ckpt/base_l5_1t_49/model_epoch49.ckpt"
# MemDTC
MEMDTC_MODEL = "weights/MemDTC/models"
# MemTrack
MEMTRACK_MODEL = "weights/MemTrack/models"
# Ocean
OCEAN_MODEL = "weights/Ocean/OceanO.pth"
# RLS-RTMDNet
RLS_RTMDNET_MODEL = "weights/RLS-RTMDNet/rt-mdnet.pth"
# ROAM
ROAM_FEAT_DIR = ""
ROAM_MODEL_DIR = ""
# RPT
RPT_CONFIG = "weights/RPT/config_vot2018_offline.yaml"
RPT_SNAPSHOT = "weights/RPT/siamreppoints.model"
# SiamBAN
SIAMBAN_CONFIG = "weights/SiamBAN/siamban_r50_l234_otb/config.yaml"
SIAMBAN_SNAPSHOT = "weights/SiamBAN/siamban_r50_l234_otb/model.pth"
# SiamCAR
SIAMCAR_CONFIG = "weights/SiamCAR/siamcar_r50/config.yaml"
SIAMCAR_SNAPSHOT = "weights/SiamCAR/model_general.pth"
# SiamDW
SIAMDW_MODEL = "weights/SiamDW/CIResNet22_RPN.pth"
SIAMDW_CIRINCEP22_MODEL = "weights/SiamDW/CIRIncep22.pth"
SIAMDW_CIRNEXT22_MODEL = "weights/SiamDW/CIRNext22.pth"
SIAMDW_CIRESNET22FC_G_MODEL = "weights/SiamDW/CIResNet22FC_G.pth"
SIAMDW_CIRESNET22_MODEL = "weights/SiamDW/CIResNet22.pth"
SIAMDW_SIAMFCRES22W_MODEL = "weights/SiamDW/SiamFCRes22W.pth"
SIAMDW_CIRESNET22_RPN_MODEL = "weights/SiamDW/CIResNet22_RPN.pth"
# SiamFC
SIAMFC_MODEL = "weights/SiamFC/model.pth"
# SiamFC++
SIAMFCPP_CONFIG = "weights/SiamFC++/otb/siamfcpp_googlenet-otb.yaml"
# SiamMFC
SIAMMCF_ROOT_DIR = "external/siam-mcf/"
SIAMMCF_MODEL = "weights/SiamMCF/pretrained/siam_mcf.ckpt-50000"
# SiamRPN
SIAMRPN_MODEL = "weights/SiamRPN/model.pth"
# SiamRPN++
SIAMRPNPP_CONFIG = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_otb/config.yaml"
SIAMRPNPP_SNAPSHOT = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_otb/model.pth"
SIAMRPNPP_ALEXNET_OTB_CONFIG = "weights/SiamRPN++/siamrpn_alex_dwxcorr_otb/config.yaml"
SIAMRPNPP_ALEXNET_OTB_SNAPSHOT = "weights/SiamRPN++/siamrpn_alex_dwxcorr_otb/model.pth"
SIAMRPNPP_ALEXNET_CONFIG = "weights/SiamRPN++/siamrpn_alex_dwxcorr/config.yaml"
SIAMRPNPP_ALEXNET_SNAPSHOT = "weights/SiamRPN++/siamrpn_alex_dwxcorr/model.pth"
SIAMRPNPP_MOBILENET_CONFIG = (
"weights/SiamRPN++/siamrpn_mobilev2_l234_dwxcorr/config.yaml"
)
SIAMRPNPP_MOBILENET_SNAPSHOT = (
"weights/SiamRPN++/siamrpn_mobilev2_l234_dwxcorr/model.pth"
)
SIAMRPNPP_RESNET_LT_CONFIG = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_lt/config.yaml"
SIAMRPNPP_RESNET_LT_SNAPSHOT = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_lt/model.pth"
SIAMRPNPP_RESNET_OTB_CONFIG = (
"weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_otb/config.yaml"
)
SIAMRPNPP_RESNET_OTB_SNAPSHOT = (
"weights/SiamRPN++/siamrpn_r50_l234_dwxcorr_otb/model.pth"
)
SIAMRPNPP_RESNET_CONFIG = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr/config.yaml"
SIAMRPNPP_RESNET_SNAPSHOT = "weights/SiamRPN++/siamrpn_r50_l234_dwxcorr/model.pth"
SIAMPRNPP_SIAMMASK_CONFIG = "weights/SiamRPN++/siammask_r50_l3/config.yaml"
SIAMPRNPP_SIAMMASK_SNAPSHOT = "weights/SiamRPN++/siammask_r50_l3/model.pth"
# SPM
SPM_CONFIG = "weights/SPM/alexnet_c42_otb.yaml"
# THOR
THOR_CONFIG = "weights/THOR"
THOR_SIAMFC_MODEL = "weights/THOR/SiamFC/model.pth"
THOR_SIAMRPN_MODEL = "weights/THOR/SiamRPN/SiamRPNBIG.model"
THOR_SIAMMASK_MODEL = "weights/THOR/SiamMask/model.pth"
# TRAS
TRAS_MODEL = "weights/TRAS/Student.weights"
| [
46274,
62,
34219,
796,
366,
36280,
62,
43420,
1,
198,
20114,
1847,
52,
6234,
62,
34219,
796,
366,
18206,
2288,
62,
43420,
1,
198,
29817,
25620,
14887,
6234,
62,
34219,
796,
366,
41464,
1634,
62,
43420,
1,
198,
198,
2,
16092,
292,
31... | 1.943341 | 2,065 |
# -*- coding: utf-8 -*-
# Copyright (c) 2017-present Alibaba Group Holding Limited. He Guimin <heguimin36@163.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
2,
15069,
357,
66,
8,
2177,
12,
25579,
41992,
4912,
31703,
15302,
13,
679,
1962,
320,
259,
1279,
258,
5162,
320,
259,
2623,
31,
24136,
13,
785,
29,
198,
2,
2296... | 2.625 | 80 |
import gzip
from typing import Optional
import httpx
from app.core import settings
| [
11748,
308,
13344,
198,
6738,
19720,
1330,
32233,
198,
198,
11748,
2638,
87,
198,
198,
6738,
598,
13,
7295,
1330,
6460,
628
] | 3.909091 | 22 |
#!/usr/bin/env python
#
# updatecheck.py - The UpdateCheckAction class.
#
# Author: Paul McCarthy <pauldmccarthy@gmail.com>
#
"""This module provides the :class:`.UpdateCheckAction`, which checks to see
if a new version of FSLeyes is available.
"""
import logging
import urllib.request as request
import ssl
import json
import wx
import wx.adv as wxadv
import fsl.version as fslversion
import fsleyes_widgets.utils.status as status
import fsleyes.version as version
import fsleyes.strings as strings
from . import base
log = logging.getLogger(__name__)
_FSLEYES_URL = 'https://fsl.fmrib.ox.ac.uk/fsl/fslwiki/FSLeyes'
"""A url to direct the user towards to download the latest version of FSLeyes.
"""
_FSLEYES_VERSION_URL = 'https://api.anaconda.org/package/conda-forge/fsleyes'
"""A url which points to a JSON file that contains information about the
FSLeyes package on conda-forge.
"""
class UpdateCheckAction(base.Action):
"""The :class:`.UpdateCheckAction` is an :class:`.Action` which checks to
see if a new version of FSLeyes is available, and tells the user if there
is.
"""
def __init__(self, overlayList, displayCtx):
"""Create an ``UpdateCheckAction``. """
base.Action.__init__(
self, overlayList, displayCtx, self.__checkForUpdates)
def __checkForUpdates(self,
showUpToDateMessage=True,
showErrorMessage=True,
ignorePoint=False):
"""Run this action. Downloads a text file from a URL which contains
the latest available version of FSLeyes. Compares that version with
the running version. Displays a message to the user.
:arg showUpToDateMessage: Defaults to ``True``. If ``False``, and
the current version of FSLeyes is up to
date, the user is not informed.
:arg showErrorMessage: Defaults to ``True``. If ``False``, and
some error occurs while checking for
updates, the user is not informed.
:arg ignorePoint: Defaults to ``False``. If ``True``, the
point release number is ignored in the
comparison.
"""
errMsg = strings.messages[self, 'newVersionError']
errTitle = strings.titles[ self, 'newVersionError']
with status.reportIfError(errTitle,
errMsg,
raiseError=False,
report=showErrorMessage):
log.debug('Checking for FSLeyes updates ({})'.format(
_FSLEYES_VERSION_URL))
ctx = ssl.create_default_context()
ctx.check_hostname = False
ctx.verify_mode = ssl.CERT_NONE
with request.urlopen(_FSLEYES_VERSION_URL, context=ctx) as f:
info = json.loads(f.read().decode('utf-8'))
latest = info['latest_version']
current = version.__version__
upToDate = fslversion.compareVersions(latest,
current,
ignorePoint) <= 0
log.debug('This version of FSLeyes ({}) is '
'{} date (latest: {})'.format(
current,
'up to' if upToDate else 'out of',
latest))
if upToDate and not showUpToDateMessage:
return
urlMsg = strings.messages[self, 'updateUrl']
if upToDate:
title = strings.titles[ self, 'upToDate']
msg = strings.messages[self, 'upToDate']
msg = msg.format(current)
else:
title = strings.titles[ self, 'newVersionAvailable']
msg = strings.messages[self, 'newVersionAvailable']
msg = msg.format(current, latest, _FSLEYES_URL)
parent = wx.GetTopLevelWindows()[0]
dlg = UrlDialog(parent, title, msg, urlMsg, _FSLEYES_URL)
dlg.CentreOnParent()
dlg.ShowModal()
class UrlDialog(wx.Dialog):
"""Custom ``wx.Dialog`` used by the :class:`UpdateCheckAction` to
display a message containing the FSLeyes download URL to the user.
"""
def __init__(self,
parent,
title,
msg,
urlMsg=None,
url=None):
"""Create a ``UrlDialog``.
:arg parent: ``wx`` parent object
:arg title: Dialog title
:arg msg: Message to display
:arg urlMsg: Message to display next to the URL. Not shown if a URL
is not provided.
:arg url: URL to display.
"""
wx.Dialog.__init__(self,
parent,
title=title,
style=wx.DEFAULT_DIALOG_STYLE)
ok = wx.Button( self, label='Ok', id=wx.ID_OK)
msg = wx.StaticText(self, label=msg)
self.__ok = ok
if urlMsg is not None:
urlMsg = wx.StaticText(self, label=urlMsg)
if url is not None:
url = wxadv.HyperlinkCtrl(self, url=url)
sizer = wx.BoxSizer(wx.VERTICAL)
btnSizer = wx.BoxSizer(wx.HORIZONTAL)
sizer.Add((1, 20), flag=wx.EXPAND, proportion=1)
sizer.Add(msg, flag=wx.EXPAND | wx.LEFT | wx.RIGHT, border=20)
sizer.Add((1, 20), flag=wx.EXPAND, proportion=1)
if urlMsg is not None and url is not None:
sizer.Add(urlMsg, flag=wx.EXPAND | wx.LEFT | wx.RIGHT, border=20)
sizer.Add((1, 5), flag=wx.EXPAND, proportion=1)
if url is not None:
sizer.Add(url, flag=wx.EXPAND | wx.LEFT | wx.RIGHT, border=20)
sizer.Add((1, 20), flag=wx.EXPAND, proportion=1)
btnSizer.Add((20, 1), flag=wx.EXPAND, proportion=1)
btnSizer.Add(ok, flag=wx.EXPAND)
btnSizer.Add((20, 1), flag=wx.EXPAND)
sizer.Add(btnSizer, flag=wx.EXPAND)
sizer.Add((1, 20), flag=wx.EXPAND, proportion=1)
ok.SetDefault()
self.SetSizer(sizer)
self.Layout()
self.Fit()
@property
def ok(self):
"""Return a reference to the OK button. """
return self.__ok
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
198,
2,
4296,
9122,
13,
9078,
532,
383,
10133,
9787,
12502,
1398,
13,
198,
2,
198,
2,
6434,
25,
3362,
18751,
1279,
79,
30406,
76,
535,
18270,
31,
14816,
13,
785,
29,
198,
2,
1... | 1.980769 | 3,328 |
from __future__ import unicode_literals
from optparse import make_option
from leonardo import leonardo
from django.core.management.base import BaseCommand, NoArgsCommand
from leonardo_sitestarter.scaffold_web import load_data
from leonardo_sitestarter.utils import _load_from_stream
| [
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
198,
6738,
2172,
29572,
1330,
787,
62,
18076,
198,
6738,
443,
261,
13109,
1330,
443,
261,
13109,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
7308,
21575,... | 3.433735 | 83 |
"""LeetCode problem 001 - Two Sum
Given an array of integers, return indices of the two numbers
such that they add up to a specific target.
You may assume that each input would have exactly one solution.
Example:
>>>> two_sum(nums=[2, 5, 8, 21], target=13)
[1, 2].
"""
def two_sum(nums: list, target: int) -> list[int]:
"""Run in O(n) time, O(n) memory by using a dictionary.
The hint is in the `each input would have exactly one solution`.
For each number n, we check if we there is a matching `target - n`:
if so, then we have our pair
if not, we update the dictionary and check the next number
"""
num_dict = {}
for i1, num in enumerate(nums):
try:
i2 = num_dict[target - num]
return [i1, i2]
except KeyError:
num_dict[num] = i1
return []
def two_sum_brute(nums: list, target: int) -> list[int]:
"""Run in O(n**2) with memory O(1)."""
i1 = 0
while i1 < len(nums) - 1:
i2 = i1 + 1
while i2 < len(nums):
print(i1, i2, nums[i1], nums[i2])
if nums[i1] + nums[i2] == target:
return [i1, i2]
i2 += 1
i1 += 1
return []
| [
37811,
3123,
316,
10669,
1917,
3571,
16,
532,
4930,
5060,
198,
198,
15056,
281,
7177,
286,
37014,
11,
1441,
36525,
286,
262,
734,
3146,
198,
10508,
326,
484,
751,
510,
284,
257,
2176,
2496,
13,
198,
1639,
743,
7048,
326,
1123,
5128,
... | 2.224265 | 544 |
import argparse
import sys
import os
from datetime import datetime
import keras
from keras.models import *
from keras.datasets import cifar10
from keras.datasets import mnist
from keras.applications.vgg16 import VGG16
from keras.preprocessing.image import load_img
from keras.layers import *
from keras import *
from utils import *
from nc_setup import *
from ssc import *
try:
from art.attacks.fast_gradient import FastGradientMethod
from art.classifiers import KerasClassifier
except:
from attacks import *
try:
from art.attacks.fast_gradient import FastGradientMethod
from art.classifiers import KerasClassifier
except:
from attacks import *
| [
11748,
1822,
29572,
198,
11748,
25064,
198,
11748,
28686,
198,
6738,
4818,
8079,
1330,
4818,
8079,
198,
198,
11748,
41927,
292,
198,
6738,
41927,
292,
13,
27530,
1330,
1635,
198,
6738,
41927,
292,
13,
19608,
292,
1039,
1330,
269,
361,
2... | 3.315 | 200 |
import wx
import sys
from wxBufferedWindow import *
from PIL import Image
| [
11748,
266,
87,
198,
11748,
25064,
198,
6738,
266,
87,
36474,
1068,
27703,
1330,
1635,
198,
6738,
350,
4146,
1330,
7412,
198,
220,
220,
220,
220,
220,
220,
220,
220,
220,
628,
198
] | 2.606061 | 33 |
from uploader import AbstractUploader
| [
6738,
9516,
263,
1330,
27741,
41592,
263,
198
] | 4.75 | 8 |
from funcy import print_durations
from collections import Counter, defaultdict
import numpy as np
@print_durations
if __name__ == "__main__":
test_a, test_b = day6("test_input.txt", 80), day6("test_input.txt", 256)
puzzle_a, puzzle_b = day6("input.txt", 80), day6("input.txt", 256)
print(f"Day 6a: {test_a}")
print(f"Day 6b: {test_b}")
print(f"Day 6a: {puzzle_a}")
print(f"Day 6b: {puzzle_b}")
assert test_a == 5934
assert test_b == 26984457539
assert puzzle_a == 365862
assert puzzle_b == 1653250886439
| [
6738,
1257,
948,
1330,
3601,
62,
67,
20074,
198,
6738,
17268,
1330,
15034,
11,
4277,
11600,
198,
11748,
299,
32152,
355,
45941,
628,
198,
31,
4798,
62,
67,
20074,
628,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,... | 2.37931 | 232 |
"""Django command for clearing inactivate variant sets."""
from django.core.management.base import BaseCommand
from django.db import transaction
from variants.models import cleanup_variant_sets as clear_inactive_variant_sets
from variants.tasks import clear_inactive_variant_sets as task_clear_inactive_variant_sets
class Command(BaseCommand):
"""Implementation of clearing inactive variant sets.
"""
#: Help message displayed on the command line.
help = "Clear inactive variant sets."
def add_arguments(self, parser):
"""Add the command's argument to the ``parser``."""
parser.add_argument(
"--async", help="Run the clearing asynchronously.", action="store_false"
)
@transaction.atomic
def handle(self, *args, **options):
"""Perform rebuilding the statistics."""
if options["async"]:
clear_inactive_variant_sets()
msg = "Done clearing inactive variant sets."
else:
task_clear_inactive_variant_sets.delay()
msg = "Pushed clearing inactive variant sets to background."
self.stdout.write(self.style.SUCCESS(msg))
| [
37811,
35,
73,
14208,
3141,
329,
17304,
287,
39022,
15304,
5621,
526,
15931,
628,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
13,
8692,
1330,
7308,
21575,
198,
6738,
42625,
14208,
13,
9945,
1330,
8611,
198,
198,
6738,
17670,
13,
27530... | 2.825243 | 412 |
# Copyright (c) 2013, Luis Fernandes and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
| [
2,
15069,
357,
66,
8,
2211,
11,
20894,
26366,
274,
290,
20420,
198,
2,
1114,
5964,
1321,
11,
3387,
766,
5964,
13,
14116,
198,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
11748,
5306,
27768,
198,
6738,
5306,
... | 4.04 | 50 |
#!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import sys
from mopy.mojo_python_tests_runner import MojoPythonTestRunner
if __name__ == '__main__':
sys.exit(main())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
15069,
1946,
383,
18255,
1505,
46665,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
198,
2,
1043,... | 3.255102 | 98 |
from ib_async.functionality.current_time import CurrentTimeMixin
from ib_async.functionality.executions import ExecutionsMixin
from ib_async.functionality.instrument_details import InstrumentDetailsMixin
from ib_async.functionality.matching_symbols import MatchingSymbolsMixin
from ib_async.functionality.market_data import MarketDataMixin
from ib_async.functionality.market_depth import MarketDepthMixin
from ib_async.functionality.realtime_bars import RealtimeBarsMixin
from ib_async.functionality.tickbytick import TickByTickMixin
from ib_async.functionality.position import PositionMixin
from ib_async.functionality.orders import OrdersMixin
from ib_async.protocol import Protocol
from ib_async.instrument import Instrument, SecurityIdentifierType, SecurityType
from ib_async.tick_types import TickType, TickTypeGroup, MarketDataTimeliness
__all__ = [
"IBClient", "connect",
"Instrument", "SecurityIdentifierType", "SecurityType",
"TickType", "TickTypeGroup", "MarketDataTimeliness"
]
| [
6738,
24283,
62,
292,
13361,
13,
8818,
1483,
13,
14421,
62,
2435,
1330,
9236,
7575,
35608,
259,
198,
6738,
24283,
62,
292,
13361,
13,
8818,
1483,
13,
18558,
3508,
1330,
8393,
3508,
35608,
259,
198,
6738,
24283,
62,
292,
13361,
13,
881... | 3.353333 | 300 |
from utilities.dashboard.config import INTRO_PAGE_DIR
def create_intro_page_data(language, dataset_name):
"""Create a dictionary with several dataset and language specific text snippets and headers."""
metadata_path = INTRO_PAGE_DIR / "metadata" / dataset_name
res = {}
with open(metadata_path / f"top_text_{language}.txt", "r", encoding="utf-8") as f:
res["top_text"] = f.read()
with open(metadata_path / f"plot_intro_{language}.txt", "r", encoding="utf-8") as f:
res["plot_intro"] = f.read()
with open(metadata_path / f"bottom_text_{language}.txt", "r", encoding="utf-8") as f:
res["bottom_text"] = f.read()
with open(metadata_path / f"intro_april_{language}.txt", "r", encoding="utf-8") as f:
res["april_intro"] = f.read()
if language == "english":
res["title"] = "Explore What People Believe and Do in Response to CoViD-19"
res["groupby_title"] = "Group Differences: March 2020 & April 2020"
res["april_title"] = "Childcare"
res["bottom_title"] = "Labour Supply"
elif language == "german":
res["title"] = (
"Was denken die Menschen zur Corona-Pandemie, wie stark "
+ "sind sie von ihr betroffen und wie passen sie ihr Verhalten an?"
)
res["groupby_title"] = "Unterschiede zwischen Gruppen: März 2020 & April 2020"
res["april_title"] = "Kinderbetreuung"
res["bottom_title"] = "Arbeitsangebot"
else:
raise NotImplementedError("The language you supplied is not supported yet.")
return res
| [
6738,
20081,
13,
42460,
3526,
13,
11250,
1330,
3268,
5446,
46,
62,
4537,
8264,
62,
34720,
628,
198,
4299,
2251,
62,
600,
305,
62,
7700,
62,
7890,
7,
16129,
11,
27039,
62,
3672,
2599,
198,
220,
220,
220,
37227,
16447,
257,
22155,
351... | 2.490536 | 634 |
import time
from abc import ABC, abstractmethod
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import rps.utilities.misc as misc
| [
11748,
640,
198,
6738,
450,
66,
1330,
9738,
11,
12531,
24396,
198,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
2603,
29487,
8019,
13,
8071,
2052,
355,
16082,
198,
198,
... | 3.326923 | 52 |
from helpers import *
from unreal_engine.classes import (Blueprint, K2Node_FunctionResult, K2Node_MakeArray as K2Node_MakeArray, K2Node_Event, K2Node_MakeStruct,
KismetSystemLibrary as KSL, KismetTextLibrary as KTL, KismetStringLibrary as KStrLib)
from unreal_engine import EdGraphPin, UObject, UScriptStruct
from unreal_engine.structs import Vector as StructVector # this is *not* FVector
def MakeConnectedArray(graph, category, subcategory, pins, pos=None):
'''like MakeArray, but instead of literals, it connects the pins to the given inputs'''
if pos is None:
pos = graph.graph_get_good_place_for_new_node()
node = graph.graph_add_node(K2Node_MakeArray, *pos)
node.NumInputs = len(pins)
node.node_reconstruct()
for i, otherPin in enumerate(pins):
pin = node.node_find_pin('[%d]' % i)
pin.category = category
pin.sub_category = subcategory
pin.make_link_to(otherPin)
node = NodeWrapper(node)
node.Array.category = category
node.Array.sub_category = subcategory
return node
from unreal_engine.classes import Blueprint, K2Node_DynamicCast, Actor, Object
from unreal_engine.structs import EdGraphPinType
from unreal_engine.enums import EEdGraphPinDirection
def TestRecorderNote(graph, who, action, argsPin, executePin):
'''adds a call to TestRecorder.Note, wires it up, and returns the wrapped node'''
node = MakeCall(graph, TestRecorder.Note)
node.self = GetVariable(graph, 'Recorder').Recorder
node.who = who
node.action = action
node.args = argsPin
node.execute = executePin
return node
Utils = FindBP('/Game/Utils.Utils').GeneratedClass
from unreal_engine.classes import TestRecorder, TestActor, ParamActor
from unreal_engine.structs import TestStruct
if 0:
# StructInOutRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StructInOutRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.inStructs, Utils.StrStructArray))
recvNote = TestRecorderNote(graph, 'StructInOutRet', 'recv', argsStr.ReturnValue, entry.then)
structs = []
for name, num in ('Spring',5001), ('Summer',-5002), ('Fall',5003),('Winter',-5004):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
outArray = MakeConnectedArray(graph, 'struct', TestStruct, structs)
outF = MakeLiteral(graph, 101.125)
structs = []
for name, num in ('Brighton',16),('Alta',18),('Solitude',20):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
retArray = MakeConnectedArray(graph, 'struct', TestStruct, structs)
argsStr = MakeArgsStr(graph, (outArray.Array, Utils.StrStructArray), (outF.ReturnValue, Utils.StrFloat), (retArray.Array, Utils.StrStructArray))
sendNote = TestRecorderNote(graph, 'StructInOutRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.outStructs = outArray.Array
ret.of = outF.ReturnValue
ret.ReturnValue = retArray.Array
if 0:
# TestStructInOutRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestStructInOutRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 6357)
structs = []
for name, num in ('Dell',107), ('HP', 1000), ('Razor', 201):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
a = MakeConnectedArray(graph, 'struct', TestStruct, structs)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (a.Array, Utils.StrStructArray))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StructInOutRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.inStructs = a.Array
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.outStructs, Utils.StrStructArray), (taCall.of, Utils.StrFloat), (taCall.ReturnValue, Utils.StrStructArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StructRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StructRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'StructRet', 'recv', argsStr.ReturnValue, entry.then)
structs = []
for name, num in ('Red', 101), ('Blue', 102), ('Green', 103), ('Orange', 104):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
array = MakeConnectedArray(graph, 'struct', TestStruct, structs)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrStructArray))
sendNote = TestRecorderNote(graph, 'StructRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.ReturnValue = array.Array
if 0:
# TestStructRet
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStructRet')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStructRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 10242048)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StructRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.ReturnValue, Utils.StrStructArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StructOut
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StructOut')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'StructOut', 'recv', argsStr.ReturnValue, entry.then)
structs = []
for name, num in ('Monday', 5), ('toozdee', 10), ('Wed', 15), ('Thirsty', 20):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
array = MakeConnectedArray(graph, 'struct', TestStruct, structs)
of = MakeLiteral(graph, 9.895)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrStructArray), (of.ReturnValue, Utils.StrFloat))
sendNote = TestRecorderNote(graph, 'StructOut', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.structs = array.Array
ret.of = of.ReturnValue
if 0:
# TestStructOut
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStructOut')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStructOut')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 1234567)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StructOut)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.structs, Utils.StrStructArray), (taCall.of, Utils.StrFloat))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StructIn
bp = FindBP('/Game/BTestActor.BTestActor')
graph = bp.UberGraphPages[0]
entry = GetEventNode(bp, 'StructIn')
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.structs, Utils.StrStructArray), (entry.f, Utils.StrFloat))
recvNote = TestRecorderNote(graph, 'StructIn', 'recv', argsStr.ReturnValue, entry.then)
sendNote = TestRecorderNote(graph, 'StructIn', 'send', 'None', recvNote.then)
if 0:
# TestStructIn
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStructIn')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStructIn')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 1887)
structs = []
for name, num in ('Fingers', 10), ('Toes', 11), ('knees', 12), ('elboWS', 99):
structs.append(MakeTestStruct(graph, name, num).TestStruct)
array = MakeConnectedArray(graph, 'struct', TestStruct, structs)
f = MakeLiteral(graph, -271.122)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (array.Array, Utils.StrStructArray), (f.ReturnValue, Utils.StrFloat))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StructIn)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.structs = array.Array
taCall.f = f.ReturnValue
taCall.execute = preNote.then
TestRecorderNote(graph, 'tester', 'recv', 'None', taCall.then)
if 0:
# ActorInOutRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'ActorInOutRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.inActors, Utils.StrActorArray))
recvNote = TestRecorderNote(graph, 'ActorInOutRet', 'recv', argsStr.ReturnValue, entry.then)
prevLink = recvNote.then
actors = []
for name in 'Up Down Left Right'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
outArray = MakeConnectedArray(graph, 'object', ParamActor, actors)
outF = MakeLiteral(graph, 98.715)
actors = []
for name in 'North South East wEsT'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
retArray = MakeConnectedArray(graph, 'object', ParamActor, actors)
argsStr = MakeArgsStr(graph, (outArray.Array, Utils.StrActorArray), (outF.ReturnValue, Utils.StrFloat), (retArray.Array, Utils.StrActorArray))
sendNote = TestRecorderNote(graph, 'ActorInOutRet', 'send', argsStr.ReturnValue, prevLink)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.outActors = outArray.Array
ret.of = outF.ReturnValue
ret.ReturnValue = retArray.Array
if 0:
# TestActorInOutRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestActorInOutRet')
entry = NodeWrapper(graph.Nodes[0])
prevLink = entry.then
i = MakeLiteral(graph, 8675309)
actors = []
for name in 'Larry Curly Moe'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
array = MakeConnectedArray(graph, 'object', ParamActor, actors)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (array.Array, Utils.StrActorArray))
sendNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, prevLink)
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.ActorInOutRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.inActors = array.Array
taCall.execute = sendNote.then
argsStr = MakeArgsStr(graph, (taCall.outActors, Utils.StrActorArray), (taCall.of, Utils.StrFloat), (taCall.ReturnValue, Utils.StrActorArray))
recvNote = TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
destroy = MakeCall(graph, ParamActor.DestroyActors)
destroy.execute = recvNote.then
destroy.actors = array.Array
d2 = MakeCall(graph, ParamActor.DestroyActors)
d2.execute = destroy.then
d2.actors = taCall.outActors
d3 = MakeCall(graph, ParamActor.DestroyActors)
d3.execute = d2.then
d3.actors = taCall.ReturnValue
if 0:
# ActorRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'ActorRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'ActorRet', 'recv', argsStr.ReturnValue, entry.then)
prevLink = recvNote.then
actors = []
for name in 'Luke Han Leia Lando Bobba'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
array = MakeConnectedArray(graph, 'object', ParamActor, actors)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrActorArray))
sendNote = TestRecorderNote(graph, 'ActorRet', 'send', argsStr.ReturnValue, prevLink)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.ReturnValue = array.Array
if 0:
# TestActorRet
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestActorRet')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestActorRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 311111)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.ActorRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.ReturnValue, Utils.StrActorArray))
recvNote = TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
destroy = MakeCall(graph, ParamActor.DestroyActors)
destroy.execute = recvNote.then
destroy.actors = taCall.ReturnValue
if 0:
# ActorOut
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'ActorOut')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'ActorOut', 'recv', argsStr.ReturnValue, entry.then)
prevLink = recvNote.then
actors = []
for name in 'Joseph Hyrum Alvin'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
array = MakeConnectedArray(graph, 'object', ParamActor, actors)
of = MakeLiteral(graph, 254.061)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrActorArray), (of.ReturnValue, Utils.StrFloat))
sendNote = TestRecorderNote(graph, 'ActorOut', 'send', argsStr.ReturnValue, prevLink)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.actors = array.Array
ret.of = of.ReturnValue
if 0:
# TestActorOut
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestActorOut')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestActorOut')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 7455)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.ActorOut)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.actors, Utils.StrActorArray), (taCall.of, Utils.StrFloat))
recvNote = TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
destroy = MakeCall(graph, ParamActor.DestroyActors)
destroy.execute = recvNote.then
destroy.actors = taCall.actors
if 0:
# ActorIn
bp = FindBP('/Game/BTestActor.BTestActor')
graph = bp.UberGraphPages[0]
entry = GetEventNode(bp, 'ActorIn')
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.actors, Utils.StrActorArray), (entry.f, Utils.StrFloat))
recvNote = TestRecorderNote(graph, 'ActorIn', 'recv', argsStr.ReturnValue, entry.then)
sendNote = TestRecorderNote(graph, 'ActorIn', 'send', 'None', recvNote.then)
if 0:
# TestActorIn
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestActorIn')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestActorIn')
entry = NodeWrapper(graph.Nodes[0])
prevLink = entry.then
actors = []
for name in 'Joe Fred Jared Ed'.split():
spawn = MakeCall(graph, ParamActor.SpawnWithName)
spawn.withName = name
spawn.execute = prevLink
prevLink = spawn.then
actors.append(spawn.ReturnValue)
i = MakeLiteral(graph, 13)
array = MakeConnectedArray(graph, 'object', ParamActor, actors)
f = MakeLiteral(graph, -689.123)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (array.Array, Utils.StrActorArray), (f.ReturnValue, Utils.StrFloat))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, prevLink)
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.ActorIn)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.actors = array.Array
taCall.f = f.ReturnValue
taCall.execute = preNote.then
recvNote = TestRecorderNote(graph, 'tester', 'recv', 'None', taCall.then)
destroy = MakeCall(graph, ParamActor.DestroyActors)
destroy.execute = recvNote.then
destroy.actors = array.Array
if 0:
# StringInOutRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StringInOutRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.inStrings, Utils.StrStringArray))
recvNote = TestRecorderNote(graph, 'StringInOutRet', 'recv', argsStr.ReturnValue, entry.then)
outArray = MakeArray(graph, ['Origin','Rebates','Foreseen','Abner'])
outF = MakeLiteral(graph, 77.115)
retArray = MakeArray(graph, ['Battery', 'Mouse', 'Pad', 'Charger', 'Cord'])
argsStr = MakeArgsStr(graph, (outArray.Array, Utils.StrStringArray), (outF.ReturnValue, Utils.StrFloat), (retArray.Array, Utils.StrStringArray))
sendNote = TestRecorderNote(graph, 'StringInOutRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.outStrings = outArray.Array
ret.of = outF.ReturnValue
ret.ReturnValue = retArray.Array
if 0:
# TestStringInOutRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestStringInOutRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 73716)
a = MakeArray(graph, ['One','Two','Three','Four','Five','Six'])
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (a.Array, Utils.StrStringArray))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StringInOutRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.inStrings = a.Array
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.outStrings, Utils.StrStringArray), (taCall.of, Utils.StrFloat), (taCall.ReturnValue, Utils.StrStringArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StringRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StringRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'StringRet', 'recv', argsStr.ReturnValue, entry.then)
array = MakeArray(graph, ['Enero', 'Febrero', 'Marzo', 'Abril'])
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrStringArray))
sendNote = TestRecorderNote(graph, 'StringRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.ReturnValue = array.Array
if 0:
# TestStringRet
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStringRet')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStringRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 17761)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StringRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.ReturnValue, Utils.StrStringArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StringOut
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'StringOut')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'StringOut', 'recv', argsStr.ReturnValue, entry.then)
array = MakeArray(graph, ['Jan', 'February', 'MaRzO'])
of = MakeLiteral(graph, -113.311)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrStringArray), (of.ReturnValue, Utils.StrFloat))
sendNote = TestRecorderNote(graph, 'StringOut', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.strings = array.Array
ret.of = of.ReturnValue
if 0:
# TestStringOut
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStringOut')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStringOut')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 12321)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StringOut)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.strings, Utils.StrStringArray), (taCall.of, Utils.StrFloat))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# StringIn
bp = FindBP('/Game/BTestActor.BTestActor')
graph = bp.UberGraphPages[0]
entry = GetEventNode(bp, 'StringIn')
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.strings, Utils.StrStringArray), (entry.f, Utils.StrFloat))
recvNote = TestRecorderNote(graph, 'StringIn', 'recv', argsStr.ReturnValue, entry.then)
sendNote = TestRecorderNote(graph, 'StringIn', 'send', 'None', recvNote.then)
if 0:
# TestStringIn
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestStringIn')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestStringIn')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 786)
array = MakeArray(graph, ['Rachael', 'Jacob', 'Nathan', 'Adam'])
f = MakeLiteral(graph, 3.142)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (array.Array, Utils.StrStringArray), (f.ReturnValue, Utils.StrFloat))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.StringIn)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.strings = array.Array
taCall.f = f.ReturnValue
taCall.execute = preNote.then
TestRecorderNote(graph, 'tester', 'recv', 'None', taCall.then)
if 0:
# VectorInOutRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'VectorInOutRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.inVectors, Utils.StrVectorArray))
recvNote = TestRecorderNote(graph, 'VectorInOutRet', 'recv', argsStr.ReturnValue, entry.then)
outArray = MakeArray(graph, [FVector(1.111,2.222,3.333), FVector(4.444,5.555,6.666)])
outF = MakeLiteral(graph, 1151.966)
retArray = MakeArray(graph, [FVector(100.000,200.000,300.000), FVector(400.000,500.000,600.000), FVector(10.000,20.000,30.000), FVector(40.000,50.000,60.000)])
argsStr = MakeArgsStr(graph, (outArray.Array, Utils.StrVectorArray), (outF.ReturnValue, Utils.StrFloat), (retArray.Array, Utils.StrVectorArray))
sendNote = TestRecorderNote(graph, 'VectorInOutRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.outVectors = outArray.Array
ret.of = outF.ReturnValue
ret.ReturnValue = retArray.Array
if 0:
# TestVectorInOutRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestVectorInOutRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 99411)
a = MakeArray(graph, [FVector(10,11,12),FVector(13,14,15),FVector(16,17,18),FVector(19,20,-21)])
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (a.Array, Utils.StrVectorArray))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.VectorInOutRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.inVectors = a.Array
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.outVectors, Utils.StrVectorArray), (taCall.of, Utils.StrFloat), (taCall.ReturnValue, Utils.StrVectorArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# VectorRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'VectorRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'VectorRet', 'recv', argsStr.ReturnValue, entry.then)
array = MakeArray(graph, [FVector(11.225,-5.0,33.333), FVector(5,4,3), FVector(-1,-10,-100)])
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrVectorArray))
sendNote = TestRecorderNote(graph, 'VectorRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.ReturnValue = array.Array
if 0:
# TestVectorRet
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestVectorRet')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestVectorRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 5110)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.VectorRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.ReturnValue, Utils.StrVectorArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# VectorOut
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'VectorOut')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'VectorOut', 'recv', argsStr.ReturnValue, entry.then)
array = MakeArray(graph, [FVector(5.5, 4.5, 3.5),FVector(-1.2, -10, 5000),FVector(17.125, -105.177, 32.111)])
of = MakeLiteral(graph, 99.101)
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrVectorArray), (of.ReturnValue, Utils.StrFloat))
sendNote = TestRecorderNote(graph, 'VectorOut', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.vectors = array.Array
ret.of = of.ReturnValue
if 0:
# TestVectorOut
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestVectorOut')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestVectorOut')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 7777)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.VectorOut)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.vectors, Utils.StrVectorArray), (taCall.of, Utils.StrFloat))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# VectorIn
bp = FindBP('/Game/BTestActor.BTestActor')
graph = bp.UberGraphPages[0]
entry = GetEventNode(bp, 'VectorIn')
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.vectors, Utils.StrVectorArray), (entry.f, Utils.StrFloat))
recvNote = TestRecorderNote(graph, 'VectorIn', 'recv', argsStr.ReturnValue, entry.then)
sendNote = TestRecorderNote(graph, 'VectorIn', 'send', 'None', recvNote.then)
if 0:
# TestVectorIn
bp = FindBP('/Game/BTester.BTester')
try:
GetFunctionGraph(bp, 'TestVectorIn')
raise Exception('Delete function first!')
except AssertionError:
pass
graph = ue.blueprint_add_function(bp, 'TestVectorIn')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 3819)
array = MakeArray(graph, [FVector(1,2,3),FVector(4,5,6)])
f = MakeLiteral(graph, 117.880)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (array.Array, Utils.StrVectorArray), (f.ReturnValue, Utils.StrFloat))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.VectorIn)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.vectors = array.Array
taCall.f = f.ReturnValue
taCall.execute = preNote.then
TestRecorderNote(graph, 'tester', 'recv', 'None', taCall.then)
if 0:
# BoolInOutRet
bp = FindBP('/Game/BTestActor.BTestActor')
graph = GetFunctionGraph(bp, 'BoolInOutRet')
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt), (entry.inBools, Utils.StrBoolArray))
recvNote = TestRecorderNote(graph, 'BoolInOutRet', 'recv', argsStr.ReturnValue, entry.then)
outArray = MakeArray(graph, [True, False, False, True, True, True, True, True, False])
outF = MakeLiteral(graph, 1125.865)
retArray = MakeArray(graph, [True, True, False, False, False, True, False, False, True, True])
argsStr = MakeArgsStr(graph, (outArray.Array, Utils.StrBoolArray), (outF.ReturnValue, Utils.StrFloat), (retArray.Array, Utils.StrBoolArray))
sendNote = TestRecorderNote(graph, 'BoolInOutRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.outBools = outArray.Array
ret.of = outF.ReturnValue
ret.ReturnValue = retArray.Array
if 0:
# TestBoolInOutRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestBoolInOutRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 32711)
a = MakeArray(graph, [False, False, True, False, False, True, False, True, True, True])
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (a.Array, Utils.StrBoolArray))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.BoolInOutRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.inBools = a.Array
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.outBools, Utils.StrBoolArray), (taCall.of, Utils.StrFloat), (taCall.ReturnValue, Utils.StrBoolArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# BoolRet
bp = FindBP('/Game/BTestActor.BTestActor')
for graph in bp.FunctionGraphs:
if graph.get_name() == 'BoolRet':
break
else:
graph = None
entry = NodeWrapper(graph.Nodes[0])
argsStr = MakeArgsStr(graph, (entry.i, Utils.StrInt))
recvNote = TestRecorderNote(graph, 'BoolRet', 'recv', argsStr.ReturnValue, entry.then)
array = MakeArray(graph, [True, False, True, False, False, True, True])
argsStr = MakeArgsStr(graph, (array.Array, Utils.StrBoolArray))
sendNote = TestRecorderNote(graph, 'BoolRet', 'send', argsStr.ReturnValue, recvNote.then)
ret = GetReturnNode(graph)
ret.execute = sendNote.then
ret.ReturnValue = array.Array
if 0:
# TestBoolRet
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestBoolRet')
entry = graph.Nodes[0]
i = MakeLiteral(graph, 6991)
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.BoolRet)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
argsStr = MakeArgsStr(graph, (taCall.ReturnValue, Utils.StrBoolArray))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# TestBoolOut
# Add a test node
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestBoolOut')
entry = graph.Nodes[0]
# build args
i = MakeLiteral(graph, 81)
# make args string
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt))
# note in test recorder
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
# call func
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.BoolOut)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.execute = preNote.then
# combine result
argsStr = MakeArgsStr(graph, (taCall.bools, Utils.StrBoolArray), (taCall.of, Utils.StrFloat))
TestRecorderNote(graph, 'tester', 'recv', argsStr.ReturnValue, taCall.then)
if 0:
# TestBoolIn
# Add a test node
bp = FindBP('/Game/BTester.BTester')
graph = ue.blueprint_add_function(bp, 'TestBoolIn')
entry = graph.Nodes[0]
# build args
i = MakeLiteral(graph, 44)
a = MakeArray(graph, [True, False, False, True, True])
f = MakeLiteral(graph, 202.511)
# make args string
argsStr = MakeArgsStr(graph, (i.ReturnValue, Utils.StrInt), (a.Array, Utils.StrBoolArray), (f.ReturnValue, Utils.StrFloat))
# note in test recorder
preNote = TestRecorderNote(graph, 'tester', 'send', argsStr.ReturnValue, entry.node_find_pin('then'))
# call func
ta = GetVariable(graph, 'TestActor')
taCall = MakeCall(graph, TestActor.BoolIn)
taCall.self = ta.TestActor
taCall.i = i.ReturnValue
taCall.bools = a.Array
taCall.f = f.ReturnValue
taCall.execute = preNote.then
# combine result
# (not needed here)
TestRecorderNote(graph, 'tester', 'recv', 'None', taCall.then)
| [
6738,
49385,
1330,
1635,
198,
6738,
22865,
62,
18392,
13,
37724,
1330,
357,
14573,
4798,
11,
509,
17,
19667,
62,
22203,
23004,
11,
509,
17,
19667,
62,
12050,
19182,
355,
509,
17,
19667,
62,
12050,
19182,
11,
509,
17,
19667,
62,
9237,
... | 2.546026 | 14,166 |
import numpy as np
import argparse, pdb, sys
parser = argparse.ArgumentParser(description='Eval model outputs')
parser.add_argument('-model', dest="model", required=True, help='Dataset to use')
parser.add_argument('-test_freq', dest="freq", required=True, type=int, help='what is to be predicted')
parser.add_argument('-eval_mode', dest="eval_mode", required=True, help='To evaluate test or validation')
# parser.add_argument('-entity2id' , dest="entity2id", required=True, help='Entity 2 id')
# parser.add_argument('-relation2id', dest="relation2id", required=True, help=' relation to id')
args = parser.parse_args()
best_rank = sys.maxsize
print(args.model)
for k in range(args.freq, 30000, args.freq):
valid_output = open('temp_scope/' + args.model + '/valid.txt')
model_time = open('temp_scope/' + args.model + '/valid_time_pred_{}.txt'.format(k))
model_out_time = []
count = 0
for line in model_time:
count = 0
temp_out = []
for ele in line.split():
tup = (float(ele), count)
temp_out.append(tup)
count = count + 1
model_out_time.append(temp_out)
for row in model_out_time:
row.sort(key=lambda x: x[0])
final_out_time = []
for row in model_out_time:
temp_dict = dict()
count = 0
for ele in row:
temp_dict[ele[1]] = count
# temp_dict[count] = ele[1]
count += 1
final_out_time.append(temp_dict)
ranks_time = []
for i, row in enumerate(valid_output):
avg_rank = []
top_time = final_out_time[i][0]
start_time = int(row.split()[0])
end_time = int(row.split()[1])
for e in range(start_time, end_time + 1, 1):
avg_rank.append(final_out_time[i][e])
ranks_time.append(np.min(np.array(avg_rank)))
# if top_time <= end_time and top_time >= start_time:
# ranks_time.append(1)
# else:
# ranks_time.append(0)
# pdb.set_trace()
print('Epoch {} : time_rank {}'.format(k, np.mean(np.array(ranks_time))))
if args.eval_mode == 'valid':
if (np.mean(np.array(ranks_time) + 1)) < best_rank:
best_rank = (np.mean(np.array(ranks_time) + 1))
best_epoch = k
# best_tail_rank = np.mean(np.array(ranks_tail)) + 1
# best_head_rank = np.mean(np.array(ranks_head)) + 1
print('------------------------------------------')
print('Best Validation Epoch till now Epoch {}, time rank: {}'.format(best_epoch, best_rank))
print('------------------------------------------') | [
11748,
299,
32152,
355,
45941,
198,
11748,
1822,
29572,
11,
279,
9945,
11,
25064,
198,
198,
48610,
796,
1822,
29572,
13,
28100,
1713,
46677,
7,
11213,
11639,
36,
2100,
2746,
23862,
11537,
198,
48610,
13,
2860,
62,
49140,
10786,
12,
1984... | 2.294118 | 1,139 |
# Copyright (c) 2021 IBM Corporation
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import numpy as np
import matplotlib.pyplot as plt
from . import reid_utils_fn
from . import kf_utils
from scipy.optimize import linear_sum_assignment
| [
2,
15069,
357,
66,
8,
33448,
220,
19764,
10501,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,
16727,
257,
4866,
198,
2,
286,
428,
3788,
290,
3917,
10314,
3696,
357,
1169,
366,
25423,
12340,
... | 3.744745 | 333 |
"""Endpoints to start the analysis."""
import redis
import rfc3987
import uuid
import time
from flask import Blueprint, abort, current_app, request, session
from tsa.extensions import redis_pool
from tsa.tasks.batch import inspect_graph
blueprint = Blueprint('analyze', __name__, static_folder='../static')
@blueprint.route('/api/v1/analyze/catalog', methods=['POST'])
def api_analyze_catalog():
"""Analyze a catalog."""
if 'token' not in session:
session['token'] = str(uuid.uuid4())
iri = request.args.get('sparql', None)
graph = request.args.get('graph', None)
if iri is not None and graph is not None and rfc3987.match(iri) and rfc3987.match(graph):
current_app.logger.info(f'Analyzing endpoint {iri}, named graph {graph}')
red = redis.Redis(connection_pool=redis_pool)
#Throttling
key = f'batch:{session["token"]}'
queueLength = red.scard(key)
while queueLength > 1000:
current_app.logger.warning(f'Queue length: {queueLength}')
time.sleep(60)
queueLength = red.scard(key)
t = inspect_graph.si(iri, graph).apply_async()
current_app.logger.info(f'Batch id: {session["token"]}, task id: {t.id}')
red.hset('taskBatchId', t.id, session["token"])
return ''
else:
abort(400)
| [
37811,
12915,
13033,
284,
923,
262,
3781,
526,
15931,
198,
11748,
2266,
271,
198,
11748,
374,
16072,
2670,
5774,
198,
11748,
334,
27112,
198,
11748,
640,
198,
6738,
42903,
1330,
39932,
11,
15614,
11,
1459,
62,
1324,
11,
2581,
11,
6246,
... | 2.307309 | 602 |
# encoding: utf-8
"""
@author: Yang Qian
@contact: yqian@aibee.com
"""
import glob
import os.path as osp
import re
import warnings
import numpy as np
from .bases import ImageDataset
from ..datasets import DATASET_REGISTRY
@DATASET_REGISTRY.register()
class NAIC(ImageDataset):
"""NAIC.
URL: `<https://naic.pcl.ac.cn/frame/3>`_
"""
_junk_pids = [-1]
dataset_dir = ''
dataset_name = "NAIC" | [
2,
21004,
25,
3384,
69,
12,
23,
198,
37811,
198,
31,
9800,
25,
220,
10998,
44696,
198,
31,
32057,
25,
331,
80,
666,
31,
64,
571,
1453,
13,
785,
198,
37811,
198,
198,
11748,
15095,
198,
11748,
28686,
13,
6978,
355,
267,
2777,
198,
... | 2.335196 | 179 |
import threading
import Queue
from tgbot import logging
from tgbot.async import Future
| [
11748,
4704,
278,
198,
11748,
4670,
518,
198,
198,
6738,
256,
70,
13645,
1330,
18931,
198,
6738,
256,
70,
13645,
13,
292,
13361,
1330,
10898,
198
] | 3.384615 | 26 |
import requests
import pandas
import re
from pandas.io.json import json_normalize
from configs import Config
from movieflix.models import Movie
from django.urls import reverse
if __name__ == '__main__':
main() | [
11748,
7007,
198,
11748,
19798,
292,
198,
11748,
302,
198,
6738,
19798,
292,
13,
952,
13,
17752,
1330,
33918,
62,
11265,
1096,
198,
6738,
4566,
82,
1330,
17056,
198,
6738,
1409,
2086,
75,
844,
13,
27530,
1330,
15875,
198,
6738,
42625,
... | 3.253731 | 67 |
from __future__ import print_function
from __future__ import division
from __future__ import absolute_import
from builtins import object
import sys
import os
import unittest
import maya.cmds as cmds
import maya.OpenMaya as om
import maya.OpenMayaAnim as oma
import maya.OpenMayaFX as omfx
import pymel.versions
from pymel.util.testing import TestCaseExtended
if not hasattr(cmds, 'about'):
import maya.standalone
maya.standalone.initialize()
#===============================================================================
# Current Bugs
#===============================================================================
# For CURRENT bugs, we PASS is the bug is still present, and FAIL if it goes
# away... this may be counter-intuitive, but it acts as an alert if a bug is
# fixed (so we can possibly get rid of yucky work-around code...)
# Bug report 378211
# Bug report 378192
# Bug report 344037
# Bug report 345384
# This bug only seems to affect windows (or at least, Win x64 -
# haven't tried on 32-bit).
# Introduced in maya 2014
# Change request #: BSPR-12597
# This test gives inconsistent results - the bug will show up (meaning the
# unittest "passes") if the test is run by itself (or just this module is run),
# but the bug will not show up (meaning the unittest "fails") if the entire test
# suite is run
@unittest.skip("inconsistent results")
#===============================================================================
# Current bugs that will cause Maya to CRASH (and so are commented out!)
#===============================================================================
# This is commented out as it will cause a CRASH - uncomment out (or just
# copy/ paste the relevant code into the script editor) to test if it's still
# causing a crash...
# If you're copy / pasting into a script editor, in order for a crash to occur,
# all lines must be executed at once - if you execute one at a time, there will
# be no crash
# Also, I'm making the code in each of the test functions self-contained (ie,
# has all imports, etc) for easy copy-paste testing...
# class TestSubdivSelectCrash(unittest.TestCase):
# def testCmds(self):
# import maya.cmds as cmds
# cmds.file(new=1, f=1)
# polyCube = cmds.polyCube()[0]
# subd = cmds.polyToSubdiv(polyCube)[0]
# cmds.select(subd + '.sme[*][*]')
#
# def testApi(self):
# import maya.cmds as cmds
# import maya.OpenMaya as om
#
# polyCube = cmds.polyCube()[0]
# subd = cmds.polyToSubdiv(polyCube)[0]
# selList = om.MSelectionList()
# selList.add(subd + '.sme[*][*]')
#===============================================================================
# FIXED (Former) Bugs
#===============================================================================
# Fixed in Maya 2009! yay!
# Fixed ! Yay! (...though I've only check on win64...)
# (not sure when... was fixed by time of 2011 Hotfix 1 - api 201101,
# and still broken in 2009 SP1a - api 200906)
# Bug report 345382
# Fixed ! Yay! (...though I've only check on win64...)
# (not sure when... was fixed by time of 2011 Hotfix 1 - api 201101,
# and still broken in 2009 SP1a - api 200906)
# nucleus node fixed in 2014
# symmetryConstraint fixed in 2015
# transferAttributes fixed <= 2016.5
# jointFFd fixed in 2021
# These probably aren't strictly considered "bugs" by autodesk, though I
# think they should be...
# Fixed in 2014! yay!
class TestGroupUniqueness(unittest.TestCase):
'''Test to check whether cmds.group returns a unique name
'''
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
11593,
37443,
834,
1330,
7297,
198,
6738,
11593,
37443,
834,
1330,
4112,
62,
11748,
198,
6738,
3170,
1040,
1330,
2134,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
555,
715,
... | 3.286368 | 1,093 |
# Imports function labels from PPSSPP .sym file.
#@author Kotcrab
#@category Data
sym = askFile("Select PPSSPP .sym file", "Select")
offsetBy = askInt("Offset addresses by", "Offset")
skipZun = True
makePrimary = True
for line in file(sym.absolutePath):
parts = line.split(" ")
address = toAddr(long(parts[0], 16) + offsetBy)
name = parts[1].rsplit(",", 1)[0]
if skipZun and name.startswith("z_un_"):
continue
print "Create label", name, "at", address
createLabel(address, name, makePrimary)
| [
2,
1846,
3742,
2163,
14722,
422,
350,
3705,
4303,
47,
764,
37047,
2393,
13,
198,
2,
31,
9800,
21702,
6098,
397,
198,
2,
31,
22872,
6060,
198,
198,
37047,
796,
1265,
8979,
7203,
17563,
350,
3705,
4303,
47,
764,
37047,
2393,
1600,
366... | 2.683673 | 196 |
#!/usr/bin/python
from sys import argv as sys_argv
from sys import exit as sys_exit
try:
inp_file = sys_argv[1]
out_file = sys_argv[2]
except:
print "USAGE: \n convert2gaf.py name_of_input_file name_of_output_file \n e.g. \n convert2gaf.py Wenner.dat Wenner_gaf.dat \n\n"
sys_exit()
inp = open(inp_file,'r')
out = open(out_file,'w')
# Name of survey line
line = inp.readline()
out.write(line)
# Unit electrode spacing
spac = inp.readline()
out.write(spac.lstrip())
# Array type (11 for general array)
intype = int(inp.readline()) # 1-Wenner, 3-Dipole-Dipole, 7-Schlumberger
out.write('11\r\n')
# Array type, 0 non-specific
out.write('0\r\n')
# Header
out.write('Type of measurement (0=app. resistivity,1=resistance)\r\n')
out.write('0\r\n') # to indicate app. resistivity
# Number of data points
numdata = inp.readline()
out.write(numdata.lstrip())
numdata = int(numdata)
# Type of x-location for data points, 1 for mid-point
line = inp.readline()
if line == 1:
stop
out.write('2\r\n') # 0-no topography, 1-true horizontal distance, 2-ground distance
# Flag for I.P. data, 0 for none (1 if present)
line = inp.readline()
out.write(line)
# Data Points
for i in range(numdata):
line = inp.readline()
line = line.split()
x0 = float(line[0])
a = float(line[1])
if intype == 1:
res = float(line[2])
else:
n = float(line[2])
res = float(line[3])
C1 = x0
if intype == 1: # Wenner
C2 = x0 + 3*a
P1 = x0 + a
P2 = x0 + 2*a
elif intype == 3: # Dipole-Dipole
C2 = x0 + a
P1 = x0 + a*(n+1)
P2 = x0 + a*(n+2)
elif intype == 7: # Schlumberger
C2 = x0 + a*(2*n+1)
P1 = x0 + a*n
P2 = x0 + a*(n+1)
out.write('4 {0:.4f} 0.0000 {1:.4f} 0.0000 {2:.4f} 0.0000 {3:.4f} 0.0000 {4:.4f}\r\n'.format(C1,C2,P1,P2,res))
for i in range(5):
out.write('0\r\n')
inp.close()
out.close()
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
198,
6738,
25064,
1330,
1822,
85,
355,
25064,
62,
853,
85,
198,
6738,
25064,
1330,
8420,
355,
25064,
62,
37023,
198,
198,
28311,
25,
198,
220,
220,
220,
287,
79,
62,
7753,
796,
25064,
62,
... | 2.037306 | 965 |
"""
Module implementing sampling pipelines
"""
import abc
from typing import Any, Dict, List, Optional, Tuple, Union
import numpy as np
from pydantic import Field, validator
from eolearn.core import EONode, EOWorkflow, FeatureType, LoadTask, MergeEOPatchesTask, OverwritePermission, SaveTask
from eolearn.geometry import MorphologicalOperations, MorphologicalStructFactory
from eolearn.ml_tools import BlockSamplingTask, FractionSamplingTask, GridSamplingTask
from ..core.pipeline import Pipeline
from ..tasks.common import ClassFilterTask
from ..utils.filter import get_patches_with_missing_features
from ..utils.types import Feature, FeatureSpec
class BaseSamplingPipeline(Pipeline, metaclass=abc.ABCMeta):
"""Pipeline to run sampling on EOPatches"""
config: Schema
def filter_patch_list(self, patch_list: List[str]) -> List[str]:
"""Filter output EOPatches that have already been processed"""
filtered_patch_list = get_patches_with_missing_features(
self.storage.filesystem,
self.storage.get_folder(self.config.output_folder_key),
patch_list,
self._get_output_features(),
)
return filtered_patch_list
def build_workflow(self) -> EOWorkflow:
"""Creates workflow that is divided into the following sub-parts:
1. loading data,
2. preprocessing steps,
3. sampling features
4. saving results
"""
loading_node = self._get_loading_node()
preprocessing_node = self._get_preprocessing_node(loading_node)
sampling_node = self._get_sampling_node(preprocessing_node)
save_task = SaveTask(
self.storage.get_folder(self.config.output_folder_key, full_path=True),
features=self._get_output_features(),
compress_level=self.config.compress_level,
overwrite_permission=OverwritePermission.OVERWRITE_FEATURES,
config=self.sh_config,
)
return EOWorkflow.from_endnodes(EONode(save_task, inputs=[sampling_node]))
def _get_loading_node(self) -> EONode:
"""Prepares nodes for loading and joining EOPatches."""
load_nodes = []
for folder_name, features in self.config.apply_to.items():
load_features: List[FeatureSpec] = []
for feature_type_str, feature_names in features.items():
feature_type = FeatureType(feature_type_str)
if not feature_type.is_spatial():
raise ValueError(f"Only spatial features can be sampled, but found {feature_type}: {feature_names}")
for feature_name in feature_names:
load_features.append((feature_type, feature_name))
load_features.append(FeatureType.BBOX)
if any(FeatureType(feature_type).is_temporal() for feature_type in features):
load_features.append(FeatureType.TIMESTAMP)
load_task = LoadTask(
self.storage.get_folder(folder_name, full_path=True),
lazy_loading=True,
features=load_features,
config=self.sh_config,
)
load_nodes.append(EONode(load_task, name=f"Load from {folder_name}"))
return EONode(MergeEOPatchesTask(), inputs=load_nodes)
def _get_preprocessing_node(self, previous_node: EONode) -> EONode: # pylint: disable=no-self-use
"""The default implementation doesn't add any preprocessing steps"""
return previous_node
@abc.abstractmethod
def _get_sampling_node(self, previous_node: EONode) -> EONode:
"""Method to prepare sampling nodes"""
def _get_features_to_sample(self) -> List[Tuple[FeatureType, str, str]]:
"""Get a list of features that will be sampled, together with their new names"""
features_to_sample = []
for _, features in self.config.apply_to.items():
for feature_type, feature_names in features.items():
for feature_name in feature_names:
if self.config.sampled_suffix is None:
features_to_sample.append((feature_type, feature_name, feature_name))
else:
features_to_sample.append(
(feature_type, feature_name, f"{feature_name}_{self.config.sampled_suffix}")
)
return features_to_sample
def _get_mask_of_samples_feature(self) -> Optional[Feature]:
"""Provide a mask of samples feature"""
if self.config.mask_of_samples_name:
return FeatureType.MASK_TIMELESS, self.config.mask_of_samples_name
return None
def _get_output_features(self) -> List[FeatureSpec]:
"""Get a list of features that will be saved as an output of the pipeline"""
output_features: List[FeatureSpec] = [FeatureType.BBOX]
features_to_sample = self._get_features_to_sample()
for feature_type, _, sampled_feature_name in features_to_sample:
output_features.append((feature_type, sampled_feature_name))
mask_of_samples_feature = self._get_mask_of_samples_feature()
if mask_of_samples_feature:
output_features.append(mask_of_samples_feature)
if any(feature_type.is_temporal() for feature_type, _, _ in features_to_sample):
output_features.append(FeatureType.TIMESTAMP)
return output_features
class BaseRandomSamplingPipeline(BaseSamplingPipeline, metaclass=abc.ABCMeta):
"""A base class for all sampling pipeline that work on random selection of samples"""
config: Schema
def get_execution_arguments(self, workflow: EOWorkflow) -> List[Dict[EONode, Dict[str, object]]]:
"""Extends the basic method for adding execution arguments by adding seed arguments a sampling task"""
exec_args = super().get_execution_arguments(workflow)
sampling_node = workflow.get_node_with_uid(self._sampling_node_uid)
if sampling_node is None:
return exec_args
generator = np.random.default_rng(seed=self.config.seed)
for arg_index in range(len(self.patch_list)):
exec_args[arg_index][sampling_node] = dict(seed=generator.integers(low=0, high=2**32))
return exec_args
class FractionSamplingPipeline(BaseRandomSamplingPipeline):
"""A pipeline to sample per-class with different distributions"""
config: Schema
def _get_preprocessing_node(self, previous_node: EONode) -> EONode:
"""Preprocessing that applies erosion on sampling feature values"""
if self.config.erosion_dict is None:
return previous_node
end_node = previous_node
for radius, labels in self.config.erosion_dict.items():
task = ClassFilterTask(
(FeatureType.MASK_TIMELESS, self.config.sampling_feature_name),
labels,
MorphologicalOperations.EROSION,
struct_elem=MorphologicalStructFactory.get_disk(radius),
)
end_node = EONode(task, inputs=[end_node])
return end_node
def _get_sampling_node(self, previous_node: EONode) -> EONode:
"""Prepare the sampling task"""
task = FractionSamplingTask(
features_to_sample=self._get_features_to_sample(),
sampling_feature=(FeatureType.MASK_TIMELESS, self.config.sampling_feature_name),
fraction=self.config.fraction_of_samples,
exclude_values=self.config.exclude_values,
mask_of_samples=self._get_mask_of_samples_feature(),
)
node = EONode(task, inputs=[previous_node])
self._sampling_node_uid = node.uid
return node
class BlockSamplingPipeline(BaseRandomSamplingPipeline):
"""A pipeline to randomly sample blocks"""
config: Schema
def _get_sampling_node(self, previous_node: EONode) -> EONode:
"""Prepare the sampling task"""
task = BlockSamplingTask(
features_to_sample=self._get_features_to_sample(),
amount=self.config.fraction_of_samples or self.config.number_of_samples,
sample_size=self.config.sample_size,
mask_of_samples=self._get_mask_of_samples_feature(),
)
node = EONode(task, inputs=[previous_node])
self._sampling_node_uid = node.uid
return node
class GridSamplingPipeline(BaseSamplingPipeline):
"""A pipeline to sample blocks in a regular grid"""
config: Schema
def _get_sampling_node(self, previous_node: EONode) -> EONode:
"""Prepare the sampling task"""
task = GridSamplingTask(
features_to_sample=self._get_features_to_sample(),
sample_size=self.config.sample_size,
stride=self.config.stride,
mask_of_samples=self._get_mask_of_samples_feature(),
)
return EONode(task, inputs=[previous_node])
| [
37811,
198,
26796,
15427,
19232,
31108,
198,
37811,
198,
11748,
450,
66,
198,
6738,
19720,
1330,
4377,
11,
360,
713,
11,
7343,
11,
32233,
11,
309,
29291,
11,
4479,
198,
198,
11748,
299,
32152,
355,
45941,
198,
6738,
279,
5173,
5109,
1... | 2.418718 | 3,697 |
import json
import re
from bs4 import BeautifulSoup
from model.Transport.Walk import Walk
from model.Transport.PublicTransport import PublicTransport
from model.Transport.Drive import Drive
from model.Transport.Bicycle import Bicycle
from model.Possibilities import Possibilities
from webservice_caller.TransportAPICaller import TransportAPICaller
from webservice_caller.call_url import call_url, APICallError
class GoogleAPICaller(TransportAPICaller):
'''
Class that handles calling google api to compute itiniraries
'''
_url = 'https://maps.googleapis.com/maps/api/directions/json?'
_key = 'AIzaSyCqgwlzgUDYYF7xnePerJZaapgUWmyGYjc'
def __init__ (self, request):
'''
Create the different parameters that we will need for the API url
'''
self._origin = request.from_x, request.from_y
self._destination = request.to_x, request.to_y
self._modes = {'driving':Drive,'walking':Walk,'bicycling':Bicycle,'transit':PublicTransport}
@property
def get_times(self):
'''
Get the different times related to the travel modes and returns
a list of objects corresponding to each travel mode'
'''
times = {}
for mode, mode_class in self._modes.items():
url_final = GoogleAPICaller._url + "origin=" + ",".join(str (e) for e in self._origin) + "&destination=" + ",".join(str(f) for f in self._destination) + "&mode=" + mode + "&key=" + GoogleAPICaller._key
response = call_url(url_final)
data = json.loads(response.content)
try:
travel_time = data["routes"][0]["legs"][0]["duration"]["value"]
except IndexError:
raise APICallError
except KeyError:
raise APICallError
times[mode] = travel_time
return times
def get_itineraries(self):
'''
Get the different itineraries related to the travel modes
'''
itineraries = {}
for mode, mode_class in self._modes.items():
url_final = GoogleAPICaller._url + "origin=" + ",".join(str (e) for e in self._origin) + "&destination=" + ",".join(str(f) for f in self._destination) + "&mode=" + mode + "&key=" + GoogleAPICaller._key
response = call_url(url_final)
data = json.loads(response.content)
try:
instruction = data["routes"][0]["legs"][0]["steps"]
except IndexError:
raise APICallError
except KeyError:
raise APICallError
itinerary = ""
for i in range(len(instruction)):
itinerary += instruction[i]["html_instructions"] + ", "
clean_itinerary = BeautifulSoup(itinerary,"html.parser").text
itineraries[mode] = clean_itinerary
return itineraries
| [
11748,
33918,
198,
11748,
302,
198,
6738,
275,
82,
19,
1330,
23762,
50,
10486,
198,
6738,
2746,
13,
8291,
634,
13,
35963,
1330,
6857,
198,
6738,
2746,
13,
8291,
634,
13,
15202,
8291,
634,
1330,
5094,
8291,
634,
198,
6738,
2746,
13,
... | 2.298732 | 1,262 |
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
| [
2,
30396,
329,
257,
13934,
5509,
10139,
13,
198,
2,
1398,
12200,
19667,
7,
15252,
2599,
198,
2,
220,
220,
220,
220,
825,
11593,
15003,
834,
7,
944,
11,
2124,
2599,
198,
2,
220,
220,
220,
220,
220,
220,
220,
220,
2116,
13,
2100,
... | 1.911765 | 102 |
import asyncio
import datetime
import discord
from discord.ext import commands
from .utils import human_time
| [
11748,
30351,
952,
198,
11748,
4818,
8079,
198,
198,
11748,
36446,
198,
6738,
36446,
13,
2302,
1330,
9729,
198,
198,
6738,
764,
26791,
1330,
1692,
62,
2435,
198
] | 3.964286 | 28 |
"""create books table
Revision ID: 001
Revises:
Create Date: 2021-08-08 21:34:07.136758
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '001'
down_revision = None
branch_labels = None
depends_on = None
| [
37811,
17953,
3835,
3084,
198,
198,
18009,
1166,
4522,
25,
3571,
16,
198,
18009,
2696,
25,
220,
198,
16447,
7536,
25,
33448,
12,
2919,
12,
2919,
2310,
25,
2682,
25,
2998,
13,
20809,
38569,
198,
198,
37811,
198,
6738,
31341,
2022,
291,... | 2.808511 | 94 |
import pytest
from pkg_resources import resource_filename
from .util import SpaceTxValidator
experiment_schema_path = resource_filename("validate_sptx", "schema/experiment.json")
validator = SpaceTxValidator(experiment_schema_path)
example = resource_filename("validate_sptx", "examples/experiment/experiment.json")
| [
11748,
12972,
9288,
198,
6738,
279,
10025,
62,
37540,
1330,
8271,
62,
34345,
198,
198,
6738,
764,
22602,
1330,
4687,
46047,
47139,
1352,
198,
198,
23100,
3681,
62,
15952,
2611,
62,
6978,
796,
8271,
62,
34345,
7203,
12102,
378,
62,
82,
... | 3.252525 | 99 |
#coding=utf-8
from appium import webdriver
# self.driver.find_element_by_class_name('android.widget.RelativeLayout').click()
if __name__ == '__main__':
t = Test()
t.setUp()
| [
2,
66,
7656,
28,
40477,
12,
23,
198,
6738,
598,
1505,
1330,
3992,
26230,
198,
220,
220,
220,
220,
220,
220,
1303,
2116,
13,
26230,
13,
19796,
62,
30854,
62,
1525,
62,
4871,
62,
3672,
10786,
19411,
13,
42655,
13,
6892,
876,
32517,
... | 2.486842 | 76 |
# Copyright (c) 2012-2018 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the COPYING file.
# pylint: disable=unused-variable
| [
2,
15069,
357,
66,
8,
2321,
12,
7908,
8297,
28650,
47061,
13,
1439,
2489,
10395,
13,
198,
2,
5765,
286,
428,
2723,
2438,
318,
21825,
416,
257,
347,
10305,
12,
7635,
5964,
326,
460,
307,
198,
2,
1043,
287,
262,
27975,
45761,
2393,
... | 3.491525 | 59 |
import matplotlib
import numpy
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import sys
from AdaFair import AdaFair
sys.path.insert(0, 'DataPreprocessing')
sys.path.insert(0, 'equalized_odds_and_calibration-master')
# import funcs_disp_mist as fdm
from Competitors.AdaCost import AdaCostClassifier
from load_kdd import load_kdd
# from load_german import load_german
from load_compas_data import load_compas
from load_adult import load_adult
from load_bank import load_bank
if __name__ == '__main__':
# main("compass-gender")
# main("adult-gender")
# main("bank")
main("kdd")
| [
11748,
2603,
29487,
8019,
198,
11748,
299,
32152,
198,
198,
6759,
29487,
8019,
13,
1904,
10786,
46384,
11537,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
198,
11748,
25064,
198,
198,
6738,
47395,
30099,
1330,
47395,... | 2.806452 | 217 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from .download_files import DownloadsFile
from .network_info import NetworkInfo
from .load_music import LoadMusic
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
6738,
764,
15002,
62,
16624,
1330,
50093,
8979,
198,
6738,
764,
27349,
62,
10951,
1330,
7311,
12360,
198,
6738,
76... | 3.156863 | 51 |
from tokenize import NAME, OP
import re
regexes = {
"joins": re.compile(r"[- /]"),
"repeated_underscore": re.compile(r"_{2,}"),
"invalid_variable_name_start": re.compile(r"^[^a-zA-Z_]"),
"invalid_variable_name_characters": re.compile(r"[^0-9a-zA-Z_]"),
}
def acceptable(name, capitalize=False):
"""Convert a string into something that can be used as a valid python variable name"""
# Convert space and dashes into underscores
name = regexes["joins"].sub("_", name)
# Remove invalid characters
name = regexes["invalid_variable_name_characters"].sub("", name)
# Remove leading characters until we find a letter or underscore
name = regexes["invalid_variable_name_start"].sub("", name)
# Clean up irregularities in underscores.
name = regexes["repeated_underscore"].sub("_", name.strip("_"))
if capitalize:
# We don't use python's built in capitalize method here because it
# turns all upper chars into lower chars if not at the start of
# the string and we only want to change the first character.
name_parts = []
for word in name.split("_"):
name_parts.append(word[0].upper())
if len(word) > 1:
name_parts.append(word[1:])
name = "".join(name_parts)
return name
class TokenDetails:
"""Container for current token"""
class Single:
"""Container for a single block (i.e. it or ignore block)"""
@property
@name.setter
@property
@property
class Group:
"""Container for describe blocks"""
@property
@name.setter
@property
def starting_signature(self):
"""Determine if this group is starting itself or anything belonging to it"""
return self.starting_group or self.starting_single
@property
def kls_name(self):
"""Determine python name for group"""
# Determine kls for group
if not self.parent or not self.parent.name:
return f"Test{self.name}"
else:
use = self.parent.kls_name
if use.startswith("Test"):
use = use[4:]
return f"Test{use}_{self.name}"
@property
def super_kls(self):
"""
Determine what kls this group inherits from
If default kls should be used, then None is returned
"""
if not self.kls and self.parent and self.parent.name:
return self.parent.kls_name
return self.kls
def start_group(self, scol, typ):
"""Start a new group"""
return Group(parent=self, level=scol, typ=typ)
def start_single(self, typ, scol):
"""Start a new single"""
self.starting_single = True
single = self.single = Single(typ=typ, group=self, indent=(scol - self.level))
self.singles.append(single)
return single
def finish_signature(self):
"""Tell group it isn't starting anything anymore"""
self.starting_group = False
self.starting_single = False
def modify_kls(self, name):
"""Add a part to what will end up being the kls' superclass"""
if self.kls is None:
self.kls = name
else:
self.kls += name
| [
6738,
11241,
1096,
1330,
36751,
11,
13349,
198,
11748,
302,
198,
198,
260,
25636,
274,
796,
1391,
198,
220,
220,
220,
366,
7639,
1040,
1298,
302,
13,
5589,
576,
7,
81,
17912,
12,
1220,
60,
12340,
198,
220,
220,
220,
366,
45956,
515,... | 2.472073 | 1,307 |
#
# PySNMP MIB module BAS-FTD-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/BAS-FTD-MIB
# Produced by pysmi-0.3.4 at Wed May 1 11:33:53 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion, SingleValueConstraint, ValueSizeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion", "SingleValueConstraint", "ValueSizeConstraint")
basFtd, = mibBuilder.importSymbols("BAS-MIB", "basFtd")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Bits, ObjectIdentity, Integer32, Gauge32, ModuleIdentity, TimeTicks, NotificationType, MibIdentifier, Unsigned32, Counter32, IpAddress, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Bits", "ObjectIdentity", "Integer32", "Gauge32", "ModuleIdentity", "TimeTicks", "NotificationType", "MibIdentifier", "Unsigned32", "Counter32", "IpAddress", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
basFtdMib = ModuleIdentity((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1))
if mibBuilder.loadTexts: basFtdMib.setLastUpdated('9810071415Z')
if mibBuilder.loadTexts: basFtdMib.setOrganization('Broadband Access Systems')
if mibBuilder.loadTexts: basFtdMib.setContactInfo(' Tech Support Broadband Access Systems 4 Technology Drive Westborough, MA 01581 U.S.A. 508-366-8833 support@basystems.com')
if mibBuilder.loadTexts: basFtdMib.setDescription('This MIB module defines the configuration and status MIB objects for a Broadband Access System FTD objects.')
basFtdObjects = MibIdentifier((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1))
basFtdHeartBeatTimer = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10)).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdHeartBeatTimer.setStatus('current')
if mibBuilder.loadTexts: basFtdHeartBeatTimer.setDescription('Timer value in seconds for the FTD heartbeat.')
basFtdTableEligibilityCounter = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 2), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdTableEligibilityCounter.setStatus('current')
if mibBuilder.loadTexts: basFtdTableEligibilityCounter.setDescription('Count of the FTD heartbeat timeouts since receiving a table request, i.e. an eligibility cycle.')
basFtdTableEligibilityCounterThreshold = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdTableEligibilityCounterThreshold.setStatus('current')
if mibBuilder.loadTexts: basFtdTableEligibilityCounterThreshold.setDescription('Counter threshold (table eligibility counter) defining a time window used to accumulate table requests and stagger their distribution.')
basFtdIdleCounter = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 4), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdIdleCounter.setStatus('current')
if mibBuilder.loadTexts: basFtdIdleCounter.setDescription('Count of the FTD heartbeat timeouts in an idle state.')
basFtdIdleCounterThreshold = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdIdleCounterThreshold.setStatus('current')
if mibBuilder.loadTexts: basFtdIdleCounterThreshold.setDescription('Counter threshold (idle counter) defining a time window ( in heartbeat units ) used to stagger the transmission of revision packets.')
basFtdTableRequestCounter = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 6), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdTableRequestCounter.setStatus('current')
if mibBuilder.loadTexts: basFtdTableRequestCounter.setDescription('Count of FTD table requests during an eligibility cycle.')
basFtdPendingCallbackCounter = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdPendingCallbackCounter.setStatus('current')
if mibBuilder.loadTexts: basFtdPendingCallbackCounter.setDescription('Count of the FTD heartbeat timeouts accumulated while a callback is pending, i.e. a pending cycle.')
basFtdPendingCallbackThreshold = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 100)).clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdPendingCallbackThreshold.setStatus('current')
if mibBuilder.loadTexts: basFtdPendingCallbackThreshold.setDescription('Counter threshold (pending callback counter) defining a maximum time window to wait for a callback.')
basFtdBootState = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("null", 1), ("cold", 2), ("warm", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdBootState.setStatus('current')
if mibBuilder.loadTexts: basFtdBootState.setDescription('FTD objects boot state')
basFtdPurgeConfiguration = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("immediate", 1), ("delayed", 2))).clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdPurgeConfiguration.setStatus('current')
if mibBuilder.loadTexts: basFtdPurgeConfiguration.setDescription('Configuration for when to purge the forwarding table: immediately or delayed (after a new table has been received).')
basFtdUpdateRequests = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 11), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdUpdateRequests.setStatus('current')
if mibBuilder.loadTexts: basFtdUpdateRequests.setDescription('Count of the total number of update requests.')
basFtdUpdatepackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdUpdatepackets.setStatus('current')
if mibBuilder.loadTexts: basFtdUpdatepackets.setDescription('Count of the total number of update packets.')
basFtdTableRequests = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 13), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdTableRequests.setStatus('current')
if mibBuilder.loadTexts: basFtdTableRequests.setDescription('Count of the total number of table requests.')
basFtdTablePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 14), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdTablePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdTablePackets.setDescription('Count of the total number of table packets.')
basFtdAllocatedPackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdAllocatedPackets.setStatus('current')
if mibBuilder.loadTexts: basFtdAllocatedPackets.setDescription('Count of the total number of allocated packets.')
basFtdSentPackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 16), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdSentPackets.setStatus('current')
if mibBuilder.loadTexts: basFtdSentPackets.setDescription('Count of the total number of packets sent.')
basFtdFreedPackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 17), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdFreedPackets.setStatus('current')
if mibBuilder.loadTexts: basFtdFreedPackets.setDescription('Count of the total number of packets freed.')
basFtdSpuriousUpdatePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 18), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdSpuriousUpdatePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdSpuriousUpdatePackets.setDescription('Count of the total number of spurious update packets.')
basFtdSpuriousTablePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 19), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdSpuriousTablePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdSpuriousTablePackets.setDescription('Count of the total number of spurious table packets.')
basFtdIgnoredUpdatePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 20), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdIgnoredUpdatePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdIgnoredUpdatePackets.setDescription('Count of the total number of ignored update packets.')
basFtdIgnoredTablePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 21), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdIgnoredTablePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdIgnoredTablePackets.setDescription('Count of the total number of ignored table packets.')
basFtdInstalledUpdatePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 22), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdInstalledUpdatePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdInstalledUpdatePackets.setDescription('Count of the total number of installed update packets.')
basFtdInstalledTablePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 23), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdInstalledTablePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdInstalledTablePackets.setDescription('Count of the total number of installed table packets.')
basFtdStoredTablePackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 24), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdStoredTablePackets.setStatus('current')
if mibBuilder.loadTexts: basFtdStoredTablePackets.setDescription('Count of the total number of stored table packets.')
basFtdRevisionPackets = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 25), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdRevisionPackets.setStatus('current')
if mibBuilder.loadTexts: basFtdRevisionPackets.setDescription('Count of the total number of sent(rs) or received (fwd) revision packets.')
basFtdFailureCode = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109))).clone(namedValues=NamedValues(("finite-state-machine", 1), ("dequeue", 2), ("null-table-fia", 3), ("nonnull-update-pkt", 4), ("nonnull-table-pkt", 5), ("pending-threshold", 6), ("uninitialized-storage", 7), ("external", 100), ("rbp-registration", 101), ("unknown-pkt", 102), ("rbp-send", 103), ("rbp-callback", 104), ("packet-allocation", 105), ("packet-corruption", 106), ("rte-error-bad-prefix", 107), ("rte-error-bad-version", 108), ("add-route-failure", 109)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdFailureCode.setStatus('current')
if mibBuilder.loadTexts: basFtdFailureCode.setDescription('Log of the most recent failure code.')
basFtdRevision = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 27), Integer32()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: basFtdRevision.setStatus('current')
if mibBuilder.loadTexts: basFtdRevision.setDescription('FTD update revision (write only for testing).')
basFtdPresentFsmState = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 28), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdPresentFsmState.setStatus('current')
if mibBuilder.loadTexts: basFtdPresentFsmState.setDescription('Current FTD finite state maching state.')
basFtdFsmRestarts = MibScalar((1, 3, 6, 1, 4, 1, 3493, 2, 5, 1, 1, 29), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: basFtdFsmRestarts.setStatus('current')
if mibBuilder.loadTexts: basFtdFsmRestarts.setDescription('Count of the total number of FTD object restarts since a cold boot.')
mibBuilder.exportSymbols("BAS-FTD-MIB", basFtdFsmRestarts=basFtdFsmRestarts, basFtdIdleCounterThreshold=basFtdIdleCounterThreshold, basFtdInstalledUpdatePackets=basFtdInstalledUpdatePackets, basFtdIgnoredUpdatePackets=basFtdIgnoredUpdatePackets, basFtdTableRequests=basFtdTableRequests, basFtdUpdateRequests=basFtdUpdateRequests, basFtdObjects=basFtdObjects, basFtdInstalledTablePackets=basFtdInstalledTablePackets, basFtdSpuriousUpdatePackets=basFtdSpuriousUpdatePackets, basFtdBootState=basFtdBootState, basFtdTableEligibilityCounterThreshold=basFtdTableEligibilityCounterThreshold, basFtdIdleCounter=basFtdIdleCounter, basFtdPurgeConfiguration=basFtdPurgeConfiguration, basFtdIgnoredTablePackets=basFtdIgnoredTablePackets, basFtdTableEligibilityCounter=basFtdTableEligibilityCounter, basFtdSpuriousTablePackets=basFtdSpuriousTablePackets, basFtdTableRequestCounter=basFtdTableRequestCounter, basFtdPendingCallbackThreshold=basFtdPendingCallbackThreshold, basFtdMib=basFtdMib, basFtdStoredTablePackets=basFtdStoredTablePackets, basFtdRevisionPackets=basFtdRevisionPackets, basFtdTablePackets=basFtdTablePackets, basFtdFailureCode=basFtdFailureCode, basFtdUpdatepackets=basFtdUpdatepackets, basFtdPresentFsmState=basFtdPresentFsmState, basFtdPendingCallbackCounter=basFtdPendingCallbackCounter, PYSNMP_MODULE_ID=basFtdMib, basFtdHeartBeatTimer=basFtdHeartBeatTimer, basFtdAllocatedPackets=basFtdAllocatedPackets, basFtdSentPackets=basFtdSentPackets, basFtdRevision=basFtdRevision, basFtdFreedPackets=basFtdFreedPackets)
| [
2,
198,
2,
9485,
15571,
7378,
337,
9865,
8265,
29809,
12,
9792,
35,
12,
8895,
33,
357,
4023,
1378,
16184,
76,
489,
8937,
13,
785,
14,
79,
893,
11632,
8,
198,
2,
7054,
45,
13,
16,
2723,
2393,
1378,
14,
14490,
14,
67,
615,
47562,
... | 2.88211 | 4,835 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
:Mod: test_citation
:Synopsis:
:Author:
servilla
:Created:
1/25/20
"""
import json
import pytest
from webapp.citation import Citation
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
25,
5841,
25,
1332,
62,
66,
3780,
198,
198,
25,
49771,
25,
198,
198,
25,
13838,
25,
198,
220,
220,... | 2.369048 | 84 |
#!/usr/bin/python
# -*- coding: latin-1 -*-
import sys
import zmq
import time
import socket
import autopy
from pynput.mouse import Button, Controller
# Socket to talk to server
context = zmq.Context()
socket = context.socket(zmq.SUB)
mouse = Controller()
print("Collecting updates from weather server…")
socket.connect("tcp://localhost:5556")
# Subscribe to zipcode, default is NYC, 10001
zip_filter = sys.argv[1] if len(sys.argv) > 1 else "11102"
# Python 2 - ascii bytes to unicode str
if isinstance(zip_filter, bytes):
zip_filter = zip_filter.decode('ascii')
# subscribe to messaged PREFIXED with zipfilter
socket.setsockopt_string(zmq.SUBSCRIBE, "")
# Process 5 updates
total_temp = 0
while True:
string = socket.recv_string()
print(string)
# socket.send_string("Yo Buddy")
# uid, xPos, yPos = string.split()
# xPos = int(xPos)
# yPos = int(yPos)
# # time.sleep(2)
# mouse.position = xPos, yPos
# print(hex(autopy.bitmap.capture_screen().get_color(xPos, yPos)))
# mouse.move(xPos, yPos)
# autopy.mouse.smooth_move(xPos, yPos)
# print("Average temperature for zipcode '%s' was %dF" % (
# zip_filter, total_temp / (update_nbr+1))
# )
#
# Hello World client in Python
# Connects REQ socket to tcp://localhost:5555
# Sends "Hello" to server, expects "World" back
#
# import zmq
# context = zmq.Context()
# # Socket to talk to server
# print("Connecting to hello world server…")
# socket = context.socket(zmq.REQ)
# socket.connect("tcp://localhost:5555")
# # Do 10 requests, waiting each time for a response
# for request in range(10):
# print("Sending request %s …" % request)
# socket.send(b"Hello")
# # Get the reply.
# message = socket.recv()
# print("Received reply %s [ %s ]" % (request, message))
import ipaddress
import netifaces
import pandas as pd
# determine my current network - name, address, subnet
gws = netifaces.gateways()
addr, interface_name = gws['default'][netifaces.AF_INET]
network_info = netifaces.ifaddresses(interface_name)[netifaces.AF_INET]
address, netmask, broadcast = network_info[0].values()
n = ipaddress.ip_interface(f'{address}/{netmask}').network
# list(n)[1:-1]
ip_list = [ str(x) for x in n][1:-1]
df = pd.DataFrame(ip_list, columns=['ip'])
df['port'] = 80
df['is_up'] = False
df[is_up].apply(is_up)
import socket | [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
2,
532,
9,
12,
19617,
25,
3042,
259,
12,
16,
532,
9,
12,
198,
198,
11748,
25064,
198,
11748,
1976,
76,
80,
198,
11748,
640,
198,
11748,
17802,
628,
198,
11748,
1960,
11081,
198,
6738,
279... | 2.573434 | 926 |
import os
from fnmatch import fnmatch
| [
11748,
28686,
198,
6738,
24714,
15699,
1330,
24714,
15699,
628
] | 3.9 | 10 |
from deutschland.verena.verenadownloader import VerenaDownloader
| [
6738,
390,
40768,
1044,
13,
303,
918,
64,
13,
303,
918,
324,
593,
29356,
1330,
569,
567,
2616,
10002,
263,
628,
628,
198
] | 3 | 23 |
#!/usr/bin/python
"""
Calculate vdw energy table.
This is a separate calculation at atom level. The result is not used by MCCE. However the conformer level vdw terms
should be consistent with those in opp files.
"""
import sys
import libvdw
from pdbio import *
if __name__ == "__main__":
# find location of parameters
libvdw.initdb()
#print vdwdb.vdwparam
#print vdwdb.scaling
pdbfile = "step2_out.pdb"
protein = Protein()
protein.loadpdb(pdbfile)
protein.make_connect12()
protein.make_connect13()
protein.make_connect14()
if len(sys.argv) < 2:
print "vdw.py conformerID cutoff"
print "Example: vdw.py GLU-1A0060_005 0.00"
print "Note: conformerID is in head3.lst."
sys.exit()
resName = sys.argv[1][:3]
confSeq = sys.argv[1][5:]
if len(sys.argv) > 2:
libvdw.print_cutoff = float(sys.argv[2])
chainID = confSeq[0]
resSeq = int(confSeq[1:5])
confNum = int(confSeq[-3:])
resID = "%3s%04d%c" % (resName, resSeq, chainID)
confID = "%3s%04d%c%03d" % (resName, resSeq, chainID, confNum)
found_res = False
found_conf = False
for res in protein.residue:
if found_res:
break
if res.resID == resID:
found_res = True
for conf in res.conf:
if found_conf:
break
if confID == conf.confID:
found_conf = True
vdw0 = 0.0 # intra
vdw1 = 0.0 # to backbone
for res2 in protein.residue:
vdwt = libvdw.vdw_conf(conf, res2.conf[0])
vdw1 += vdwt
if abs(vdwt) > libvdw.print_cutoff:
print "Backbone(Accumulative): %s -> %s: %.3f" % (conf.confID, res2.conf[0].confID, vdw1)
for conf2 in res2.conf[1:]:
if conf2 == conf: # Intra
vdw0 = libvdw.vdw_conf(conf, conf2)
print "Intra: %s -> %s: %.3f *" % (conf.confID, conf2.confID, vdw0)
elif res == res2: # same residue to other conformers
vdwt = 0.0
else:
vdwt = libvdw.vdw_conf(conf, conf2)
if abs(vdwt) > libvdw.print_cutoff:
print "Pairwise: %s -> %s: %.3f" % (conf.confID, conf2.confID, vdwt)
print "%s: vdw0=%.3f, vdw1=%.3f" % (sys.argv[1], vdw0, vdw1)
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
37811,
198,
27131,
378,
410,
67,
86,
2568,
3084,
13,
198,
770,
318,
257,
4553,
17952,
379,
22037,
1241,
13,
383,
1255,
318,
407,
973,
416,
337,
4093,
36,
13,
2102,
262,
369,
16354,
1241,
... | 1.75134 | 1,492 |
from django.apps import AppConfig
| [
6738,
42625,
14208,
13,
18211,
1330,
2034,
16934,
628
] | 3.888889 | 9 |
import sys
import numpy as np
INF = 10 ** 18
if __name__ == "__main__":
main()
| [
11748,
25064,
201,
198,
201,
198,
11748,
299,
32152,
355,
45941,
201,
198,
201,
198,
1268,
37,
796,
838,
12429,
1248,
201,
198,
201,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
201,
198,
220,
220,
220,
1388,
3419,
... | 2.162791 | 43 |
import os
from bruces.webapp.view import LayoutView
from bruces.webapp.view import SilentViewContext
class BaseLayoutView(LayoutView):
"""
Base layout view.
"""
class OuterView(LayoutView.OuterView):
"""
Outer view class for layouts.
"""
# Base directory for layout files
basedir = os.path.realpath(os.path.dirname(__file__))
# Sub-directory for layout files
viewdir = "layout/"
# Base directory for view files
basedir = os.path.realpath(os.path.dirname(__file__)) + "/impl"
# Context class
context_cls = SilentViewContext
class DefaultLayoutView(BaseLayoutView):
"""
Default view object for displaying layouts.
"""
class OuterView(BaseLayoutView.OuterView):
"""
Outer view class for the layout.
"""
# Filename
filename = "default.html"
class LandingLayoutView(BaseLayoutView):
"""
Default view object for displaying layouts.
"""
class OuterView(BaseLayoutView.OuterView):
"""
Outer view class for the layout.
"""
# Filename
filename = "landing.html"
| [
11748,
28686,
201,
198,
6738,
18145,
728,
13,
12384,
1324,
13,
1177,
1330,
47639,
7680,
201,
198,
6738,
18145,
728,
13,
12384,
1324,
13,
1177,
1330,
25083,
7680,
21947,
201,
198,
201,
198,
201,
198,
4871,
7308,
32517,
7680,
7,
32517,
... | 2.324578 | 533 |
import os
import unittest
from swttaro.search_str import SearchStr
if __name__ == '__main__':
suite = unittest.TestSuite()
suite.addTest(SearchStrTest('test_search_str'))
suite.addTest(SearchStrTest('test_re_search_str'))
runner = unittest.TextTestRunner()
runner.run(suite)
| [
11748,
28686,
198,
198,
11748,
555,
715,
395,
198,
198,
6738,
1509,
926,
12022,
13,
12947,
62,
2536,
1330,
11140,
13290,
628,
198,
198,
361,
11593,
3672,
834,
6624,
705,
834,
12417,
834,
10354,
198,
220,
220,
220,
18389,
796,
555,
715... | 2.594828 | 116 |
# coding: utf-8
from enum import Enum
from six import string_types, iteritems
from bitmovin_api_sdk.common.poscheck import poscheck_model
| [
2,
19617,
25,
3384,
69,
12,
23,
198,
198,
6738,
33829,
1330,
2039,
388,
198,
6738,
2237,
1330,
4731,
62,
19199,
11,
11629,
23814,
198,
6738,
1643,
76,
709,
259,
62,
15042,
62,
21282,
74,
13,
11321,
13,
1930,
9122,
1330,
1426,
9122,
... | 3.043478 | 46 |
import pytest
from snek5000 import load_simul
@pytest.mark.parametrize("session_id", (0, 1))
@pytest.mark.parametrize("session_id", (0, 1))
| [
11748,
12972,
9288,
198,
198,
6738,
10505,
74,
27641,
1330,
3440,
62,
14323,
377,
628,
198,
31,
9078,
9288,
13,
4102,
13,
17143,
316,
380,
2736,
7203,
29891,
62,
312,
1600,
357,
15,
11,
352,
4008,
628,
198,
31,
9078,
9288,
13,
4102,... | 2.457627 | 59 |
#!/usr/bin/env python3
#
# MIT License
#
# Copyright (c) 2020 EntySec
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
import sys
import readline
from ghost.core.cli.badges import Badges
from ghost.core.cli.colors import Colors
from ghost.core.cli.tables import Tables
from ghost.core.base.device import Device
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
18,
198,
198,
2,
198,
2,
17168,
13789,
198,
2,
198,
2,
15069,
357,
66,
8,
12131,
7232,
88,
6558,
198,
2,
198,
2,
2448,
3411,
318,
29376,
7520,
11,
1479,
286,
3877,
11,
284,
597,
1048,... | 3.67313 | 361 |
import json
import ast
import sys
import os
import re
import csv
import itertools
import operator
import dateutil.parser
import hashlib
import gensim
import spacy
import jsonschema
from datetime import datetime
from pprint import pprint
from datetime import datetime, timedelta
from gensim.utils import simple_preprocess
from nltk.corpus import stopwords
# This is so that the following imports work
sys.path.append(os.path.realpath("."))
from src.stop_words_file import *
from src.config import *
def merge_json(file1, file2):
""" Merges two jsons which are list of dictionaries """
first_part = open_json(file1)
second_part = open_json(file2)
merged_contents = first_part + second_part
return merged_contents
def get_json_key_value(json_object, keys_list):
""" Get the value from json pointing to string of keys input: [k1,k2] """
# This will be the key
key = keys_list[0]
# We check for types, if its a dict or list
if isinstance(json_object, list):
key = int(key)
# Check for index out of range
if key >= len(json_object):
return None
if len(keys_list) == 1:
# We first check if its a list and then move forward.
# Think, why we did it like that ?
if isinstance(json_object, list):
return json_object[key]
elif key in json_object:
return json_object[key]
else:
return None
return get_json_key_value(json_object[key], keys_list[1:])
def format_input_json(app, timestamp, message, channel_type, properties):
"""
Returns formatted parsed data.
Format of parsed json :
app : app_name
timestamp : "2015/01/01 12:10:30", (this format only coz elastic search identifies date field only if this format only)
message : "feedback / REVIEW"
channel-type : "channel"
properties : {
rating : 3
contains rest of the channel specific data.
}
"""
parsed_data = {}
parsed_data[APP] = app
parsed_data[TIMESTAMP] = timestamp
parsed_data[MESSAGE] = message
parsed_data[CHANNEL_TYPE] = channel_type
parsed_data[PROPERTIES] = properties
parsed_data[HASH_ID] = calculate_hash(timestamp + message)
return parsed_data
def convert_date_format(dateformat):
"""
Identifies most date time formats and converts them into unified date times format:
YYYY/MM/DD HH:MM:SS
Elastic Search identifies this format only.
"""
# Converts both unix and general time formats
try:
yourdate = dateutil.parser.parse(dateformat)
except BaseException:
try:
return datetime.utcfromtimestamp(
float(dateformat)).strftime("%Y/%m/%d %H:%M:%S")
except BaseException:
return ""
oldformat = str(yourdate)
datetimeobject = datetime.strptime(oldformat, '%Y-%m-%d %H:%M:%S')
newformat = datetimeobject.strftime('%Y/%m/%d %H:%M:%S')
return newformat
def generic_cleanup(message):
"""
All the parsing codes call this for generic cleanup.
"""
# Important to have quotes for sentiment to be correctly identified
message = message.replace(u"\u2018", "'")
message = message.replace(u"\u2019", "'")
message = message.replace("\n", "")
message = message.replace("\t", "")
# Removes links from message using regex
regex = re.compile(URL_REGEX)
message = regex.sub("", message)
# Removing the non ascii chars
message = (message.encode("ascii", "ignore")).decode("utf-8")
return message
def check_tweet_authenticity(tweet_message, twitter_handle_blacklist):
""" Checks if tweets incoming are authentic. basically there is blacklist of twitter-handles """
is_tweet_authentic = True
for handle in twitter_handle_blacklist:
if handle in tweet_message:
is_tweet_authentic = False
return is_tweet_authentic
def format_output_json(input_dict,
category=None,
sentiment=None,
derived_insight_properties=None):
""" Creates the json according to the unified json output format """
temp_dict = {}
# If the node that we are already passing has an insight, we write over it.
if DERIVED_INSIGHTS in input_dict:
temp_dict = input_dict[DERIVED_INSIGHTS]
if derived_insight_properties is not None:
if EXTRA_PROPERTIES in temp_dict:
temp_dict[EXTRA_PROPERTIES] = {
**temp_dict[EXTRA_PROPERTIES],
**derived_insight_properties
}
else:
temp_dict[EXTRA_PROPERTIES] = derived_insight_properties
# Touch the category only if you know what you are doing! You Moron!
if category is not None:
temp_dict[CATEGORY] = category
# Do not play with my sentiments! You Moron!
if sentiment is not None:
temp_dict[SENTIMENT] = sentiment
input_dict[DERIVED_INSIGHTS] = temp_dict
return input_dict
#
def filter_review_on_channel(channel_list, reviews):
""" Filters the review for those channels which are not in channel_list """
return [
review for review in reviews if review[CHANNEL_TYPE] in channel_list
]
def remove_stop_words(document):
"""
Removes stop words. Takes tokenised document as input and returns
after removing the stop words.
input : ["phil","is","good"]
output : ["phil","good"]
"""
stop_words = stopwords.words("english")
stop_words = set(stop_words + EXTENDED_STOP_WORDS)
return [token for token in document if token not in stop_words]
def lemmatisation(text, allowed_postags=["NOUN", "ADJ", "VERB", "ADV"]):
"""
Does lemmatisation. whats lemmatisation? google :P
Input : ["phil", "is", "good"]
- https://spacy.io/api/annotation
- https://spacy.io/api/top-level
"""
nlp = spacy.load("en", disable=["parser", "ner"])
doc = nlp(" ".join(text))
return [token.lemma_ for token in doc if token.pos_ in allowed_postags]
def get_negative_review(reviews):
""" Why does positive come above negative?? Think positive man!!! Sapien!! :p """
return [
review for review in reviews
if review[DERIVED_INSIGHTS][SENTIMENT]["compound"] < 0.0
]
def decrypt_config(app_config):
"""
- Replaces the ENV variables with their values. These values are stored in local and circleCI env.
- Returns app_config json with reaplced values
"""
env_list = app_config[ENV_KEYS]
app_config_formatted = json.dumps(app_config)
for key in env_list:
app_config_formatted = app_config_formatted.replace(
key, os.environ[key])
replaced_app_config = json.loads(app_config_formatted)
return replaced_app_config
| [
11748,
33918,
198,
11748,
6468,
198,
11748,
25064,
198,
11748,
28686,
198,
11748,
302,
198,
11748,
269,
21370,
198,
11748,
340,
861,
10141,
198,
11748,
10088,
198,
11748,
3128,
22602,
13,
48610,
198,
11748,
12234,
8019,
198,
198,
11748,
3... | 2.582236 | 2,657 |
import numpy as np
def mse(y):
"""
Mean squared error for decision tree (ie., mean) predictions
"""
return np.mean((y - np.mean(y)) ** 2)
def entropy(y):
"""
Entropy of a label sequence
"""
hist = np.bincount(y)
ps = hist / np.sum(hist)
return -np.sum([p * np.log2(p) for p in ps if p > 0])
def gini(y):
"""
Gini impurity (local entropy) of a label sequence
"""
hist = np.bincount(y)
N = np.sum(hist)
return 1 - sum([(i / N) ** 2 for i in hist])
| [
11748,
299,
32152,
355,
45941,
628,
628,
198,
198,
4299,
285,
325,
7,
88,
2599,
198,
220,
220,
220,
37227,
198,
220,
220,
220,
22728,
44345,
4049,
329,
2551,
5509,
357,
494,
1539,
1612,
8,
16277,
198,
220,
220,
220,
37227,
198,
220,... | 2.29646 | 226 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_ifgsim
----------------------------------
Tests for `ifgsim` module.
"""
import sys
import unittest
from ifgsim import ifgsim
if __name__ == '__main__':
sys.exit(unittest.main())
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
198,
37811,
198,
9288,
62,
361,
70,
14323,
198,
3880,
438,
198,
198,
51,
3558,
329,
4600,
361,
70,
14323,
63,
8265,
... | 2.583333 | 96 |
from setuptools import find_packages, setup
setup(
name='src',
packages=find_packages(),
version='0.1.0',
description='Springboard Data Science Program Capstone 2 Project',
author='Manuel J Gomez',
license='',
)
| [
6738,
900,
37623,
10141,
1330,
1064,
62,
43789,
11,
9058,
198,
198,
40406,
7,
198,
220,
220,
220,
1438,
11639,
10677,
3256,
198,
220,
220,
220,
10392,
28,
19796,
62,
43789,
22784,
198,
220,
220,
220,
2196,
11639,
15,
13,
16,
13,
15,... | 2.925926 | 81 |
import time
from selenium.webdriver.support import expected_conditions as EC
link = "http://selenium1py.pythonanywhere.com/catalogue/coders-at-work_207/"
| [
11748,
640,
198,
6738,
384,
11925,
1505,
13,
12384,
26230,
13,
11284,
1330,
2938,
62,
17561,
1756,
355,
13182,
198,
198,
8726,
796,
366,
4023,
1378,
741,
47477,
16,
9078,
13,
29412,
1092,
3003,
13,
785,
14,
9246,
30326,
14,
19815,
364... | 3.1 | 50 |
import pyopenpose as op
import glob
from os.path import join
import cv2
import numpy as np
import tqdm
import json
from json import JSONEncoder
source_dir = '/data/social_map/behave01'
output_file = '/data/social_map/list.txt'
model_folder = "/home/prota/Desktop/openpose/models"
filelist = glob.glob(join(source_dir, '*.png'))
filelist.sort()
### OPENPOSE PARAMS
params = dict()
params["model_folder"] = model_folder
params["face"] = False
params["hand"] = False
params["num_gpu"] = 1
params["num_gpu_start"] = 0
opWrapper = op.WrapperPython()
opWrapper.configure(params)
opWrapper.start()
with open(output_file, 'w') as of:
for i, f in enumerate(tqdm.tqdm(filelist, desc='Files to check if skeleton is present:')):
im = cv2.imread(f)
im_size = (im.shape[1], im.shape[0])
datum = op.Datum()
datum.cvInputData = im
opWrapper.emplaceAndPop([datum])
skeletal_coordinates = np.around(np.array(datum.poseKeypoints).tolist(), 2).tolist()
d = dict()
try:
d['file'] = f
d['n_skeletons'] = len(skeletal_coordinates)
# of.write('{} {} '.format(f, len(skeletal_coordinates)))
pos = list()
for ske in skeletal_coordinates:
heels = list()
lh = np.asarray(ske[21][:2], dtype=np.int32)
if lh.any() != 0:
heels.append(lh)
rh = np.asarray(ske[24][:2], dtype=np.int32)
if rh.any() != 0:
heels.append(rh)
av = [a.tolist() for a in heels]
if len(av) > 0:
av = np.mean(av, axis=0, dtype=np.int32)
# im = cv2.circle(im, av, 5, (255, 0, 0))
# cv2.imshow('image', im)
# cv2.waitKey(0)
pos.append(av.tolist())
d['skels'] = pos
except TypeError:
continue
j = json.dumps(d)
of.write(j + '\n')
of.flush()
| [
11748,
12972,
9654,
3455,
355,
1034,
198,
11748,
15095,
198,
6738,
28686,
13,
6978,
1330,
4654,
198,
11748,
269,
85,
17,
198,
11748,
299,
32152,
355,
45941,
198,
11748,
256,
80,
36020,
198,
11748,
33918,
198,
6738,
33918,
1330,
19449,
2... | 1.919355 | 1,054 |
import pandas as pd
from loguru import logger
import datetime
from trading_calendars import get_calendar, TradingCalendar
from typing import Optional, Dict, Any, List, Tuple, Optional
from ta_scanner.data.base_connector import DataFetcherBase
from ta_scanner.data.constants import (
TimezoneNames,
WhatToShow,
Exchange,
Calendar,
Currency,
)
| [
11748,
19798,
292,
355,
279,
67,
198,
6738,
2604,
14717,
1330,
49706,
198,
198,
11748,
4818,
8079,
198,
6738,
7313,
62,
9948,
44942,
1330,
651,
62,
9948,
9239,
11,
25469,
9771,
9239,
198,
6738,
19720,
1330,
32233,
11,
360,
713,
11,
43... | 3.067227 | 119 |
import datetime
import os
import psutil
import shutil
import subprocess
import wx
import wx
if __name__ == "__main__":
app = wx.App()
backup = Backup(os.path.expanduser("~/obs-backups"), os.path.expanduser("~/.config/obs-studio"))
obs = Obs()
frame = ObsBackupFrame(title="OBS Backup Tool", backup=backup, obs=obs)
frame.Show()
app.MainLoop()
| [
11748,
4818,
8079,
198,
11748,
28686,
198,
11748,
26692,
22602,
198,
11748,
4423,
346,
198,
11748,
850,
14681,
198,
11748,
266,
87,
198,
198,
11748,
266,
87,
198,
198,
361,
11593,
3672,
834,
6624,
366,
834,
12417,
834,
1298,
198,
220,
... | 2.587413 | 143 |
# data_root = 'C:/Users/parisa/Desktop/csYRPCai/project/train/'
# import numpy as np
# import pandas as pd
# train_photos = pd.read_csv(data_root+'train_photo_to_biz_ids.csv')
# train_photo_to_biz = pd.read_csv(data_root+'train_photo_to_biz_ids.csv', index_col='photo_id')
# train_df = pd.read_csv(data_root+"train_biz_HOGfeatures.csv")
# test_df = pd.read_csv(data_root+"test_biz_HOGfeatures.csv")
# y_train = train_df['label'].values
# X_train = train_df['feature vector'].values
# y_test = test_df['label'].values
# X_test = test_df['feature vector'].values
# def convert_label_to_array(str_label):
# str_label = str_label[1:-1]
# str_label = str_label.split(',')
# return [int(x) for x in str_label if len(x)>0]
# def convert_feature_to_vector(str_feature):
# str_feature = str_feature[1:-1]
# str_feature = str_feature.split(',')
# return [float(x) for x in str_feature]
# y_train = np.array([convert_label_to_array(y) for y in train_df['label']])
# X_train = np.array([convert_feature_to_vector(x) for x in train_df['feature vector']])
# y_test = np.array([convert_label_to_array(y) for y in test_df['label']])
# X_test = np.array([convert_feature_to_vector(x) for x in test_df['feature vector']])
# print "X_train: ", X_train.shape
# print "y_train: ", y_train.shape
# print "X_test: ", X_test.shape
# print "y_test: ", y_test.shape
# print "train_df:"
# train_df[0:5]
# from sklearn import svm, datasets
# from sklearn.cross_validation import train_test_split
# from sklearn.preprocessing import label_binarize
# from sklearn.multiclass import OneVsRestClassifier
# from sklearn.preprocessing import MultiLabelBinarizer
# import time
# t=time.time()
# mlb = MultiLabelBinarizer()
# y_train= mlb.fit_transform(y_train) #Convert list of labels to binary matrix
# y_test= mlb.fit_transform(y_test) #Convert list of labels to binary matrix
# random_state = np.random.RandomState(0)
# classifier = OneVsRestClassifier(svm.SVC(kernel='linear', probability=True))
# classifier.fit(X_train, y_train)
# y_ppredict = classifier.predict(X_test)
# print "Time passed: ", "{0:.1f}".format(time.time()-t), "sec"
# print "Samples of predicted labels (in binary matrix):\n", y_ppredict[0:3]
# print "\nSamples of predicted labels:\n", mlb.inverse_transform(y_ppredict[0:3])
# statistics = pd.DataFrame(columns=[ "attribuite "+str(i) for i in range(9)]+['num_biz'], index = ["biz count", "biz ratio"])
# statistics.loc["biz count"] = np.append(np.sum(y_ppredict, axis=0), len(y_ppredict))
# pd.options.display.float_format = '{:.0f}%'.format
# statistics.loc["biz ratio"] = statistics.loc["biz count"]*100/len(y_ppredict)
# statistics
# from sklearn.metrics import f1_score
# print "F1 score: ", f1_score(y_test, y_ppredict, average='micro')
# print "Individual Class F1 score: ", f1_score(y_test, y_ppredict, average=None)
data_root = 'C:/Users/Yixin/Desktop/cs231a/project/train/'
import numpy as np
import pandas as pd
import time
from sklearn import svm, datasets
from sklearn.cross_validation import train_test_split
from sklearn.preprocessing import label_binarize
from sklearn.multiclass import OneVsRestClassifier
from sklearn.multiclass import OneVsOneClassifier
from sklearn.preprocessing import MultiLabelBinarizer
train_photos = pd.read_csv(data_root+'train_photo_to_biz_ids.csv')
train_photo_to_biz = pd.read_csv(data_root+'train_photo_to_biz_ids.csv', index_col='photo_id')
train_df = pd.read_csv(data_root+"train_biz_HOGfeatures.csv")
y_train = train_df['label'].values
X_train = train_df['feature vector'].values
y_train = np.array([convert_label_to_array(y) for y in train_df['label']])
X_train = np.array([convert_feature_to_vector(x) for x in train_df['feature vector']])
print "X_train: ", X_train.shape
print "y_train: ", y_train.shape
print "train_df:"
train_df[0:5]
t=time.time()
mlb = MultiLabelBinarizer()
y_train= mlb.fit_transform(y_train) #Convert list of labels to binary matrix
random_state = np.random.RandomState(0)
X_ptrain, X_ptest, y_ptrain, y_ptest = train_test_split(X_train, y_train, test_size=.2,random_state=random_state)
classifier = OneVsRestClassifier(svm.SVC(C=0.125, kernel='linear', probability=True))
#classifier = OneVsRestClassifier(svm.SVC(kernel='rbf', probability=True))
classifier.fit(X_ptrain, y_ptrain)
y_ppredict = classifier.predict(X_ptest)
print "Time passed: ", "{0:.1f}".format(time.time()-t), "sec"
print "Samples of predicted labels (in binary matrix):\n", y_ppredict[0:3]
print "\nSamples of predicted labels:\n", mlb.inverse_transform(y_ppredict[0:3])
statistics = pd.DataFrame(columns=[ "attribuite "+str(i) for i in range(9)]+['num_biz'], index = ["biz count", "biz ratio"])
statistics.loc["biz count"] = np.append(np.sum(y_ppredict, axis=0), len(y_ppredict))
pd.options.display.float_format = '{:.0f}%'.format
statistics.loc["biz ratio"] = statistics.loc["biz count"]*100/len(y_ppredict)
statistics
from sklearn.metrics import f1_score
print "F1 score: ", f1_score(y_ptest, y_ppredict, average='micro')
print "Individual Class F1 score: ", f1_score(y_ptest, y_ppredict, average=None)
| [
198,
2,
1366,
62,
15763,
796,
705,
34,
14079,
14490,
14,
1845,
9160,
14,
36881,
14,
6359,
38162,
5662,
1872,
14,
16302,
14,
27432,
14,
6,
198,
220,
198,
2,
1330,
299,
32152,
355,
45941,
198,
2,
1330,
19798,
292,
355,
279,
67,
220,... | 2.590337 | 1,987 |
# -*- coding:utf-8 -*-
'''
Created on 2021/12/11
@author: WHT
'''
import unittest
import datafactory.futures.domestic as dom
if __name__ == '__main__':
unittest.main()
| [
2,
532,
9,
12,
19617,
25,
40477,
12,
23,
532,
9,
12,
198,
7061,
6,
198,
41972,
319,
33448,
14,
1065,
14,
1157,
198,
31,
9800,
25,
370,
6535,
198,
7061,
6,
198,
11748,
555,
715,
395,
198,
11748,
1366,
69,
9548,
13,
69,
315,
942... | 2.342105 | 76 |
import matplotlib.pyplot as plt
import numpy as np
idade = ('0 a 4', '5 a 9', '10 a 14', '15 a 19',
'20 a 24', '25 a 29', '30 a 34', '35 a 39',
'40 a 44', '45 a 49', '50 a 54', '55 a 59',
'60 a 64', '65 a 69', '70 a 74', '75 a 79',
'80 a 84', '85 a 89', '90 a 94', '95 a 99',
'100 +')
x = np.arange(len(idade))
masc = np.array([7016987, 7624144, 8725413, 8558868, 8630229,
8460995, 7717658, 6766664, 6320568, 5692014,
4834995, 3902344, 3041035, 2224065, 1667372,
1090517, 668623, 310759, 114964, 31529, 7247])
fem = np.array([6779171, 7345231, 8441348, 8432004, 8614963,
8643419, 8026854, 7121915, 6688796, 6141338,
5305407, 4373877, 3468085, 2616745, 2074264,
1472930, 998349, 508724, 211594, 66806, 16989])
fig, ax = plt.subplots(figsize=(16,8))
ax.bar(idade, fem, align='center', color = 'pink')
ax.set_xticks(x)
ax.set_xticklabels(idade, fontsize=8)
ax.set_xlabel('Faixa etaria (em anos)')
ax.set_ylabel('Populacao (em valores absolutos)')
ax.set_title('Populacao feminina por grupo de idade em 2010 (IBGE)')
plt.show() | [
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
299,
32152,
355,
45941,
198,
312,
671,
796,
19203,
15,
257,
604,
3256,
705,
20,
257,
860,
3256,
705,
940,
257,
1478,
3256,
705,
1314,
257,
678,
3256,
198,
220,
220... | 1.918301 | 612 |
import operator
import itertools
import collections
from .utils import OrderedSet
#
# Extra processors
#
union_counter = itertools.count()
preprocessors = {
'union': preprocess_union,
#'intersection': preprocess_intersect
}
#
# Simplifiers
#
SIMPLE_OPS = ('Concat', 'SignExt', 'ZeroExt')
# oh gods
simplifiers = {
'Reverse': boolean_reverse_simplifier,
'And': boolean_and_simplifier,
'Or': boolean_or_simplifier,
'Not': boolean_not_simplifier,
'Extract': extract_simplifier,
'Concat': concat_simplifier,
'If': if_simplifier,
'__lshift__': lshift_simplifier,
'__rshift__': rshift_simplifier,
'__eq__': eq_simplifier,
'__ne__': ne_simplifier,
'__or__': bitwise_or_simplifier,
'__and__': bitwise_and_simplifier,
'__xor__': bitwise_xor_simplifier,
'__add__': bitwise_add_simplifier,
'__sub__': bitwise_sub_simplifier,
'__mul__': bitwise_mul_simplifier,
'ZeroExt': zeroext_simplifier,
'SignExt': signext_simplifier,
'fpToIEEEBV': fptobv_simplifier,
'fpToFP': fptofp_simplifier,
}
#
# Length checkers
#
#
# Operation lists
#
expression_arithmetic_operations = {
# arithmetic
'__add__', '__radd__',
'__div__', '__rdiv__',
'__truediv__', '__rtruediv__',
'__floordiv__', '__rfloordiv__',
'__mul__', '__rmul__',
'__sub__', '__rsub__',
'__pow__', '__rpow__',
'__mod__', '__rmod__',
'__divmod__', '__rdivmod__',
'SDiv', 'SMod',
'__neg__',
'__pos__',
'__abs__',
}
bin_ops = {
'__add__', '__radd__',
'__mul__', '__rmul__',
'__or__', '__ror__',
'__and__', '__rand__',
'__xor__', '__rxor__',
}
expression_comparator_operations = {
# comparisons
'__eq__',
'__ne__',
'__ge__', '__le__',
'__gt__', '__lt__',
}
# expression_comparator_operations = {
# 'Eq',
# 'Ne',
# 'Ge', 'Le',
# 'Gt', 'Lt',
# }
expression_bitwise_operations = {
# bitwise
'__invert__',
'__or__', '__ror__',
'__and__', '__rand__',
'__xor__', '__rxor__',
'__lshift__', '__rlshift__',
'__rshift__', '__rrshift__',
}
expression_set_operations = {
# Set operations
'union',
'intersection',
'widen'
}
expression_operations = expression_arithmetic_operations | expression_comparator_operations | expression_bitwise_operations | expression_set_operations
backend_comparator_operations = {
'SGE', 'SLE', 'SGT', 'SLT', 'UGE', 'ULE', 'UGT', 'ULT',
}
backend_bitwise_operations = {
'RotateLeft', 'RotateRight', 'LShR', 'Reverse',
}
backend_boolean_operations = {
'And', 'Or', 'Not'
}
backend_bitmod_operations = {
'Concat', 'Extract', 'SignExt', 'ZeroExt'
}
backend_creation_operations = {
'BoolV', 'BVV', 'FPV'
}
backend_symbol_creation_operations = {
'BoolS', 'BVS', 'FPS'
}
backend_vsa_creation_operations = {
'TopStridedInterval', 'StridedInterval', 'ValueSet', 'AbstractLocation'
}
backend_other_operations = { 'If' }
backend_arithmetic_operations = {'SDiv', 'SMod'}
backend_operations = backend_comparator_operations | backend_bitwise_operations | backend_boolean_operations | \
backend_bitmod_operations | backend_creation_operations | backend_other_operations | backend_arithmetic_operations
backend_operations_vsa_compliant = backend_bitwise_operations | backend_comparator_operations | backend_boolean_operations | backend_bitmod_operations
backend_operations_all = backend_operations | backend_operations_vsa_compliant | backend_vsa_creation_operations
backend_fp_cmp_operations = {
'fpLT', 'fpLEQ', 'fpGT', 'fpGEQ', 'fpEQ',
}
backend_fp_operations = {
'FPS', 'fpToFP', 'fpToIEEEBV', 'fpFP', 'fpToSBV', 'fpToUBV',
'fpNeg', 'fpSub', 'fpAdd', 'fpMul', 'fpDiv', 'fpAbs'
} | backend_fp_cmp_operations
opposites = {
'__add__': '__radd__', '__radd__': '__add__',
'__div__': '__rdiv__', '__rdiv__': '__div__',
'__truediv__': '__rtruediv__', '__rtruediv__': '__truediv__',
'__floordiv__': '__rfloordiv__', '__rfloordiv__': '__floordiv__',
'__mul__': '__rmul__', '__rmul__': '__mul__',
'__sub__': '__rsub__', '__rsub__': '__sub__',
'__pow__': '__rpow__', '__rpow__': '__pow__',
'__mod__': '__rmod__', '__rmod__': '__mod__',
'__divmod__': '__rdivmod__', '__rdivmod__': '__divmod__',
'__eq__': '__eq__',
'__ne__': '__ne__',
'__ge__': '__le__', '__le__': '__ge__',
'__gt__': '__lt__', '__lt__': '__gt__',
'ULT': 'UGT', 'UGT': 'ULT',
'ULE': 'UGE', 'UGE': 'ULE',
'SLT': 'SGT', 'SGT': 'SLT',
'SLE': 'SGE', 'SGE': 'SLE',
#'__neg__':
#'__pos__':
#'__abs__':
#'__invert__':
'__or__': '__ror__', '__ror__': '__or__',
'__and__': '__rand__', '__rand__': '__and__',
'__xor__': '__rxor__', '__rxor__': '__xor__',
'__lshift__': '__rlshift__', '__rlshift__': '__lshift__',
'__rshift__': '__rrshift__', '__rrshift__': '__rshift__',
}
reversed_ops = {
'__radd__': '__add__',
'__rand__': '__and__',
'__rdiv__': '__div__',
'__rdivmod__': '__divmod__',
'__rfloordiv__': '__floordiv__',
'__rlshift__': '__lshift__',
'__rmod__': '__mod__',
'__rmul__': '__mul__',
'__ror__': '__or__',
'__rpow__': '__pow__',
'__rrshift__': '__rshift__',
'__rsub__': '__sub__',
'__rtruediv__': '__truediv__',
'__rxor__': '__xor__'
}
inverse_operations = {
'__eq__': '__ne__',
'__ne__': '__eq__',
'__gt__': '__le__',
'__lt__': '__ge__',
'__ge__': '__lt__',
'__le__': '__gt__',
'ULT': 'UGE', 'UGE': 'ULT',
'UGT': 'ULE', 'ULE': 'UGT',
'SLT': 'SGE', 'SGE': 'SLT',
'SLE': 'SGT', 'SGT': 'SLE',
}
length_same_operations = expression_arithmetic_operations | backend_bitwise_operations | expression_bitwise_operations | backend_other_operations | expression_set_operations | {'Reversed'}
length_none_operations = backend_comparator_operations | expression_comparator_operations | backend_boolean_operations | backend_fp_cmp_operations
length_change_operations = backend_bitmod_operations
length_new_operations = backend_creation_operations
leaf_operations = backend_symbol_creation_operations | backend_creation_operations
leaf_operations_concrete = backend_creation_operations
leaf_operations_symbolic = backend_symbol_creation_operations
#
# Reversibility
#
not_invertible = {'Identical', 'union'}
reverse_distributable = { 'widen', 'union', 'intersection',
'__invert__', '__or__', '__ror__', '__and__', '__rand__', '__xor__', '__rxor__',
}
extract_distributable = {
'__and__', '__rand__',
'__or__', '__ror__',
'__xor__', '__rxor__',
}
infix = {
'__add__': '+',
'__sub__': '-',
'__mul__': '*',
'__div__': '/',
'__floordiv__': '/',
# '__truediv__': 'does this come up?',
'__pow__': '**',
'__mod__': '%',
# '__divmod__': "don't think this is used either",
'__eq__': '==',
'__ne__': '!=',
'__ge__': '>=',
'__le__': '<=',
'__gt__': '>',
'__lt__': '<',
'UGE': '>=',
'ULE': '<=',
'UGT': '>',
'ULT': '<',
'SGE': '>=s',
'SLE': '<=s',
'SGT': '>s',
'SLT': '<s',
'SDiv': "/s",
'SMod': "%s",
'__or__': '|',
'__and__': '&',
'__xor__': '^',
'__lshift__': '<<',
'__rshift__': '>>',
'And': '&&',
'Or': '||',
'Concat': '..',
}
commutative_operations = { '__and__', '__or__', '__xor__', '__add__', '__mul__', 'And', 'Or', 'Xor', }
from .errors import ClaripyOperationError, ClaripyTypeError
from . import ast
from . import fp
from .backend_manager import backends
| [
11748,
10088,
198,
11748,
340,
861,
10141,
198,
11748,
17268,
198,
198,
6738,
764,
26791,
1330,
14230,
1068,
7248,
198,
198,
2,
198,
2,
17221,
20399,
198,
2,
198,
198,
24592,
62,
24588,
796,
340,
861,
10141,
13,
9127,
3419,
198,
198,
... | 2.159507 | 3,492 |
#!/usr/bin/env python
import unittest
import catkin_boost_python_test as cbpt
from nose import SkipTest
if __name__ == '__main__':
unittest.main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
198,
11748,
555,
715,
395,
198,
11748,
3797,
5116,
62,
39521,
62,
29412,
62,
9288,
355,
269,
65,
457,
198,
198,
6738,
9686,
1330,
32214,
14402,
628,
198,
198,
361,
11593,
3672,
834,
... | 2.644068 | 59 |
#!/usr/bin/env python
"""Exercises using Netmiko"""
from __future__ import print_function
from getpass import getpass
from netmiko import ConnectHandler
def main():
"""Exercises using Netmiko"""
passwd = getpass("Enter password: ")
device = {
"device_type": "cisco_nxos",
"host": "nxos1.lasthop.io",
"username": "pyclass",
"password": passwd,
}
cfg_commands = [
"ntp server 130.126.24.24 use-vrf management",
"ntp server 152.2.21.1 use-vrf management",
]
for a_device in [device]:
with ConnectHandler(**a_device) as net_connect:
print("Current Prompt: " + net_connect.find_prompt())
output = net_connect.send_config_set(cfg_commands)
save_output = net_connect.save_config()
print("\nConfiguring...")
print("#" * 80)
print("\nSaving config to startup")
print(output)
print(save_output)
print("#" * 80)
print()
if __name__ == "__main__":
main()
| [
2,
48443,
14629,
14,
8800,
14,
24330,
21015,
198,
37811,
3109,
2798,
2696,
1262,
3433,
76,
12125,
37811,
198,
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
651,
6603,
1330,
651,
6603,
198,
6738,
2010,
76,
12125,
1330,
8113,
... | 2.241304 | 460 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-19 18:35
from __future__ import unicode_literals
from django.db import migrations, models
| [
2,
532,
9,
12,
19617,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
2,
2980,
515,
416,
37770,
352,
13,
24,
13,
21,
319,
1584,
12,
2998,
12,
1129,
1248,
25,
2327,
198,
6738,
11593,
37443,
834,
1330,
28000,
1098,
62,
17201,
874,
198,
1... | 2.719298 | 57 |
from algoritmia.problems.regularexpressions.compiler import RegularExpressionParser, ThompsonAutomatonBuilder
from algoritmia.schemes.backtracking import BacktrackingEnumerator
from algoritmia.statespace import IForwardStateSpace
| [
6738,
435,
7053,
270,
20730,
13,
1676,
22143,
13,
2301,
377,
533,
87,
8439,
507,
13,
5589,
5329,
1330,
23603,
16870,
2234,
46677,
11,
11654,
38062,
13951,
32875,
201,
198,
6738,
435,
7053,
270,
20730,
13,
1416,
4411,
274,
13,
1891,
36... | 3.24 | 75 |
from Rules.ASingleRule import ASingleRule
from Utils import ColorUtil
from RectUtils import RectUtil
from Rules.TextValidator import TextValidator
from Utils import Constants
from Utils import GroupUtil
from Utils import TextUtils
from string import printable
# @Override
# Contain: all spaces, all invisible chars, or all non-ascii chars
# *
# * @param text
# * @return
# */
| [
6738,
14252,
13,
1921,
17697,
31929,
1330,
7054,
17697,
31929,
198,
6738,
7273,
4487,
1330,
5315,
18274,
346,
198,
6738,
48599,
18274,
4487,
1330,
48599,
18274,
346,
198,
6738,
14252,
13,
8206,
47139,
1352,
1330,
8255,
47139,
1352,
198,
6... | 2.914894 | 141 |
# -*- encoding: utf-8 -*-
'''
Created on: 2015
Author: Mizael Martinez
'''
import sys, os,threading, time
sys.path.append("../view")
from inicio import Ui_inicio
from PyQt4 import QtCore, QtGui
| [
2,
532,
9,
12,
21004,
25,
3384,
69,
12,
23,
532,
9,
12,
198,
7061,
6,
198,
220,
220,
198,
220,
15622,
319,
25,
1853,
198,
220,
6434,
25,
29975,
3010,
20741,
198,
198,
7061,
6,
198,
11748,
25064,
11,
28686,
11,
16663,
278,
11,
... | 2.512195 | 82 |
import phosphate
from colorama import Fore
from codecs import encode as c_encode
reverse_shuffle = """
def f(s:str):
if len(s) % 4 == 0:
l = int(len(s) / 2)
s = s[l:] + s[:l]
l = int(l / 2)
s = s[l:] + s[:l]
l = l * 2
s = s[l:] + s[:l]
return s
else:
return False"""
deobfuscate_function = """
h = p + o
def b(s:str):
s = s.replace(" ", "")
c = 1
l = ''
n = ''
for i in s:
l += i
if c == 8:
n += chr(int(l, 2))
c = 0
l = ''
c += 1
return n
def f(s:str):
if len(s) % 4 == 0:
l = int(len(s) / 2)
s = s[l:] + s[:l]
l = int(l / 2)
s = s[l:] + s[:l]
l = l * 2
s = s[l:] + s[:l]
return s
else:
return False
def r(s:str):
s = s[::-1]
s = f(s).replace(".", "")
s = s.replace("]", "0 ").replace("[", "& ").replace("=", "1 ").replace("-", "+ ").replace("_", "00").replace("&", "10").replace("+", "11")
s = b(s)
s = f(s)
n = ""
for i in s:
if i in h + "=":
n += i
return t(n.encode("utf8")).decode()
"""
deobfuscate_function_imports = """from base64 import b64decode as t
from string import digits as p
from string import ascii_letters as o"""
obfuscate() | [
11748,
46926,
201,
198,
6738,
3124,
1689,
1330,
4558,
201,
198,
6738,
40481,
82,
1330,
37773,
355,
269,
62,
268,
8189,
201,
198,
201,
198,
50188,
62,
1477,
18137,
796,
37227,
201,
198,
4299,
277,
7,
82,
25,
2536,
2599,
201,
198,
220... | 1.75063 | 794 |
from __future__ import print_function
from django.contrib.auth.models import User
from django.core.management import BaseCommand, CommandError
from oath.totp import accept_totp
from django.conf import settings
TF_FORWARD_DRIFT = getattr(settings,'TF_FORWARD_DRIFT', 1)
TF_BACKWARD_DRIFT = getattr(settings,'TF_BACKWARD_DRIFT', 1)
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
6738,
42625,
14208,
13,
3642,
822,
13,
18439,
13,
27530,
1330,
11787,
198,
6738,
42625,
14208,
13,
7295,
13,
27604,
1330,
7308,
21575,
11,
9455,
12331,
198,
6738,
17865,
13,
83,
313,
... | 3.132075 | 106 |
#!/usr/bin/python
"""Utility CLI Application
This CLI is helpful for managing CODEX metadata
"""
import fire
# RegionNumber TileX TileY Xposition Yposition
# 1 1 1 0 0
# 1 1 2 0 1008
# 1 1 3 0 2016
# 1 1 4 0 3024
# 1 1 5 0 4032
# 1 2 1 1344 0
# 1 2 2 1344 1008
# 1 2 3 1344 2016
# 1 2 4 1344 3024
if __name__ == '__main__':
fire.Fire(CodexUtils)
| [
2,
48443,
14629,
14,
8800,
14,
29412,
198,
37811,
18274,
879,
43749,
15678,
198,
198,
1212,
43749,
318,
7613,
329,
11149,
327,
3727,
6369,
20150,
198,
37811,
198,
11748,
2046,
198,
198,
2,
17718,
15057,
197,
35103,
55,
197,
35103,
56,
... | 1.950276 | 181 |
# Copyright 2021 Fedlearn authors.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from core.proto.transmission_pb2_grpc import TransmissionServicer, add_TransmissionServicer_to_server
from concurrent import futures
import grpc
# global variables
_MAX_MESSAGE_LENGTH = 1 << 30
def serve(grpc_servicer: TransmissionServicer) -> None:
""" gRPC server.
Open gRPC service as a server, and monitor gRPC request and return gRPC response.
This function should be called by the federated clients.
Parameters
----------
grpc_servicer : class derived from transmission_pb2_grpc.TransmissionServicer
request message created by alg in master terminal.
Returns
-------
None
This function has no return variables.
"""
options = [
('grpc.max_send_message_length', _MAX_MESSAGE_LENGTH),
('grpc.max_receive_message_length', _MAX_MESSAGE_LENGTH),
]
server = grpc.server(futures.ThreadPoolExecutor(max_workers=10), options=options)
add_TransmissionServicer_to_server(grpc_servicer, server)
server.add_insecure_port("%s:%s" % (grpc_servicer.machine_info.ip, grpc_servicer.machine_info.port))
# TODO: using debug level (DebugOutput), not print, to display information
print("---------------")
print("starting %s:%s" % (grpc_servicer.machine_info.ip, grpc_servicer.machine_info.port))
server.start()
server.wait_for_termination()
| [
2,
15069,
33448,
10169,
35720,
7035,
13,
198,
2,
49962,
739,
262,
24843,
13789,
11,
10628,
362,
13,
15,
357,
1169,
366,
34156,
15341,
198,
2,
345,
743,
407,
779,
428,
2393,
2845,
287,
11846,
351,
262,
13789,
13,
198,
2,
921,
743,
... | 3.066986 | 627 |
# pip install importlib_resources
import torch
import torch.nn.functional as F
import torchvision.models as models
from utils import *
from cam.scorecam import *
# alexnet
alexnet = models.alexnet(pretrained=True).eval()
alexnet_model_dict = dict(type='alexnet', arch=alexnet, layer_name='features_10',input_size=(224, 224))
alexnet_scorecam = ScoreCAM(alexnet_model_dict)
input_image = load_image('images/'+'ILSVRC2012_val_00002193.JPEG')
input_ = apply_transforms(input_image)
if torch.cuda.is_available():
input_ = input_.cuda()
predicted_class = alexnet(input_).max(1)[-1]
scorecam_map = alexnet_scorecam(input_)
basic_visualize(input_.cpu(), scorecam_map.type(torch.FloatTensor).cpu(),save_path='alexnet.png')
# vgg
vgg = models.vgg16(pretrained=True).eval()
vgg_model_dict = dict(type='vgg16', arch=vgg, layer_name='features_29',input_size=(224, 224))
vgg_scorecam = ScoreCAM(vgg_model_dict)
input_image = load_image('images/'+'ILSVRC2012_val_00002193.JPEG')
input_ = apply_transforms(input_image)
if torch.cuda.is_available():
input_ = input_.cuda()
predicted_class = vgg(input_).max(1)[-1]
scorecam_map = vgg_scorecam(input_)
basic_visualize(input_.cpu(), scorecam_map.type(torch.FloatTensor).cpu(),save_path='vgg.png')
# resnet
resnet = models.resnet18(pretrained=True).eval()
resnet_model_dict = dict(type='resnet18', arch=resnet, layer_name='layer4',input_size=(224, 224))
resnet_scorecam = ScoreCAM(resnet_model_dict)
input_image = load_image('images/'+'ILSVRC2012_val_00002193.JPEG')
input_ = apply_transforms(input_image)
if torch.cuda.is_available():
input_ = input_.cuda()
predicted_class = resnet(input_).max(1)[-1]
scorecam_map = resnet_scorecam(input_)
basic_visualize(input_.cpu(), scorecam_map.type(torch.FloatTensor).cpu(),save_path='resnet.png') | [
2,
7347,
2721,
1330,
8019,
62,
37540,
198,
198,
11748,
28034,
198,
11748,
28034,
13,
20471,
13,
45124,
355,
376,
198,
11748,
28034,
10178,
13,
27530,
355,
4981,
198,
198,
6738,
3384,
4487,
1330,
1635,
198,
6738,
12172,
13,
26675,
20991,... | 2.621145 | 681 |
# coding=utf-8
"""
desc: Python 数据类型列表
Version: 0.1
Author: huijz
Date: 2020-08-05
"""
list = ['runoob', 786, 2.23, 'john', 70.2]
tinyList = [123, 'john']
print list # 输出完整列表
print list[0] # 输出列表的第一个元素
print list[1:3] # 输出第二个至第三个元素
print list[2:] # 输出从第三个开始至列表末尾的所有元素
print tinyList * 2 # 输出列表两次
print list + tinyList # 打印组合的列表
tuple = ('runoob', 786, 2.23, 'john', 70.2)
tinytuple = (123, 'john')
print tuple # 输出完整元组
print tuple[0] # 输出元组的第一个元素
print tuple[1:3] # 输出第二个至第四个(不包含)的元素
print tuple[2:] # 输出从第三个开始至列表末尾的所有元素
print tinytuple * 2 # 输出元组两次
print tuple + tinytuple # 打印组合的元组
tuple = ('runoob', 786, 2.23, 'john', 70.2)
list = ['runoob', 786, 2.23, 'john', 70.2]
tuple[2] = 1000 # 元组中是非法应用
list[2] = 1000 # 列表中是合法应用
dict = {}
dict['one'] = "This is one"
dict[2] = "This is two"
tinydict = {'name': 'runoob', 'code': 6734, 'dept': 'sales'}
print dict['one'] # 输出键为'one' 的值
print dict[2] # 输出键为 2 的值
print tinydict # 输出完整的字典
print tinydict.keys() # 输出所有键
print tinydict.values() # 输出所有值
| [
2,
19617,
28,
40477,
12,
23,
198,
37811,
198,
20147,
25,
11361,
10545,
243,
108,
162,
235,
106,
163,
109,
119,
161,
252,
233,
26344,
245,
26193,
101,
198,
14815,
25,
657,
13,
16,
198,
13838,
25,
289,
84,
2926,
89,
198,
10430,
25,
... | 1.350598 | 753 |
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*-
# vi: set ft=python sts=4 ts=4 sw=4 et:
"""
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
from nipype.interfaces.base import (BaseInterface, BaseInterfaceInputSpec, traits,
File, TraitedSpec, Directory, isdefined)
import os, os.path as op
import numpy as np
import nibabel as nb
import networkx as nx
import shutil
from nipype.utils.misc import package_check
import warnings
from ... import logging
iflogger = logging.getLogger('interface')
have_cmp = True
try:
package_check('cmp')
except Exception, e:
have_cmp = False
warnings.warn('cmp not installed')
else:
import cmp
from cmp.util import runCmd
def create_roi(subject_id, subjects_dir, fs_dir, parcellation_name):
""" Creates the ROI_%s.nii.gz files using the given parcellation information
from networks. Iteratively create volume. """
iflogger.info("Create the ROIs:")
output_dir = op.abspath(op.curdir)
fs_dir = op.join(subjects_dir, subject_id)
cmp_config = cmp.configuration.PipelineConfiguration()
cmp_config.parcellation_scheme = "Lausanne2008"
log = cmp_config.get_logger()
parval = cmp_config._get_lausanne_parcellation('Lausanne2008')[parcellation_name]
pgpath = parval['node_information_graphml']
aseg = nb.load(op.join(fs_dir, 'mri', 'aseg.nii.gz'))
asegd = aseg.get_data()
iflogger.info("Working on parcellation: ")
iflogger.info(cmp_config._get_lausanne_parcellation('Lausanne2008')[parcellation_name])
iflogger.info("========================")
pg = nx.read_graphml(pgpath)
# each node represents a brain region
# create a big 256^3 volume for storage of all ROIs
rois = np.zeros( (256, 256, 256), dtype=np.int16 )
count = 0
for brk, brv in pg.nodes_iter(data=True):
count = count + 1
iflogger.info(brv)
iflogger.info(brk)
if brv['dn_hemisphere'] == 'left':
hemi = 'lh'
elif brv['dn_hemisphere'] == 'right':
hemi = 'rh'
if brv['dn_region'] == 'subcortical':
iflogger.info(brv)
iflogger.info("---------------------")
iflogger.info("Work on brain region: %s" % (brv['dn_region']))
iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
iflogger.info("---------------------")
# if it is subcortical, retrieve roi from aseg
idx = np.where(asegd == int(brv['dn_fs_aseg_val']))
rois[idx] = int(brv['dn_correspondence_id'])
elif brv['dn_region'] == 'cortical':
iflogger.info(brv)
iflogger.info("---------------------")
iflogger.info("Work on brain region: %s" % (brv['dn_region']))
iflogger.info("Freesurfer Name: %s" % brv['dn_fsname'])
iflogger.info("Region %s of %s " % (count, pg.number_of_nodes()))
iflogger.info("---------------------")
labelpath = op.join(output_dir, parval['fs_label_subdir_name'] % hemi)
# construct .label file name
fname = '%s.%s.label' % (hemi, brv['dn_fsname'])
# execute fs mri_label2vol to generate volume roi from the label file
# store it in temporary file to be overwritten for each region
mri_cmd = 'mri_label2vol --label "%s" --temp "%s" --o "%s" --identity' % (op.join(labelpath, fname),
op.join(fs_dir, 'mri', 'orig.mgz'), op.join(output_dir, 'tmp.nii.gz'))
runCmd( mri_cmd, log )
tmp = nb.load(op.join(output_dir, 'tmp.nii.gz'))
tmpd = tmp.get_data()
# find voxel and set them to intensityvalue in rois
idx = np.where(tmpd == 1)
rois[idx] = int(brv['dn_correspondence_id'])
# store volume eg in ROI_scale33.nii.gz
out_roi = op.join(output_dir, 'ROI_%s.nii.gz' % parcellation_name)
# update the header
hdr = aseg.get_header()
hdr2 = hdr.copy()
hdr2.set_data_dtype(np.uint16)
log.info("Save output image to %s" % out_roi)
img = nb.Nifti1Image(rois, aseg.get_affine(), hdr2)
nb.save(img, out_roi)
iflogger.info("[ DONE ]")
class Parcellate(BaseInterface):
"""Subdivides segmented ROI file into smaller subregions
This interface implements the same procedure as in the ConnectomeMapper's
parcellation stage (cmp/stages/parcellation/maskcreation.py) for a single
parcellation scheme (e.g. 'scale500').
Example
-------
>>> import nipype.interfaces.cmtk as cmtk
>>> parcellate = cmtk.Parcellate()
>>> parcellate.inputs.freesurfer_dir = '.'
>>> parcellate.inputs.subjects_dir = '.'
>>> parcellate.inputs.subject_id = 'subj1'
>>> parcellate.inputs.parcellation_name = 'scale500'
>>> parcellate.run() # doctest: +SKIP
"""
input_spec = ParcellateInputSpec
output_spec = ParcellateOutputSpec
| [
2,
795,
16436,
25,
532,
9,
12,
4235,
25,
21015,
26,
12972,
12,
521,
298,
12,
28968,
25,
604,
26,
33793,
12,
8658,
82,
12,
14171,
25,
18038,
532,
9,
12,
198,
2,
25357,
25,
900,
10117,
28,
29412,
39747,
28,
19,
40379,
28,
19,
15... | 2.234772 | 2,364 |
import yaml
from typing import Dict
from urllib.request import urlopen
from rubin_changelog.config import REPOS_YAML
| [
11748,
331,
43695,
198,
198,
6738,
19720,
1330,
360,
713,
198,
6738,
2956,
297,
571,
13,
25927,
1330,
19016,
9654,
198,
198,
6738,
6437,
259,
62,
354,
8368,
519,
13,
11250,
1330,
4526,
37997,
62,
56,
2390,
43,
198
] | 3.051282 | 39 |
import os
import shutil
import platform
import pandas as pd
import matplotlib.pyplot as plt
import flopy
import pyemu
model_ws = os.path.join("extra_crispy")
nam_file = "freyberg.nam"
ml = flopy.modflow.Modflow.load(nam_file,exe_name="mf2005",model_ws=model_ws,verbose=True)
ml.dis.sr.xul = 619653
ml.dis.sr.yul = 3353277
ml.dis.sr.rotation = 0
ml.dis.epsg_str = "EPSG:32614"
ml.dis.start_datetime = "11-5-1955"
#write a grid spec file
ml.dis.sr.write_gridSpec(os.path.join("misc","freyberg.spc"))
# write the bore coords file
obs_rowcol = pd.read_csv(os.path.join("misc","obs_rowcol.dat"),delim_whitespace=True)
obs_rowcol.loc[:,'x'] = ml.dis.sr.xcentergrid[obs_rowcol.row-1,obs_rowcol.col-1]
obs_rowcol.loc[:,'y'] = ml.dis.sr.ycentergrid[obs_rowcol.row-1,obs_rowcol.col-1]
obs_rowcol.loc[:,"top"] = ml.dis.top[obs_rowcol.row-1,obs_rowcol.col-1]
obs_rowcol.loc[:,"layer"] = 1
# use this later to set weights
obs_names = ["or{0:02d}c{1:02d}_0".format(r-1,c-1) for r,c in zip(obs_rowcol.row,obs_rowcol.col)]
# get the truth time series
h = flopy.utils.HeadFile(os.path.join(model_ws,"freyberg.hds"),model=ml)
data = h.get_alldata()
#write all those terrible mod2obs files
ibound = ml.bas6.ibound.array
#well_data = ml.wel.stress_period_data[0]
#ibound[0,well_data["i"],well_data['j']] = 0
#drn_data = ml.riv.stress_period_data[0]
#ibound[0,drn_data["i"],drn_data['j']] = 0
f_crd = open(os.path.join("misc","bore.crds"),'w')
for i in range(ml.nrow):
for j in range(ml.ncol):
if ibound[0,i,j] == 0:
continue
on = "or{0:02d}c{1:02d}".format(i,j)
ox = ml.dis.sr.xcentergrid[i,j]
oy = ml.dis.sr.ycentergrid[i,j]
ol = 1
f_crd.write("{0:20s} {1:15.6E} {2:15.6E} {3:d}\n".format(on,ox,oy,ol))
f_crd.close()
# run mod2smp to get the truth values
with open(os.path.join("settings.fig"),'w') as f:
f.write("date=dd/mm/yyyy\ncolrow=no")
with open(os.path.join("misc","mod2smp.in"),'w') as f:
f.write(os.path.join("misc","freyberg.spc")+'\n')
f.write(os.path.join("misc","bore.crds")+'\n')
f.write(os.path.join("misc","bore.crds")+'\n')
f.write(os.path.join("extra_crispy","freyberg.hds")+'\n')
f.write("f\n5\n1.0e+30\nd\n")
f.write("01/01/2015\n00:00:00\n")
f.write(os.path.join("misc","freyberg_heads.smp")+'\n')
os.system(os.path.join("exe","mod2smp.exe") + " <"+os.path.join("misc","mod2smp.in"))
# write the ins file for the head smp
pyemu.pst_utils.smp_to_ins(os.path.join("misc","freyberg_heads.smp"))
shutil.copy2(os.path.join("misc","freyberg_heads.smp"),os.path.join("misc","freyberg_heads_truth.smp"))
# write the hk template
pnames = []
with open(os.path.join("misc","hk_Layer_1.ref.tpl"),'w') as f:
f.write("ptf ~\n")
for i in range(ml.nrow):
for j in range(ml.ncol):
#print(i,j,ibound[0,i,j])
if ibound[0,i,j] == 0:
tpl_str = " 0.000000E+00"
else:
pn = "hkr{0:02d}c{1:02d}".format(i,j)
tpl_str = "~ {0:8s} ~".format(pn)
f.write("{0:14s} ".format(tpl_str))
pnames.append(pn)
f.write('\n')
# build pst instance
misc_files = os.listdir(os.path.join("misc"))
ins_files = [os.path.join("misc",f) for f in misc_files if f.endswith(".ins")]
out_files = [f.replace(".ins",'') for f in ins_files]
tpl_files = [os.path.join("misc",f) for f in misc_files if f.endswith(".tpl")]
in_files = [os.path.join(ml.model_ws,os.path.split(f)[-1]).replace(".tpl",'') for f in tpl_files]
in_files = [os.path.join(ml.model_ws,"ref",os.path.split(f)[-1]) if "layer" in f.lower() else f for f in in_files]
pst = pyemu.pst_utils.pst_from_io_files(tpl_files,in_files,ins_files,out_files)
# apply par values and bounds and groups
pdata = pst.parameter_data
grps = pdata.groupby(pdata.parnme.apply(lambda x:'hk' in x)).groups
hk_mean = ml.upw.hk.array.mean()
hk_stdev = ml.upw.hk.array.std()
lb = hk_mean * 0.1
ub = hk_mean * 10.0
pdata.loc[grps[True],"parval1"] = hk_mean
pdata.loc[grps[True],"parubnd"] = ub
pdata.loc[grps[True],"parlbnd"] = lb
pdata.loc[grps[True],"pargp"] = "hk"
# constant mults
grps = pdata.groupby(pdata.parnme.apply(lambda x:'rch' in x)).groups
pdata.loc[grps[True],"parval1"] = 1.0
pdata.loc[grps[True],"parubnd"] = 1.5
pdata.loc[grps[True],"parlbnd"] = 0.5
pdata.loc[grps[True],"pargp"] = "rch"
pdata.loc["rch_1","parval1"] = 1.0
pdata.loc["rch_1","parubnd"] = 1.1
pdata.loc["rch_1","parlbnd"] = 0.9
rcond_mean = ml.riv.stress_period_data[0]["cond"].mean()
rcond_std = ml.riv.stress_period_data[0]["cond"].std()
rcond_lb = rcond_mean * 0.1
rcond_ub = rcond_mean * 10.0
grps = pdata.groupby(pdata.parnme.apply(lambda x:'rcond' in x)).groups
pdata.loc[grps[True],"parval1"] = rcond_mean
pdata.loc[grps[True],"parubnd"] = rcond_ub
pdata.loc[grps[True],"parlbnd"] = rcond_lb
pdata.loc[grps[True],"pargp"] = "rcond"
wf_base = ml.wel.stress_period_data[0]["flux"]
wf_fore = ml.wel.stress_period_data[1]["flux"]
# grps = pdata.groupby(pdata.parnme.apply(lambda x:'wf' in x)).groups
# pdata.loc[grps[True],"parval1"] = 1.0
# pdata.loc[grps[True],"parubnd"] = 1.5
# pdata.loc[grps[True],"parlbnd"] = 0.5
# pdata.loc[grps[True],"pargp"] = "welflux"
grps = pdata.groupby(pdata.parnme.apply(lambda x:'wf' in x and x.endswith("_1"))).groups
pdata.loc[grps[True],"parval1"] = -1.0 * wf_base
pdata.loc[grps[True],"parubnd"] = -1.0 * wf_base * 1.1
pdata.loc[grps[True],"parlbnd"] = -1.0 * wf_base * 0.9
pdata.loc[grps[True],"scale"] = -1.0
pdata.loc[grps[True],"pargp"] = "welflux"
grps = pdata.groupby(pdata.parnme.apply(lambda x:'wf' in x and x.endswith("_2"))).groups
pdata.loc[grps[True],"parval1"] = -1.0 * wf_fore
pdata.loc[grps[True],"parubnd"] = -1.0 * wf_fore * 1.5
pdata.loc[grps[True],"parlbnd"] = -1.0 * wf_fore * 0.5
pdata.loc[grps[True],"scale"] = -1.0
pdata.loc[grps[True],"pargp"] = "welflux"
pdata.loc["ss","parval1"] = ml.upw.ss.array.mean()
pdata.loc["ss","parubnd"] = ml.upw.ss.array.mean() * 10.0
pdata.loc["ss","parlbnd"] = ml.upw.ss.array.mean() * 0.1
pdata.loc["ss","pargp"] = "storage"
pdata.loc["sy","parval1"] = ml.upw.sy.array.mean()
pdata.loc["sy","parubnd"] = ml.upw.sy.array.mean() * 10.0
pdata.loc["sy","parlbnd"] = ml.upw.sy.array.mean() * 0.1
pdata.loc["sy","pargp"] = "storage"
#apply obs weights and groups and values
import run
run.process()
run.write_other_obs_ins()
shutil.copy2(os.path.join("misc","other.obs"),os.path.join("misc","other.obs.truth"))
smp = pyemu.pst_utils.smp_to_dataframe(os.path.join("misc","freyberg_heads_truth.smp"))
values = list(smp.loc[:,"value"])
pst.observation_data.loc[:,"weight"] = 0.0
pst.observation_data.loc[:,"obgnme"] = "forecast"
groups = pst.observation_data.groupby(pst.observation_data.obsnme.apply(lambda x:x in obs_names)).groups
pst.observation_data.loc[groups[True],"weight"] = 100.0
pst.observation_data.loc[groups[True],"obgnme"] = "head_cal"
groups = pst.observation_data.groupby(pst.observation_data.obsnme.apply(lambda x:x.startswith('o'))).groups
pst.observation_data.loc[groups[True],"obsval"] = values
pst.observation_data.index = pst.observation_data.obsnme
with open(os.path.join("misc","other.obs.truth"),'r') as f:
for line in f:
raw = line.strip().split()
pst.observation_data.loc[raw[0],"obsval"] = float(raw[1])
pst.model_command[0] = "python run.py"
pst.zero_order_tikhonov()
pst.control_data.noptmax = 20
pst.pestpp_lines.append('++forecasts(travel_time,sw_gw_0,sw_gw_1,sw_gw_2)')
pst.pestpp_lines.append('++n_iter_base(1)')
pst.pestpp_lines.append('++n_iter_super(4)')
pst.pestpp_lines.append('++max_reg_iter(5)')
pst.write("freyberg.pst",update_regul=True)
if platform.system().lower() == "windows":
pest_exe = os.path.join("exe","pest++.exe")
else:
pest_exe = None
os.system(pest_exe + ' freyberg.pst /h :4004')
# dt_deltas = pd.to_timedelta(h.get_times(),unit="d")
# idx = pd.to_datetime(ml.dis.start_datetime) + dt_deltas
# obs_data = pd.DataFrame(data[:,0,obs_rowcol.row-1,obs_rowcol.col-1],columns=obs_rowcol.name,
# index=idx)
#
# print(obs_data.shape)
# obs_rowcol.index = obs_rowcol.name
# for name in obs_data.columns:
# top = obs_rowcol.loc[name,"top"]
# if obs_data.loc[:,name].max() > top:
# print(name,"flooded")
# fig = plt.figure()
# ax = plt.subplot(111)
# obs_data.loc[:,name].plot(ax=ax,legend=False,marker='.')
#
# ax.plot(ax.get_xlim(),[top,top],"k--")
# ax.set_title(name)
# plt.show()
# fig = plt.figure()
# ax = plt.subplot(111)
# ax = ml.wel.stress_period_data.plot(ax=ax)
# ax = ml.riv.stress_period_data.plot(ax=ax)
# ax.scatter(obs_rowcol.x,obs_rowcol.y)
# [ax.text(x,y,name) for x,y,name in zip(obs_rowcol.x,obs_rowcol.y,obs_rowcol.name)]
# ax = ml.wel.plot()[0]
# ax.scatter(obs_rowcol.x,obs_rowcol.y)
| [
11748,
28686,
198,
11748,
4423,
346,
198,
11748,
3859,
198,
11748,
19798,
292,
355,
279,
67,
198,
11748,
2603,
29487,
8019,
13,
9078,
29487,
355,
458,
83,
198,
11748,
781,
11081,
198,
11748,
12972,
368,
84,
198,
198,
19849,
62,
18504,
... | 2.084859 | 4,207 |
import numpy as np
import alloy.math
import alloy.ros
from geometry_msgs.msg import(
Pose,
Point,
Transform
)
__all__ = [
'do_transform_point',
'do_transform_pose'
]
def do_transform_point(transform: Transform, point: Point) -> Point:
"""An implementation of the tf2_geometry_msgs interface to get around PyKDL.
Transform a point with the given transform message.
Parameters
----------
transform : geometry_msgs/Transform
Message that describes the transform
point : geometry_msgs/Point
The point to be transformed
Returns
-------
geometry_msgs/Point
Transformed point
"""
transform_mat = alloy.math.transformation_matrix_from_array(alloy.ros.transform_to_numpy(transform))
point_np = alloy.ros.point_to_numpy(point)
point_np = np.append(point_np, 1).reshape((4,1))
trans_point = np.matmul(transform_mat, point_np)
return alloy.ros.numpy_to_point(trans_point[0:3,0])
def do_transform_pose(transform: Transform, pose: Pose) -> Pose:
"""An implementation of the tf2_geometry_msgs interface to get around PyKDL.
Transform a pose with the given transform message.
Parameters
----------
transform : geometry_msgs/Transform
Message that describes the transform
pose : geometry_msgs/Pose
The pose to be transformed
Returns
-------
geometry_msgs/Pose
Transformed pose.
"""
transform_mat = alloy.math.transformation_matrix_from_array(alloy.ros.transform_to_numpy(transform))
pose_mat = alloy.math.transformation_matrix_from_array(alloy.ros.pose_to_numpy(pose))
#combine two transformation matrix
trans_pose_mat = np.matmul(transform_mat, pose_mat)
return alloy.ros.numpy_to_pose(alloy.math.transformation_matrix_to_array(trans_pose_mat))
| [
11748,
299,
32152,
355,
45941,
198,
11748,
36186,
13,
11018,
198,
11748,
36186,
13,
4951,
198,
198,
6738,
22939,
62,
907,
14542,
13,
19662,
1330,
7,
198,
220,
220,
220,
37557,
11,
198,
220,
220,
220,
6252,
11,
198,
220,
220,
220,
26... | 2.755656 | 663 |
'''
Naming conventions:
For public functions:
execute_*
These represent functions which do purely in-memory compute. They will evaluate expectations the
core compute function, and exercise all logging and metrics tracking (outside of outputs), but they
will not invoke *any* outputs (and their APIs don't allow the user to).
'''
from dagster import check
from dagster.core.definitions import PipelineDefinition, SystemStorageData
from dagster.core.errors import (
DagsterExecutionStepNotFoundError,
DagsterInvariantViolationError,
DagsterRunNotFoundError,
DagsterStepOutputNotFoundError,
)
from dagster.core.events import DagsterEvent, DagsterEventType
from dagster.core.execution.context.system import SystemPipelineExecutionContext
from dagster.core.execution.plan.plan import ExecutionPlan
from dagster.core.instance import DagsterInstance
from dagster.core.storage.pipeline_run import PipelineRun, PipelineRunStatus
from dagster.core.system_config.objects import EnvironmentConfig
from dagster.utils import ensure_gen
from .config import RunConfig
from .context_creation_pipeline import scoped_pipeline_context
from .results import PipelineExecutionResult
def _pipeline_execution_iterator(
pipeline_context, execution_plan, run_config, step_keys_to_execute
):
'''A complete execution of a pipeline. Yields pipeline start, success,
and failure events. Defers to _steps_execution_iterator for step execution.
'''
if (
isinstance(pipeline_context, DagsterEvent)
and pipeline_context.event_type # pylint: disable=no-member
== DagsterEventType.PIPELINE_INIT_FAILURE
):
yield pipeline_context
return
yield DagsterEvent.pipeline_start(pipeline_context)
pipeline_success = True
try:
for event in _steps_execution_iterator(
pipeline_context,
execution_plan=execution_plan,
run_config=run_config,
step_keys_to_execute=step_keys_to_execute,
):
if event.is_step_failure:
pipeline_success = False
yield event
except (Exception, KeyboardInterrupt):
pipeline_success = False
raise # finally block will run before this is re-raised
finally:
if pipeline_success:
yield DagsterEvent.pipeline_success(pipeline_context)
else:
yield DagsterEvent.pipeline_failure(pipeline_context)
def execute_pipeline_iterator(pipeline, environment_dict=None, run_config=None, instance=None):
'''Returns iterator that yields :py:class:`SolidExecutionResult` for each
solid executed in the pipeline.
This is intended to allow the caller to do things between each executed
node. For the 'synchronous' API, see :py:func:`execute_pipeline`.
Parameters:
pipeline (PipelineDefinition): Pipeline to run
environment_dict (dict): The enviroment configuration that parameterizes this run
run_config (RunConfig): Configuration for how this pipeline will be executed
Returns:
Iterator[DagsterEvent]
'''
check.inst_param(pipeline, 'pipeline', PipelineDefinition)
environment_dict = check.opt_dict_param(environment_dict, 'environment_dict')
instance = check.opt_inst_param(
instance, 'instance', DagsterInstance, DagsterInstance.ephemeral()
)
run_config = check_run_config_param(run_config, pipeline)
run = _create_run(instance, pipeline, run_config, environment_dict)
return execute_run_iterator(pipeline, run, instance)
def execute_pipeline(
pipeline, environment_dict=None, run_config=None, instance=None, raise_on_error=True
):
'''
"Synchronous" version of :py:func:`execute_pipeline_iterator`.
This is the entry point for dagster CLI and dagit execution. For the dagster-graphql entry
point, see execute_plan() below.
Parameters:
pipeline (PipelineDefinition): Pipeline to run
environment_dict (dict):
The enviroment configuration that parameterizes this run
run_config (RunConfig):
Configuration for how this pipeline will be executed
instance (DagsterInstance):
The instance to execute against, defaults to ephemeral (no artifacts persisted)
raise_on_error (Bool):
Whether or not to raise exceptions when they occur. Defaults to True
since this behavior is useful in tests which is the most common use of this API.
Returns:
:py:class:`PipelineExecutionResult`
'''
check.inst_param(pipeline, 'pipeline', PipelineDefinition)
environment_dict = check.opt_dict_param(environment_dict, 'environment_dict')
run_config = check_run_config_param(run_config, pipeline)
check.opt_inst_param(instance, 'instance', DagsterInstance)
instance = instance or DagsterInstance.ephemeral()
execution_plan = create_execution_plan(pipeline, environment_dict, run_config)
step_keys_to_execute = _resolve_step_keys(execution_plan, run_config.step_keys_to_execute)
# run should be used and threaded through here
# https://github.com/dagster-io/dagster/issues/1745
_run = _create_run(instance, pipeline, run_config, environment_dict)
with scoped_pipeline_context(
pipeline, environment_dict, run_config, instance, raise_on_error=raise_on_error
) as pipeline_context:
event_list = list(
_pipeline_execution_iterator(
pipeline_context,
execution_plan=execution_plan,
run_config=run_config,
step_keys_to_execute=step_keys_to_execute,
)
)
return PipelineExecutionResult(
pipeline,
run_config.run_id,
event_list,
lambda: scoped_pipeline_context(
pipeline,
environment_dict,
run_config,
instance,
system_storage_data=SystemStorageData(
intermediates_manager=pipeline_context.intermediates_manager,
file_manager=pipeline_context.file_manager,
),
),
)
def execute_pipeline_with_preset(
pipeline, preset_name, run_config=None, instance=None, raise_on_error=True
):
'''Runs :py:func:`execute_pipeline` with the given preset for the pipeline.
The preset will optionally provide environment_dict and/or build a pipeline from
a solid subset. If a run_config is not provied, one which only sets the
mode as defined by the preset will be used.
'''
check.inst_param(pipeline, 'pipeline', PipelineDefinition)
check.str_param(preset_name, 'preset_name')
check.opt_inst_param(run_config, 'run_config', RunConfig)
check.opt_inst_param(instance, 'instance', DagsterInstance)
instance = instance or DagsterInstance.ephemeral()
preset = pipeline.get_preset(preset_name)
if preset.solid_subset is not None:
pipeline = pipeline.build_sub_pipeline(preset.solid_subset)
if run_config:
run_config = run_config.with_mode(preset.mode)
else:
run_config = RunConfig(mode=preset.mode)
return execute_pipeline(
pipeline, preset.environment_dict, run_config, instance, raise_on_error=raise_on_error
)
def _steps_execution_iterator(pipeline_context, execution_plan, run_config, step_keys_to_execute):
'''Iterates over execution of individual steps yielding the associated events.
Does not yield pipeline level events asside from init failure when the context fails to construct.
'''
check.inst_param(
pipeline_context, 'pipeline_context', (DagsterEvent, SystemPipelineExecutionContext)
)
check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
check.inst_param(run_config, 'run_config', RunConfig)
check.list_param(step_keys_to_execute, 'step_keys_to_execute', of_type=str)
if (
isinstance(pipeline_context, DagsterEvent)
# pylint: disable=no-member
and pipeline_context.event_type == DagsterEventType.PIPELINE_INIT_FAILURE
):
return ensure_gen(pipeline_context)
_setup_reexecution(run_config, pipeline_context, execution_plan)
# Engine execution returns a generator of yielded events, so returning here means this function
# also returns a generator
return pipeline_context.executor_config.get_engine().execute(
pipeline_context, execution_plan, step_keys_to_execute
)
def execute_plan(
execution_plan, instance, environment_dict=None, run_config=None, step_keys_to_execute=None
):
'''This is the entry point of dagster-graphql executions. For the dagster CLI entry point, see
execute_pipeline() above.
'''
check.inst_param(execution_plan, 'execution_plan', ExecutionPlan)
check.inst_param(instance, 'instance', DagsterInstance)
environment_dict = check.opt_dict_param(environment_dict, 'environment_dict')
run_config = check_run_config_param(run_config, execution_plan.pipeline_def)
check.opt_list_param(step_keys_to_execute, 'step_keys_to_execute', of_type=str)
return list(
execute_plan_iterator(
execution_plan=execution_plan,
environment_dict=environment_dict,
run_config=run_config,
step_keys_to_execute=step_keys_to_execute,
instance=instance,
)
)
| [
7061,
6,
198,
45,
3723,
21396,
25,
198,
198,
1890,
1171,
5499,
25,
198,
198,
41049,
62,
9,
198,
198,
4711,
2380,
5499,
543,
466,
14177,
287,
12,
31673,
24061,
13,
1119,
481,
13446,
9027,
262,
198,
7295,
24061,
2163,
11,
290,
5517,
... | 2.705984 | 3,476 |
# simple check to see if file changed
# Function example, will only read file contents when file stats change
@func_onchange(cheezy_file_metric)
file('test.txt', 'w').write('original content')
print '-'*60
print get_filecontents('test.txt')
print '-'*60
print get_filecontents('test.txt')
file('test.txt', 'w').write('new and improved content!')
print '-'*60
print get_filecontents('test.txt')
print '-'*60
print get_filecontents('test.txt')
print '~'*60
# Method example, will only read file contents when file stats change
f = FileReadThing()
file('test.txt', 'w').write('original content')
print '-'*60
print f.get_filecontents('test.txt')
print '-'*60
print f.get_filecontents('test.txt')
file('test.txt', 'w').write('new and improved content!')
print '-'*60
print f.get_filecontents('test.txt')
print '-'*60
print f.get_filecontents('test.txt')
| [
220,
220,
220,
220,
198,
198,
2,
2829,
2198,
284,
766,
611,
2393,
3421,
198,
198,
2,
15553,
1672,
11,
481,
691,
1100,
2393,
10154,
618,
2393,
9756,
1487,
198,
31,
20786,
62,
261,
3803,
7,
2395,
68,
7357,
62,
7753,
62,
4164,
1173,
... | 2.902357 | 297 |
from __future__ import print_function
import os
import sys
from py2gcode import gcode_cmd
from py2gcode import cnc_dxf
fileName = sys.argv[1]
feedrate = 25.0
layerNames = ['hole_thru_4-40']
prog = gcode_cmd.GCodeProg()
prog.add(gcode_cmd.GenericStart())
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.FeedRate(feedrate))
param = {
'fileName' : fileName,
'layers' : layerNames,
'dxfTypes' : ['CIRCLE'],
'startZ' : 0.00,
'stopZ' : -0.12,
'safeZ' : 0.3,
'stepZ' : 0.03,
'startDwell' : 2.0,
}
drill = cnc_dxf.DxfDrill(param)
prog.add(drill)
prog.add(gcode_cmd.Space())
prog.add(gcode_cmd.End(),comment=True)
baseName, dummy = os.path.splitext(__file__)
fileName = '{0}.ngc'.format(baseName)
print('generating: {0}'.format(fileName))
prog.write(fileName)
| [
6738,
11593,
37443,
834,
1330,
3601,
62,
8818,
198,
11748,
28686,
220,
198,
11748,
25064,
198,
6738,
12972,
17,
70,
8189,
1330,
308,
8189,
62,
28758,
198,
6738,
12972,
17,
70,
8189,
1330,
269,
10782,
62,
67,
26152,
198,
198,
7753,
537... | 1.981651 | 436 |
# Generated by Django 2.1.8 on 2019-05-23 14:21
from django.db import migrations, models
| [
2,
2980,
515,
416,
37770,
362,
13,
16,
13,
23,
319,
13130,
12,
2713,
12,
1954,
1478,
25,
2481,
198,
198,
6738,
42625,
14208,
13,
9945,
1330,
15720,
602,
11,
4981,
628
] | 2.84375 | 32 |
from modeldata import from_downloaded as modeldata_from_downloaded
import log
from utilities import get_ncfiles_in_dir
from utilities import get_urls,get_logins
from utilities import get_n_months,add_month_to_timestamp,convert_time_to_datetime
from netCDF4 import Dataset
import numpy as np
import os
from datetime import datetime
from time import time as time_now
from ftplib import FTP,error_temp
def ftp_server_retry(output_dir,main_ftp_dir,start_date,end_date,temporal_resolution,log_file='dl/cmems.log',variables=['u','v'],i_depths=[0],i_times=None):
'''Calls the ftp_server function to download CMEMS data from FTP, if the connection to the FTP breaks during
execution, the function is called again to retry connecting.
Input:
- output_dir [string]: local directory where final files are stored
- main_ftp_dir [string]: directory on FTP server where data to download is stored
- start_date [datetime.date]: date to start download
- end_date [datetime.date]: date to end download
- log_file (optional) [string]: file in which log messages are saved (default: dl/cmems.log)
- variables (optional) [list of strings]: list of strings specifying variables to download, these
should match the variable names in "input/variables.json" (default: ['u','v'])
- i_depths (optional) [list of integers]: indices of depth layers to download (default: [0], surface)'''
try:
ftp_server(output_dir,main_ftp_dir,start_date,end_date,temporal_resolution,log_file=log_file,variables=variables,i_depths=i_depths)
except DisconnectedError as e:
log.error(log_file, 'FTP connection broken: ' + str(e))
log.info(log_file,'Trying to reconnect to FTP server...')
ftp_server_retry(output_dir,main_ftp_dir,start_date,end_date,temporal_resolution,log_file=log_file,variables=variables,i_depths=i_depths)
def ftp_server(output_dir,main_ftp_dir,start_date,end_date,temporal_resolution,log_file='dl_cmems.log',variables=['u','v'],i_depths=[0],i_times=None):
'''Connects to the CMEMS FTP server and downloads full netcdf files from the FTP with either daily or monthly resolution.
Full netcdf files are stored temporarily on a local drive before requested variables and depth layers are extracted. If this
is successful, the temporary full netcdf file is then removed. If the connection to the FTP server breaks during execution,
a DisconnectedError is returned.'''
ftp_url = get_urls('cmems_main')
login = get_logins('cmems')
ftp = connect_to_cmems_ftp(ftp_url,main_ftp_dir,login['user'],login['pwd'],log_file)
if temporal_resolution == 'monthly':
_dl_monthly(ftp,output_dir,start_date,end_date,log_file,variables,i_depths,i_times)
elif temporal_resolution == 'daily':
_dl_daily(ftp,output_dir,start_date,end_date,log_file,variables,i_depths,i_times)
else:
raise ValueError('Unknown temporal resolution requested for CMEMS FTP download. Valid options are "daily" or "monthly".')
ftp.quit()
def connect_to_cmems_ftp(ftp_url,main_ftp_dir,user,pwd,log_file):
'''Connects to CMEMS FTP'''
log.info(log_file,'Accessing CMEMS FTP server: '+ftp_url+'/'+main_ftp_dir)
ftp = FTP(ftp_url)
ftp.login(user=user,passwd=pwd)
ftp.cwd(main_ftp_dir)
return ftp
def _dl_monthly(ftp,output_dir,start_date,end_date,log_file,variables,i_depths,i_times):
'''Downloads full monthly netcdf files to temporary directory, saves requested
variables and depth layers, and removes temporary full netcdf file when successfull.'''
n_years = end_date.year-start_date.year+1
# loop over years because netcdf files are stored in year/ directories
for n_year in range(n_years):
year = start_date.year+n_year
ftp.cwd(str(year)+'/')
log.info(log_file,f'Changed FTP working dir to: {ftp.pwd()}')
filenames = ftp.nlst()
_dl_file(ftp,filenames,output_dir,log_file,variables,i_depths,i_times,filename_format='%Y%m')
ftp.cwd('../')
def _dl_daily(ftp,output_dir,start_date,end_date,log_file,variables,i_depths,i_times):
'''Downloads full daily netcdf files to temporary directory, saves requested
variables and depth layers, and removes temporary full netcdf file when successful.'''
n_months = get_n_months(start_date,end_date)
# loop over months because netcdf files are stored in year/month/ directories
for n_month in range(n_months):
date = add_month_to_timestamp(start_date,n_month)
date_dir = str(date.year)+'/'+str(date.month).zfill(2)+'/'
ftp.cwd(date_dir)
log.info(log_file,f'Changed FTP working dir to: {ftp.pwd()}')
filenames = ftp.nlst()
_dl_file(ftp,filenames,output_dir,log_file,variables,i_depths,i_times)
ftp.cwd('../../')
| [
6738,
4235,
335,
1045,
1330,
422,
62,
2902,
14578,
355,
4235,
335,
1045,
62,
6738,
62,
2902,
14578,
198,
11748,
2604,
198,
6738,
20081,
1330,
651,
62,
10782,
16624,
62,
259,
62,
15908,
198,
6738,
20081,
1330,
651,
62,
6371,
82,
11,
... | 2.653994 | 1,815 |
from model.contact import Contact
import random
import pytest
| [
6738,
2746,
13,
32057,
1330,
14039,
198,
11748,
4738,
198,
11748,
12972,
9288,
628
] | 4.5 | 14 |
import pygame
black = (0, 0, 0)
red = (255, 0, 0)
| [
11748,
12972,
6057,
198,
198,
13424,
796,
357,
15,
11,
657,
11,
657,
8,
198,
445,
796,
357,
13381,
11,
657,
11,
657,
8,
628,
628
] | 2.076923 | 26 |