id stringlengths 1 265 | text stringlengths 6 5.19M | dataset_id stringclasses 7
values |
|---|---|---|
3315834 | <reponame>jzijin/mmsr<gh_stars>1-10
import cv2
import numpy as np
img1 = cv2.imread("0.png")
img2 = cv2.imread("1.png")
img3 = cv2.imread("3.png")
img4 = cv2.imread("2.png")
img5 = cv2.imread("4.png")
width = img1.shape[1]
height = img1.shape[0]
img6 = np.zeros((height*5, width, 3), np.uint8)
img6[0:height, 0:width] = img1
img6[height:height*2, 0:width] = img2
img6[height*2:height*3, 0:width] = img3
img6[height*3:height*4, 0:width] = img4
img6[height*4:height*5, 0:width] = img5
cv2.imwrite("aaaaa.png", img6)
| StarcoderdataPython |
4832831 | """This module implements standard game functions for Py-Climber, such as processing keypresses"""
import sys
import random
from src.blob_enemy import Blob
import pygame
import pygame.freetype
def check_events(settings, screen, tile_map):
"""Watch for keyboard and mouse events"""
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.KEYDOWN:
check_keydown_events(settings, event, screen, tile_map)
elif event.type == pygame.KEYUP:
check_keyup_events(settings, event, screen, tile_map)
def reset_game(tile_map):
tile_map.reset()
def check_keydown_events(settings, event, screen, tile_map):
"""Respond to key down events"""
player = tile_map.player
if event.key == pygame.K_ESCAPE:
sys.exit()
if event.key == pygame.K_a:
generate_new_random_blob(settings, screen, settings.image_res.enemy_blob_images, tile_map)
if event.key == pygame.K_r:
reset_game(tile_map)
if event.key == pygame.K_LEFT:
if not player.idle_top:
if player.dx == 0.0:
player.dx = -1 * settings.player_dx
player.facing_left = True
if event.key == pygame.K_RIGHT:
if not player.idle_top:
if player.dx == 0.0:
player.dx = settings.player_dx
player.facing_left = False
if event.key == pygame.K_F9:
if settings.fullscreen == True:
settings.fullscreen = False
pygame.display.set_mode((800, 600))
else:
settings.fullscreen = True
pygame.display.set_mode((800, 600), pygame.FULLSCREEN)
def check_keyup_events(settings, event, screen, tile_map):
player = tile_map.player
if event.key == pygame.K_SPACE:
if not player.idle_top:
if player.falling == False:
player.dy = settings.player_jump_velocity
player.falling = True
elif player.air_jumps < player.max_air_jumps:
player.dy = settings.player_air_jump_velocity
player.air_jumps += 1
if event.key == pygame.K_LEFT:
if not player.idle_top:
if player.dx != 0.0:
player.dx = 0.0
if event.key == pygame.K_RIGHT:
if not player.idle_top:
if player.dx != 0.0:
player.dx = 0.0
def generate_new_random_blob(settings, screen, images, tile_map):
"""Generate a new blob enemy and add it to the list"""
# How this should work: First pick a floor, this is the middle_row of the triad created
# when generating the map, e.g. not the floor and not a level where blocks can appear
floor_number = random.randint(0, settings.map_number_floors - 2)
# Secondly pick a side, left or right (this will affect placement and initial velocity, etc)
facing_left = random.choice([True, False])
# Calculate initial position / velocity / facing flags
enemy = Blob(settings, screen, images)
enemy.rect.bottom = settings.tile_height * ( 2 + (3 * floor_number))
enemy.rect.left = 3 * settings.tile_width + tile_map.x_offset
enemy.dx = settings.enemy_blob_dx
if facing_left:
enemy.rect.left += 10 * settings.tile_width
enemy.dx *= -1.0
enemy.facing_left = True
enemy.set_current_animation(settings.anim_name_walk_left)
else:
enemy.facing_left = False
enemy.set_current_animation(settings.anim_name_walk_right)
# Add it to the list
tile_map.enemies.add(enemy)
def blit_help_text(settings, screen):
"""Draws the text explaining what keys do what"""
color_white = (255, 255, 255)
y = screen.get_rect().bottom - 48
font = settings.font
font.render_to(screen, (10,y), "ESC to exit", settings.font_color)
y -= 20
font.render_to(screen, (10,y), "F9 to toggle fullscreen", settings.font_color)
y -= 20
font.render_to(screen, (10,y), "'a' to add a new enemy", settings.font_color)
y -= 20
font.render_to(screen, (10,y), "'r' to reset", settings.font_color)
y -= 20
font.render_to(screen, (15,y), "...can jump once in air", settings.font_color)
y -= 20
font.render_to(screen, (10,y), "SPACE to jump", settings.font_color)
y -= 20
font.render_to(screen, (10,y), "LEFT/RIGHT arrows to walk", settings.font_color)
def update_game_objects(settings, tile_map):
tile_map.update()
def draw_game_objects(settings, screen, tile_map):
# Draw the map - pass True to render a grid overlay on the tiles
tile_map.draw()
# Draw help text
blit_help_text(settings, screen)
def update_screen(settings, screen, tile_map):
"""Update images and flip screen"""
# Redraw screen each pass
screen.fill(settings.bg_color)
# UPDATES...
update_game_objects(settings, tile_map)
# DRAWS...
draw_game_objects(settings, screen, tile_map)
# FLIP....
pygame.display.flip()
| StarcoderdataPython |
3370385 | <reponame>CajetanP/programming-exercises
def convert(number):
result = ""
if number%3 == 0:
result += "Pling"
if number%5 == 0:
result += "Plang"
if number%7 == 0:
result += "Plong"
if not len(result):
result += str(number)
return result
| StarcoderdataPython |
1733557 | <gh_stars>0
import sys, os
def fixpath():
path = os.environ.get('PATH', '').split(os.pathsep)
libdir = os.path.join(os.path.dirname(__file__), 'lib')
path.append(libdir)
os.environ['PATH'] = os.pathsep.join(path)
if hasattr(os, 'add_dll_directory'):
os.add_dll_directory(libdir)
if sys.platform == "win32":
fixpath()
| StarcoderdataPython |
3386022 | <reponame>miiiingi/algorithmstudy<gh_stars>0
import sys
import collections
input = sys.stdin.readline
array = collections.deque()
N = int(input().strip())
for _ in range(N) :
number = input().strip()
if number == '0' :
array.pop()
else :
array.append(int(number))
print(sum(array)) | StarcoderdataPython |
41326 | from torch.utils.data import Dataset
from torchvision.transforms.functional import to_tensor
from utils.image.processor import ImagePreprocessor, colorFormats
from PIL import Image
import glob, random
class ImageData(Dataset):
def __init__(
self, srcPath, crop=True, cropSize=96, colorFromat="RGB", processorList=None
):
super(ImageData, self).__init__()
imgs = glob.glob(srcPath + "/**", recursive=True)
imgs = filter(
lambda path: path.endswith("png")
or path.endswith("jpg")
or path.endswith("jpeg"),
imgs,
)
self.imgs = list(imgs)
self.imgNum = len(self.imgs)
if colorFromat not in colorFormats:
raise KeyError("only RGB or YUV or L")
self.imgPreprocessor = ImagePreprocessor(
crop=crop,
cropSize=cropSize,
colorFromat=colorFromat,
processorList=processorList,
)
def __len__(self):
return self.imgNum
def __getitem__(self, index):
imgPath = self.imgs[index]
img = Image.open(imgPath)
lr, hr = self.imgPreprocessor.process(img)
return to_tensor(lr), to_tensor(hr)
| StarcoderdataPython |
1780845 | <reponame>VulcanClimateModeling/fv3config<gh_stars>1-10
import unittest
import copy
import os
import shutil
from fv3config import ConfigError
from fv3config._tables import update_diag_table_for_config
from fv3config.config.derive import (
get_current_date,
_get_current_date_from_coupler_res,
_get_coupler_res_filename,
)
from fv3config._datastore import (
get_microphysics_name,
get_field_table_filename,
get_diag_table_filename,
get_data_table_filename,
)
import tempfile
from .mocks import c12_config
TEST_DIRECTORY = os.path.dirname(os.path.realpath(__file__))
DEFAULT_CONFIG = c12_config()
valid_coupler_res = """ 2 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)
2016 8 1 0 0 0 Model start time: year, month, day, hour, minute, second
2016 8 3 0 0 0 Current model time: year, month, day, hour, minute, second
/
"""
valid_current_date = [2016, 8, 3, 0, 0, 0]
bad_coupler_res = """ 2 (Calendar: no_calendar=0, thirty_day_months=1, julian=2, gregorian=3, noleap=4)
2016 8 1 0 0 0 Model start time: year, month, day, hour, minute, second
2016 8 missing 0 0 0 Current model time: year, month, day, hour, minute, second
/
"""
empty_config = {}
config_for_update_diag_table_test = {
"experiment_name": "diag_table_test",
"namelist": {
"coupler_nml": {
"current_date": valid_current_date,
"force_date_from_namelist": True,
}
},
}
diag_table_test_in = "default_experiment\n2016 1 1 0 0 0\nother contents here"
diag_table_test_out = "diag_table_test\n2016 8 3 0 0 0\nother contents here"
class RunDirectory(object):
def __init__(self, directory_path):
os.mkdir(directory_path)
os.mkdir(os.path.join(directory_path, "INPUT"))
os.mkdir(os.path.join(directory_path, "RESTART"))
self.directory_path = directory_path
def cleanup(self):
shutil.rmtree(self.directory_path)
class TableTests(unittest.TestCase):
def setUp(self):
self._run_directory_list = []
def tearDown(self):
for directory in self._run_directory_list:
directory.cleanup()
def make_run_directory(self, directory_name):
full_path = os.path.join(TEST_DIRECTORY, directory_name)
self._run_directory_list.append(RunDirectory(full_path))
return full_path
def test_default_data_table_filename(self):
config = copy.deepcopy(DEFAULT_CONFIG)
filename = get_data_table_filename(config)
self.assertTrue(os.path.isfile(filename))
self.assertTrue("data_table" in filename)
def test_default_diag_table_filename(self):
config = copy.deepcopy(DEFAULT_CONFIG)
filename = get_diag_table_filename(config)
self.assertTrue(os.path.isfile(filename))
self.assertTrue("diag_table" in filename)
def test_default_field_table_filename(self):
config = copy.deepcopy(DEFAULT_CONFIG)
filename = get_field_table_filename(config)
self.assertTrue(os.path.isfile(filename))
self.assertTrue("field_table" in filename)
def test_get_specified_data_table_filename(self):
source_rundir = self.make_run_directory("source_rundir")
data_table_filename = os.path.join(source_rundir, "data_table")
open(data_table_filename, "w").close()
config = copy.deepcopy(DEFAULT_CONFIG)
config["data_table"] = data_table_filename
filename = get_data_table_filename(config)
self.assertEqual(filename, data_table_filename)
def test_get_specified_diag_table_filename(self):
source_rundir = self.make_run_directory("source_rundir")
diag_table_filename = os.path.join(source_rundir, "diag_table")
open(diag_table_filename, "w").close()
config = copy.deepcopy(DEFAULT_CONFIG)
config["diag_table"] = diag_table_filename
filename = get_diag_table_filename(config)
self.assertEqual(filename, diag_table_filename)
def test_get_bad_field_table_filename(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["namelist"]["gfs_physics_nml"]["imp_physics"] = -1
with self.assertRaises(NotImplementedError):
get_field_table_filename(config)
def test_get_bad_microphysics_name_from_config(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["namelist"]["gfs_physics_nml"]["imp_physics"] = -1
with self.assertRaises(NotImplementedError):
get_microphysics_name(config)
def test_get_bad_diag_table_filename(self):
diag_table_filename = "/not/a/path/diag_table"
config = copy.deepcopy(DEFAULT_CONFIG)
config["diag_table"] = diag_table_filename
with self.assertRaises(ConfigError):
get_diag_table_filename(config)
def test_get_diag_table_from_empty_config(self):
with self.assertRaises(ConfigError):
get_diag_table_filename(empty_config)
def test_get_data_table_from_empty_config(self):
with self.assertRaises(ConfigError):
get_data_table_filename(empty_config)
def test_update_diag_table_from_empty_config(self):
rundir = self.make_run_directory("rundir")
with self.assertRaises(ConfigError):
update_diag_table_for_config(
empty_config, valid_current_date, os.path.join(rundir, "source")
)
def test_get_current_date_from_coupler_res(self):
rundir = self.make_run_directory("test_rundir")
coupler_res_filename = os.path.join(rundir, "coupler.res")
with open(coupler_res_filename, "w") as f:
f.write(valid_coupler_res)
current_date = _get_current_date_from_coupler_res(coupler_res_filename)
self.assertEqual(current_date, valid_current_date)
def test_get_current_date_from_bad_coupler_res(self):
rundir = self.make_run_directory("test_rundir")
coupler_res_filename = os.path.join(rundir, "coupler.res")
with open(coupler_res_filename, "w") as f:
f.write(bad_coupler_res)
with self.assertRaises(ConfigError):
_get_current_date_from_coupler_res(coupler_res_filename)
def test_get_current_date_from_config_force_date_true(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["namelist"]["coupler_nml"]["force_date_from_namelist"] = True
config["namelist"]["coupler_nml"]["current_date"] = valid_current_date
current_date = get_current_date(config)
self.assertEqual(current_date, valid_current_date)
def test_get_current_date_from_config_force_date_false(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["namelist"]["coupler_nml"]["force_date_from_namelist"] = False
config["namelist"]["coupler_nml"]["current_date"] = valid_current_date
current_date = get_current_date(config)
self.assertEqual(current_date, valid_current_date)
def test_get_current_date_from_config_which_includes_coupler_res_asset(self):
config = copy.deepcopy(DEFAULT_CONFIG)
tmpdir = self.make_run_directory("test_dir")
config["patch_files"] = {
"source_location": tmpdir,
"source_name": "coupler.res",
"target_location": "INPUT",
"target_name": "coupler.res",
"copy_method": "copy",
}
with open(os.path.join(tmpdir, "coupler.res"), "w") as f:
f.write(valid_coupler_res)
current_date = get_current_date(config)
self.assertEqual(current_date, valid_current_date)
def test_get_coupler_res_filename_from_bytes_coupler_res_asset(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["patch_files"] = {
"bytes": b"some data",
"target_location": "INPUT",
"target_name": "coupler.res",
}
with self.assertRaises(NotImplementedError):
_get_coupler_res_filename(config)
def test_get_current_date_from_config_empty_initial_conditions(self):
config = copy.deepcopy(DEFAULT_CONFIG)
config["initial_conditions"] = []
config["namelist"]["coupler_nml"]["current_date"] = valid_current_date
current_date = get_current_date(config)
self.assertEqual(current_date, valid_current_date)
def test_update_diag_table_for_config(self):
rundir = self.make_run_directory("test_rundir")
diag_table_filename = os.path.join(rundir, "diag_table")
with open(diag_table_filename, "w") as f:
f.write(diag_table_test_in)
current_date = get_current_date(config_for_update_diag_table_test)
update_diag_table_for_config(
config_for_update_diag_table_test, current_date, diag_table_filename
)
with open(diag_table_filename) as f:
self.assertEqual(diag_table_test_out, f.read())
def test_get_coupler_res_filename_from_dir():
config = copy.deepcopy(DEFAULT_CONFIG)
with tempfile.TemporaryDirectory() as initial_conditions_dir:
coupler_res_filename = os.path.join(initial_conditions_dir, "coupler.res")
with open(coupler_res_filename, "w") as f:
f.write(valid_coupler_res)
config["initial_conditions"] = initial_conditions_dir
result = _get_coupler_res_filename(config)
assert result == coupler_res_filename
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
54878 | import os
import csv
import sys
from sklearn.model_selection import train_test_split
sys.path.append("..")
from training_config import RANDOM_SEED, ALLOWED_CLASSES, DATA_DIR
def stratified_split(X, y, test_size=0.2, validate_size=0.2, random_state=42):
X_train, X_test, y_train, y_test = train_test_split(X, y, stratify=y, test_size=test_size,
random_state=random_state)
new_validate_size = validate_size / (1 - test_size)
X_train, X_val, y_train, y_val = train_test_split(X_train, y_train, stratify=y_train, test_size=new_validate_size,
random_state=random_state)
return X_train, X_test, X_val, y_train, y_test, y_val
def populate(X, Z, y):
y_mod = []
X_list = []
for i, key in enumerate(Z):
for file_name in X[key]:
X_list.append(file_name)
y_mod.append(y[i])
return X_list, y_mod
def create_dataset(X, y, file_name):
with open(os.path.join(DATA_DIR, file_name), 'w', newline='') as csvfile:
dataset_writer = csv.writer(csvfile, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
for i in range(len(X)):
dataset_writer.writerow((X[i], y[i]))
if __name__ == "__main__":
X = {}
y = []
Z = []
for class_name in ALLOWED_CLASSES:
class_path = os.path.join(DATA_DIR, class_name)
files = []
for file_name in os.listdir(class_path):
if not file_name.endswith(".pkl"):
continue
files.append(file_name)
splitted_terms = file_name.split("_")
patient_id = splitted_terms[0]
value = class_name + "_" + patient_id
if value not in X.keys():
X[value] = []
y.append(class_name)
Z.append(value)
X[value].append(os.path.join(class_path, file_name))
Z_train, Z_test, Z_val, y_train, y_test, y_val = stratified_split(Z, y, test_size=0.2, validate_size=0.2,
random_state=RANDOM_SEED)
X_train, y_train = populate(X, Z_train, y_train)
X_test, y_test = populate(X, Z_test, y_test)
X_val, y_val = populate(X, Z_val, y_val)
print("Train size: {}".format(len(X_train)))
print("Test size: {}".format(len(X_test)))
print("Val size: {}".format(len(X_val)))
create_dataset(X_train, y_train, 'train.csv')
create_dataset(X_test, y_test, 'test.csv')
create_dataset(X_val, y_val, 'val.csv')
| StarcoderdataPython |
3399666 | #!/usr/bin/env python
import os,sys
from mpi4py import MPI
import configparser
comm=MPI.COMM_WORLD
rank=comm.Get_rank()
from random import random
# cf=configparser.ConfigParser()
# cf.read(sys.argv[1])
# scan_method=cf.get('scan','Scan method')
# print 'scan_method=',scan_method
print 'rank',rank,' has random number',random()
| StarcoderdataPython |
160062 | <gh_stars>0
import pandas as pd
class FlowsResult():
def __init__(self, csv, clear=False,
columns=['flow', 'tid', 'cwnd', 'rate', 'loss', 'sacked', 'acked', 'rtt', 'inflight']):
self.csv = csv
self.columns = columns
self.res = pd.DataFrame(columns=columns)
if clear:
self.res.to_csv(csv, index=False) # clear the file
def append(self, row, flush_period=30):
"""Appends row df to result. row should be a DataFrame.
"""
assert len(row[0]) == len(self.columns)
row_df = pd.DataFrame(row, columns=self.columns)
self.res = self.res.append(row_df)
if self.res.shape[0] % flush_period == 0:
self.flush()
def flush(self, index=False):
"""Flushs current results to file and clear buffer.
"""
self.res.to_csv(self.csv, index=index, mode='a',
header=False, columns=self.columns)
print(f'{self.res.shape[0]} lines flushed to {self.csv}.')
self.res = self.res[0:0]
def read(self, idx_col=False):
"""Reads the DataFrame from csv. Redundant for now, but can be kept
to ensure future consistency with our result format.
"""
self.res = pd.read_csv(self.csv, index_col=idx_col)
return self.res
| StarcoderdataPython |
98859 | <gh_stars>0
from .languagemodels import ngram
from .classification import naive_bayes | StarcoderdataPython |
84065 | from typing import List
class Node:
def __init__(self, index):
self.index = index
self._childes = {}
self._parent = None
@property
def parent(self):
return self._parent
@parent.setter
def parent(self, value):
self._parent = value
def get_n_parents(self, n):
""" returns the n parents of the parent of the parent...
:param n: how many parents should be returned
:return: the n next parents, if not enough parents it will just return
as many as it has
"""
cur = self
parents = []
for _ in range(n):
if not cur.parent:
break
parents.append(cur.parent)
cur = cur.parent
return parents, cur.parent is not None
@property
def childes(self):
return list(self._childes.values())
def add_child(self, child):
self._childes[child.index] = child
class Tree:
def __init__(self, cv2_presentation):
self.roots = []
self.flat = {}
self._leaves = None
self._init_tree(cv2_presentation)
def _init_tree(self, cv2_presentation):
cv2_presentation = [[i] + cv2_presentation[0][i].tolist() for i in range(len(cv2_presentation[0]))]
cv2_presentation = sorted(cv2_presentation, key=lambda x: x[4])
i = 0
while i < len(cv2_presentation):
cv2_node = cv2_presentation[i]
# create new node
node = Node(cv2_node[0])
# check if it is a root node
if cv2_node[4] == -1:
self.flat[node.index] = node
self.roots.append(node)
else: # no root node
# check if parent is already in list
if cv2_node[4] not in self.flat:
# move it to last position
tmp = cv2_presentation.pop(i)
cv2_presentation = cv2_presentation + [tmp]
continue
parent = self.flat[cv2_node[4]]
# insert node into tree
self.flat[node.index] = node
node.parent = parent
parent.add_child(node)
i += 1
@property
def leaves(self) -> List[Node]:
if not self._leaves:
self._leaves = [leave for key, leave in self.flat.items() if not leave.childes]
return self._leaves | StarcoderdataPython |
1778431 | import numpy as np
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from qlearnkit.algorithms import QKMeans
import pytest
from qiskit import Aer
from qiskit.utils import QuantumInstance, algorithm_globals
seed = 42
algorithm_globals.random_seed = seed
sv_quantum_instance = QuantumInstance(
Aer.get_backend("aer_simulator_statevector"),
seed_simulator=algorithm_globals.random_seed,
seed_transpiler=algorithm_globals.random_seed,
)
qasm_quantum_instance = QuantumInstance(
Aer.get_backend("aer_simulator"),
shots=100,
seed_simulator=algorithm_globals.random_seed,
seed_transpiler=algorithm_globals.random_seed,
)
@pytest.mark.parametrize(
'quantum_instance, quantum_instance_type',
[
(qasm_quantum_instance, 'qasm'),
(sv_quantum_instance, 'statevector')
]
)
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_kmeans_results(quantum_instance, quantum_instance_type, dtype):
# Checks that KMeans works as intended on toy dataset by comparing with
# expected results computed by hand.
X = np.array([
[0, 0],
[0.5, 0],
[0.5, 1],
[1, 1]
], dtype=dtype)
init_centers = np.array([
[0, 0],
[1, 1]
], dtype=dtype)
expected_labels = [0, 0, 1, 1]
expected_centers = np.array([[0.25, 0], [0.75, 1]], dtype=dtype)
expected_n_iter = 2
qkmeans = QKMeans(
quantum_instance=quantum_instance,
n_clusters=2,
n_init=1,
init=init_centers
)
qkmeans.fit(X)
np.testing.assert_array_equal(qkmeans.labels_, expected_labels,
f"Test failed with {quantum_instance_type}.\n"
f"Expected {expected_labels}, but it was {qkmeans.labels_}")
np.testing.assert_allclose(qkmeans.cluster_centers_, expected_centers)
assert qkmeans.n_iter_ == expected_n_iter
@pytest.mark.parametrize(
'quantum_instance, quantum_instance_type',
[
(qasm_quantum_instance, 'qasm'),
(sv_quantum_instance, 'statevector')
]
)
def test_kmeans_relocated_clusters(quantum_instance, quantum_instance_type):
# check that empty clusters are relocated as expected
X = np.array([
[0, 0],
[0.5, 0],
[0.5, 1],
[1, 1]
])
# second center too far from others points will be empty at first iter
init_centers = np.array([
[0.5, 0.5],
[3, 3]
])
expected_labels = [0, 0, 1, 1]
expected_centers = [[0.25, 0], [0.75, 1]]
expected_n_iter = 2
qkmeans = QKMeans(
quantum_instance=quantum_instance,
n_clusters=2,
n_init=1,
init=init_centers
)
pred_labels = qkmeans.fit_predict(X)
np.testing.assert_array_equal(pred_labels, expected_labels,
f"Test failed with {quantum_instance_type}.\n"
f"Expected {expected_labels}, but it was {pred_labels}"
)
np.testing.assert_allclose(qkmeans.cluster_centers_, expected_centers)
assert qkmeans.n_iter_ == expected_n_iter
@pytest.mark.parametrize(
'quantum_instance, quantum_instance_type',
[
(qasm_quantum_instance, 'qasm'),
(sv_quantum_instance, 'statevector')
]
)
@pytest.mark.parametrize("dtype", [np.float32, np.float64])
def test_kmeans_iris(quantum_instance, quantum_instance_type, dtype):
# test kmeans using iris dataset (first 2 features)
qkmeans = QKMeans(
n_clusters=3,
quantum_instance=quantum_instance
)
X, y = load_iris(return_X_y=True)
X = np.asarray([x[0:2] for x, y_ in zip(X, y) if y_ != 2],
dtype=dtype)
y = np.asarray([y_ for x, y_ in zip(X, y) if y_ != 2],
dtype=dtype)
train_data, test_data, train_label, test_label = train_test_split(X, y, test_size=0.1, random_state=42)
# Perform clustering
qkmeans.fit(train_data, train_label)
# Predict the nearest cluster of train data
predicted_labels = qkmeans.predict(train_data)
# assert fit(X).predict(X) equal fit_predict(X)
np.testing.assert_array_equal(predicted_labels, qkmeans.labels_)
| StarcoderdataPython |
42759 | <gh_stars>0
#!/usr/bin/env python2.7
import pika
import json, os
import magic
import time
import ast
from sys import argv
from cassandra.cluster import Cluster
from cassandra.auth import PlainTextAuthProvider
from cassandra import query
from sets import Set
def print_usage():
print("USAGE: %s KEYSPACE_FROM KEYSPACE_TO TABLE SELECTOR CLUSTER_IPS USERNAME PASSWORD" % argv[0])
print("e.g.:\n%s holmes_totem holmes results \"service_name = 'yara'\" \"['10.0.4.80','10.0.4.81','10.0.4.82']\" cassandra password" % argv[0])
exit(-1)
if len(argv) != 8:
print_usage()
keyspace_from = argv[1]
keyspace_to = argv[2]
table = argv[3]
selection = argv[4]
cluster_ips = ast.literal_eval(argv[5])
username = argv[6]
password = argv[7]
if type(cluster_ips) != list:
print("ERROR: CLUSTER_IPS must be a list!")
print_usage()
print("Copying from keyspace '%s' to '%s' on cluster %s: Table '%s' where \"%s\".\n\nContinue? [yn]" % (keyspace_from, keyspace_to, cluster_ips, table, selection))
c = ""
while c != "y":
c = raw_input()
if c == 'n':
print("Aborted")
exit(-1)
ap = PlainTextAuthProvider(username=username, password=password)
cluster = Cluster(cluster_ips, auth_provider=ap)
sess_get = cluster.connect(keyspace_from)
sess_insert = cluster.connect(keyspace_to)
sess_get.row_factory = query.dict_factory
rows = sess_get.execute("SELECT * FROM %s WHERE %s;" % (table, selection))
i = 0
for r in rows:
i += 1
keys = []
vals = []
for k in r:
keys.append("%s" % str(k))
vals.append("%%(%s)s" % str(k))
insert_stmt = "INSERT INTO %s (%s) VALUES (%s)" % (table, ",".join(keys), ",".join(vals))
sess_insert.execute(insert_stmt, r)
print("Copied %d" % (i))
print("=======")
print("Copied %d entries" % i)
| StarcoderdataPython |
134809 | <filename>emailHelper.py
#!/usr/bin/env python3
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
# me == my email address
# you == recipient's email address
me = "<EMAIL>"
you = "<EMAIL>"
# Create message container - the correct MIME type is multipart/alternative.
msg = MIMEMultipart('alternative')
msg['Subject'] = "Link"
msg['From'] = me
msg['To'] = you
# Create the body of the message (a plain-text and an HTML version).
text = "Hi!\nHow are you?\nHere is the link you wanted:\nhttps://www.python.org"
html = """\
<html>
<head></head>
<body>
<p>Hi!<br>
How are you?<br>
Here is the <a href="https://www.python.org">link</a> you wanted.
</p>
</body>
</html>
"""
# Record the MIME types of both parts - text/plain and text/html.
part1 = MIMEText(text, 'plain')
part2 = MIMEText(html, 'html')
# Attach parts into message container.
# According to RFC 2046, the last part of a multipart message, in this case
# the HTML message, is best and preferred.
msg.attach(part1)
msg.attach(part2)
# Send the message via local SMTP server.
s = smtplib.SMTP('localhost:5000')
# sendmail function takes 3 arguments: sender's address, recipient's address
# and message to send - here it is sent as one string.
s.sendmail(me, you, msg.as_string())
s.quit() | StarcoderdataPython |
3273099 | #coding:utf-8
#!/bin/env python2.7
import sys
import os
import matplotlib
matplotlib.use('Agg')
import pylab
import itertools
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as tkr
from matplotlib.backends.backend_pdf import PdfPages
args = sys.argv
inputdirs = args[1:-1] #directories where SweeD_Report.BCINxx..._nnkb.txt and Seqfile_BCINxx..._evolstats_nnkb.txt are stored.
outputdir = args[-1]
#Options
mbconvert = True
scale = 0.2 #in Mbp
windows = ['10kb', '50kb', '100kb'] #Frames in which statistics are calculated. From smaller frame to bigger.
chromosomes = ['BCIN01', 'BCIN02', 'BCIN03', 'BCIN04', 'BCIN05', 'BCIN06', 'BCIN07', 'BCIN08', 'BCIN09', 'BCIN10', 'BCIN11', 'BCIN12', 'BCIN13', 'BCIN14', 'BCIN15']
filestypes = {'CLR':['Position', 'Likelihood', 'Alpha'], 'EVS':['Window', 'Start', 'Stop', '\'Hns\'', '\'S\'', '\'Pi\'', '\'thetaW\'', '\'Hsd\'', '\'D\'']}
#Functions
def Prepline(inputfile) :
tupline = tuple(inputfile.readline().rstrip().split('\t'))
return tupline
def GenTreeDict(chromosome, filesdict, filestype, fileslist, windows) :
filesdict[filestype] = dict()
for inputfile in fileslist :
if chromosome in inputfile :
for window in windows :
if window in inputfile :
filesdict[filestype][window] = inputfile
#Main Script
inputfiles, CLRfiles, EVSfiles = (list() for i in range(3))
for inputdir in inputdirs :
inputfiles = os.listdir(inputdir)
for inputfile in inputfiles :
if 'SweeD_Report' in inputfile :
CLRfiles.append(''.join([inputdir, inputfile])) #list of valid sweed CLR files
elif 'evolstats' in inputfile :
EVSfiles.append(''.join([inputdir, inputfile])) #list of valid evolstats files
filesdict = dict()
for bcin in chromosomes : #creates the arborescence of files for each chromosome studied
filesdict[bcin] = dict()
GenTreeDict(bcin, filesdict[bcin], 'CLR', CLRfiles, windows)
GenTreeDict(bcin, filesdict[bcin], 'EVS', EVSfiles, windows)
datadict = dict()
for chrnb in chromosomes :#['BCIN01'] :
datadict[chrnb] = dict()
filetype = 'CLR'
datadict[chrnb][filetype] = dict()
for key, value in filesdict[chrnb][filetype].items() :
with open(value, 'r') as infile :
datadict[chrnb][filetype][key] = dict()
intuple = ('0')
while intuple[0] != filestypes[filetype][0] :
intuple = Prepline(infile)
for tupelem in intuple :
datadict[chrnb][filetype][key][tupelem] = list()
intuple = Prepline(infile)
nbcols = len(intuple)
while intuple != ('',) :
for i in range(0, nbcols) :
if mbconvert == True and i == 0 :
datadict[chrnb][filetype][key][filestypes[filetype][i]].append(float(intuple[i])/1000000)
elif i in range(1,3) :
datadict[chrnb][filetype][key][filestypes[filetype][i]].append(float(intuple[i]))
intuple = Prepline(infile)
filetype = 'EVS'
datadict[chrnb][filetype] = dict()
for key, value in filesdict[chrnb][filetype].items() :
with open(value, 'r') as infile :
datadict[chrnb][filetype][key] = dict()
intuple = ('0')
while intuple[0] != filestypes[filetype][0] :
intuple = Prepline(infile)
for tupelem in intuple :
datadict[chrnb][filetype][key][tupelem] = list()
intuple = Prepline(infile)
while intuple != ('',) :
for i in range(0, len(intuple)) :
if mbconvert == True and i == 1 :
datadict[chrnb][filetype][key][filestypes[filetype][i]].append(float(intuple[i])/1000000)
elif i in (3, 7, 8) :
if intuple[i] == 'None' :
datadict[chrnb][filetype][key][filestypes[filetype][i]].append(0.0)
else :
datadict[chrnb][filetype][key][filestypes[filetype][i]].append(float(intuple[i]))
intuple = Prepline(infile)
#Graphs generation
with PdfPages('Summary_genome_statistics.pdf') as pdf :
for bcin in chromosomes : #['BCIN01'] :
fig = plt.figure(figsize=(8, 6), dpi=1200)
largestwindow = windows[-1]
maxpos = datadict[bcin]['CLR'][largestwindow]['Position'][-1]
linestyles = {'10kb':'-', '50kb':'--', '100kb':'--'}
linecolors = {'10kb':'darkorange', '50kb':'steelblue', '100kb':'turquoise'}
plt.suptitle(bcin, fontsize=10)
#Graph with CLR values along genome
#(genome position corresponds to the starting point of the sliding window)
filetype = 'CLR'
clr1 = plt.subplot(4,1,1)
clr1.spines['top'].set_color('none')
clr1.spines['right'].set_color('none')
clr1.spines['bottom'].set_linewidth(0.5)
clr1.spines['left'].set_linewidth(0.5)
clr1.set_xlim(0.0, maxpos)
clr1.xaxis.set_major_locator(tkr.MultipleLocator(0.2)) #Set main ticks of x axis.
clr1.xaxis.set_minor_locator(tkr.MultipleLocator(0.05)) #Set secondary tick (without labels) of x axis.
clr1.tick_params(axis='x', which='major', width=.5, labelsize=4, direction='out')
clr1.tick_params(axis='x', which='minor', width=.25, direction='out')
for window in windows :
clr1.plot(datadict[bcin][filetype][window]['Position'],
datadict[bcin][filetype][window]['Likelihood'],
color=linecolors[window], linewidth=0.5, linestyle=linestyles[window], zorder=0.1)
clr1.set_ylabel('CLR', fontsize=6)
lowlim, uplim = clr1.get_ylim()
clr1.set_ylim(0.0, uplim)
clr1.yaxis.set_major_locator(tkr.MultipleLocator(20))
clr1.yaxis.set_minor_locator(tkr.MultipleLocator(5))
clr1.tick_params(axis='y', which='major', width=.5, labelsize=4, direction='out')
clr1.tick_params(axis='y', which='minor', width=.25, direction='out')
#Graph with Tajima's D values along the genome
#(genome position corresponds to the starting point of the sliding window)
filetype = 'EVS'
evs1 = plt.subplot(4,1,2)
evs1.spines['top'].set_color('none')
evs1.spines['right'].set_color('none')
evs1.spines['bottom'].set_position(('data', 0))
evs1.spines['bottom'].set_linewidth(0.5)
evs1.spines['left'].set_linewidth(0.5)
#evs1.axhline(0, linestyle='-', linewidth=0.5, color='lightgray') #Add a horizontal line at y=0.
evs1.set_xlim(0.0, maxpos)
evs1.xaxis.set_major_locator(tkr.IndexLocator(base=0.2, offset=0.2)) #Set main ticks of x axis.
evs1.xaxis.set_minor_locator(tkr.MultipleLocator(0.05)) #Set secondary tick (without labels) of x axis.
evs1.tick_params(axis='x', which='major', width=.5, labelsize=4, direction='inout')
evs1.tick_params(axis='x', which='minor', width=.25, direction='inout')
for window in windows :
evs1.plot(datadict[bcin][filetype][window]['Start'],
datadict[bcin][filetype][window]['\'D\''],
color=linecolors[window], linewidth=0.5, linestyle=linestyles[window], zorder=0.1)
evs1.set_ylabel('Tajima\'s D', fontsize=6)
lowlim, uplim = evs1.get_ylim()
evs1.set_ylim(lowlim, uplim)
evs1.yaxis.set_major_locator(tkr.MultipleLocator(1))
evs1.yaxis.set_minor_locator(tkr.MultipleLocator(0.25))
evs1.tick_params(axis='y', which='major', width=.5, labelsize=4, direction='out')
evs1.tick_params(axis='y', which='minor', width=.25, direction='out')
#Graph with Fay & Wu's H (standardized by Zeng) values along the genome
#(genome position corresponds to the starting point of the sliding window)
evs2 = plt.subplot(4,1,3)
evs2.spines['top'].set_color('none')
evs2.spines['right'].set_color('none')
evs2.spines['bottom'].set_position(('data', 0))
evs2.spines['bottom'].set_linewidth(0.5)
evs2.spines['left'].set_linewidth(0.5)
evs2.set_xlim(0.0, maxpos)
evs2.xaxis.set_major_locator(tkr.IndexLocator(base=0.2, offset=0.2)) #Set main ticks of x axis.
evs2.xaxis.set_minor_locator(tkr.MultipleLocator(0.05)) #Set secondary tick (without labels) of x axis.
evs2.tick_params(axis='x', which='major', width=.5, labelsize=4, direction='inout')
evs2.tick_params(axis='x', which='minor', width=.25, direction='inout')
for window in windows :
plt.plot(datadict[bcin][filetype][window]['Start'],
datadict[bcin][filetype][window]['\'Hsd\''],
color=linecolors[window], linewidth=0.5, linestyle=linestyles[window], zorder=0.1)
lowlim, uplim = evs2.get_ylim()
evs2.set_ylim(lowlim, uplim)
evs2.yaxis.set_major_locator(tkr.MultipleLocator(1))
evs2.yaxis.set_minor_locator(tkr.MultipleLocator(0.25))
evs2.tick_params(axis='y', which='major', width=.5, labelsize=4, direction='out')
evs2.tick_params(axis='y', which='minor', width=.25, direction='out')
evs2.set_ylabel('Fay and Wu\'s H (Std)', fontsize=6)
#Graph with Fay & Wu's H (raw) values along the genome
#(genome position corresponds to the starting point of the sliding window)
evs3 = plt.subplot(4,1,4)
evs3.spines['top'].set_color('none')
evs3.spines['right'].set_color('none')
evs3.spines['bottom'].set_position(('data', 0))
evs3.spines['bottom'].set_linewidth(0.5)
evs3.spines['left'].set_linewidth(0.5)
evs3.set_xlim(0.0, maxpos)
evs3.xaxis.set_major_locator(tkr.IndexLocator(base=0.2, offset=0.2)) #Set main ticks of x axis.
evs3.xaxis.set_minor_locator(tkr.MultipleLocator(0.05)) #Set secondary tick (without labels) of x axis.
evs3.tick_params(axis='x', which='major', width=.5, labelsize=4, direction='inout')
evs3.tick_params(axis='x', which='minor', width=.25, direction='inout')
for window in windows :
plt.plot(datadict[bcin][filetype][window]['Start'],
datadict[bcin][filetype][window]['\'Hns\''],
color=linecolors[window], linewidth=0.5, linestyle=linestyles[window], label=window, zorder=0.1)
lowlim, uplim = evs3.get_ylim()
evs3.set_ylim(lowlim, uplim)
evs3.yaxis.set_major_locator(tkr.MultipleLocator(100))
evs3.yaxis.set_minor_locator(tkr.MultipleLocator(20))
evs3.tick_params(axis='y', which='major', width=.5, labelsize=4, direction='out')
evs3.tick_params(axis='y', which='minor', width=.25, direction='out')
evs3.set_ylabel('Fay and Wu\'s H (Raw)', fontsize=6)
#evs3.set_xlabel('Position along chromosome', labelpad=40, fontsize=6)
#Save figure
plt.tight_layout()
fig.text(x=0.5, y=0.015, s='Position along chromosome sequence (Mb)', fontsize=8, horizontalalignment='center')
fig.legend(loc='lower right', ncol=1, fontsize=6, facecolor='white', framealpha= 0.75, frameon=False)
pylab.savefig(''.join([bcin, '.png']))
pdf.savefig()
plt.close()
| StarcoderdataPython |
3260379 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Virtualchain
~~~~~
copyright: (c) 2014-15 by Halfmoon Labs, Inc.
copyright: (c) 2016 by Blockstack.org
This file is part of Virtualchain
Virtualchain is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Virtualchain is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Virtualchain. If not, see <http://www.gnu.org/licenses/>.
"""
import argparse
import os
import sys
import subprocess
import signal
import json
import datetime
import traceback
import time
import random
import errno
from ConfigParser import SafeConfigParser
from .lib import config, indexer
from .lib.blockchain import session
log = session.get_logger("virtualchain")
# global instance of our implementation's state engine
state_engine = None
# global flag indicating that we're running
running = False
def sync_virtualchain(bitcoind_opts, last_block, state_engine, expected_snapshots={}, tx_filter=None ):
"""
Synchronize the virtual blockchain state up until a given block.
Obtain the operation sequence from the blockchain, up to and including last_block.
That is, go and fetch each block we haven't seen since the last call to this method,
extract the operations from them, and record in the given working_dir where we left
off while watching the blockchain.
Store the state engine state, consensus snapshots, and last block to the working directory.
Return True on success
Return False if we're supposed to stop indexing
Abort the program on error. The implementation should catch timeouts and connection errors
"""
rc = False
start = datetime.datetime.now()
while True:
try:
# advance state
rc = indexer.StateEngine.build(bitcoind_opts, last_block + 1, state_engine, expected_snapshots=expected_snapshots, tx_filter=tx_filter )
break
except Exception, e:
log.exception(e)
log.error("Failed to synchronize chain; exiting to safety")
os.abort()
time_taken = "%s seconds" % (datetime.datetime.now() - start).seconds
log.info(time_taken)
return rc
def setup_virtualchain(impl=None, bitcoind_connection_factory=None, index_worker_env=None):
"""
Set up the virtual blockchain.
Use the given virtual blockchain core logic.
"""
global connect_bitcoind
if impl is not None:
config.set_implementation(impl)
def virtualchain_set_opfields( op, **fields ):
"""
Pass along virtualchain-reserved fields to a virtualchain operation.
This layer of indirection is meant to help with future compatibility,
so virtualchain implementations do not try to set operation fields
directly.
"""
# warn about unsupported fields
for f in fields.keys():
if f not in indexer.RESERVED_KEYS:
log.warning("Unsupported virtualchain field '%s'" % f)
# propagate reserved fields
for f in fields.keys():
if f in indexer.RESERVED_KEYS:
op[f] = fields[f]
return op
def connect_bitcoind( opts ):
"""
Top-level method to connect to bitcoind,
using either a built-in default, or a module
to be loaded at runtime whose path is referred
to by the environment variable
VIRTUALCHAIN_MOD_CONNECT_BLOCKCHAIN.
"""
# connect_bitcoind_factory = workpool.multiprocess_connect_bitcoind()
connect_bitcoind_factory = session.connect_bitcoind_impl
return connect_bitcoind_factory( opts )
| StarcoderdataPython |
139795 | '''
Unit tests for wind.py
'''
import unittest
import datetime
import pytz
import pandas as pd
import numpy as np
from envirodataqc import wind
class test_wind(unittest.TestCase):
def setUp(self):
'''
Create a pandas dataframe for tests
This dataset is somewhat arbitrary but meant to
be useful in a variety of ways.
'''
dts = [
datetime.datetime(2021,3,18,0,0),
datetime.datetime(2021,3,18,0,10),
datetime.datetime(2021,3,18,0,20),
datetime.datetime(2021,3,18,0,30),
datetime.datetime(2021,3,18,0,40),
datetime.datetime(2021,3,18,0,50),
datetime.datetime(2021,3,18,1,0),
datetime.datetime(2021,3,18,1,10)
]
#Test wind speeds and directions
#Consists of:
# One section of zero winds associated with changing
# One section of zero winds with unchanging direction
# One section of non-zero winds with flatlined direction
spvals = [1,0,0,1,2,3,0,0]
dirvals = [30,55,65,60,60,65,22,22]
self.data = pd.DataFrame({'spvals':spvals,'dirvals':dirvals},index=dts)
def tearDown(self):
pass
def test_windsp_ratio(self):
testratio = wind.check_windsp_ratio(self.data)
trueratio = 0.31
self.assertAlmostEqual(testratio,trueratio,3)
def test_windsp_withdir(self):
trueflags = [0,1,1,0,0,0,0,0]
testflags = wind.check_windsp_withdir(
self.data['spvals'].to_numpy(),
self.data['dirvals'].to_numpy()
)
self.assertEqual(testflags,trueflags)
def test_winddir_withsp(self):
trueflags = [0,0,0,1,1,0,0,0]
testflags = wind.check_winddir_withsp(
self.data['spvals'].to_numpy(),
self.data['dirvals'].to_numpy()
)
self.assertEqual(testflags,trueflags)
def test_windsp_ratio_tz(self):
'''
Test windsp ratio using TZ aware dataframe
'''
tmzn = pytz.timezone('America/Denver')
datatz = self.data.tz_localize(tmzn)
testratio = wind.check_windsp_ratio(datatz)
trueratio = 0.31
self.assertAlmostEqual(testratio,trueratio,3)
if __name__=='__main__':
unittest.main()
| StarcoderdataPython |
3355804 | from ozobotmapf.graphics.drawables import Line
from ozobotmapf.simulator.agents.agent import Agent
class AnimatedAgent(Agent):
"""
This agent is animating it's path in time.
"""
def __init__(self, agent_id, raw_plans, ozomap, config):
super().__init__(agent_id, raw_plans, ozomap, config)
def update_path(self, time):
self.active_path.clear()
head = self._get_position(time)
tail = self._get_position(time - self.config.tail_lag)
self.__build_active_path(tail, head)
def __build_active_path(self, from_pos, to_pos):
if from_pos.pos_tile == to_pos.pos_tile:
self.__build_same_tile_path(from_pos, to_pos)
else:
self.__build_different_tile_path(from_pos, to_pos)
def __build_same_tile_path(self, from_pos, to_pos):
if from_pos.is_first_half == to_pos.is_first_half:
self.__build_line_between(from_pos, to_pos)
else:
self.__build_line_to_middle(from_pos)
self.__build_line_to_middle(to_pos)
def __build_line_between(self, from_pos, to_pos):
p_from = from_pos.get_point_from_position()
p_to = to_pos.get_point_from_position()
self.__add_path_line(p_from, p_to)
def __build_different_tile_path(self, from_pos, to_pos):
self.__build_line_before_leave(from_pos)
self.__build_line_after_entry(to_pos)
def __build_line_to_middle(self, pos):
position = pos.get_point_from_position()
middle = pos.pos_tile.tile.get_middle()
self.__add_path_line(middle, position)
def __build_line_before_leave(self, from_pos):
position = from_pos.get_point_from_position()
leave = from_pos.pos_tile.tile.get_edge_middle(from_pos.pos_tile.to_dir)
if from_pos.is_first_half:
middle = from_pos.pos_tile.tile.get_middle()
self.__add_path_line(position, from_pos.pos_tile.tile.get_middle())
self.__add_path_line(middle, leave)
else:
self.__add_path_line(position, leave)
def __build_line_after_entry(self, to_pos):
position = to_pos.get_point_from_position()
enter = to_pos.pos_tile.tile.get_edge_middle(to_pos.pos_tile.from_dir)
if to_pos.is_first_half:
self.__add_path_line(enter, position)
else:
middle = to_pos.pos_tile.tile.get_middle()
self.__add_path_line(middle, enter)
self.__add_path_line(middle, position)
def __add_path_line(self, p_from, p_to):
self.active_path.add_drawable(
Line(p_from, p_to, self.config.line_width)
)
| StarcoderdataPython |
61949 | # -*- coding: utf-8 -*-
from django.db import models
from django.utils.translation import ugettext_lazy as _
from django.utils import formats
from django.contrib.auth.models import User
from jsonfield import JSONField
from mbase.models import MetaBaseModel, MetaBaseStatusModel
from mcat.models import Product
from mcat_order.conf import ORDER_STATUSES, CIVILITIES
class Customer(MetaBaseModel, MetaBaseStatusModel):
first_name = models.CharField(max_length=120, verbose_name=_(u'First name'))
last_name = models.CharField(max_length=120, verbose_name=_(u'Last name'))
civility = models.CharField(max_length=60, verbose_name=_(u'Title'), choices=CIVILITIES, default=CIVILITIES[0][0])
telephone = models.PositiveIntegerField(verbose_name=_(u'Phone number'))
company_name = models.CharField(max_length=120, blank=True, verbose_name=_(u'Company name'))
email = models.EmailField(verbose_name=_(u'Email'))
address = models.TextField(verbose_name=_(u'Address'))
user = models.OneToOneField(User, verbose_name=_(u'User') )
extra = JSONField(blank=True, verbose_name=_(u'Extra infos'))
class Meta:
verbose_name=_(u'Customer')
verbose_name_plural = _(u'Customers')
ordering = ('last_name',)
unique_together = ('first_name', 'last_name')
def __unicode__(self):
return unicode(self.first_name+' '+self.last_name)
@property
def telephone_formated(self):
return '%s %s %s %s' %(self.telephone[0:2],self.telephone[2:4],self.telephone[4:6],self.telephone[6:8])
def get_civility(self):
for civ in CIVILITIES:
if civ[0] == self.civility:
return civ[1]
return self.civility
class Order(MetaBaseModel):
customer = models.ForeignKey(Customer, related_name='orders', verbose_name=_(u'Customer'))
status = models.CharField(max_length=120, verbose_name=_(u'Status'), choices=ORDER_STATUSES, default=ORDER_STATUSES[0][0])
total = models.FloatField(null=True, blank=True, verbose_name=_(u'Total'))
class Meta:
verbose_name=_(u'Order')
verbose_name_plural = _(u'Orders')
ordering = ('-created',)
def __unicode__(self):
date = formats.date_format(self.created, "SHORT_DATETIME_FORMAT")
return unicode(date+' - '+str(self.total)+' - '+self.status)
class OrderedProduct(MetaBaseModel):
product = models.ForeignKey(Product, related_name='ordered', verbose_name=_(u'Product'))
order = models.ForeignKey(Order, related_name='+', verbose_name=_(u'Order'))
quantity = models.PositiveIntegerField(verbose_name=_(u'Quantity'))
price_per_unit = models.FloatField(verbose_name=_(u'Price per unit'))
class Meta:
verbose_name=_(u'Ordered product')
verbose_name_plural = _(u'Ordered products')
ordering = ('-created', 'order')
def __unicode__(self):
date = formats.date_format(self.created, "SHORT_DATETIME_FORMAT")
return unicode(date)
| StarcoderdataPython |
4835308 | class BaseOptions():
def initialize(self):
self.dataset = './dataset' # path to the dir of the dataset
self.name = 'experiment' # Name of the experiment
class TrainOptions(BaseOptions):
def __init__(self):
BaseOptions.initialize(self)
self.init_channels = 16
self.layers = 16
self.train_portion = 0.5
self.report_freq = 50
self.batch_size = 64 # batch size from paper
self.learning_rate = 0.025 # lr frmo paper
self.learning_rate_min = 0.001 # lr_min from paper
self.momentum = 0.9 # momentum from paper
self.weight_decay = 3e-4 # weight decay from papay
self.epochs = 150 # num epochs from pape
self.cutout = True # use cutout
self.cutout_length = 16 # cutout length
self.arch_learning_rate = 3e-4
self.arch_weight_decay = 1e-3 # architecture weight decay from paper
self.arch_betas = (0.5, 0.999)
self.initial_temp = 2.5 # initial softmax temperature from paper
self.anneal_rate = 0.00003 # annealation rate of softmax temperature from paper | StarcoderdataPython |
1771613 | <filename>europarl/jobs/crawler.py<gh_stars>0
import configparser
import datetime
import json
import logging
import multiprocessing
import os
import random
import socket
import sys
import time
from datetime import date
from queue import Full
import requests
from dotenv import load_dotenv
from europarl import configuration
from europarl.db import (
DBInterface,
Documents,
Rules,
SessionDay,
URLs,
create_table_structure,
tables,
)
from europarl.mptools import (
EventMessage,
MainContext,
ProcWorker,
QueueProcWorker,
TimerProcWorker,
default_signal_handler,
init_signals,
)
from europarl.rules import rule
from europarl.workers import (
DateUrlGenerator,
DocumentDownloader,
PostProcessingScheduler,
PostProcessingWorker,
SessionDayChecker,
TokenBucketWorker,
)
def main():
config = configuration.read()
with Context(config) as main_ctx:
create_table_structure(main_ctx.config)
db = DBInterface(config=main_ctx.config["DEFAULT"])
Rules(db).register_rules(rule.rule_registry.keys)
db.close()
# rules.init_rules(main_ctx.config)
init_signals(
main_ctx.shutdown_event, default_signal_handler, default_signal_handler
)
token_bucket_q = main_ctx.MPQueue(100)
url_q = main_ctx.MPQueue(10)
main_ctx.Proc(
token_bucket_q,
name="SessionDayChecker",
worker_class=SessionDayChecker,
config=config["SessionDayChecker"],
)
for instance_id in range(int(config["Downloader"].get("Instances", 1))):
main_ctx.Proc(
token_bucket_q,
url_q,
name="Downloader_{}".format(instance_id),
worker_class=DocumentDownloader,
config=config["Downloader"],
)
main_ctx.Proc(
url_q,
name="DateUrlGenerator",
worker_class=DateUrlGenerator,
config=config["DateUrlGenerator"],
)
main_ctx.Proc(
token_bucket_q,
name="TokenGenerator",
worker_class=TokenBucketWorker,
config=config["TokenBucketWorker"],
)
while not main_ctx.shutdown_event.is_set():
event = main_ctx.event_queue.safe_get()
if not event:
continue
class Context(MainContext):
def stop_procs(self):
super(Context, self).stop_procs()
temp_db = DBInterface(config=self.config["General"])
urls = URLs(temp_db)
# drop uncrawled urls last to prevent race conditions
self.logger.info("Dropping uncrawled urls")
urls.drop_uncrawled_urls()
if __name__ == "__main__":
main()
| StarcoderdataPython |
1694609 | <reponame>Ivanazzz/SoftUni-W3resource-Python
start = int(input("Enter the first number: "))
final = int(input("Enter the second number: "))
magic_number = int(input("Enter the magic number: "))
counter_of_combinations = 0
combination_is_found = False
for first_number in range(start, final + 1):
for second_number in range(start, final + 1):
counter_of_combinations += 1
if first_number + second_number == magic_number:
combination_is_found = True
break
if combination_is_found:
break
if combination_is_found:
print(f"Combination N:{counter_of_combinations} {first_number} + {second_number} = {magic_number}")
else:
print(f"{counter_of_combinations} combinations - neither equals {magic_number}") | StarcoderdataPython |
4830076 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.service_client import SDKClient
from msrest import Configuration, Serializer, Deserializer
from .version import VERSION
from msrest.pipeline import ClientRawResponse
from msrest.exceptions import HttpOperationError
from . import models
class AutoRestValidationTestConfiguration(Configuration):
"""Configuration for AutoRestValidationTest
Note that all parameters used to create this instance are saved as instance
attributes.
:param subscription_id: Subscription ID.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, subscription_id, base_url=None):
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
if not base_url:
base_url = 'http://localhost:3000'
super(AutoRestValidationTestConfiguration, self).__init__(base_url)
self.add_user_agent('autorestvalidationtest/{}'.format(VERSION))
self.subscription_id = subscription_id
class AutoRestValidationTest(SDKClient):
"""Test Infrastructure for AutoRest. No server backend exists for these tests.
:ivar config: Configuration for client.
:vartype config: AutoRestValidationTestConfiguration
:param subscription_id: Subscription ID.
:type subscription_id: str
:param str base_url: Service URL
"""
def __init__(
self, subscription_id, base_url=None):
self.config = AutoRestValidationTestConfiguration(subscription_id, base_url)
super(AutoRestValidationTest, self).__init__(None, self.config)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self.api_version = '1.0.0'
self._serialize = Serializer(client_models)
self._deserialize = Deserializer(client_models)
def validation_of_method_parameters(
self, resource_group_name, id, custom_headers=None, raw=False, **operation_config):
"""Validates input parameters on the method. See swagger for details.
:param resource_group_name: Required string between 3 and 10 chars
with pattern [a-zA-Z0-9]+.
:type resource_group_name: str
:param id: Required int multiple of 10 from 100 to 1000.
:type id: int
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Product or ClientRawResponse if raw=true
:rtype: ~validation.models.Product or
~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<validation.models.ErrorException>`
"""
# Construct URL
url = self.validation_of_method_parameters.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=10, min_length=3, pattern=r'[a-zA-Z0-9]+'),
'id': self._serialize.url("id", id, 'int', maximum=1000, minimum=100, multiple=10)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['apiVersion'] = self._serialize.query("self.api_version", self.api_version, 'str', pattern=r'\d{2}-\d{2}-\d{4}')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
validation_of_method_parameters.metadata = {'url': '/fakepath/{subscriptionId}/{resourceGroupName}/{id}'}
def validation_of_body(
self, resource_group_name, id, body=None, custom_headers=None, raw=False, **operation_config):
"""Validates body parameters on the method. See swagger for details.
:param resource_group_name: Required string between 3 and 10 chars
with pattern [a-zA-Z0-9]+.
:type resource_group_name: str
:param id: Required int multiple of 10 from 100 to 1000.
:type id: int
:param body:
:type body: ~validation.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Product or ClientRawResponse if raw=true
:rtype: ~validation.models.Product or
~msrest.pipeline.ClientRawResponse
:raises: :class:`ErrorException<validation.models.ErrorException>`
"""
# Construct URL
url = self.validation_of_body.metadata['url']
path_format_arguments = {
'subscriptionId': self._serialize.url("self.config.subscription_id", self.config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=10, min_length=3, pattern=r'[a-zA-Z0-9]+'),
'id': self._serialize.url("id", id, 'int', maximum=1000, minimum=100, multiple=10)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
query_parameters['apiVersion'] = self._serialize.query("self.api_version", self.api_version, 'str', pattern=r'\d{2}-\d{2}-\d{4}')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.put(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise models.ErrorException(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
validation_of_body.metadata = {'url': '/fakepath/{subscriptionId}/{resourceGroupName}/{id}'}
def get_with_constant_in_path(
self, custom_headers=None, raw=False, **operation_config):
"""
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: None or ClientRawResponse if raw=true
:rtype: None or ~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
constant_param = "constant"
# Construct URL
url = self.get_with_constant_in_path.metadata['url']
path_format_arguments = {
'constantParam': self._serialize.url("constant_param", constant_param, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
if custom_headers:
header_parameters.update(custom_headers)
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
if raw:
client_raw_response = ClientRawResponse(None, response)
return client_raw_response
get_with_constant_in_path.metadata = {'url': '/validation/constantsInPath/{constantParam}/value'}
def post_with_constant_in_body(
self, body=None, custom_headers=None, raw=False, **operation_config):
"""
:param body:
:type body: ~validation.models.Product
:param dict custom_headers: headers that will be added to the request
:param bool raw: returns the direct response alongside the
deserialized response
:param operation_config: :ref:`Operation configuration
overrides<msrest:optionsforoperations>`.
:return: Product or ClientRawResponse if raw=true
:rtype: ~validation.models.Product or
~msrest.pipeline.ClientRawResponse
:raises:
:class:`HttpOperationError<msrest.exceptions.HttpOperationError>`
"""
constant_param = "constant"
# Construct URL
url = self.post_with_constant_in_body.metadata['url']
path_format_arguments = {
'constantParam': self._serialize.url("constant_param", constant_param, 'str')
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/json'
header_parameters['Content-Type'] = 'application/json; charset=utf-8'
if custom_headers:
header_parameters.update(custom_headers)
# Construct body
if body is not None:
body_content = self._serialize.body(body, 'Product')
else:
body_content = None
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
response = self._client.send(request, stream=False, **operation_config)
if response.status_code not in [200]:
raise HttpOperationError(self._deserialize, response)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('Product', response)
if raw:
client_raw_response = ClientRawResponse(deserialized, response)
return client_raw_response
return deserialized
post_with_constant_in_body.metadata = {'url': '/validation/constantsInPath/{constantParam}/value'}
| StarcoderdataPython |
1790625 | print("Kamran")
print("Hello Kamran")
print("I am Kamran") | StarcoderdataPython |
176610 | #!/usr/bin/python
# -*- encoding: utf-8 -*-
from logger import setup_logger
from model import BiSeNet
from face_dataset import FaceMask
from loss import OhemCELoss
import torch
import torch.nn as nn
from torch.utils.data import DataLoader
import torch.nn.functional as F
import torch.distributed as dist
import os
import os.path as osp
import logging
import time
import numpy as np
from tqdm import tqdm
import math
from PIL import Image
import torchvision.transforms as transforms
import cv2
def vis_parsing_maps(im, parsing_anno, stride, save_im=False, save_path='vis_results/parsing_map_on_im.jpg'):
# Colors for all 20 parts
part_colors = [[255, 0, 0], [255, 85, 0], [255, 170, 0],
[255, 0, 85], [255, 0, 170],
[0, 255, 0], [85, 255, 0], [170, 255, 0],
[0, 255, 85], [0, 255, 170],
[0, 0, 255], [85, 0, 255], [170, 0, 255],
[0, 85, 255], [0, 170, 255],
[255, 255, 0], [255, 255, 85], [255, 255, 170],
[255, 0, 255], [255, 85, 255], [255, 170, 255],
[0, 255, 255], [85, 255, 255], [170, 255, 255]]
im = np.array(im)
vis_im = im.copy().astype(np.uint8)
vis_parsing_anno = parsing_anno.copy().astype(np.uint8)
vis_parsing_anno = cv2.resize(vis_parsing_anno, None, fx=stride, fy=stride, interpolation=cv2.INTER_NEAREST)
vis_parsing_anno_color = np.zeros((vis_parsing_anno.shape[0], vis_parsing_anno.shape[1], 3)) + 255
num_of_class = np.max(vis_parsing_anno)
for pi in range(1, num_of_class + 1):
index = np.where(vis_parsing_anno == pi)
vis_parsing_anno_color[index[0], index[1], :] = part_colors[pi]
vis_parsing_anno_color = vis_parsing_anno_color.astype(np.uint8)
# print(vis_parsing_anno_color.shape, vis_im.shape)
vis_im = cv2.addWeighted(cv2.cvtColor(vis_im, cv2.COLOR_RGB2BGR), 0.4, vis_parsing_anno_color, 0.6, 0)
# Save result or not
if save_im:
cv2.imwrite(save_path, vis_im, [int(cv2.IMWRITE_JPEG_QUALITY), 100])
# return vis_im
def evaluate(respth='./res/test_res', dspth='./data', cp='model_final_diss.pth'):
if not os.path.exists(respth):
os.makedirs(respth)
n_classes = 19
net = BiSeNet(n_classes=n_classes)
net.cuda()
save_pth = osp.join(respth, 'cp', cp)
net.load_state_dict(torch.load(save_pth))
net.eval()
no_iter = str(int(cp.split('_')[0]))
org_respth = respth[:]
respth = os.path.join(respth, no_iter)
if not os.path.exists(respth):
os.makedirs(respth)
to_tensor = transforms.Compose([
transforms.ToTensor(),
transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),
])
''' added '''
cropsize = [448, 448]
n_img_per_gpu = 16
data_root = '/home/jihyun/workspace/face_parsing/dataset/CelebAMask-HQ/'
ds = FaceMask(data_root, cropsize=cropsize, mode='val')
dl = DataLoader(ds, batch_size=16, shuffle=False, drop_last=True)
n_min = n_img_per_gpu * cropsize[0] * cropsize[1] // 16
score_thres = 0.7
ignore_idx = -100
loss_avg = []
LossP = OhemCELoss(thresh=score_thres, n_min=n_min, ignore_lb=ignore_idx)
Loss2 = OhemCELoss(thresh=score_thres, n_min=n_min, ignore_lb=ignore_idx)
Loss3 = OhemCELoss(thresh=score_thres, n_min=n_min, ignore_lb=ignore_idx)
with torch.no_grad():
for i, sample in enumerate(dl):
im, lb = sample
im = im.cuda()
lb = lb.cuda()
lb = torch.squeeze(lb, 1)
out, out16, out32 = net(im)
lossp = LossP(out, lb)
loss2 = Loss2(out16, lb)
loss3 = Loss3(out32, lb)
loss = lossp + loss2 + loss3
loss_avg.append(loss.item())
loss_avg = sum(loss_avg) / len(loss_avg)
f = open(osp.join(org_respth, 'loss.log'), 'a')
f.write(' eval_loss: ' + str(loss_avg) + '\n')
f.close()
for image_path in os.listdir(dspth):
img = Image.open(osp.join(dspth, image_path))
image = img.resize((512, 512), Image.BILINEAR)
img = to_tensor(image)
img = torch.unsqueeze(img, 0)
img = img.cuda()
out, out16, out32 = net(img)
parsing = out.squeeze(0).cpu().numpy().argmax(0)
vis_parsing_maps(image, parsing, stride=1, save_im=True, save_path=osp.join(respth, image_path))
if __name__ == "__main__":
setup_logger('./res')
evaluate()
| StarcoderdataPython |
3397483 | <reponame>code42/py42
from py42.choices import Choices
from py42.sdk.queries.fileevents.file_event_query import FileEventFilterStringField
from py42.sdk.queries.query_filter import QueryFilterBooleanField
class Actor(FileEventFilterStringField):
"""Class that filters events by the cloud service username of the event originator
(applies to cloud data source events only).
"""
_term = "actor"
class DirectoryID(FileEventFilterStringField):
"""Class that filters events by unique identifier of the cloud drive or folder where the event
occurred (applies to cloud data source events only).
"""
_term = "directoryId"
class Shared(QueryFilterBooleanField):
"""Class that filters events by the shared status of the file at the time the event occurred
(applies to cloud data source events only).
"""
_term = "shared"
class SharedWith(FileEventFilterStringField):
"""Class that filters events by the list of users who had been granted access to the file at the
time of the event (applies to cloud data source events only).
"""
_term = "sharedWith"
class SharingTypeAdded(FileEventFilterStringField, Choices):
"""Class that filters results to include events where a file's sharing permissions were
changed to a value that increases exposure (applies to cloud data source events only).
Available options provided as class attributes:
- :attr:`SharingTypeAdded.SHARED_VIA_LINK`
- :attr:`SharingTypeAdded.IS_PUBLIC`
- :attr:`SharingTypeAdded.OUTSIDE_TRUSTED_DOMAIN`
"""
_term = "sharingTypeAdded"
SHARED_VIA_LINK = "SharedViaLink"
IS_PUBLIC = "IsPublic"
OUTSIDE_TRUSTED_DOMAIN = "OutsideTrustedDomains"
| StarcoderdataPython |
4823227 | class RunService(object):
def __init__(self, raw):
self.raw = raw
def __getattr__(self, item):
if item == 'name':
return self.raw["metadata"]["name"]
if item == 'url':
return self.raw["status"]["address"]["url"]
raise AttributeError
| StarcoderdataPython |
135948 | <gh_stars>10-100
# Copyright 2022 Sony Semiconductors Israel, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import operator
from typing import Any, Callable, Dict
class Filter:
"""
Filter a layer configuration by its attributes.
"""
def match(self, layer_config: Dict[str, Any]):
"""
Check whether the passed configuration matches the filter.
Args:
layer_config: Layer's configuration to check.
Returns:
Whether the passed configuration matches the filter or not.
"""
raise Exception('Filter did not implement match')
class AttributeFilter(Filter):
"""
Wrap a key, value and an operation to filter a layer's configuration according to.
If the layer's configuration has the key, and its' value matches when applying the operator,
the configuration matches the AttributeFilter.
"""
def __init__(self,
attr: str,
value: Any,
op: Callable):
"""
Args:
attr (str): Attribute to filter a layer's configuration according to.
value (Any): Value to filter to filter a layer's configuration according to.
op (Callable): Operator to check if when applied on a layer's configuration value it holds with regard to the filter's value field.
"""
self.attr = attr
self.value = value
self.op = op
def __eq__(self, other: Any) -> bool:
"""
Check whether an object is equal to the AttributeFilter or not.
Args:
other: Object to check if it is equal to the AttributeFilter or not.
Returns:
Whether the object is equal to the AttributeFilter or not.
"""
if not isinstance(other, AttributeFilter):
return False
return self.attr == other.attr and \
self.value == other.value and \
self.op == other.op
def __or__(self, other: Any):
"""
Create a filter that combines multiple AttributeFilters with a logic OR between them.
Args:
other: Filter to add to self with logic OR.
Returns:
OrAttributeFilter that filters with OR between the current AttributeFilter and the passed AttributeFilter.
"""
if not isinstance(other, AttributeFilter):
raise Exception("Not an attribute filter. Can not run an OR operation.")
return OrAttributeFilter(self, other)
def __and__(self, other: Any):
"""
Create a filter that combines multiple AttributeFilters with a logic AND between them.
Args:
other: Filter to add to self with logic AND.
Returns:
AndAttributeFilter that filters with AND between the current AttributeFilter and the passed AttributeFilter.
"""
if not isinstance(other, AttributeFilter):
raise Exception("Not an attribute filter. Can not run an AND operation.")
return AndAttributeFilter(self, other)
def match(self,
layer_config: Dict[str, Any]) -> bool:
"""
Check whether the passed configuration matches the filter.
Args:
layer_config: Layer's configuration to check.
Returns:
Whether the passed configuration matches the filter or not.
"""
if self.attr in layer_config:
return self.op(layer_config.get(self.attr), self.value)
return False
def op_as_str(self):
"""
Returns: A string representation for the filter.
"""
raise Exception("Filter must implement op_as_str ")
def __repr__(self):
return f'{self.attr} {self.op_as_str()} {self.value}'
class OrAttributeFilter(Filter):
"""
AttributeFilter to filter by multiple filters with logic OR between them.
"""
def __init__(self, *filters: AttributeFilter):
"""
Args:
*filters: List of filters to apply a logic OR between them when filtering.
"""
self.filters = filters
def match(self,
layer_config: Dict[str, Any]) -> bool:
"""
Check whether a layer's configuration matches the filter or not.
Args:
layer_config: Layer's configuration to check.
Returns:
Whether a layer's configuration matches the filter or not.
"""
for f in self.filters:
if f.match(layer_config):
return True
return False
def __repr__(self):
"""
Returns: A string representation for the filter.
"""
return ' | '.join([str(f) for f in self.filters])
class AndAttributeFilter(Filter):
"""
AttributeFilter to filter by multiple filters with logic AND between them.
"""
def __init__(self, *filters):
self.filters = filters
def match(self,
layer_config: Dict[str, Any]) -> bool:
"""
Check whether the passed configuration matches the filter.
Args:
layer_config: Layer's configuration to check.
Returns:
Whether the passed configuration matches the filter or not.
"""
for f in self.filters:
if not f.match(layer_config):
return False
return True
def __repr__(self):
"""
Returns: A string representation for the filter.
"""
return ' & '.join([str(f) for f in self.filters])
class Greater(AttributeFilter):
"""
Filter configurations such that it matches configurations
that have an attribute with a value that is greater than the value that Greater holds.
"""
def __init__(self,
attr: str,
value: Any):
super().__init__(attr=attr, value=value, op=operator.gt)
def op_as_str(self): return ">"
class GreaterEq(AttributeFilter):
"""
Filter configurations such that it matches configurations
that have an attribute with a value that is greater or equal than the value that GreaterEq holds.
"""
def __init__(self, attr: str, value: Any):
super().__init__(attr=attr, value=value, op=operator.ge)
def op_as_str(self): return ">="
class Smaller(AttributeFilter):
"""
Filter configurations such that it matches configurations that have an attribute with a value that is smaller than the value that Smaller holds.
"""
def __init__(self, attr: str, value: Any):
super().__init__(attr=attr, value=value, op=operator.lt)
def op_as_str(self): return "<"
class SmallerEq(AttributeFilter):
"""
Filter configurations such that it matches configurations that have an attribute with a value that is smaller or equal than the value that SmallerEq holds.
"""
def __init__(self, attr: str, value: Any):
super().__init__(attr=attr, value=value, op=operator.le)
def op_as_str(self): return "<="
class NotEq(AttributeFilter):
"""
Filter configurations such that it matches configurations that have an attribute with a value that is not equal to the value that NotEq holds.
"""
def __init__(self, attr: str, value: Any):
super().__init__(attr=attr, value=value, op=operator.ne)
def op_as_str(self): return "!="
class Eq(AttributeFilter):
"""
Filter configurations such that it matches configurations that have an attribute with a value that equals to the value that Eq holds.
"""
def __init__(self, attr: str, value: Any):
super().__init__(attr=attr, value=value, op=operator.eq)
def op_as_str(self): return "="
| StarcoderdataPython |
4818585 | def merge_the_tools(string, k):
x = 0
y = k
print_order = []
while y <= len(string):
st = string[x : y]
pr = ''
for ch in st:
if ch not in pr:
pr += ch
print_order.append(pr)
x += k
y += k
for a in print_order:
print(a) | StarcoderdataPython |
4829228 | import itertools
N=int(input())
D=list(map(int,input().split()))
print(sum(d*cumulate for d,cumulate in zip(D[1:],itertools.accumulate(D)))) | StarcoderdataPython |
3390903 | <filename>ocw/dataset.py<gh_stars>100-1000
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
'''
Classes:
Dataset - Container for a dataset's attributes and data.
Bounds - Container for holding spatial and temporal bounds information
for operations on a Dataset.
'''
import datetime as dt
import logging
import netCDF4
import numpy
import ocw.utils as utils
logger = logging.getLogger(__name__)
class Dataset:
'''Container for a dataset's attributes and data.'''
def __init__(self, lats, lons, times, values, variable=None, units=None,
origin=None, name=""):
'''Default Dataset constructor
:param lats: One dimensional numpy array of unique latitude values.
:type lats: :class:`numpy.ndarray`
:param lons: One dimensional numpy array of unique longitude values.
:type lons: :class:`numpy.ndarray`
:param times: One dimensional numpy array of unique python datetime
objects.
:type times: :class:`numpy.ndarray`
:param values: Three dimensional numpy array of parameter values with
shape [timesLength, latsLength, lonsLength].
:type values: :class:`numpy.ndarray`
:param variable: Name of the value variable.
:type variable: :mod:`string`
:param units: Name of the value units
:type units: :mod:`string`
:param name: An optional string name for the Dataset.
:type name: :mod:`string`
:param origin: An optional object used to specify information on where
this dataset was loaded from.
:type origin: :class:`dict`
:raises: ValueError
'''
self._validate_inputs(lats, lons, times, values)
lats, lons, values = utils.normalize_lat_lon_values(lats, lons, values)
self.lats = lats
self.lons = lons
self.times = times
self.values = values
self.variable = variable
self.units = units
self.name = name
self.origin = origin
def spatial_boundaries(self):
'''Calculate the spatial boundaries.
:returns: The Dataset's bounding latitude and longitude values as a
tuple in the form (min_lat, max_lat, min_lon, max_lon)
:rtype: :func:`tuple` of the form (:class:`float`, :class:`float`,
:class:`float`, :class:`float`).
'''
return (float(numpy.min(self.lats)), float(numpy.max(self.lats)),
float(numpy.min(self.lons)), float(numpy.max(self.lons)))
def temporal_boundaries(self):
'''Calculate the temporal range
:returns: The start and end date of the Dataset's temporal range as
a tuple in the form (start_time, end_time).
:rtype: :func:`tuple` of the form (:class:`datetime.datetime`,
:class:`datetime.datetime`)
'''
sorted_time = numpy.sort(self.times)
start_time = sorted_time[0]
end_time = sorted_time[-1]
return (start_time, end_time)
def spatial_resolution(self):
'''Calculate the latitudinal and longitudinal spatial resolution.
If self.lats and self.lons are from curvilinear coordinates,
the output resolutions are approximate values.
:returns: The Dataset's latitudinal and longitudinal spatial resolution
as a tuple of the form (lat_resolution, lon_resolution).
:rtype: (:class:`float`, :class:`float`)
'''
if self.lats.ndim == 1 and self.lons.ndim == 1:
sorted_lats = numpy.sort(list(set(self.lats)))
sorted_lons = numpy.sort(list(set(self.lons)))
lat_resolution = sorted_lats[1] - sorted_lats[0]
lon_resolution = sorted_lons[1] - sorted_lons[0]
if self.lats.ndim == 2 and self.lons.ndim == 2:
lat_resolution = self.lats[1, 1] - self.lats[0, 0]
lon_resolution = self.lons[1, 1] - self.lons[0, 0]
return (lat_resolution, lon_resolution)
def temporal_resolution(self):
'''Calculate the temporal resolution.
:raises ValueError: If timedelta.days as calculated from the sorted \
list of times is an unrecognized value a ValueError is raised.
:returns: The temporal resolution.
:rtype: :mod:`string`
'''
sorted_times = numpy.sort(self.times)
time_resolution = sorted_times[1] - sorted_times[0]
num_days = time_resolution.days
if num_days == 0:
num_hours = time_resolution.seconds / 3600
time_resolution = 'hourly' if num_hours >= 1 else 'minutely'
elif num_days == 1:
time_resolution = 'daily'
elif num_days <= 31:
time_resolution = 'monthly'
elif num_days > 31:
time_resolution = 'yearly'
return time_resolution
def _validate_inputs(self, lats, lons, times, values):
"""Check that Dataset inputs are valid.
:raises: ValueError
"""
err_msg = None
# Setup and Check parameter dimensionality is correct
lat_dim = len(lats.shape)
lon_dim = len(lons.shape)
time_dim = len(times.shape)
value_dim = len(values.shape)
lat_count = lats.shape[0]
lon_count = lons.shape[0]
if lat_dim == 2 and lon_dim == 2:
lon_count = lons.shape[1]
time_count = times.shape[0]
if time_dim != 1:
err_msg = ("Time Array should be 1 dimensional. %s dimensions"
" found." % time_dim)
elif value_dim < 2:
err_msg = ("Value Array should be at least 2 dimensional."
" %s dimensions found." % value_dim)
# Finally check that the Values array conforms to the proper shape
if value_dim == 2:
if (values.shape[0] != time_count and
values.shape != (lat_count, lon_count)):
err_msg = """Value Array must be of shape (lats, lons) or (times, locations).
Expected shape (%s, %s) but received (%s, %s)""" % (
lat_count,
lon_count,
values.shape[0],
values.shape[1])
if (value_dim == 3 and
values.shape != (time_count, lat_count, lon_count)):
err_msg = """Value Array must be of shape (times, lats, lons).
Expected shape (%s, %s, %s) but received (%s, %s, %s)""" % (
time_count,
lat_count,
lon_count,
values.shape[0],
values.shape[1],
values.shape[2])
if err_msg:
logger.error(err_msg)
raise ValueError(err_msg)
def __str__(self):
lat_min, lat_max, lon_min, lon_max = self.spatial_boundaries()
start, end = self.temporal_boundaries()
lat_range = "({}, {})".format(lat_min, lon_min)
lon_range = "({}, {})".format(lon_min, lon_min)
temporal_boundaries = "({}, {})".format(start, end)
formatted_repr = (
"<Dataset - name: {}, "
"lat-range: {}, "
"lon-range: {}, "
"temporal_boundaries: {}, "
"var: {}, "
"units: {}>"
)
return formatted_repr.format(
self.name if self.name != "" else None,
lat_range,
lon_range,
temporal_boundaries,
self.variable,
self.units
)
class Bounds(object):
"""Container for holding spatial and temporal bounds information.
Certain operations require valid bounding information to be present for
correct functioning. Bounds guarantees that a function receives well
formed information without the need to do the validation manually.
boundary_type may be one of the following:
* 'rectangular'
* 'CORDEX (CORDEX region name)': pre-defined CORDEX boundary
* 'us_states': an array of US states abbreviation is required (ex) us_states = ['CA','NV'])
* 'countries': an array of county names is required (ex) countries = ['United States','Canada']
* 'user': user_mask_file in a netCDF format with two dimensional mask variable is required.
If boundary_type == 'rectangular', spatial and temporal bounds must follow the
following guidelines.
* Latitude values must be in the range [-90, 90]
* Longitude values must be in the range [-180, 180]
* Lat/Lon Min values must be less than the corresponding Lat/Lon Max
values.
Temporal bounds must a valid datetime object
"""
def __init__(self, boundary_type='rectangular',
us_states=None, countries=None,
user_mask_file=None, mask_variable_name=None,
longitude_name=None, latitude_name=None,
lat_min=-90, lat_max=90, lon_min=-180, lon_max=180,
start=None, end=None):
"""Default Bounds constructor
:param boundary_type: The type of spatial subset boundary.
:type boundary_type: :mod:`string`
:param lat_min: The minimum latitude bound.
:type lat_min: :class:`float`
:param lat_min: The minimum latitude bound.
:type lat_min: :class:`float`
:param lat_max: The maximum latitude bound.
:type lat_max: :class:`float`
:param lon_min: The minimum longitude bound.
:type lon_min: :class:`float`
:param lon_max: The maximum longitude bound.
:type lon_max: :class:`float`
:param start: An optional datetime object for the starting
datetime bound.
:type start: :class:`datetime.datetime`
:param end: An optional datetime object for the ending datetime bound.
:type end: :class:`datetime.datetime`
:raises: ValueError
"""
self.boundary_type = boundary_type
self._start = None
self._end = None
self.lat_min = None
self.lat_max = None
self.lon_min = None
self.lon_max = None
if start and self._validate_start(start):
self._start = start
if end and self._validate_end(end):
self._end = end
if boundary_type == 'us_states':
self.masked_regions = utils.shapefile_boundary(boundary_type, us_states)
if boundary_type == 'countries':
self.masked_regions = utils.shapefile_boundary(boundary_type, countries)
if boundary_type == 'user':
file_object = netCDF4.Dataset(user_mask_file)
self.mask_variable = file_object.variables[mask_variable_name][:]
mask_longitude = file_object.variables[longitude_name][:]
mask_latitude = file_object.variables[latitude_name][:]
if mask_longitude.ndim == 1 and mask_latitude.ndim == 1:
self.mask_longitude, self.mask_latitude = \
numpy.meshgrid(mask_longitude, mask_latitude)
elif mask_longitude.ndim == 2 and mask_latitude.ndim == 2:
self.mask_longitude = mask_longitude
self.mask_latitude = mask_latitude
if boundary_type == 'rectangular':
if self._validate_lat_lon(lat_max=lat_max, lat_min=lat_min, lon_max=lon_max, lon_min=lon_min):
self.lat_min = float(lat_min)
self.lat_max = float(lat_max)
self.lon_min = float(lon_min)
self.lon_max = float(lon_max)
if boundary_type[:6].upper() == 'CORDEX':
lat_min, lat_max, lon_min, lon_max = \
utils.CORDEX_boundary(boundary_type[6:].replace(" ", "").lower())
if self._validate_lat_lon(lat_max=lat_max, lat_min=lat_min, lon_max=lon_max, lon_min=lon_min):
self.lat_min = float(lat_min)
self.lat_max = float(lat_max)
self.lon_min = float(lon_min)
self.lon_max = float(lon_max)
@property
def start(self):
""" Getter for start attribute. """
return self._start
@start.setter
def start(self, value):
""" Setter for start attribute. """
if value and self._validate_start(value):
self._start = value
@property
def end(self):
""" Getter for end attribute. """
return self._end
@end.setter
def end(self, value):
""" Setter for end attribute. """
if value and self._validate_end(value):
self._end = value
def _validate_start(self, value):
""" Validate start is both the correct type and less than end. """
if not isinstance(value, dt.datetime):
error = "Attempted to set start to invalid type: %s" % (type(value))
logger.error(error)
raise ValueError(error)
if self._end:
if value > self._end:
error = "Attempted to set start to invalid value: %s" % (value)
logger.error(error)
raise ValueError(error)
return True
def _validate_end(self, value):
""" Validate end is both the correct type and greater than start. """
if not isinstance(value, dt.datetime):
error = "Attempted to set end to invalid type: %s" % (type(value))
logger.error(error)
raise ValueError(error)
if self._start:
if value < self._start:
error = "Attempted to set end to invalid value: %s" % (value)
logger.error(error)
raise ValueError(error)
return True
def _validate_lat_lon(self, lat_max, lat_min, lon_max, lon_min):
""" Confirm the min / max lat / lon are within expected ranges. """
if not (-90 <= float(lat_min) <= 90) or float(lat_min) > float(lat_max):
error = "Attempted to set lat_min to invalid value: %s" % (lat_min)
logger.error(error)
raise ValueError(error)
if not -90 <= float(lat_max) <= 90:
error = "Attempted to set lat_max to invalid value: %s" % (lat_max)
logger.error(error)
raise ValueError(error)
if not (-180 <= float(lon_min) <= 180) or float(lon_min) > float(lon_max):
error = "Attempted to set lon_min to invalid value: %s" % (lon_min)
logger.error(error)
raise ValueError(error)
if not -180 <= float(lon_max) <= 180:
error = "Attempted to set lat_max to invalid value: %s" % (lon_max)
logger.error(error)
raise ValueError(error)
return True
| StarcoderdataPython |
3354404 | <gh_stars>0
print(True and True) # True in Python we use and (not &&)
a = 25
b = 10
print("a == 25 and b == 10",a == 25 and b == 10) # if one side is false then everything false
print(a > 5 and b > 20) # if one side is false then everything false
print(True and True and True and False) # one drop of False will ruin everything
print(False and 2*2 == 4 and 3 == 3) # 2*2 == 4 will not be evaluated because False ruined it
print(a >= 5 and b >= 10)
print(a >= 5 and b >= 10 and a > b)
print(a >= 5 and b >= 10 and a < b)
print(False and False) # False
print(True and False) # False
print(False and True) # False
# # # or (not || in other languages)
# # one side of or has to be True for the whole expression to be True
print(True or True) # True
print(True or False) # True
print(False or True) # True
print(False or False) # False
# # with or as soon as one statement is True Python stops evaluation the expression
print("a > 24 or b > 1_000 or 2*2 == 5", a > 24 or b > 1_000 or 2*2 == 5)
print("" or 0 or False)
print(1_000_000 == 1000000 and 1000000 == 1_0_0_0_0_0_0) # last one is NOT recommended
#
# # negation
print(not True)
print(not False)
#
print(2 == 3 and 3 == 9000)
print(not 2 == 3) # True
print(not 2 == 3 and 3 == 900) # still False
print(not (2 == 3 and 3 == 900)) # because not otherwise is eval first
# #
isAoverB = a >= 5 and b >= 10 and a > b
notSoFast = not isAoverB
#
# # # there are also bit operators & and | think ^ and ~
# # https://realpython.com/python-bitwise-operators/ | StarcoderdataPython |
12266 | <gh_stars>1-10
import unittest
from unittest.mock import patch
import os
from .ansible_test_framework import AnsibleTestFramework, RecordMaker
import keeper_secrets_manager_ansible.plugins
import tempfile
records = {
"TRd_567FkHy-CeGsAzs8aA": RecordMaker.make_record(
uid="TRd_567FkHy-CeGsAzs8aA",
title="JW-F1-R1",
fields={
"password": "<PASSWORD>"
}
),
"A_7YpGBUgRTeDEQLhVRo0Q": RecordMaker.make_file(
uid="A_7YpGBUgRTeDEQLhVRo0Q",
title="JW-F1-R2-File",
files=[
{"name": "nailing it.mp4", "type": "video/mp4", "url": "http://localhost/abc", "data": "ABC123"},
{"name": "video_file.mp4", "type": "video/mp4", "url": "http://localhost/xzy", "data": "XYZ123"},
]
)
}
def mocked_get_secrets(*args):
if len(args) > 0:
uid = args[0][0]
ret = [records[uid]]
else:
ret = [records[x] for x in records]
return ret
class KeeperInitTest(unittest.TestCase):
def setUp(self):
self.yml_file_name = "test_keeper.yml"
self.json_file_name = "test_keeper.json"
# Add in addition Python libs. This includes the base
# module for Keeper Ansible and the Keeper SDK.
self.base_dir = os.path.dirname(os.path.realpath(__file__))
self.ansible_base_dir = os.path.join(self.base_dir, "ansible_example")
self.yml_file = os.path.join(os.path.join(self.ansible_base_dir, self.yml_file_name))
self.json_file = os.path.join(os.path.join(self.ansible_base_dir, self.json_file_name))
for file in [self.yml_file, self.json_file]:
if os.path.exists(file) is True:
os.unlink(file)
def tearDown(self):
for file in [self.yml_file, self.json_file]:
if os.path.exists(file) is True:
os.unlink(file)
def _common(self):
with tempfile.TemporaryDirectory() as temp_dir:
a = AnsibleTestFramework(
base_dir=self.ansible_base_dir,
playbook=os.path.join("playbooks", "keeper_init.yml"),
inventory=os.path.join("inventory", "all"),
plugin_base_dir=os.path.join(os.path.dirname(keeper_secrets_manager_ansible.plugins.__file__)),
vars={
"keeper_token": "<KEY>",
"keeper_config_file": self.yml_file_name,
"show_config": True
}
)
r, out, err = a.run()
result = r[0]["localhost"]
self.assertEqual(result["ok"], 2, "1 things didn't happen")
self.assertEqual(result["failures"], 0, "failures was not 0")
self.assertEqual(result["changed"], 0, "0 things didn't change")
self.assertTrue(os.path.exists(self.yml_file), "test_keeper.yml does not exist")
a = AnsibleTestFramework(
base_dir=self.ansible_base_dir,
playbook=os.path.join("playbooks", "keeper_init.yml"),
inventory=os.path.join("inventory", "all"),
plugin_base_dir=os.path.join(os.path.dirname(keeper_secrets_manager_ansible.plugins.__file__)),
vars={
"keeper_token": "<KEY>",
"keeper_config_file": self.json_file_name,
"show_config": False
}
)
r, out, err = a.run()
result = r[0]["localhost"]
self.assertEqual(result["ok"], 2, "1 things didn't happen")
self.assertEqual(result["failures"], 0, "failures was not 0")
self.assertEqual(result["changed"], 0, "0 things didn't change")
self.assertTrue(os.path.exists(self.json_file), "test_keeper.json does not exist")
# @unittest.skip
@patch("keeper_secrets_manager_core.core.SecretsManager.get_secrets", side_effect=mocked_get_secrets)
def test_keeper_get_mock(self, _):
self._common()
@unittest.skip
def test_keeper_get_live(self):
self._common()
| StarcoderdataPython |
1775660 | from django.shortcuts import render,redirect
from django.http import HttpResponse,Http404,HttpResponseRedirect,JsonResponse
from .models import Profile,Neighbourhood,Business
from .forms import NewProfileForm,NewBusinessForm,NewHoodForm,UpdateProfileForm
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required(login_url='/accounts/login/')
def welcome(request):
neighbourhoods = Neighbourhood.objects.all()
businesses = Business.objects.all()
return render(request, 'index.html',{"neighbourhoods":neighbourhoods,"businesses":businesses})
@login_required(login_url='/accounts/login/')
def profile(request):
profile = Profile.objects.get(user = request.user)
return render(request,'profile.html',{"profile":profile})
@login_required(login_url='/accounts/login/')
def update_profile(request):
profile = Profile.objects.get(user = request.user)
if request.method == 'POST':
form = UpdateProfileForm(request.POST,instance = profile )
if form.is_valid():
form.save()
messages.success(request, 'Uve successfully updated profile!')
return redirect('profile')
else:
form = UpdateProfileForm(instance = profile )
return render(request,'update_profile.html',{"profile":profile})
@login_required(login_url='/accounts/login/')
def add_profile(request):
current_user = request.user
if request.method == 'POST':
form = NewProfileForm(request.POST,request.FILES)
if form.is_valid():
profile = profile_form.save(commit=False)
profile.user = current_user
profile.save()
return redirect('welcome')
else:
form = NewProfileForm()
return render(request,'new_profile.html', {"form":form})
def home(request):
neighbourhoods = Neighbourhood.objects.filter(user=request.user)
return render(request,'home.html',{"neighbourhoods":neighbourhoods})
def search_results(request):
if 'business' in request.GET and request.GET["businesses"]:
search_term = request.GET.get("business")
searched_business = Business.search_by_title(search_term)
message = f"{search_term}"
return render(request, 'search.html',{"message":message,"businesses": searched_businesses})
else:
message = "Searched"
return render(request, 'search.html',{"message":message})
@login_required(login_url='/accounts/login/')
def new_business(request):
if request.method == 'POST':
form = NewBusinessForm(request.POST)
if form.is_valid():
business = form.save(commit = False)
business.user = current_user
business.save()
return redirect('welcome')
else:
form = NewBusinessForm()
return render(request,'new_business.html',{"form":form})
@login_required(login_url='/accounts/login/')
def new_hood(request):
if request.method =='POST':
neighbourhoods = Neighbourhood.objects.filter(user=request.user)
form = NewHoodForm(request.POST)
if form.is_valid():
hood = form.save(commit = False)
hood.user = request_user
hood.save()
return redirect('welcome')
else:
form = NewHoodForm()
return render(request,'new_hood.html',{"form":form})
| StarcoderdataPython |
144397 | # Copyright (c) 2016-2017 Enproduktion GmbH & Laber's Lab e.U. (FN 394440i, Austria)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from flask import render_template, flash, session
from flask.views import MethodView
from platform import database
from platform.models.user import User
from platform.models import forms
# Error Handling
from platform import app
from platform.views.errors import ShowErrors
from platform.models.errors import ServerError
class ShowLogin(MethodView):
methods = ["GET", "POST"]
def dispatch_request(self):
form = forms.UserLogin()
if form.validate_on_submit():
db = database.get_db()
user = User.get_by_login(db, form.username.data,
form.password.data)
if user:
session['username'] = user.username
flash("Login successful", "success");
else:
flash("Login failed (your fault)", "error");
return render_template("login.html", form = form)
@app.errorhandler(ServerError)
def handle_invalid_usage(error):
errorview = ShowErrors()
return errorview.dispatch_request(error.message)
| StarcoderdataPython |
100424 | import os
import sys
import tempfile
import shutil
import logging
import hashlib
import time
import tarfile
import json
import imp
from socket import gethostname
from .environments import Environment
from .exceptions import IpkgException
from .packages import META_FILE, make_filename
from .files import vopen
from .mixins import NameVersionRevisionComparable
from .utils import unarchive, mkdir
from .compat import basestring, StringIO
from .platforms import Platform
LOGGER = logging.getLogger(__name__)
class BuildError(IpkgException):
"""Raised when a formula fails to build."""
class IncompleteFormula(BuildError):
"""A mandatory attribute is missing.
"""
def __init__(self, attr):
self.attr = attr
def __str__(self):
return 'The "%s" attribute is mandatory' % self.attr
def find_files(base):
"""Create a list of files in prefix ``base``.
"""
result = []
for parent, _, files in os.walk(base):
rel_dir = parent.split(base)[1][1:]
for filename in files:
result.append(os.path.join(rel_dir, filename))
return result
class Formula(NameVersionRevisionComparable):
"""A recipe used to build a package.
"""
name = None
version = None
revision = 1
sources = None
patches = tuple()
dependencies = tuple()
homepage = None
envvars = None
build_envvars = None
# Arguments passed to ``./configure``
configure_args = ['--prefix=%(prefix)s']
platform = None
def __init__(self, environment=None, verbose=False, log=None):
# Check for mandatory attributes
for attr in ('name', 'version', 'revision', 'sources'):
if getattr(self, attr) is None:
raise IncompleteFormula(attr)
self.environment = environment
self.verbose = verbose
self.log = log or logging.getLogger(__name__)
self.src_root = None
self.__cwd = os.getcwd()
def run_command(self, command, data=None, cwd=None):
"""Run a ``command``.
``command`` can be a string or a list.
If a ``data`` string is passed, it will be written on the standard
input.
If no ``cwd`` is given, the command will run in the sources directory.
"""
cmd = command if isinstance(command, basestring) else ' '.join(command)
LOGGER.info('Running: %s', cmd)
if self.verbose:
stdout = sys.stdout
stderr = sys.stderr
else:
stdout = stderr = open(os.devnull, 'w')
return self.environment.execute(command, stdout=stdout, stderr=stderr,
cwd=cwd or self.__cwd, data=data),
def run_configure(self):
"""Run ``./configure``, using ``configure_args`` arguments.
``configure_args`` arguments can be format strings, using directory
names.
For example::
>>> from ipkg.build import Formula
>>> class gdbm(Formula):
... name = 'gdbm'
... version = '1.10'
... sources = File('http://ftpmirror.gnu.org/gdbm/gdbm-1.10.tar.gz')
... configure_args = ('--prefix=%(prefix)s', '--mandir=%(man)s')
...
When building the gdbm formula, the configure script will be passed
the build prefix and the man directory inside it.
"""
command = ['./configure']
directories = self.environment.directories
command.extend(arg % directories for arg in self.configure_args)
self.run_command(command)
def __getattr__(self, attr):
if attr.startswith('run_'):
command = [attr.split('_', 1)[1]]
def func(args=None, **kw):
"""Wrap calls to ``run_command``."""
if args:
if isinstance(args, basestring):
args = args.split()
command.extend(args)
self.run_command(command, **kw)
return func
else:
raise AttributeError(attr)
def build(self, package_dir, remove_build_dir=True, repository=None):
"""Build the formula."""
LOGGER.debug('%r.build(package_dir=%s, remove_build_dir=%s)',
self, package_dir, remove_build_dir)
installed_dependencies = []
build_dir = tempfile.mkdtemp(prefix='ipkg-build-')
# Create a temporary env if no env has been previously defined
if self.environment is None:
LOGGER.info('Creating temporary build environment')
prefix = os.path.join(build_dir, 'environment')
self.environment = Environment(prefix)
self.environment.directories.create()
if self.build_envvars:
self.environment.variables.add(self.build_envvars)
env_prefix = self.environment.prefix
# Install dependencies in build environment
if self.dependencies:
LOGGER.info('Build dependencies: %s',
', '.join(self.dependencies))
for dependency in self.dependencies:
if dependency not in self.environment.packages:
self.environment.install(dependency, repository)
installed_dependencies.append(dependency)
# Create the sources root directory
self.src_root = src_root = os.path.join(build_dir, 'sources')
mkdir(src_root, False)
# Unarchive the sources file and store the sources directory as cwd
# for use when running commands from now
self.__cwd = self.unarchive(self.sources)
# Apply patches
if self.patches:
strip = 0
for patch in self.patches:
LOGGER.info('Applying patch: %s', patch)
self.run_patch(['-p%d' % strip], data=patch.open().read())
# Create a list of the files contained in the environment before
# running "make install"
files_before_install = set(find_files(env_prefix))
# Compile and install the code
self.install()
# Compare the current environment file list with the previous one
package_files = set(find_files(env_prefix)) - files_before_install
# Use the list of new files to create a package
ipkg_file = self.__create_package(package_files,
env_prefix, package_dir)
# Cleanup
LOGGER.debug('Removing files installed in build environment')
for package_file in package_files:
package_file_path = os.path.join(env_prefix, package_file)
os.unlink(package_file_path)
if self.dependencies:
LOGGER.debug('Uninstalling dependencies from build environment')
for dependency in self.dependencies:
self.environment.uninstall(dependency)
if remove_build_dir:
LOGGER.debug('Removing build directory: %s', build_dir)
shutil.rmtree(build_dir)
LOGGER.info('Build done')
return ipkg_file
def install(self):
"""Run ``./configure``, ``make`` and ``make install``.
If your package need a custom build process,
override this method in your formula.
Do whatever needed to build your code.
All new files found in the build environment prefix will be included
in the package.
"""
self.run_configure()
self.run_make()
self.run_make(['install'])
def __create_package(self, files, build_dir, package_dir):
"""Create a package.
"""
build_platform = str(Platform.current())
platform = self.platform or build_platform
meta = {
'name': self.name,
'version': self.version,
'revision': str(self.revision),
'platform': platform,
'dependencies': self.dependencies,
'homepage': self.homepage,
'hostname': gethostname().split('.')[0],
'timestamp': time.time(),
'files': tuple(files),
'build_prefix': build_dir,
'build_platform': build_platform,
'envvars': self.envvars,
}
filepath = os.path.join(package_dir, make_filename(**meta))
meta_string = StringIO()
json.dump(meta, meta_string, indent=4)
meta_string_size = meta_string.tell()
meta_string.seek(0)
meta_tarinfo = tarfile.TarInfo(META_FILE)
meta_tarinfo.type = tarfile.REGTYPE
meta_tarinfo.mode = 0644
meta_tarinfo.size = meta_string_size
pkg = tarfile.open(filepath, 'w:bz2')
pkg.addfile(meta_tarinfo, meta_string)
for pkg_file in files:
pkg.add(os.path.join(self.environment.prefix, pkg_file),
pkg_file, recursive=False)
pkg.close()
LOGGER.info('Package %s created', filepath)
return filepath
def unarchive(self, src_file):
"""Unarchive ``src_file``.
"""
return unarchive(src_file.open(), self.src_root)
@classmethod
def from_file(cls, filepath):
"""Load a Formula from a file.
"""
#LOGGER.debug('%s.from_file("%s")', cls.__name__, filepath)
if not os.path.exists(filepath):
raise IpkgException('Formula not found: %s' % filepath)
filepath = os.path.abspath(filepath)
filename = os.path.basename(filepath)
module_name = filename.split('.py')[0].replace('.', '_')
try:
module = imp.load_source(module_name, filepath)
except ImportError as err:
raise IpkgException('Error when importing formula %s: %s' %
(filepath, err))
formula_classes = []
for attr in dir(module):
if attr.startswith('_'):
continue
obj = getattr(module, attr)
try:
is_formula_cls = issubclass(obj, Formula)
except TypeError:
pass
else:
if is_formula_cls and obj is not Formula:
formula_classes.append(obj)
if formula_classes:
if len(formula_classes) > 1:
raise IpkgException('Too many Formula classes')
else:
formula_class = formula_classes[0]
else:
raise IpkgException('No Formula class found')
setattr(module, formula_class.__name__, formula_class)
return formula_class
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self.environment)
class File(object):
"""A build resource.
Use this class to reference a file in your formulas, for example::
.. code-block:: python
class Foo(Formula):
sources = File('http://foo.org/foo.tar.gz', sha256='42')
When build the Foo formula, its sources will be fetched from
``http://foo.org/foo.tar.gz`` and its sha256 checksum will be
checked against the value ``42``
(this will fail for sure).
Supported checksum types: sha512, sha384, sha256, sha224, sha1 and md5.
"""
def __init__(self, url, **kw):
if 'sha512' in kw:
hash_class = hashlib.sha512
expected_hash = kw.pop('sha512')
elif 'sha384' in kw:
hash_class = hashlib.sha384
expected_hash = kw.pop('sha384')
elif 'sha256' in kw:
hash_class = hashlib.sha256
expected_hash = kw.pop('sha256')
elif 'sha224' in kw:
hash_class = hashlib.sha224
expected_hash = kw.pop('sha224')
elif 'sha1' in kw:
hash_class = hashlib.sha1
expected_hash = kw.pop('sha1')
elif 'md5' in kw:
hash_class = hashlib.md5
expected_hash = kw.pop('md5')
else:
hash_class = None
expected_hash = None
self.url = url
self.hash_class = hash_class
self.expected_hash = expected_hash
def open(self):
"""Returns a file-like object.
Validate file checksum, if specified.
"""
fileobj = vopen(self.url, expected_hash=self.expected_hash,
hash_class=self.hash_class)
fileobj.verify_checksum()
return fileobj
def __repr__(self):
return 'File("%s")' % self.url
def __str__(self):
return self.url
| StarcoderdataPython |
1653782 | from pagination_bootstrap.version import __version__, __url__, version_info
| StarcoderdataPython |
1756975 | """ Utility functions. """
import numpy as np
import os
import random
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
from tensorflow.python.platform import flags
FLAGS = flags.FLAGS
## Network helpers
def conv_block(inp, cweight, bweight, reuse, scope, activation=tf.nn.relu, residual=False):
""" Perform, conv, batch norm, nonlinearity, and max pool """
stride, no_stride = [1,2,2,1], [1,1,1,1]
conv_output = tf.nn.conv2d(inp, cweight, no_stride, 'SAME') + bweight
normed = tf.layers.batch_normalization(conv_output, reuse=reuse, name=scope)
if activation is not None:
normed = activation(normed)
return normed
## Loss functions
def mse(pred, label):
pred = tf.reshape(pred, [-1])
label = tf.reshape(label, [-1])
return tf.reduce_mean(tf.square(pred-label))
def xent(pred, label):
# Note - with tf version <=0.12, this loss has incorrect 2nd derivatives
return tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=label) / FLAGS.k_shot | StarcoderdataPython |
30548 | import os
from math import radians, sin, cos, asin, degrees, pi, sqrt, pow, fabs, atan2
from django import forms
from django.db import models
from django.conf import settings
from modelcluster.fields import ParentalKey
from wagtail.wagtailcore.models import Page, Orderable
from wagtail.wagtailadmin.edit_handlers import FieldPanel, InlinePanel, MultiFieldPanel
from wagtail.wagtailimages.edit_handlers import ImageChooserPanel
from wagtail.wagtailsearch import index
from wagtail.wagtaildocs.models import Document
from wagtail.wagtaildocs.edit_handlers import DocumentChooserPanel
class Dxf2VrPage(Page):
intro = models.CharField(max_length=250, null=True, blank=True,)
equirectangular_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete = models.SET_NULL,
related_name = '+',
)
dxf_file = models.ForeignKey(
'wagtaildocs.Document',
null=True,
on_delete = models.SET_NULL,
related_name = '+',
)
shadows = models.BooleanField(default=False)
fly_camera = models.BooleanField(default=False)
double_face = models.BooleanField(default=False)
search_fields = Page.search_fields + [
index.SearchField('intro'),
#index.SearchField('body'),
]
content_panels = Page.content_panels + [
FieldPanel('intro'),
DocumentChooserPanel('dxf_file'),
ImageChooserPanel('equirectangular_image'),
MultiFieldPanel([
FieldPanel('shadows'),
FieldPanel('fly_camera'),
FieldPanel('double_face'),
], heading="Visual settings"),
InlinePanel('material_images', label="Material Image Gallery",),
]
def extract_dxf(self):
path_to_dxf = os.path.join(settings.MEDIA_ROOT, 'documents', self.dxf_file.filename)
dxf_f = open(path_to_dxf, encoding = 'utf-8')
material_gallery=self.material_images.all()
output = {}
flag = False
x = 0
value = 'dummy'
while value !='ENTITIES':
key = dxf_f.readline().strip()
value = dxf_f.readline().strip()
while value !='ENDSEC':
key = dxf_f.readline().strip()
value = dxf_f.readline().strip()
if flag == 'face':#stores values for 3D faces
if key == '8':#layer name
temp[key] = value
elif key == '10' or key == '11' or key == '12' or key == '13':#X position
temp[key] = value
elif key == '20' or key == '21' or key == '22' or key == '23':#mirror Y position
value = -float(value)
temp[key] = value
elif key == '30' or key == '31' or key == '32' or key == '33':#Z position
temp[key] = value
elif flag == 'block':#stores values for blocks
if key == '2' or key == '8':#block name and layer name
temp[key] = value
elif key == '10' or key == '30':#X Z position
temp[key] = value
elif key == '20':#Y position, mirrored
temp[key] = -float(value)
elif key == '50':#Z rotation
temp[key] = value
elif key == '41' or key == '42' or key == '43':#scale values
temp[key] = value
elif key == '210':#X of OCS unitary vector
Az_1 = float(value)
P_x = float(temp['10'])
elif key == '220':#Y of OCS unitary vector
Az_2 = float(value)
P_y = -float(temp['20'])#reset original value
elif key == '230':#Z of OCS unitary vector
Az_3 = float(value)
P_z = float(temp['30'])
#arbitrary axis algorithm
#see if OCS z vector is close to world Z axis
if fabs(Az_1) < (1/64) and fabs(Az_2) < (1/64):
W = ('Y', 0, 1, 0)
else:
W = ('Z', 0, 0, 1)
#cross product for OCS x arbitrary vector, normalized
Ax_1 = W[2]*Az_3-W[3]*Az_2
Ax_2 = W[3]*Az_1-W[1]*Az_3
Ax_3 = W[1]*Az_2-W[2]*Az_1
Norm = sqrt(pow(Ax_1, 2)+pow(Ax_2, 2)+pow(Ax_3, 2))
Ax_1 = Ax_1/Norm
Ax_2 = Ax_2/Norm
Ax_3 = Ax_3/Norm
#cross product for OCS y arbitrary vector, normalized
Ay_1 = Az_2*Ax_3-Az_3*Ax_2
Ay_2 = Az_3*Ax_1-Az_1*Ax_3
Ay_3 = Az_1*Ax_2-Az_2*Ax_1
Norm = sqrt(pow(Ay_1, 2)+pow(Ay_2, 2)+pow(Ay_3, 2))
Ay_1 = Ay_1/Norm
Ay_2 = Ay_2/Norm
Ay_3 = Ay_3/Norm
#insertion world coordinates from OCS
temp['10'] = P_x*Ax_1+P_y*Ay_1+P_z*Az_1
temp['20'] = P_x*Ax_2+P_y*Ay_2+P_z*Az_2
temp['30'] = P_x*Ax_3+P_y*Ay_3+P_z*Az_3
#OCS X vector translated into WCS
Ax_1 = ((P_x+cos(radians(float(temp['50']))))*Ax_1+(P_y+sin(radians(float(temp['50']))))*Ay_1+P_z*Az_1)-temp['10']
Ax_2 = ((P_x+cos(radians(float(temp['50']))))*Ax_2+(P_y+sin(radians(float(temp['50']))))*Ay_2+P_z*Az_2)-temp['20']
Ax_3 = ((P_x+cos(radians(float(temp['50']))))*Ax_3+(P_y+sin(radians(float(temp['50']))))*Ay_3+P_z*Az_3)-temp['30']
#cross product for OCS y vector, normalized
Ay_1 = Az_2*Ax_3-Az_3*Ax_2
Ay_2 = Az_3*Ax_1-Az_1*Ax_3
Ay_3 = Az_1*Ax_2-Az_2*Ax_1
Norm = sqrt(pow(Ay_1, 2)+pow(Ay_2, 2)+pow(Ay_3, 2))
Ay_1 = Ay_1/Norm
Ay_2 = Ay_2/Norm
Ay_3 = Ay_3/Norm
#A-Frame rotation order is Yaw(Z), Pitch(X) and Roll(Y)
#thanks for help <NAME> and https://www.geometrictools.com/
if Ay_3<1:
if Ay_3>-1:
pitch = asin(Ay_3)
yaw = atan2(-Ay_1, Ay_2)
roll = atan2(-Ax_3, Az_3)
else:
pitch = -pi/2
yaw = -atan2(Az_1, Ax_1)
roll = 0
else:
pitch = pi/2
yaw = atan2(Az_1, Ax_1)
roll = 0
#Y position, mirrored
temp['20'] = -temp['20']
#rotations from radians to degrees
temp['210'] = degrees(pitch)
temp['50'] = degrees(yaw)
temp['220'] = -degrees(roll)
elif flag == 'attrib':#stores values for attributes within block
if key == '1':#attribute value
attr_value = value
elif key == '2':#attribute key
temp[value] = attr_value
flag = 'block'#restore block modality
if key == '0':
if flag == 'face':#close 3D face
#is material set in model?
no_color=True
if material_gallery:
for material in material_gallery:
if material.layer == temp['8']:
no_color=False
temp['color'] = material.color
if no_color:#color is still not set for layer, so we use default
temp['8'] = 'default'
temp['color'] = 'white'
output[x] = self.make_triangle_1(x, temp)
if temp['12']!=temp['13'] or temp['22']!=temp['23'] or temp['32']!=temp['33']:
x += 1
output[x] = self.make_triangle_2(x, temp)
flag = False
elif value == 'ATTRIB':#start attribute within block
attr_value = ''
flag = 'attrib'
elif flag == 'block':#close block
#material images are patterns? is material set in model?
no_color=True
if material_gallery:
for material in material_gallery:
if material.layer == temp['8']:
no_color=False
temp['color'] = material.color
if material.pattern:# == True
temp['repeat']=True
if no_color:#color is still not set for layer, so we use default
temp['8'] = 'default'
temp['color'] = 'white'
if temp['2'] == '6planes':#left for legacy
output[x] = self.make_box(x, temp)
elif temp['2'] == 'box' or temp['2'] == 'a-box':
output[x] = self.make_box(x, temp)
elif temp['2'] == 'cylinder' or temp['2'] == 'a-cylinder':
output[x] = self.make_cylinder(x, temp)
elif temp['2'] == 'cone' or temp['2'] == 'a-cone':
output[x] = self.make_cone(x, temp)
elif temp['2'] == 'sphere' or temp['2'] == 'a-sphere':
output[x] = self.make_sphere(x, temp)
elif temp['2'] == 'circle' or temp['2'] == 'a-circle':
output[x] = self.make_circle(x, temp)
elif temp['2'] == 'plane' or temp['2'] == 'a-plane' or temp['2'] == 'look-at':
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'floor':#left for legacy
temp['210'] = float(temp['210']) - 90
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'ceiling':#left for legacy
temp['210'] = float(temp['210']) + 90
output[x] = self.make_plane(x, temp)
elif temp['2'] == 'light' or temp['2'] == 'a-light':
output[x] = self.make_light(x, temp)
elif temp['2'] == 'a-text':
output[x] = self.make_text(x, temp)
elif temp['2'] == 'a-link':
output[x] = self.make_link(x, temp)
flag = False
if value == '3DFACE':#start 3D face
temp = {}#default values
flag = 'face'
x += 1
elif value == 'INSERT':#start block
temp = {'41': 1, '42': 1, '43': 1, '50': 0, '210': 0, '220': 0, '230': 1,'repeat': False}#default values
flag = 'block'
x += 1
dxf_f.close()
return output
def is_repeat(self, repeat, rx, ry):
if repeat:
output = f'; repeat:{rx} {ry}'
return output
else:
return ';'
def make_box(self, x, temp):
outstr = f'<a-entity id="box-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-box id="box-{x}" \n'
outstr += f'position="{float(temp["41"])/2} {float(temp["43"])/2} {-float(temp["42"])/2}" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['segments-depth']!='1':
outstr += f'segments-depth: {temp["segments-depth"]};'
if temp['segments-height']!='1':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='1':
outstr += f'segments-width: {temp["segments-width"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-box>\n</a-entity>\n'
return outstr
def make_cone(self, x, temp):
outstr = f'<a-entity id="cone-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-cone id="cone-{x}" \n'
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['open-ended']!='false':
outstr += 'open-ended: true;'
if temp['radius-top']!='0':
outstr += f'radius-top: {temp["radius-top"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-radial']!='36':
outstr += f'segments-radial: {temp["segments-radial"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-cone>\n</a-entity>\n'
return outstr
def make_circle(self, x, temp):
outstr = f'<a-entity id="circle-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-circle id="circle-{x}" \n'
if temp['2'] == 'circle':
outstr += f'rotation="-90 0 0"\n'
outstr += f'radius="{temp["41"]}" \n'
outstr += 'geometry="'
try:
if temp['segments']!='32':
outstr += f'segments: {temp["segments"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-circle>\n</a-entity>\n'
return outstr
def make_cylinder(self, x, temp):
outstr = f'<a-entity id="cylinder-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-cylinder id="cylinder-{x}" \n'
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['open-ended']!='false':
outstr += 'open-ended: true;'
if temp['radius-top']!='0':
outstr += f'radius-top: {temp["radius-top"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-radial']!='36':
outstr += f'segments-radial: {temp["segments-radial"]};'
if temp['theta-length']!='360':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-cylinder>\n</a-entity>\n'
return outstr
def make_sphere(self, x, temp):
outstr = f'<a-entity id="sphere-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-sphere id="sphere-{x}" \n'
outstr += f'position="0 {temp["43"]} 0" \n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}" \n'
outstr += 'geometry="'
try:
if temp['phi-length']!='360':
outstr += f'phi-length: {temp["phi-length"]};'
if temp['phi-start']!='0':
outstr += f'phi-start: {temp["phi-start"]};'
if temp['segments-height']!='18':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='36':
outstr += f'segments-width: {temp["segments-width"]};'
if temp['theta-length']!='180':
outstr += f'theta-length: {temp["theta-length"]};'
if temp['theta-start']!='0':
outstr += f'theta-start: {temp["theta-start"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-sphere>\n</a-entity>\n'
return outstr
def make_plane(self, x, temp):
outstr = f'<a-entity id="plane-ent-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}">\n'
outstr += f'<a-plane id="plane-{x}" \n'
if temp['2'] == 'look-at':#if it's a look at, it is centered and looks at the camera foot
outstr += f'position="0 {float(temp["43"])/2} 0" \n'
outstr += 'look-at="#camera-foot" \n'
elif temp['2'] == 'ceiling':#if it's a ceiling, correct position
outstr += f'position="{float(temp["41"])/2} {-float(temp["43"])/2} 0" \n'
else:#insertion is at corner
outstr += f'position="{float(temp["41"])/2} {float(temp["43"])/2} 0" \n'
outstr += f'width="{temp["41"]}" height="{temp["43"]}" \n'
outstr += 'geometry="'
try:
if temp['segments-height']!='1':
outstr += f'segments-height: {temp["segments-height"]};'
if temp['segments-width']!='1':
outstr += f'segments-width: {temp["segments-width"]};'
outstr += '" \n'
except KeyError:
outstr += '" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}'
outstr += self.is_repeat(temp["repeat"], temp["41"], temp["43"])
outstr += '">\n</a-plane>\n</a-entity>\n'
return outstr
def make_text(self, x, temp):
outstr = f'<a-entity id="text-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
outstr += f'text="width: {temp["41"]}; align: {temp["align"]}; color: {temp["color"]}; '
outstr += f'value: {temp["text"]}; wrap-count: {temp["wrap-count"]}; '
outstr += '">\n</a-entity>\n'
return outstr
def make_link(self, x, temp):
outstr = f'<a-link id="link-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
outstr += f'scale="{temp["41"]} {temp["43"]} {temp["42"]}"\n'
if temp['tree'] == 'parent':
target = self.get_parent()
elif temp['tree'] == 'child':
target = self.get_first_child()
elif temp['tree'] == 'previous' or temp['tree'] == 'prev':
target = self.get_prev_sibling()
else:#we default to next sibling
target = self.get_next_sibling()
if target:
outstr += f'href="{target.url}"\n'
outstr += f'title="{temp["title"]}" color="{temp["color"]}" on="click"\n'
eq_image = target.specific.equirectangular_image
if eq_image:
outstr += f'image="{eq_image.file.url}"'
else:
outstr += 'image="#default-sky"'
outstr += '>\n</a-link>\n'
return outstr
else:
return ''
def make_triangle_1(self, x, temp):
outstr = f'<a-triangle id="triangle-{x}" \n'
outstr += f'geometry="vertexA:{temp["10"]} {temp["30"]} {temp["20"]}; \n'
outstr += f'vertexB:{temp["11"]} {temp["31"]} {temp["21"]}; \n'
outstr += f'vertexC:{temp["12"]} {temp["32"]} {temp["22"]}" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}; '
if self.double_face:
outstr += 'side: double; '
outstr += '">\n</a-triangle> \n'
return outstr
def make_triangle_2(self, x, temp):
outstr = f'<a-triangle id="triangle-{x}" \n'
outstr += f'geometry="vertexA:{temp["10"]} {temp["30"]} {temp["20"]}; \n'
outstr += f'vertexB:{temp["12"]} {temp["32"]} {temp["22"]}; \n'
outstr += f'vertexC:{temp["13"]} {temp["33"]} {temp["23"]}" \n'
outstr += f'material="src: #image-{temp["8"]}; color: {temp["color"]}; '
if self.double_face:
outstr += 'side: double; '
outstr += '">\n</a-triangle> \n'
return outstr
def make_light(self, x, temp):
outstr = f'<a-entity id="light-{x}" \n'
outstr += f'position="{temp["10"]} {temp["30"]} {temp["20"]}" \n'
outstr += f'rotation="{temp["210"]} {temp["50"]} {temp["220"]}"\n'
try:
if temp['type'] == 'ambient':
outstr += f'light="type: ambient; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += '">\n</a-entity>\n'#close light entity
elif temp['type'] == 'point':
outstr += f'light="type: point; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += f'decay: {temp["decay"]}; distance: {temp["distance"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += '"> \n</a-entity>\n'#close light entity
elif temp['type'] == 'spot':
outstr += f'light="type: spot; color: {temp["color"]}; intensity: {temp["intensity"]}; '
outstr += f'decay: {temp["decay"]}; distance: {temp["distance"]}; '
outstr += f'angle: {temp["angle"]}; penumbra: {temp["penumbra"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += f'target: #light-{x}-target;"> \n'
outstr += f'<a-entity id="light-{x}-target" position="0 -1 0"> </a-entity> \n</a-entity> \n'#close light entity
else:#defaults to directional
outstr += f'light="type: directional; color: {temp["color"]}; intensity: {temp["intensity"]}; '
if self.shadows:
outstr += 'castShadow: true; '
outstr += f'target: #light-{x}-target;"> \n'
outstr += f'<a-entity id="light-{x}-target" position="0 -1 0"> </a-entity> \n</a-entity> \n'#close light entity
except KeyError:#default if no light type is set
outstr += 'light="type: point; intensity: 0.75; distance: 50; decay: 2; '
if self.shadows:
outstr += 'castShadow: true;'
outstr += '">\n</a-entity>\n'#close light entity
return outstr
class Dxf2VrPageMaterialImage(Orderable):
page = ParentalKey(Dxf2VrPage, related_name='material_images')
image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=True,
on_delete = models.SET_NULL,
related_name = '+',
)
layer = models.CharField(max_length=250, default="0",)
color = models.CharField(max_length=250, default="white",)
pattern = models.BooleanField(default=False)
panels = [
FieldPanel('layer'),
ImageChooserPanel('image'),
FieldPanel('pattern'),
FieldPanel('color'),
] | StarcoderdataPython |
167428 | <reponame>filfreire/questions-three
from datetime import datetime, timedelta
from functools import partial
from io import StringIO
import os
from subprocess import PIPE, Popen, STDOUT
import sys
from time import sleep
from unittest import TestCase, main
from expects import expect, be_empty, contain, equal, have_length
from twin_sister import open_dependency_context
from questions_three.ci import run_all
from twin_sister.expects_matchers import contain_key_with_value
class FakeStream(StringIO):
def read(self):
return bytes(super().read(), encoding="utf-8")
class FakePopen:
def __init__(self, complete=True, exit_code=0):
self._is_started = False
self._is_complete = complete
self._exit_code = exit_code
self.pid = 42
self.stdout = FakeStream("")
# Fake interface
def fake_stdout(self, s):
self.stdout = FakeStream(s)
# Real interface
def communicate(self):
while not self._is_complete:
pass
return self._stdout, b""
# Fake interface
def is_running(self):
return self._is_started and not self._is_complete
# Fake interface
def start(self):
self._is_started = True
# Fake interface
def complete(self):
self._is_complete = True
# Real interface
def poll(self):
self.returncode = self._exit_code if self._is_complete else None
return self.returncode
# Real interface
def wait(self, timeout=None):
while not self._is_complete:
pass
self.returncode = self._exit_code
return self.returncode
class FakePopenClass:
def __init__(self):
self.canned_objects = {}
self.opened = []
def __call__(self, args, **kwargs):
# The spelling of args is intentional. We expect a sequence.
# See standard library documentation for subprocess.Popen
self.opened.append({"args": args, "kwargs": kwargs})
if len(args) > 1:
script = args[1]
if script in self.canned_objects.keys():
obj = self.canned_objects[script]
obj.start()
return obj
return FakePopen(complete=True)
def scripts_executed(self):
return [c["args"][1] for c in self.opened]
class TestRunAll(TestCase):
def setUp(self):
self.context = open_dependency_context(supply_fs=True, supply_env=True, supply_logging=True)
self.context.set_env(DELAY_BETWEEN_CHECKS_FOR_PARALLEL_SUITE_COMPLETION=0, MAX_PARALLEL_SUITES=5)
self.popen_class = FakePopenClass()
self.context.inject(Popen, self.popen_class)
def tearDown(self):
self.context.close()
def fake_file(self, filename):
self.context.create_file(filename, content="")
def test_runs_python_script_buried_in_specified_directory(self):
path = "spinach"
filename = self.context.os.path.join(path, "spam", "eggs", "sausage.py")
self.fake_file(filename=filename)
run_all(path)
expect(self.popen_class.scripts_executed()).to(contain(filename))
def test_uses_same_python_executable_as_self(self):
expected = "/path/to/python"
self.context.inject(sys.executable, expected)
self.fake_file(filename="./whatever.py")
run_all(".")
opened = self.popen_class.opened
expect(opened).to(have_length(1))
expect(opened[0]["args"][0]).to(equal(expected))
def test_does_not_run_non_python_script(self):
path = "the"
unexpected = os.path.join(path, "spinach", "imposition")
self.fake_file(filename=unexpected)
run_all(path)
expect(self.popen_class.scripts_executed()).not_to(contain(unexpected))
def test_does_not_run_script_with_leading_underscore(self):
path = "the"
unexpected = os.path.join(path, "spinach", "_imposition.py")
self.fake_file(filename=unexpected)
run_all(path)
expect(self.popen_class.scripts_executed()).not_to(contain(unexpected))
def test_does_not_run_script_outside_specified_directory(self):
self.fake_file(filename="/sir/not/appearing/in/this/film.py")
run_all("/my_tests")
expect(self.popen_class.scripts_executed()).to(be_empty)
def test_runs_scripts_in_parallel(self):
path = "somewhere"
first_script = os.path.join(path, "script1.py")
self.fake_file(first_script)
p1 = FakePopen(complete=False)
self.popen_class.canned_objects[first_script] = p1
second_script = os.path.join(path, "script2.py")
self.fake_file(second_script)
p2 = FakePopen(complete=False)
self.popen_class.canned_objects[second_script] = p2
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
expiry = datetime.now() + timedelta(seconds=0.1)
while not ((p1.is_running() and p2.is_running()) or datetime.now() > expiry):
sleep(0.01)
try:
assert p1.is_running(), "First process is not running"
assert p2.is_running(), "Second process is not running"
finally:
# clean-up
p1.complete()
p2.complete()
t.join()
def set_limit(self, limit):
self.context.set_env(MAX_PARALLEL_SUITES=limit)
def test_limits_process_count_to_config(self):
path = "my_test_suites"
limit = 3
self.set_limit(limit)
procs = {os.path.join(path, "suite_%d.py" % n): FakePopen(complete=False) for n in range(limit + 2)}
for filename, proc in procs.items():
self.fake_file(filename)
self.popen_class.canned_objects[filename] = proc
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
sleep(0.1)
running = 0
for proc in procs.values():
if proc.is_running():
running += 1
try:
expect(running).to(equal(limit))
finally:
for proc in procs.values():
proc.complete()
t.join()
def test_executes_next_script_when_worker_is_ready(self):
self.set_limit(1)
path = "somewhere"
script1 = os.path.join(path, "one.py")
p1 = FakePopen(complete=False)
self.fake_file(script1)
self.popen_class.canned_objects[script1] = p1
script2 = os.path.join(path, "two.py")
p2 = FakePopen(complete=False)
self.fake_file(script2)
self.popen_class.canned_objects[script2] = p2
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
sleep(0.1)
try:
assert p1.is_running(), "P1 is not running"
assert not p2.is_running(), "P2 ran while P1 was still running"
p1.complete()
sleep(0.1)
assert not p1.is_running(), "P1 did not stop"
assert p2.is_running(), "P2 did not start"
finally:
p1.complete()
p2.complete()
t.join()
def test_returns_0_when_all_exit_0(self):
path = "things/and/stuff"
procs = {os.path.join(path, "suite_%d.py" % n): FakePopen(complete=True, exit_code=0) for n in range(5)}
for filename, proc in procs.items():
self.fake_file(filename)
self.popen_class.canned_objects[filename] = proc
expect(run_all(path)).to(equal(0))
def test_returns_non_zero_when_one_exits_non_zero(self):
path = "things/and/stuff"
procs = {os.path.join(path, "suite_%d.py" % n): FakePopen(complete=True, exit_code=0) for n in range(4)}
procs[os.path.join(path, "oops.py")] = FakePopen(complete=True, exit_code=1)
for filename, proc in procs.items():
self.fake_file(filename)
self.popen_class.canned_objects[filename] = proc
expect(run_all(path)).not_to(equal(0))
def test_sends_stdout_to_pipe(self):
path = "things"
filename = os.path.join(path, "thing.py")
self.fake_file(filename)
run_all(path)
opened = self.popen_class.opened
expect(opened).to(have_length(1))
expect(opened[0]["kwargs"]).to(contain_key_with_value("stdout", PIPE))
def test_pipes_stderr_to_stdout(self):
path = "things"
filename = os.path.join(path, "thing.py")
self.fake_file(filename)
run_all(path)
opened = self.popen_class.opened
expect(opened).to(have_length(1))
expect(opened[0]["kwargs"]).to(contain_key_with_value("stderr", STDOUT))
def test_outputs_captured_stdout_on_proc_exit(self):
expected = "Our chief weapons are suprise, blah blah\n"
unexpected = "wrong!\n"
spy = StringIO()
self.context.inject(sys.stdout, spy)
path = "ximinez"
proc = FakePopen(complete=False, exit_code=0)
proc.fake_stdout(unexpected)
filename = os.path.join(path, "something.py")
self.fake_file(filename)
self.popen_class.canned_objects[filename] = proc
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
sleep(0.05)
proc.fake_stdout(expected)
proc.complete()
t.join()
actual = spy.getvalue()
expect(actual).not_to(contain(unexpected))
expect(actual).to(contain(expected))
def test_waits_for_all_to_exit(self):
path = "spamwhere"
proc = FakePopen(complete=False)
filename = os.path.join(path, "spamthing.py")
self.fake_file(filename)
self.popen_class.canned_objects[filename] = proc
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
sleep(0.1)
try:
assert t.is_alive(), "Thread exited prematurely"
finally:
proc.complete()
t.join()
def test_logs_script_name_on_start(self):
path = "the"
filename = os.path.join(path, "rainbow.py")
self.fake_file(filename)
proc = FakePopen(complete=False)
self.popen_class.canned_objects[filename] = proc
t = self.context.create_time_controller(target=partial(run_all, path))
t.start()
sleep(0.1)
messages = [rec.msg for rec in self.context.logging.stored_records]
try:
expect(messages).to(contain("Executing %s\n" % filename))
finally:
proc.complete()
t.join()
if "__main__" == __name__:
main()
| StarcoderdataPython |
3384796 | <reponame>Pat-Lafon/auto-selfcontrol
import setuptools
setuptools.setup(
name='auto-selfcontrol',
version='1.0',
description='Small utility to schedule start and stop times of SelfControl',
url='github.com/andreasgrill/auto-selfcontrol',
long_description=open('README.md').read(),
packages=setuptools.find_packages()
) | StarcoderdataPython |
3227557 | <gh_stars>0
import unittest
fib_num = lambda n: fib_num(n-1) + fib_num(n-2) if n > 2 else 1
class TestFibNum(unittest.TestCase):
def test_simple_1(self):
self.assertEqual(fib_num(3), 2)
def test_simple_2(self):
self.assertTrue(fib_num(5) > 3)
def test_simple_3(self):
self.assertFalse(fib_num(5) < 3)
| StarcoderdataPython |
3355888 | <reponame>timtyree/bgmc
import numpy as np, pandas as pd
from ..utils.projection_func import get_subtract_pbc
#Programmer: <NAME>
#Date: 5.10.2021
#Group: Rappel
############
#Conventions
############
# $$
# \text{Let } \varphi_1\equiv\sin^{-1}\big(\widehat{\mathbf{x}_2-\mathbf{x}_1}\;\times\;\hat{\mathbf{a}}_1\big),
# $$
# $$
# \text{and let } \varphi_2\equiv\sin^{-1}\big(\widehat{\mathbf{x}_1-\mathbf{x}_2}\;\times\;\hat{\mathbf{a}}_2\big).
# $$
#######
#Module
#######
def get_compute_displacements_between(width,height):
subtract_pbc=get_subtract_pbc(width=width,height=height)
def compute_displacements_between(d1,d2,t_col='t',**kwargs):
'''computes the displacements between particle 1 and particle 2 in units of pixels.
supposes the index indexes time.'''
#align locations by index
dd=d1.set_index(t_col)[['x','y']].copy()
dd[['xx','yy']]=d2.set_index(t_col)[['x','y']]
dd.dropna(inplace=True)
t_values=dd.index.values
# compute displacement unit vector from tip 1 to tip 2
xy1_values=np.array(list(zip(dd['x'],dd['y'])))
xy2_values=np.array(list(zip(dd['xx'],dd['yy'])))
#I think this length check is unnecessary
s1=xy1_values.shape[0]
s2=xy2_values.shape[0]
xy2_minus_xy1_values=np.zeros((np.min((s1,s2)),2))
#compute displacements between
for j in range(xy2_minus_xy1_values.shape[0]):
xy2_minus_xy1_values[j]=subtract_pbc(xy2_values[j],xy1_values[j])
return xy2_minus_xy1_values,t_values
return compute_displacements_between
def get_compute_ranges_between(width,height):
'''Example Usage:
compute_ranges_between=get_compute_ranges_between(width=width,height=height)
range_values,t_values=compute_ranges_between(d1,d2,t_col='t',**kwargs)
'''
compute_displacements_between=get_compute_displacements_between(width=width,height=height)
def compute_ranges_between(d1,d2,t_col='t',**kwargs):
'''computes the phases between particle 1 and particle 2 in units of radians.
returns range between particles in units of pixels'''
xy2_minus_xy1_values,t_values=compute_displacements_between(d1,d2,t_col=t_col,**kwargs)
range_values=np.linalg.norm(xy2_minus_xy1_values, axis=1)
return range_values,t_values
return compute_ranges_between
def compute_phases_between(d1,d2,dict_activation_front,field='lesser_xy_values'):
'''computes the phases between particle 1 and particle 2 in units of radians.
returns range between particles in units of pixels'''
# compute displacement unit vector from tip 1 to tip 2
xy1_values=np.array(list(zip(d1['x'],d1['y'])))
xy2_values=np.array(list(zip(d2['x'],d2['y'])))
xy2_minus_xy1_values=xy2_values-xy1_values
range_values=np.linalg.norm(xy2_minus_xy1_values, axis=1)
x2_minus_x1_hat_values=xy2_minus_xy1_values[:,0]/range_values
y2_minus_y1_hat_values=xy2_minus_xy1_values[:,1]/range_values
xy2_minus_xy1_hat_values=np.array(list(zip(x2_minus_x1_hat_values,y2_minus_y1_hat_values)))
#compute directions of activation fronts. store as pandas.DataFrame.
daf=dict_activation_front
# daf.keys()
#time values
t1_values=d1['t'].values
t_values=np.array(daf['t'])[1:]
xy_values_lst=daf[field][1:]
phi1_lst=[];phi2_lst=[]
for i in range(len(xy_values_lst)):
dx1dx2_hat=xy2_minus_xy1_hat_values[i]
# print(t1_values[i])
#TODO(if naive is ugly...): try moving avg of first j contour points for the ith observation time
xy_values=xy_values_lst[i]
#TODO: compute a1_hat and a2_hat
a1=xy_values[1]-xy_values[0]
# xy_values[2]-xy_values[1]
# xy_values[3]-xy_values[2]
# xy_values[4]-xy_values[3]
a1_hat=a1/np.linalg.norm(a1)
a2=xy_values[-2]-xy_values[-1]
a2_hat=a2/np.linalg.norm(a2)
#TODO(later, to scale method): convert all subtraction operations to explicitely enforce pbc...
# print(t_values[i])
# #assert we're comparing the right times
# print ((i, t_values[i] , t1_values[i]))
# assert ( t_values[i] == t1_values[i])
phi1=np.arcsin(np.cross(dx1dx2_hat,a1))
phi2=np.arcsin(np.cross(-1.*dx1dx2_hat,a2))
phi1_lst.append(phi1)
phi2_lst.append(phi2)
phi1_values=np.array(phi1_lst)
phi2_values=np.array(phi2_lst)
#decide to make 90 degrees positive.
boo=np.isnan(phi1_values)
phi1_values[boo]=np.pi/2.
boo=np.isnan(phi2_values)
phi2_values[boo]=np.pi/2.
return range_values,phi1_values,phi2_values
def comp_relative_phase(phi1_values,phi2_values):
phi_sum_values=phi2_values+phi1_values
phi_diff_values=phi2_values-phi1_values
return phi_sum_values, phi_diff_values
def compute_phi_values(dict_tips):
'''compute phase angle using lesser contour
Example Usage:
phi_lst=compute_phi_values(dict_tips)
dict_tips['phi']=phi_lst
'''
pid_pair_list=list(zip(dict_tips['pid'],dict_tips['lesser_pid']))
phi_lst=[]
for item in pid_pair_list:
pid,pid_mate=item
xy1_value=np.array((dict_tips['x'][pid],dict_tips['y'][pid]))
xy2_value=np.array((dict_tips['x'][pid_mate],dict_tips['y'][pid_mate]))
xy_values_activation_front=dict_tips['lesser_xy_values'][pid]
phi1,phi2=compute_phases_between_kernel(xy1_value,xy2_value,xy_values_activation_front)
if np.isnan(phi1):
phi1=np.pi/2.
phi_lst.append(phi1)
return phi_lst
def compute_phases_between_kernel(xy1_value,xy2_value,xy_values_activation_front):
'''computes the phases between particle 1 and particle 2 in units of radians.
returns range between particles in units of pixels
#TODO(later, to scale method): convert all subtraction operations to explicitely enforce pbc...
# print(t_values[i])
'''
# compute displacement unit vector from tip 1 to tip 2
xy2_minus_xy1_value=xy2_value-xy1_value
range_value=np.linalg.norm(xy2_minus_xy1_value)#, axis=1)
x2_minus_x1_hat_value=xy2_minus_xy1_value[0]/range_value
y2_minus_y1_hat_value=xy2_minus_xy1_value[1]/range_value
xy2_minus_xy1_hat_value=np.array((x2_minus_x1_hat_value,y2_minus_y1_hat_value))
# xy2_minus_xy1_hat_value=np.array(list(zip(x2_minus_x1_hat_value,y2_minus_y1_hat_value)))
#compute the angles made with the activation front
xy_values=xy_values_activation_front
dx1dx2_hat=xy2_minus_xy1_hat_value
#compute a1_hat and a2_hat
a1=xy_values[1]-xy_values[0]
# xy_values[2]-xy_values[1]
# xy_values[3]-xy_values[2]
# xy_values[4]-xy_values[3]
a1_hat=a1/np.linalg.norm(a1)
a2=xy_values[-2]-xy_values[-1]
a2_hat=a2/np.linalg.norm(a2)
phi1=np.arcsin(np.cross(dx1dx2_hat,a1))
phi2=np.arcsin(np.cross(-1.*dx1dx2_hat,a2))
return phi1,phi2
##############
#Example Usage
##############
def simulate_pdict_example(dt=0.001,V_threshold=-50.):
txt_fn=f'{nb_dir}/Data/test_data/ic008.33_t_218.8.npz'
t=218.8;ds=5.;
txt=load_buffer(txt_fn)
inVc,outVc,inmhjdfx,outmhjdfx,dVcdt=unstack_txt(txt)
width,height=txt.shape[:2]
print(txt.shape)
one_step,comp_distance,comp_dict_tips=init_methods(width,height,ds,dt,nb_dir,V_threshold=V_threshold,jump_threshold=40,**kwargs)
comp_dict_topo_full_color=comp_dict_tips
#reidentify the tips to be tracked
img=inVc[...,0];dimgdt=dVcdt[...,0]
dict_tips=comp_dict_tips(img, dimgdt, t, txt)
pdict=ParticlePBCDict(dict_tips=dict_tips, width=width, height=width)#, **kwargs)
t_prev=t;txt_prev=txt.copy()
#visualize token death system
x_values=np.array(dict_tips['x'])[:-2]
y_values=np.array(dict_tips['y'])[:-2]
c_values=np.array(dict_tips['pid'])[:-2]
# fig = ShowDomain(img,dimgdt,x_values,y_values,c_values,V_threshold,t,inch=6,
# fontsize=16,vmin_img=-85.,vmax_img=35.,area=25,
# frameno=None,#frameno,
# save_fn=None,#save_fn,
# save_folder=None,#save_folder,
# save=False,#True,
# annotating=True,
# axis=[0,img.shape[0],0,img.shape[1]])
#better method: take more data! (after condensing data to a simple readout)
#DONE/DONT(later): look for more reliable way to identify activation fronts... use consistency of position over time??
# HINT: consider looking at mean gating variables from the comp_dict_topo_full_color
#TODO: linearly record data for ^these spiral tips at a high sampling rate (and fine spatial resolution)
#TODO(brainwarmer): check Slides for whether tips move along dVdt levelsets or along V levelsets
#TODO: load/plot system
#TODO: compute each of the the final scalar values needed for the following...
#TODO: test angular difference between cartesion acceleration and acceleration in the direction of the activation front versus time
#TODO: linearly track lesser_arclen of these two death events on a dt=0.001 ms timescale fixed at the basic subpixel resolution
#TODO: test proposition that lesser_arclen always drops shortly annihilating, perhaps on the 0.01~0.02 (ms?) timescale.
# ^This would support the mechanism of annihilation involving the connection of activation fronts/strings with some tension to contract
# ^This would support using a model of spiral tip dynamics along an activation front to inform our reaction rate calculations
# HINT: consider rate = 1/expected_time_to_death, where the arclength behaves in a predictable way, i.e.
# dsigma_max_dt=foo(sigma_max;relative phase?)
# from inspect import getsource
# print ( getsource(pdict.record_tips_return_txt))
# pdict.record_tips_return_txt?
ntips=len(dict_tips['x'])
assert(ntips>0)
inVc,outVc,inmhjdfx,outmhjdfx,dVcdt=unstack_txt(txt)
frameno=0
change_time=0.
duration =0.03
save_every_n_frames=1
while change_time<=duration:
# while ntips>0:
frameno+=1
t+=dt
change_time+=dt
one_step(inVc,outVc,inmhjdfx,outmhjdfx,dVcdt)
if frameno % save_every_n_frames == 0:
dict_tips=comp_dict_tips(img, dimgdt, t, txt)
pdict.merge_dict(dict_tips)
ntips=len(dict_tips['x'])
print(f"saved at time {t:.3f} ms.",end='\r')
txt=stack_txt(inVc,outVc,inmhjdfx,outmhjdfx,dVcdt)
return pdict
if __name__=='__main__':
pdict=simulate_pdict_example()#dt=0.001,V_threshold=-50.)
#compute the pdict
df, dict_greater_dict, dict_lesser_dict=pdict.separate_data_to_pandas()
#extract the relevant particles
d1=df[df.pid==float(pid_pair[0])].copy()
d2=df[df.pid==float(pid_pair[1])].copy()
dict_activation_front=dict_lesser_dict[pid_pair[0]]
#compute the relative phases of spiral tips
phi1_values,phi2_values,range_values=compute_phases_between(d1,d2,dict_activation_front)
phi_sum_values, phi_diff_values=comp_relative_phase(phi1_values,phi2_values)
i=27
#print results
print(f"phi1 , phi2 = {phi1:.3f},{phi2:.3f} at time {t_values[i]:.3f}.")
print(f"phi_sum, phi_diff = {phi_sum:.3f},{phi_diff:.3f} at time {t_values[i]:.3f}.")
| StarcoderdataPython |
3237740 | <filename>mayiutils/datasets/feature_selector_wrapper.py
#!/usr/bin/python
# encoding: utf-8
"""
@author: Ian
@file: feature_selector_wrapper.py
@time: 2019-04-22 18:41
https://github.com/WillKoehrsen/feature-selector
从5个维度削减特征
There are five methods used to identify features to remove:
Missing Values
Single Unique Values
Collinear Features
Zero Importance Features
Low Importance Features
使用:
拿到数据后,
1、要把日期数据处理掉,如转换为天数等数值变量;
2、把与业务无关的变量删掉,如客户编号等;
再用feature-selector处理
"""
import pandas as pd
import numpy as np
from feature_selector import FeatureSelector
if __name__ == '__main__':
mode = 1
df = pd.read_excel('/Users/luoyonggui/Documents/work/dataset/0/data.xlsx')
# print(df.info())
"""
RangeIndex: 562 entries, 0 to 561
Data columns (total 42 columns):
Unnamed: 0 562 non-null int64
平台流水号 562 non-null int64
保单管理机构 562 non-null int64
保单号 562 non-null int64
指定受益人标识 562 non-null object
受益人与被保险人关系 538 non-null object
交费方式 562 non-null object
交费期限 562 non-null int64
核保标识 562 non-null object
核保结论 544 non-null object
投保时年龄 562 non-null int64
基本保额与体检保额起点比例 0 non-null float64
生调保额起点 0 non-null float64
投保保额临近核保体检临界点标识 0 non-null float64
投保保额 562 non-null float64
临近核保生调临界点标识 0 non-null float64
理赔金额 562 non-null float64
累计已交保费 562 non-null float64
理赔结论 562 non-null object
Unnamed: 19 562 non-null int64
生效日期 562 non-null datetime64[ns]
出险前最后一次复效日期 6 non-null datetime64[ns]
承保后最小借款日期 2 non-null datetime64[ns]
出险日期 562 non-null datetime64[ns]
报案时间 119 non-null datetime64[ns]
申请日期 562 non-null datetime64[ns]
出险减生效天数 562 non-null int64
出险减最后一次复效天数 6 non-null float64
重疾保单借款减生效日期天数 0 non-null float64
申请时间减出险时间 562 non-null int64
报案时间减出险时间 119 non-null float64
出险原因1 562 non-null object
出险原因2 0 non-null float64
出险原因3 0 non-null float64
出险结果 552 non-null object
保单借款展期未还次数 0 non-null float64
失复效记录次数 562 non-null int64
销售渠道 562 non-null object
(SELECTDISTINCTLJ.AGENTCODEFRO销售人员工号 562 non-null int64
被保人核心客户号 562 non-null int64
保人归并客户号 562 non-null int64
被保人归并客户号 562 non-null int64
dtypes: datetime64[ns](6), float64(13), int64(14), object(9)
"""
#提取出标签数据
label = df['理赔结论']
label[label != '正常给付'] = int(1)
label[label == '正常给付'] = int(0)
label = np.array(list(label))
df = df.drop(columns=['理赔结论'])
df = df.drop(columns=['Unnamed: 0', '平台流水号', 'Unnamed: 19', '生效日期', '出险日期', '报案时间', '申请日期', '出险前最后一次复效日期', '承保后最小借款日期'])
if mode == 0:
"""
标准的data explore步骤
"""
# print(df.info())# 查看df字段和缺失值信息
# train_col.remove('平台流水号')
# train_col.remove('Unnamed: 19')
# train_col.remove('生效日期')
# train_col.remove('出险日期')
# train_col.remove('报案时间')
# train_col.remove('申请日期')
# train_col.remove('出险减最后一次复效天数')
# train_col.remove('报案时间减出险时间')
# train_col.remove('出险前最后一次复效日期')
# train_col.remove('承保后最小借款日期')
fs = FeatureSelector(data=df, labels=label)
# 缺失值处理
"""
查找缺失率大于0.6的特征
"""
fs.identify_missing(missing_threshold=0.6)
"""
13 features with greater than 0.60 missing values.
"""
missing_features = fs.ops['missing']
# 查看缺失特征
print(missing_features[:10])
"""
['基本保额与体检保额起点比例', '生调保额起点', '投保保额临近核保体检临界点标识', '临近核保生调临界点标识', '出险前最后一次复效日期', '承保后最小借款日期', '报案时间', '出险减最后一次复效天数', '重疾保单借款减生效日期天数', '报案时间减出险时间']
"""
# fs.plot_missing()
# 查看每个特征的缺失率
print(fs.missing_stats)
# 单一值
fs.identify_single_unique()
"""
0 features with a single unique value.
"""
single_unique = fs.ops['single_unique']
print(single_unique)
# fs.plot_unique()
# Collinear (highly correlated) Features
fs.identify_collinear(correlation_threshold=0.975)
"""
2 features with a correlation magnitude greater than 0.97.
"""
correlated_features = fs.ops['collinear']
print(correlated_features[:5])
"""
['报案时间减出险时间', '被保人归并客户号']
"""
# fs.plot_collinear()
# fs.plot_collinear(plot_all=True)
print(fs.record_collinear.head())
"""
drop_feature corr_feature corr_value
0 报案时间减出险时间 申请时间减出险时间 0.985089
1 被保人归并客户号 被保人核心客户号 1.000000
"""
# 4. Zero Importance Features
fs.identify_zero_importance(task='classification', eval_metric='auc',
n_iterations=10, early_stopping=True)
one_hot_features = fs.one_hot_features
base_features = fs.base_features
print('There are %d original features' % len(base_features))
print('There are %d one-hot features' % len(one_hot_features))
"""
There are 33 original features
There are 212 one-hot features
"""
print(fs.one_hot_features[:20])
# print(fs.data_all.head(10))
print(fs.data_all.shape)
zero_importance_features = fs.ops['zero_importance']
print(zero_importance_features[:5])
# fs.plot_feature_importances(threshold=0.99, plot_n=12)
print(fs.feature_importances.head(10)) | StarcoderdataPython |
1647684 | class MusicBand:
def __init__(self, title, label, musician=None):
self.title = title
self.label = label
self.musician = musician
self.album = []
def write_album(self, album):
self.album.append(album)
class Musician:
def __init__(self, name, instrument):
self.name = name
self.instrument = instrument
def __str__(self):
return f"{self.name} {self.instrument}"
class Album:
def __init__(self, song, genre):
self.song = song
self.genre = genre
band1 = MusicBand("The Beatles", "Parlophone", "beatles")
band1.write_album("White Album")
band1.write_album("Hard Day's Night")
band1.write_album("Help")
print(band1.title, band1.musician, band1.label, band1.album)
| StarcoderdataPython |
78585 | #!/usr/bin/env python3
#
# Copyright Soramitsu Co., Ltd. All Rights Reserved.
# SPDX-License-Identifier: Apache-2.0
#
import csv
import rst
import glossary
import os
perm_type = category = perm = ""
MATRIX_PATH = 'permissions/matrix.csv'
result = ['.. DON\'T MODIFY THE CONTENTS MANUALLY.',
' THIS IS AUTOGENERATED FILE.',
' All the changes will be lost in case of manual modification.',
' For modification change the files in docs/source/permissions.'
' Then do "make permissions" before "make html".', '']
result.extend(rst.header("Permissions", 0))
glossary_links = glossary.titles_to_links(glossary.read_titles())
with open('permissions/introduction.txt') as intro:
intro_lines = intro.readlines()
for line in intro_lines:
result.append(line.strip())
result.append('')
result.extend(rst.header("List of Permissions", 0))
result.extend(rst.permissions_list(MATRIX_PATH))
with open(MATRIX_PATH, newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
grantable = False
if row['Type'] != perm_type:
perm_type = row['Type']
result.extend(
rst.header("{}-related permissions".format(row['Type']), 1))
if row['Category'] != category:
category = row['Category']
result.extend(rst.header(category, 2))
if row['Permission'] != perm:
perm = row['Permission']
result.extend(rst.header(perm, 3))
if row['Grantable'].strip() == 'TRUE':
grantable = True
hint = rst.hint('This is a grantable permission.')
result.extend(hint)
descr_lines = row['Description'].split('\n')
descr_lines = list(map(lambda x: x.strip(), descr_lines))
descr_lines.append('')
if row['Additional Information'].strip():
ainfo = row['Additional Information'].split('\n')
ainfo = list(map(lambda x: x.strip(), ainfo))
ainfo.append('')
descr_lines.extend(ainfo)
links_dict = dict(glossary_links)
descr_lines_linkified = []
for line in descr_lines:
tokens = line.split(' ')
tokens_linkified = []
skip = False
for token in tokens:
if skip:
tokens_linkified.append(token)
if '`' in token:
if not skip:
tokens_linkified.append(token)
if token.count('`') % 2 == 1:
skip = not skip
continue
tokens_linkified.append(rst.linkify(token, links_dict, pop=True))
descr_lines_linkified.append(' '.join(tokens_linkified))
result.extend(descr_lines_linkified)
if row['Note'].strip():
result.extend(rst.note(row['Note']))
if row['Related Command'].strip():
rc = row['Related Command'].split('\n')
rc = map(lambda x: x.strip(), rc)
rc = filter(lambda x: len(x) > 0, rc)
rc = list(rc)
links = []
related = 'Related API method' + ('s' if len(rc) > 1 else '')
for link in rc:
try:
links.append(rst.reference(link))
except Exception:
if (row['Related Command'].strip().lower().startswith('tbd')):
links.append('To be done')
else:
print(row['Related Command'])
raise
result.append('| {}: {}'.format(related, ', '.join(links)))
result.append('')
if row['Example'].strip():
result.extend(rst.example(row['Example']))
result.extend(rst.excerpt(perm))
result.extend(rst.header('Supplementary Sources', 1))
commons_path = [os.path.pardir] * 2 + ['example', 'python', 'permissions', 'commons.py']
result.extend(rst.listing(commons_path, 'commons.py'))
with open('develop/api/permissions.rst', 'w') as output:
content = "\n".join(result)
output.write(content)
output.flush()
print('done')
| StarcoderdataPython |
1644511 |
#soma = 1/1 + 3/2 + 5/3 + 7/4 + ... + 99/50
e=d=1
soma=0.0
for d in range(d,51,1):
soma+=e/d
e+=2
print(f"{soma:.2f}" )
| StarcoderdataPython |
3300299 | from . import *
def create_session_from_user_id(user_id):
session = Session(user_id=user_id)
db.session.add(session)
try:
db.session.commit()
return session
except Exception:
db.session.rollback()
raise Exception('Failure creating session from user_id')
def get_or_create_session(user_id):
optional_session = Session.query.filter_by(user_id=user_id).first()
return (
optional_session
if optional_session is not None
else create_session_from_user_id(user_id)
)
def activate_session(session):
session.is_active = True
try:
db.session.commit()
return session
except Exception:
db.session.rollback()
raise Exception('Could activate session')
| StarcoderdataPython |
1767192 | <gh_stars>0
from lib import rpclib
import json
import time
import re
import sys
import pickle
import platform
import os
import subprocess
import signal
from slickrpc import Proxy
from binascii import hexlify
from binascii import unhexlify
from functools import partial
from shutil import copy
operating_system = platform.system()
if operating_system != 'Win64' and operating_system != 'Windows':
import readline
def colorize(string, color):
colors = {
'blue': '\033[94m',
'magenta': '\033[95m',
'green': '\033[92m',
'red': '\033[91m'
}
if color not in colors:
return string
else:
return colors[color] + string + '\033[0m'
def rpc_connection_tui():
# TODO: possible to save multiply entries from successfull sessions and ask user to choose then
while True:
restore_choice = input("Do you want to use connection details from previous session? [y/n]: ")
if restore_choice == "y":
try:
with open("connection.json", "r") as file:
connection_json = json.load(file)
rpc_user = connection_json["rpc_user"]
rpc_password = connection_json["rpc_password"]
rpc_port = connection_json["rpc_port"]
rpc_connection = rpclib.rpc_connect(rpc_user, rpc_password, int(rpc_port))
except FileNotFoundError:
print(colorize("You do not have cached connection details. Please select n for connection setup", "red"))
break
elif restore_choice == "n":
rpc_user = input("Input your rpc user: ")
rpc_password = input("Input your rpc password: ")
rpc_port = input("Input your rpc port: ")
connection_details = {"rpc_user": rpc_user,
"rpc_password": <PASSWORD>,
"rpc_port": rpc_port}
connection_json = json.dumps(connection_details)
with open("connection.json", "w+") as file:
file.write(connection_json)
rpc_connection = rpclib.rpc_connect(rpc_user, rpc_password, int(rpc_port))
break
else:
print(colorize("Please input y or n", "red"))
return rpc_connection
def def_credentials(chain):
rpcport ='';
operating_system = platform.system()
if operating_system == 'Darwin':
ac_dir = os.environ['HOME'] + '/Library/Application Support/Komodo'
elif operating_system == 'Linux':
ac_dir = os.environ['HOME'] + '/.komodo'
elif operating_system == 'Win64' or operating_system == 'Windows':
ac_dir = '%s/komodo/' % os.environ['APPDATA']
if chain == 'KMD':
coin_config_file = str(ac_dir + '/komodo.conf')
else:
coin_config_file = str(ac_dir + '/' + chain + '/' + chain + '.conf')
with open(coin_config_file, 'r') as f:
for line in f:
l = line.rstrip()
if re.search('rpcuser', l):
rpcuser = l.replace('rpcuser=', '')
elif re.search('rpcpassword', l):
rpcpassword = l.replace('rpcpassword=', '')
elif re.search('rpcport', l):
rpcport = l.replace('rpcport=', '')
if len(rpcport) == 0:
if chain == 'KMD':
rpcport = 7771
else:
print("rpcport not in conf file, exiting")
print("check "+coin_config_file)
exit(1)
return(Proxy("http://%s:%s@127.0.0.1:%d"%(rpcuser, rpcpassword, int(rpcport))))
def getinfo_tui(rpc_connection):
info_raw = rpclib.getinfo(rpc_connection)
if isinstance(info_raw, dict):
for key in info_raw:
print("{}: {}".format(key, info_raw[key]))
input("Press [Enter] to continue...")
else:
print("Error!\n")
print(info_raw)
input("\nPress [Enter] to continue...")
def token_create_tui(rpc_connection):
while True:
try:
name = input("Set your token name: ")
supply = input("Set your token supply: ")
description = input("Set your token description: ")
except KeyboardInterrupt:
break
else:
token_hex = rpclib.token_create(rpc_connection, name, supply, description)
if token_hex['result'] == "error":
print(colorize("\nSomething went wrong!\n", "pink"))
print(token_hex)
print("\n")
input("Press [Enter] to continue...")
break
else:
try:
token_txid = rpclib.sendrawtransaction(rpc_connection,
token_hex['hex'])
except KeyError:
print(token_txid)
print("Error")
input("Press [Enter] to continue...")
break
finally:
print(colorize("Token creation transaction broadcasted: " + token_txid, "green"))
file = open("tokens_list", "a")
file.writelines(token_txid + "\n")
file.close()
print(colorize("Entry added to tokens_list file!\n", "green"))
input("Press [Enter] to continue...")
break
def oracle_create_tui(rpc_connection):
print(colorize("\nAvailiable data types:\n", "blue"))
oracles_data_types = ["Ihh -> height, blockhash, merkleroot\ns -> <256 char string\nS -> <65536 char string\nd -> <256 binary data\nD -> <65536 binary data",
"c -> 1 byte signed little endian number, C unsigned\nt -> 2 byte signed little endian number, T unsigned",
"i -> 4 byte signed little endian number, I unsigned\nl -> 8 byte signed little endian number, L unsigned",
"h -> 32 byte hash\n"]
for oracles_type in oracles_data_types:
print(str(oracles_type))
while True:
try:
name = input("Set your oracle name: ")
description = input("Set your oracle description: ")
oracle_data_type = input("Set your oracle type (e.g. Ihh): ")
except KeyboardInterrupt:
break
else:
oracle_hex = rpclib.oracles_create(rpc_connection, name, description, oracle_data_type)
if oracle_hex['result'] == "error":
print(colorize("\nSomething went wrong!\n", "pink"))
print(oracle_hex)
print("\n")
input("Press [Enter] to continue...")
break
else:
try:
oracle_txid = rpclib.sendrawtransaction(rpc_connection, oracle_hex['hex'])
except KeyError:
print(oracle_txid)
print("Error")
input("Press [Enter] to continue...")
break
finally:
print(colorize("Oracle creation transaction broadcasted: " + oracle_txid, "green"))
file = open("oracles_list", "a")
file.writelines(oracle_txid + "\n")
file.close()
print(colorize("Entry added to oracles_list file!\n", "green"))
input("Press [Enter] to continue...")
break
def oracle_register_tui(rpc_connection):
#TODO: have an idea since blackjoker new RPC call
#grab all list and printout only or which owner match with node pubkey
try:
print(colorize("Oracles created from this instance by TUI: \n", "blue"))
with open("oracles_list", "r") as file:
for oracle in file:
print(oracle)
print(colorize('_' * 65, "blue"))
print("\n")
except FileNotFoundError:
print("Seems like a no oracles created from this instance yet\n")
pass
while True:
try:
oracle_id = input("Input txid of oracle you want to register to: ")
data_fee = input("Set publisher datafee (in satoshis): ")
except KeyboardInterrupt:
break
oracle_register_hex = rpclib.oracles_register(rpc_connection, oracle_id, data_fee)
if oracle_register_hex['result'] == "error":
print(colorize("\nSomething went wrong!\n", "pink"))
print(oracle_register_hex)
print("\n")
input("Press [Enter] to continue...")
break
else:
try:
oracle_register_txid = rpclib.sendrawtransaction(rpc_connection, oracle_register_hex['hex'])
except KeyError:
print(oracle_register_hex)
print("Error")
input("Press [Enter] to continue...")
break
else:
print(colorize("Oracle registration transaction broadcasted: " + oracle_register_txid, "green"))
input("Press [Enter] to continue...")
break
def oracle_subscription_utxogen(rpc_connection):
# TODO: have an idea since blackjoker new RPC call
# grab all list and printout only or which owner match with node pubkey
try:
print(colorize("Oracles created from this instance by TUI: \n", "blue"))
with open("oracles_list", "r") as file:
for oracle in file:
print(oracle)
print(colorize('_' * 65, "blue"))
print("\n")
except FileNotFoundError:
print("Seems like a no oracles created from this instance yet\n")
pass
while True:
try:
oracle_id = input("Input oracle ID you want to subscribe to: ")
#printout to fast copypaste publisher id
oracle_info = rpclib.oracles_info(rpc_connection, oracle_id)
publishers = 0
print(colorize("\nPublishers registered for a selected oracle: \n", "blue"))
try:
for entry in oracle_info["registered"]:
publisher = entry["publisher"]
print(publisher + "\n")
publishers = publishers + 1
print("Total publishers:{}".format(publishers))
except (KeyError, ConnectionResetError):
print(colorize("Please re-check your input. Oracle txid seems not valid.", "red"))
pass
print(colorize('_' * 65, "blue"))
print("\n")
if publishers == 0:
print(colorize("This oracle have no publishers to subscribe.\n"
"Please register as an oracle publisher first and/or wait since registration transaciton mined!", "red"))
input("Press [Enter] to continue...")
break
publisher_id = input("Input oracle publisher id you want to subscribe to: ")
data_fee = input("Input subscription fee (in COINS!): ")
utxo_num = int(input("Input how many transactions you want to broadcast: "))
except KeyboardInterrupt:
break
while utxo_num > 0:
while True:
oracle_subscription_hex = rpclib.oracles_subscribe(rpc_connection, oracle_id, publisher_id, data_fee)
oracle_subscription_txid = rpclib.sendrawtransaction(rpc_connection, oracle_subscription_hex['hex'])
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_subscription_txid in mempool:
break
else:
pass
print(colorize("Oracle subscription transaction broadcasted: " + oracle_subscription_txid, "green"))
utxo_num = utxo_num - 1
input("Press [Enter] to continue...")
break
def gateways_bind_tui(rpc_connection):
# main loop with keyboard interrupt handling
while True:
try:
while True:
try:
print(colorize("Tokens created from this instance by TUI: \n", "blue"))
with open("tokens_list", "r") as file:
for oracle in file:
print(oracle)
print(colorize('_' * 65, "blue"))
print("\n")
except FileNotFoundError:
print("Seems like a no oracles created from this instance yet\n")
pass
token_id = input("Input id of token you want to use in gw bind: ")
try:
token_name = rpclib.token_info(rpc_connection, token_id)["name"]
except KeyError:
print(colorize("Not valid tokenid. Please try again.", "red"))
input("Press [Enter] to continue...")
token_info = rpclib.token_info(rpc_connection, token_id)
print(colorize("\n{} token total supply: {}\n".format(token_id, token_info["supply"]), "blue"))
token_supply = input("Input supply for token binding: ")
try:
print(colorize("\nOracles created from this instance by TUI: \n", "blue"))
with open("oracles_list", "r") as file:
for oracle in file:
print(oracle)
print(colorize('_' * 65, "blue"))
print("\n")
except FileNotFoundError:
print("Seems like a no oracles created from this instance yet\n")
pass
oracle_id = input("Input id of oracle you want to use in gw bind: ")
try:
oracle_name = rpclib.oracles_info(rpc_connection, oracle_id)["name"]
except KeyError:
print(colorize("Not valid oracleid. Please try again.", "red"))
input("Press [Enter] to continue...")
while True:
coin_name = input("Input external coin ticker (binded oracle and token need to have same name!): ")
if token_name == oracle_name and token_name == coin_name:
break
else:
print(colorize("Token name, oracle name and external coin ticker should match!", "red"))
while True:
M = input("Input minimal amount of pubkeys needed for transaction confirmation (1 for non-multisig gw): ")
N = input("Input maximal amount of pubkeys needed for transaction confirmation (1 for non-multisig gw): ")
if (int(N) >= int(M)):
break
else:
print("Maximal amount of pubkeys should be more or equal than minimal. Please try again.")
pubkeys = []
for i in range(int(N)):
pubkeys.append(input("Input pubkey {}: ".format(i+1)))
pubtype = input("Input pubtype of external coin: ")
p2shtype = input("Input p2shtype of external coin: ")
wiftype = input("Input wiftype of external coin: ")
args = [rpc_connection, token_id, oracle_id, coin_name, token_supply, M, N]
new_args = [str(pubtype), str(p2shtype), wiftype]
args = args + pubkeys + new_args
# broadcasting block
try:
gateways_bind_hex = rpclib.gateways_bind(*args)
except Exception as e:
print(e)
input("Press [Enter] to continue...")
break
try:
gateways_bind_txid = rpclib.sendrawtransaction(rpc_connection, gateways_bind_hex["hex"])
except Exception as e:
print(e)
print(gateways_bind_hex)
input("Press [Enter] to continue...")
break
else:
print(colorize("Gateway bind transaction broadcasted: " + gateways_bind_txid, "green"))
file = open("gateways_list", "a")
file.writelines(gateways_bind_txid + "\n")
file.close()
print(colorize("Entry added to gateways_list file!\n", "green"))
input("Press [Enter] to continue...")
break
break
except KeyboardInterrupt:
break
# temporary :trollface: custom connection function solution
# to have connection to KMD daemon and cache it in separate file
def rpc_kmd_connection_tui():
while True:
restore_choice = input("Do you want to use KMD daemon connection details from previous session? [y/n]: ")
if restore_choice == "y":
try:
with open("connection_kmd.json", "r") as file:
connection_json = json.load(file)
rpc_user = connection_json["rpc_user"]
rpc_password = connection_json["rpc_password"]
rpc_port = connection_json["rpc_port"]
rpc_connection_kmd = rpclib.rpc_connect(rpc_user, rpc_password, int(rpc_port))
try:
print(rpc_connection_kmd.getinfo())
print(colorize("Successfully connected!\n", "green"))
input("Press [Enter] to continue...")
break
except Exception as e:
print(e)
print(colorize("NOT CONNECTED!\n", "red"))
input("Press [Enter] to continue...")
break
except FileNotFoundError:
print(colorize("You do not have cached KMD daemon connection details."
" Please select n for connection setup", "red"))
input("Press [Enter] to continue...")
elif restore_choice == "n":
rpc_user = input("Input your rpc user: ")
rpc_password = input("Input your rpc password: ")
rpc_port = input("Input your rpc port: ")
connection_details = {"rpc_user": rpc_user,
"rpc_password": <PASSWORD>,
"rpc_port": rpc_port}
connection_json = json.dumps(connection_details)
with open("connection_kmd.json", "w+") as file:
file.write(connection_json)
rpc_connection_kmd = rpclib.rpc_connect(rpc_user, rpc_password, int(rpc_port))
try:
print(rpc_connection_kmd.getinfo())
print(colorize("Successfully connected!\n", "green"))
input("Press [Enter] to continue...")
break
except Exception as e:
print(e)
print(colorize("NOT CONNECTED!\n", "red"))
input("Press [Enter] to continue...")
break
else:
print(colorize("Please input y or n", "red"))
return rpc_connection_kmd
def z_sendmany_twoaddresses(rpc_connection, sendaddress, recepient1, amount1, recepient2, amount2):
str_sending_block = "[{{\"address\":\"{}\",\"amount\":{}}},{{\"address\":\"{}\",\"amount\":{}}}]".format(recepient1, amount1, recepient2, amount2)
sending_block = json.loads(str_sending_block)
operation_id = rpc_connection.z_sendmany(sendaddress,sending_block)
return operation_id
def operationstatus_to_txid(rpc_connection, zstatus):
str_sending_block = "[\"{}\"]".format(zstatus)
sending_block = json.loads(str_sending_block)
operation_json = rpc_connection.z_getoperationstatus(sending_block)
operation_dump = json.dumps(operation_json)
operation_dict = json.loads(operation_dump)[0]
txid = operation_dict['result']['txid']
return txid
def gateways_send_kmd(rpc_connection):
# TODO: have to handle CTRL+C on text input
print(colorize("Please be carefull when input wallet addresses and amounts since all transactions doing in real KMD!", "pink"))
print("Your addresses with balances: ")
list_address_groupings = rpc_connection.listaddressgroupings()
for address in list_address_groupings:
print(str(address) + "\n")
sendaddress = input("Input address from which you transfer KMD: ")
recepient1 = input("Input address which belongs to pubkey which will receive tokens: ")
amount1 = 0.0001
recepient2 = input("Input gateway deposit address: ")
file = open("deposits_list", "a")
#have to show here deposit addresses for gateways created by user
amount2 = input("Input how many KMD you want to deposit on this gateway: ")
operation = z_sendmany_twoaddresses(rpc_connection, sendaddress, recepient1, amount1, recepient2, amount2)
print("Operation proceed! " + str(operation) + " Let's wait 2 seconds to get txid")
# trying to avoid pending status of operation
time.sleep(2)
txid = operationstatus_to_txid(rpc_connection, operation)
file.writelines(txid + "\n")
file.close()
print(colorize("KMD Transaction ID: " + str(txid) + " Entry added to deposits_list file", "green"))
input("Press [Enter] to continue...")
def gateways_deposit_tui(rpc_connection_assetchain, rpc_connection_komodo):
while True:
bind_txid = input("Input your gateway bind txid: ")
coin_name = input("Input your external coin ticker (e.g. KMD): ")
coin_txid = input("Input your deposit txid: ")
dest_pub = input("Input pubkey which claim deposit: ")
amount = input("Input amount of your deposit: ")
height = rpc_connection_komodo.getrawtransaction(coin_txid, 1)["height"]
deposit_hex = rpc_connection_komodo.getrawtransaction(coin_txid, 1)["hex"]
claim_vout = "0"
proof_sending_block = "[\"{}\"]".format(coin_txid)
proof = rpc_connection_komodo.gettxoutproof(json.loads(proof_sending_block))
deposit_hex = rpclib.gateways_deposit(rpc_connection_assetchain, bind_txid, str(height), coin_name, \
coin_txid, claim_vout, deposit_hex, proof, dest_pub, amount)
print(deposit_hex)
deposit_txid = rpclib.sendrawtransaction(rpc_connection_assetchain, deposit_hex["hex"])
print("Done! Gateways deposit txid is: " + deposit_txid + " Please not forget to claim your deposit!")
input("Press [Enter] to continue...")
break
def gateways_claim_tui(rpc_connection):
while True:
bind_txid = input("Input your gateway bind txid: ")
coin_name = input("Input your external coin ticker (e.g. KMD): ")
deposit_txid = input("Input your gatewaysdeposit txid: ")
dest_pub = input("Input pubkey which claim deposit: ")
amount = input("Input amount of your deposit: ")
claim_hex = rpclib.gateways_claim(rpc_connection, bind_txid, coin_name, deposit_txid, dest_pub, amount)
try:
claim_txid = rpclib.sendrawtransaction(rpc_connection, claim_hex["hex"])
except Exception as e:
print(e)
print(claim_hex)
input("Press [Enter] to continue...")
break
else:
print("Succesfully claimed! Claim transaction id: " + claim_txid)
input("Press [Enter] to continue...")
break
def gateways_withdrawal_tui(rpc_connection):
while True:
bind_txid = input("Input your gateway bind txid: ")
coin_name = input("Input your external coin ticker (e.g. KMD): ")
withdraw_pub = input("Input pubkey to which you want to withdraw: ")
amount = input("Input amount of withdrawal: ")
withdraw_hex = rpclib.gateways_withdraw(rpc_connection, bind_txid, coin_name, withdraw_pub, amount)
withdraw_txid = rpclib.sendrawtransaction(rpc_connection, withdraw_hex["hex"])
print(withdraw_txid)
input("Press [Enter] to continue...")
break
def print_mempool(rpc_connection):
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
tx_counter = 0
print(colorize("Transactions in mempool: \n", "magenta"))
for transaction in mempool:
print(transaction + "\n")
tx_counter = tx_counter + 1
print("Total: " + str(tx_counter) + " transactions\n")
print("R + Enter to refresh list. E + Enter to exit menu." + "\n")
is_refresh = input("Choose your destiny: ")
if is_refresh == "R":
print("\n")
pass
elif is_refresh == "E":
print("\n")
break
else:
print("\nPlease choose R or E\n")
def print_tokens_list(rpc_connection):
# TODO: have to print it with tokeninfo to have sense
pass
def print_tokens_balances(rpc_connection):
# TODO: checking tokenbalance for each token from tokenlist and reflect non zero ones
pass
def hexdump(filename, chunk_size=1<<15):
data = ""
#add_spaces = partial(re.compile(b'(..)').sub, br'\1 ')
#write = getattr(sys.stdout, 'buffer', sys.stdout).write
with open(filename, 'rb') as file:
for chunk in iter(partial(file.read, chunk_size), b''):
data += str(hexlify(chunk).decode())
return data
def convert_file_oracle_d(rpc_connection):
while True:
path = input("Input path to file you want to upload to oracle: ")
try:
hex_data = (hexdump(path, 1))[2:]
except Exception as e:
print(e)
print("Seems something goes wrong (I guess you've specified wrong path)!")
input("Press [Enter] to continue...")
break
else:
length = round(len(hex_data) / 2)
if length > 256:
print("Length: " + str(length) + " bytes")
print("File is too big for this app")
input("Press [Enter] to continue...")
break
else:
hex_length = format(length, '#04x')[2:]
data_for_oracle = str(hex_length) + hex_data
print("File hex representation: \n")
print(data_for_oracle + "\n")
print("Length: " + str(length) + " bytes")
print("File converted!")
new_oracle_hex = rpclib.oracles_create(rpc_connection, "tonyconvert", path, "d")
new_oracle_txid = rpclib.sendrawtransaction(rpc_connection, new_oracle_hex["hex"])
time.sleep(0.5)
oracle_register_hex = rpclib.oracles_register(rpc_connection, new_oracle_txid, "10000")
oracle_register_txid = rpclib.sendrawtransaction(rpc_connection, oracle_register_hex["hex"])
time.sleep(0.5)
oracle_subscribe_hex = rpclib.oracles_subscribe(rpc_connection, new_oracle_txid, rpclib.getinfo(rpc_connection)["pubkey"], "0.001")
oracle_subscribe_txid = rpclib.sendrawtransaction(rpc_connection, oracle_subscribe_hex["hex"])
time.sleep(0.5)
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_subscribe_txid in mempool:
print("Waiting for oracle subscribtion tx to be mined" + "\n")
time.sleep(6)
pass
else:
break
oracles_data_hex = rpclib.oracles_data(rpc_connection, new_oracle_txid, data_for_oracle)
try:
oracle_data_txid = rpclib.sendrawtransaction(rpc_connection, oracles_data_hex["hex"])
except Exception as e:
print(oracles_data_hex)
print(e)
print("Oracle created: " + str(new_oracle_txid))
print("Data published: " + str(oracle_data_txid))
input("Press [Enter] to continue...")
break
def convert_file_oracle_D(rpc_connection):
while True:
path = input("Input path to file you want to upload to oracle: ")
try:
hex_data = (hexdump(path, 1))
except Exception as e:
print(e)
print("Seems something goes wrong (I guess you've specified wrong path)!")
input("Press [Enter] to continue...")
break
else:
length = round(len(hex_data) / 2)
# if length > 800000:
# print("Too big file size to upload for this version of program. Maximum size is 800KB.")
# input("Press [Enter] to continue...")
# break
if length > 8000:
# if file is more than 8000 bytes - slicing it to <= 8000 bytes chunks (16000 symbols = 8000 bytes)
data = [hex_data[i:i + 16000] for i in range(0, len(hex_data), 16000)]
chunks_amount = len(data)
# TODO: have to create oracle but subscribe this time chunks amount times to send whole file in same block
# TODO: 2 - on some point file will not fit block - have to find this point
# TODO: 3 way how I want to implement it first will keep whole file in RAM - have to implement some way to stream chunks to oracle before whole file readed
# TODO: have to "optimise" registration fee
# Maybe just check size first by something like a du ?
print("Length: " + str(length) + " bytes.\n Chunks amount: " + str(chunks_amount))
new_oracle_hex = rpclib.oracles_create(rpc_connection, "tonyconvert_" + str(chunks_amount), path, "D")
new_oracle_txid = rpclib.sendrawtransaction(rpc_connection, new_oracle_hex["hex"])
time.sleep(0.5)
oracle_register_hex = rpclib.oracles_register(rpc_connection, new_oracle_txid, "10000")
oracle_register_txid = rpclib.sendrawtransaction(rpc_connection, oracle_register_hex["hex"])
# subscribe chunks_amount + 1 times, but lets limit our broadcasting 100 tx per block (800KB/block)
if chunks_amount > 100:
utxo_num = 101
else:
utxo_num = chunks_amount
while utxo_num > 0:
while True:
oracle_subscription_hex = rpclib.oracles_subscribe(rpc_connection, new_oracle_txid, rpclib.getinfo(rpc_connection)["pubkey"], "0.001")
oracle_subscription_txid = rpclib.sendrawtransaction(rpc_connection,
oracle_subscription_hex['hex'])
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_subscription_txid in mempool:
break
else:
pass
print(colorize("Oracle subscription transaction broadcasted: " + oracle_subscription_txid, "green"))
utxo_num = utxo_num - 1
# waiting for last broadcasted subscribtion transaction to be mined to be sure that money are on oracle balance
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_subscription_txid in mempool:
print("Waiting for oracle subscribtion tx to be mined" + "\n")
time.sleep(6)
pass
else:
break
print("Oracle preparation is finished. Oracle txid: " + new_oracle_txid)
# can publish data now
counter = 0
for chunk in data:
hex_length_bigendian = format(round(len(chunk) / 2), '#06x')[2:]
# swap to get little endian length
a = hex_length_bigendian[2:]
b = hex_length_bigendian[:2]
hex_length = a + b
data_for_oracle = str(hex_length) + chunk
counter = counter + 1
# print("Chunk number: " + str(counter) + "\n")
# print(data_for_oracle)
try:
oracles_data_hex = rpclib.oracles_data(rpc_connection, new_oracle_txid, data_for_oracle)
except Exception as e:
print(data_for_oracle)
print(e)
input("Press [Enter] to continue...")
break
# on broadcasting ensuring that previous one reached mempool before blast next one
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
oracle_data_txid = rpclib.sendrawtransaction(rpc_connection, oracles_data_hex["hex"])
#time.sleep(0.1)
if oracle_data_txid in mempool:
break
else:
pass
# blasting not more than 100 at once (so maximum capacity per block can be changed here)
# but keep in mind that registration UTXOs amount needs to be changed too !
if counter % 100 == 0 and chunks_amount > 100:
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_data_txid in mempool:
print("Waiting for previous data chunks to be mined before send new ones" + "\n")
print("Sent " + str(counter) + " chunks from " + str(chunks_amount))
time.sleep(6)
pass
else:
break
print("Last baton: " + oracle_data_txid)
input("Press [Enter] to continue...")
break
# if file suits single oraclesdata just broadcasting it straight without any slicing
else:
hex_length_bigendian = format(length, '#06x')[2:]
# swap to get little endian length
a = hex_length_bigendian[2:]
b = hex_length_bigendian[:2]
hex_length = a + b
data_for_oracle = str(hex_length) + hex_data
print("File hex representation: \n")
print(data_for_oracle + "\n")
print("Length: " + str(length) + " bytes")
print("File converted!")
new_oracle_hex = rpclib.oracles_create(rpc_connection, "tonyconvert_" + "1", path, "D")
new_oracle_txid = rpclib.sendrawtransaction(rpc_connection, new_oracle_hex["hex"])
time.sleep(0.5)
oracle_register_hex = rpclib.oracles_register(rpc_connection, new_oracle_txid, "10000")
oracle_register_txid = rpclib.sendrawtransaction(rpc_connection, oracle_register_hex["hex"])
time.sleep(0.5)
oracle_subscribe_hex = rpclib.oracles_subscribe(rpc_connection, new_oracle_txid, rpclib.getinfo(rpc_connection)["pubkey"], "0.001")
oracle_subscribe_txid = rpclib.sendrawtransaction(rpc_connection, oracle_subscribe_hex["hex"])
time.sleep(0.5)
while True:
mempool = rpclib.get_rawmempool(rpc_connection)
if oracle_subscribe_txid in mempool:
print("Waiting for oracle subscribtion tx to be mined" + "\n")
time.sleep(6)
pass
else:
break
oracles_data_hex = rpclib.oracles_data(rpc_connection, new_oracle_txid, data_for_oracle)
try:
oracle_data_txid = rpclib.sendrawtransaction(rpc_connection, oracles_data_hex["hex"])
except Exception as e:
print(oracles_data_hex)
print(e)
input("Press [Enter] to continue...")
break
else:
print("Oracle created: " + str(new_oracle_txid))
print("Data published: " + str(oracle_data_txid))
input("Press [Enter] to continue...")
break
def get_files_list(rpc_connection):
start_time = time.time()
oracles_list = rpclib.oracles_list(rpc_connection)
files_list = []
for oracle_txid in oracles_list:
oraclesinfo_result = rpclib.oracles_info(rpc_connection, oracle_txid)
description = oraclesinfo_result['description']
name = oraclesinfo_result['name']
if name[0:12] == 'tonyconvert_':
new_file = '[' + name + ': ' + description + ']: ' + oracle_txid
files_list.append(new_file)
print("--- %s seconds ---" % (time.time() - start_time))
return files_list
def display_files_list(rpc_connection):
print("Scanning oracles. Please wait...")
list_to_display = get_files_list(rpc_connection)
while True:
for file in list_to_display:
print(file + "\n")
input("Press [Enter] to continue...")
break
def files_downloader(rpc_connection):
while True:
display_files_list(rpc_connection)
print("\n")
oracle_id = input("Input oracle ID you want to download file from: ")
output_path = input("Input output path for downloaded file (name included) e.g. /home/test.txt: ")
oracle_info = rpclib.oracles_info(rpc_connection, oracle_id)
name = oracle_info['name']
latest_baton_txid = oracle_info['registered'][0]['batontxid']
if name[0:12] == 'tonyconvert_':
# downloading process here
chunks_amount = int(name[12:])
data = rpclib.oracles_samples(rpc_connection, oracle_id, latest_baton_txid, str(chunks_amount))["samples"]
for chunk in reversed(data):
with open(output_path, 'ab+') as file:
file.write(unhexlify(chunk[0]))
print("I hope that file saved to " + output_path + "\n")
input("Press [Enter] to continue...")
break
else:
print("I cant recognize file inside this oracle. I'm very sorry, boss.")
input("Press [Enter] to continue...")
break
def marmara_receive_tui(rpc_connection):
while True:
issuer_pubkey = input("Input pubkey of person who do you want to receive MARMARA from: ")
issuance_sum = input("Input amount of MARMARA you want to receive: ")
blocks_valid = input("Input amount of blocks for cheque matures: ")
try:
marmara_receive_txinfo = rpc_connection.marmarareceive(issuer_pubkey, issuance_sum, "MARMARA", blocks_valid)
marmara_receive_txid = rpc_connection.sendrawtransaction(marmara_receive_txinfo["hex"])
print("Marmara receive txid broadcasted: " + marmara_receive_txid + "\n")
print(json.dumps(marmara_receive_txinfo, indent=4, sort_keys=True) + "\n")
with open("receive_txids.txt", 'a+') as file:
file.write(marmara_receive_txid + "\n")
file.write(json.dumps(marmara_receive_txinfo, indent=4, sort_keys=True) + "\n")
print("Transaction id is saved to receive_txids.txt file.")
input("Press [Enter] to continue...")
break
except Exception as e:
print(marmara_receive_txinfo)
print(e)
print("Something went wrong. Please check your input")
def marmara_issue_tui(rpc_connection):
while True:
receiver_pubkey = input("Input pubkey of person who do you want to issue MARMARA: ")
issuance_sum = input("Input amount of MARMARA you want to issue: ")
maturing_block = input("Input number of block on which issuance mature: ")
approval_txid = input("Input receiving request transaction id: ")
try:
marmara_issue_txinfo = rpc_connection.marmaraissue(receiver_pubkey, issuance_sum, "MARMARA", maturing_block, approval_txid)
marmara_issue_txid = rpc_connection.sendrawtransaction(marmara_issue_txinfo["hex"])
print("Marmara issuance txid broadcasted: " + marmara_issue_txid + "\n")
print(json.dumps(marmara_issue_txinfo, indent=4, sort_keys=True) + "\n")
with open("issue_txids.txt", "a+") as file:
file.write(marmara_issue_txid + "\n")
file.write(json.dumps(marmara_issue_txinfo, indent=4, sort_keys=True) + "\n")
print("Transaction id is saved to issue_txids.txt file.")
input("Press [Enter] to continue...")
break
except Exception as e:
print(marmara_issue_txinfo)
print(e)
print("Something went wrong. Please check your input")
def marmara_creditloop_tui(rpc_connection):
while True:
loop_txid = input("Input transaction ID of credit loop you want to get info about: ")
try:
marmara_creditloop_info = rpc_connection.marmaracreditloop(loop_txid)
print(json.dumps(marmara_creditloop_info, indent=4, sort_keys=True) + "\n")
input("Press [Enter] to continue...")
break
except Exception as e:
print(marmara_creditloop_info)
print(e)
print("Something went wrong. Please check your input")
def marmara_settlement_tui(rpc_connection):
while True:
loop_txid = input("Input transaction ID of credit loop to make settlement: ")
try:
marmara_settlement_info = rpc_connection.marmarasettlement(loop_txid)
marmara_settlement_txid = rpc_connection.sendrawtransaction(marmara_settlement_info["hex"])
print("Loop " + loop_txid + " succesfully settled!\nSettlement txid: " + marmara_settlement_txid)
with open("settlement_txids.txt", "a+") as file:
file.write(marmara_settlement_txid + "\n")
file.write(json.dumps(marmara_settlement_info, indent=4, sort_keys=True) + "\n")
print("Transaction id is saved to settlement_txids.txt file.")
input("Press [Enter] to continue...")
break
except Exception as e:
print(marmara_settlement_info)
print(e)
print("Something went wrong. Please check your input")
input("Press [Enter] to continue...")
break
def marmara_lock_tui(rpc_connection):
while True:
amount = input("Input amount of coins you want to lock for settlement and staking: ")
unlock_height = input("Input height on which coins should be unlocked: ")
try:
marmara_lock_info = rpc_connection.marmaralock(amount, unlock_height)
marmara_lock_txid = rpc_connection.sendrawtransaction(marmara_lock_info["hex"])
with open("lock_txids.txt", "a+") as file:
file.write(marmara_lock_txid + "\n")
file.write(json.dumps(marmara_lock_info, indent=4, sort_keys=True) + "\n")
print("Transaction id is saved to lock_txids.txt file.")
input("Press [Enter] to continue...")
break
except Exception as e:
print(e)
print("Something went wrong. Please check your input")
input("Press [Enter] to continue...")
break
def marmara_info_tui(rpc_connection):
while True:
firstheight = input("Input first height (default 0): ")
if not firstheight:
firstheight = "0"
lastheight = input("Input last height (default current (0) ): ")
if not lastheight:
lastheight = "0"
minamount = input("Input min amount (default 0): ")
if not minamount:
minamount = "0"
maxamount = input("Input max amount (default 0): ")
if not maxamount:
maxamount = "0"
issuerpk = input("Optional. Input issuer public key: ")
try:
if issuerpk:
marmara_info = rpc_connection.marmarainfo(firstheight, lastheight, minamount, maxamount, "MARMARA", issuerpk)
else:
marmara_info = rpc_connection.marmarainfo(firstheight, lastheight, minamount, maxamount)
print(json.dumps(marmara_info, indent=4, sort_keys=True) + "\n")
input("Press [Enter] to continue...")
break
except Exception as e:
print(marmara_info)
print(e)
print("Something went wrong. Please check your input")
input("Press [Enter] to continue...")
break
def rogue_game_info(rpc_connection, game_txid):
game_info_arg = '"' + "[%22" + game_txid + "%22]" + '"'
game_info = rpc_connection.cclib("gameinfo", "17", game_info_arg)
return game_info
def rogue_game_register(rpc_connection, game_txid, player_txid = False):
if player_txid:
registration_info_arg = '"' + "[%22" + game_txid + "%22,%22" + player_txid + "%22]" + '"'
else:
registration_info_arg = '"' + "[%22" + game_txid + "%22]" + '"'
registration_info = rpc_connection.cclib("register", "17", registration_info_arg)
return registration_info
def rogue_pending(rpc_connection):
rogue_pending_list = rpc_connection.cclib("pending", "17")
return rogue_pending_list
def rogue_bailout(rpc_connection, game_txid):
bailout_info_arg = '"' + "[%22" + game_txid + "%22]" + '"'
bailout_info = rpc_connection.cclib("bailout", "17", bailout_info_arg)
return bailout_info
def rogue_highlander(rpc_connection, game_txid):
highlander_info_arg = '"' + "[%22" + game_txid + "%22]" + '"'
highlander_info = rpc_connection.cclib("highlander", "17", highlander_info_arg)
return highlander_info
def rogue_players_list(rpc_connection):
rogue_players_list = rpc_connection.cclib("players", "17")
return rogue_players_list
def rogue_player_info(rpc_connection, playertxid):
player_info_arg = '"' + "[%22" + playertxid + "%22]" + '"'
player_info = rpc_connection.cclib("playerinfo", "17", player_info_arg)
return player_info
def rogue_extract(rpc_connection, game_txid, pubkey):
extract_info_arg = '"' + "[%22" + game_txid + "%22,%22" + pubkey + "%22]" + '"'
extract_info = rpc_connection.cclib("extract", "17", extract_info_arg)
return extract_info
def rogue_keystrokes(rpc_connection, game_txid, keystroke):
rogue_keystrokes_arg = '"' + "[%22" + game_txid + "%22,%22" + keystroke + "%22]" + '"'
keystroke_info = rpc_connection.cclib("keystrokes", "17", rogue_keystrokes_arg)
return keystroke_info
def print_multiplayer_games_list(rpc_connection):
while True:
pending_list = rogue_pending(rpc_connection)
multiplayer_pending_list = []
for game in pending_list["pending"]:
if rogue_game_info(rpc_connection, game)["maxplayers"] > 1:
multiplayer_pending_list.append(game)
print("Multiplayer games availiable to join: \n")
for active_multiplayer_game in multiplayer_pending_list:
game_info = rogue_game_info(rpc_connection, active_multiplayer_game)
print(colorize("\n================================\n", "green"))
print("Game txid: " + game_info["gametxid"])
print("Game buyin: " + str(game_info["buyin"]))
print("Game height: " + str(game_info["gameheight"]))
print("Start height: " + str(game_info["start"]))
print("Alive players: " + str(game_info["alive"]))
print("Registered players: " + str(game_info["numplayers"]))
print("Max players: " + str(game_info["maxplayers"]))
print(colorize("\n***\n", "blue"))
print("Players in game:")
for player in game_info["players"]:
print("Slot: " + str(player["slot"]))
if "baton" in player.keys():
print("Baton: " + str(player["baton"]))
if "tokenid" in player.keys():
print("Tokenid: " + str(player["tokenid"]))
print("Is mine?: " + str(player["ismine"]))
print(colorize("\nR + Enter - refresh list.\nE + Enter - to the game choice.\nCTRL + C - back to main menu", "blue"))
is_refresh = input("Choose your destiny: ")
if is_refresh == "R":
print("\n")
pass
elif is_refresh == "E":
print("\n")
break
else:
print("\nPlease choose R or E\n")
def rogue_newgame_singleplayer(rpc_connection):
try:
new_game_txid = rpc_connection.cclib("newgame", "17", "[1]")["txid"]
print("New singleplayer training game succesfully created. txid: " + new_game_txid)
while True:
mempool = rpc_connection.getrawmempool()
if new_game_txid in mempool:
print(colorize("Waiting for game transaction to be mined", "blue"))
time.sleep(5)
else:
print(colorize("Game transaction is mined", "green"))
break
players_list = rogue_players_list(rpc_connection)
if len(players_list["playerdata"]) > 0:
print_players_list(rpc_connection)
while True:
is_choice_needed = input("Do you want to choose a player for this game? [y/n] ")
if is_choice_needed == "y":
player_txid = input("Please input player txid: ")
newgame_regisration_txid = rogue_game_register(rpc_connection, new_game_txid, player_txid)["txid"]
break
elif is_choice_needed == "n":
set_warriors_name(rpc_connection)
newgame_regisration_txid = rogue_game_register(rpc_connection, new_game_txid)["txid"]
break
else:
print("Please choose y or n !")
else:
print("No players available to select")
input("Press [Enter] to continue...")
newgame_regisration_txid = rogue_game_register(rpc_connection, new_game_txid)["txid"]
while True:
mempool = rpc_connection.getrawmempool()
if newgame_regisration_txid in mempool:
print(colorize("Waiting for registration transaction to be mined", "blue"))
time.sleep(5)
else:
print(colorize("Registration transaction is mined", "green"))
break
game_info = rogue_game_info(rpc_connection, new_game_txid)
start_time = time.time()
while True:
subprocess.call(["cc/rogue/rogue", str(game_info["seed"]), str(game_info["gametxid"])])
time_elapsed = time.time() - start_time
if time_elapsed > 1:
break
else:
print("Game less than 1 second. Trying to start again")
time.sleep(1)
game_end_height = int(rpc_connection.getinfo()["blocks"])
while True:
current_height = int(rpc_connection.getinfo()["blocks"])
height_difference = current_height - game_end_height
if height_difference == 0:
print(current_height)
print(game_end_height)
print(colorize("Waiting for next block before bailout", "blue"))
time.sleep(5)
else:
break
#print("\nKeystrokes of this game:\n")
#time.sleep(0.5)
while True:
keystrokes_rpc_responses = find_game_keystrokes_in_log(new_game_txid)[1::2]
if len(keystrokes_rpc_responses) < 1:
print("No keystrokes broadcasted yet. Let's wait 5 seconds")
time.sleep(5)
else:
break
#print(keystrokes_rpc_responses)
for keystroke in keystrokes_rpc_responses:
json_keystroke = json.loads(keystroke)["result"]
if "status" in json_keystroke.keys() and json_keystroke["status"] == "error":
while True:
print("Trying to re-brodcast keystroke")
keystroke_rebroadcast = rogue_keystrokes(rpc_connection, json_keystroke["gametxid"], json_keystroke["keystrokes"])
if "txid" in keystroke_rebroadcast.keys():
print("Keystroke broadcasted! txid: " + keystroke_rebroadcast["txid"])
break
else:
print("Let's try again in 5 seconds")
time.sleep(5)
# waiting for last keystroke confirmation here
last_keystroke_json = json.loads(keystrokes_rpc_responses[-1])
while True:
while True:
try:
rpc_connection.sendrawtransaction(last_keystroke_json["result"]["hex"])
except Exception as e:
pass
try:
confirmations_amount = rpc_connection.getrawtransaction(last_keystroke_json["result"]["txid"], 1)["confirmations"]
break
except Exception as e:
print(e)
print("Let's wait a little bit more")
time.sleep(5)
pass
if confirmations_amount < 2:
print("Last keystroke not confirmed yet! Let's wait a little")
time.sleep(10)
else:
print("Last keystroke confirmed!")
break
while True:
print("\nExtraction info:\n")
extraction_info = rogue_extract(rpc_connection, new_game_txid, rpc_connection.getinfo()["pubkey"])
if extraction_info["status"] == "error":
print(colorize("Your warrior died or no any information about game was saved on blockchain", "red"))
print("If warrior was alive - try to wait a little (choose n to wait for a next block). If he is dead - you can bailout now (choose y).")
else:
print("Current game state:")
print("Game txid: " + extraction_info["gametxid"])
print("Information about game saved on chain: " + extraction_info["extracted"])
print("\n")
is_bailout_needed = input("Do you want to make bailout now [y] or wait for one more block [n]? [y/n]: ")
if is_bailout_needed == "y":
while True:
bailout_info = rogue_bailout(rpc_connection, new_game_txid)
if "hex" in bailout_info.keys():
break
else:
print("bailout not broadcasted yet by some reason. Let's wait...")
time.sleep(5)
break
elif is_bailout_needed == "n":
game_end_height = int(rpc_connection.getinfo()["blocks"])
while True:
current_height = int(rpc_connection.getinfo()["blocks"])
height_difference = current_height - game_end_height
if height_difference == 0:
print(current_height)
print(game_end_height)
print(colorize("Waiting for next block before bailout", "blue"))
time.sleep(5)
else:
break
else:
print("Please choose y or n !")
print(bailout_info)
print("\nGame is finished!\n")
bailout_txid = bailout_info["txid"]
input("Press [Enter] to continue...")
except Exception as e:
print("Something went wrong.")
print(e)
input("Press [Enter] to continue...")
def play_multiplayer_game(rpc_connection):
# printing list of user active multiplayer games
active_games_list = rpc_connection.cclib("games", "17")["games"]
active_multiplayer_games_list = []
for game in active_games_list:
gameinfo = rogue_game_info(rpc_connection, game)
if gameinfo["maxplayers"] > 1:
active_multiplayer_games_list.append(gameinfo)
games_counter = 0
for active_multiplayer_game in active_multiplayer_games_list:
games_counter = games_counter + 1
is_ready_to_start = False
try:
active_multiplayer_game["seed"]
is_ready_to_start = True
except Exception as e:
pass
print(colorize("\n================================\n", "green"))
print("Game txid: " + active_multiplayer_game["gametxid"])
print("Game buyin: " + str(active_multiplayer_game["buyin"]))
if is_ready_to_start:
print(colorize("Ready for start!", "green"))
else:
print(colorize("Not ready for start yet, wait until start height!", "red"))
print("Game height: " + str(active_multiplayer_game["gameheight"]))
print("Start height: " + str(active_multiplayer_game["start"]))
print("Alive players: " + str(active_multiplayer_game["alive"]))
print("Registered players: " + str(active_multiplayer_game["numplayers"]))
print("Max players: " + str(active_multiplayer_game["maxplayers"]))
print(colorize("\n***\n", "blue"))
print("Players in game:")
for player in active_multiplayer_game["players"]:
print("Slot: " + str(player["slot"]))
print("Baton: " + str(player["baton"]))
print("Tokenid: " + str(player["tokenid"]))
print("Is mine?: " + str(player["ismine"]))
# asking user if he want to start any of them
while True:
start_game = input("\nDo you want to start any of your pendning multiplayer games?[y/n]: ")
if start_game == "y":
new_game_txid = input("Input txid of game which you want to start: ")
game_info = rogue_game_info(rpc_connection, new_game_txid)
try:
start_time = time.time()
while True:
subprocess.call(["cc/rogue/rogue", str(game_info["seed"]), str(game_info["gametxid"])])
time_elapsed = time.time() - start_time
if time_elapsed > 1:
break
else:
print("Game less than 1 second. Trying to start again")
time.sleep(1)
except Exception as e:
print("Maybe game isn't ready for start yet or your input was not correct, sorry.")
input("Press [Enter] to continue...")
break
game_end_height = int(rpc_connection.getinfo()["blocks"])
while True:
current_height = int(rpc_connection.getinfo()["blocks"])
height_difference = current_height - game_end_height
if height_difference == 0:
print(current_height)
print(game_end_height)
print(colorize("Waiting for next block before bailout or highlander", "blue"))
time.sleep(5)
else:
break
while True:
keystrokes_rpc_responses = find_game_keystrokes_in_log(new_game_txid)[1::2]
if len(keystrokes_rpc_responses) < 1:
print("No keystrokes broadcasted yet. Let's wait 5 seconds")
time.sleep(5)
else:
break
for keystroke in keystrokes_rpc_responses:
json_keystroke = json.loads(keystroke)["result"]
if "status" in json_keystroke.keys() and json_keystroke["status"] == "error":
while True:
print("Trying to re-brodcast keystroke")
keystroke_rebroadcast = rogue_keystrokes(rpc_connection, json_keystroke["gametxid"],
json_keystroke["keystrokes"])
if "txid" in keystroke_rebroadcast.keys():
print("Keystroke broadcasted! txid: " + keystroke_rebroadcast["txid"])
break
else:
print("Let's try again in 5 seconds")
time.sleep(5)
last_keystroke_json = json.loads(keystrokes_rpc_responses[-1])
while True:
while True:
try:
confirmations_amount = rpc_connection.getrawtransaction(last_keystroke_json["result"]["txid"], 1)["confirmations"]
break
except Exception as e:
print(e)
print("Let's wait a little bit more")
rpc_connection.sendrawtransaction(last_keystroke_json["result"]["hex"])
time.sleep(5)
pass
if confirmations_amount < 2:
print("Last keystroke not confirmed yet! Let's wait a little")
time.sleep(10)
else:
print("Last keystroke confirmed!")
break
while True:
print("\nExtraction info:\n")
extraction_info = rogue_extract(rpc_connection, new_game_txid, rpc_connection.getinfo()["pubkey"])
if extraction_info["status"] == "error":
print(colorize("Your warrior died or no any information about game was saved on blockchain", "red"))
print("If warrior was alive - try to wait a little (choose n to wait for a next block). If he is dead - you can bailout now (choose y).")
else:
print("Current game state:")
print("Game txid: " + extraction_info["gametxid"])
print("Information about game saved on chain: " + extraction_info["extracted"])
print("\n")
is_bailout_needed = input(
"Do you want to make bailout now [y] or wait for one more block [n]? [y/n]: ")
if is_bailout_needed == "y":
if game_info["alive"] > 1:
bailout_info = rogue_bailout(rpc_connection, new_game_txid)
try:
bailout_txid = bailout_info["txid"]
print(bailout_info)
print("\nGame is finished!\n")
input("Press [Enter] to continue...")
break
except Exception:
highlander_info = rogue_highlander(rpc_connection, new_game_txid)
highlander_info = highlander_info["txid"]
print(highlander_info)
print("\nGame is finished!\n")
input("Press [Enter] to continue...")
break
else:
highlander_info = rogue_highlander(rpc_connection, new_game_txid)
if 'error' in highlander_info.keys() and highlander_info["error"] == 'numplayers != maxplayers':
bailout_info = rogue_bailout(rpc_connection, new_game_txid)
print(bailout_info)
print("\nGame is finished!\n")
input("Press [Enter] to continue...")
break
else:
print(highlander_info)
print("\nGame is finished!\n")
input("Press [Enter] to continue...")
break
elif is_bailout_needed == "n":
game_end_height = int(rpc_connection.getinfo()["blocks"])
while True:
current_height = int(rpc_connection.getinfo()["blocks"])
height_difference = current_height - game_end_height
if height_difference == 0:
print(current_height)
print(game_end_height)
print(colorize("Waiting for next block before bailout", "blue"))
time.sleep(5)
else:
break
break
break
if start_game == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def rogue_newgame_multiplayer(rpc_connection):
while True:
max_players = input("Input game max. players (>1): ")
if int(max_players) > 1:
break
else:
print("Please re-check your input")
input("Press [Enter] to continue...")
while True:
buyin = input("Input game buyin (>0.001): ")
if float(buyin) > 0.001:
break
else:
print("Please re-check your input")
input("Press [Enter] to continue...")
try:
new_game_txid = rpc_connection.cclib("newgame", "17", '"[' + max_players + "," + buyin + ']"')["txid"]
print(colorize("New multiplayer game succesfully created. txid: " + new_game_txid, "green"))
input("Press [Enter] to continue...")
except Exception as e:
print("Something went wrong.")
print(e)
input("Press [Enter] to continue...")
def rogue_join_multiplayer_game(rpc_connection):
while True:
try:
print_multiplayer_games_list(rpc_connection)
# TODO: optional player data txid (print players you have and ask if you want to choose one)
game_txid = input("Input txid of game you want to join: ")
try:
while True:
print_players_list(rpc_connection)
is_choice_needed = input("Do you want to choose a player for this game? [y/n] ")
if is_choice_needed == "y":
player_txid = input("Please input player txid: ")
newgame_regisration_txid = rogue_game_register(rpc_connection, game_txid, player_txid)["txid"]
break
elif is_choice_needed == "n":
set_warriors_name(rpc_connection)
newgame_regisration_txid = rogue_game_register(rpc_connection, game_txid)["txid"]
break
else:
print("Please choose y or n !")
except Exception as e:
print("Something went wrong. Maybe you're trying to register on game twice or don't have enough funds to pay buyin.")
print(e)
input("Press [Enter] to continue...")
break
print(colorize("Succesfully registered.", "green"))
while True:
mempool = rpc_connection.getrawmempool()
if newgame_regisration_txid in mempool:
print(colorize("Waiting for registration transaction to be mined", "blue"))
time.sleep(5)
else:
print(colorize("Registration transaction is mined", "green"))
break
print(newgame_regisration_txid)
input("Press [Enter] to continue...")
break
except KeyboardInterrupt:
break
def print_players_list(rpc_connection):
players_list = rogue_players_list(rpc_connection)
print(colorize("\nYou own " + str(players_list["numplayerdata"]) + " warriors\n", "blue"))
warrior_counter = 0
for player in players_list["playerdata"]:
warrior_counter = warrior_counter + 1
player_data = rogue_player_info(rpc_connection, player)["player"]
print(colorize("\n================================\n","green"))
print("Warrior " + str(warrior_counter))
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n","blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
input("Press [Enter] to continue...")
def sell_warrior(rpc_connection):
print(colorize("Your brave warriors: \n", "blue"))
print_players_list(rpc_connection)
print("\n")
while True:
need_sell = input("Do you want to place order to sell any? [y/n]: ")
if need_sell == "y":
playertxid = input("Input playertxid of warrior you want to sell: ")
price = input("Input price (in ROGUE coins) you want to sell warrior for: ")
try:
tokenid = rogue_player_info(rpc_connection, playertxid)["player"]["tokenid"]
except Exception as e:
print(e)
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
token_ask_raw = rpc_connection.tokenask("1", tokenid, price)
try:
token_ask_txid = rpc_connection.sendrawtransaction(token_ask_raw["hex"])
except Exception as e:
print(e)
print(token_ask_raw)
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
print(colorize("Ask succesfully placed. Ask txid is: " + token_ask_txid, "green"))
input("Press [Enter] to continue...")
break
if need_sell == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
#TODO: have to combine into single scanner with different cases
def is_warrior_alive(rpc_connection, warrior_txid):
warrior_alive = False
raw_transaction = rpc_connection.getrawtransaction(warrior_txid, 1)
for vout in raw_transaction["vout"]:
if vout["value"] == 0.00000001 and rpc_connection.gettxout(raw_transaction["txid"], vout["n"]):
warrior_alive = True
return warrior_alive
def warriors_scanner(rpc_connection):
start_time = time.time()
token_list = rpc_connection.tokenlist()
my_warriors_list = rogue_players_list(rpc_connection)
warriors_list = {}
for token in token_list:
player_info = rogue_player_info(rpc_connection, token)
if "status" in player_info and player_info["status"] == "error":
pass
elif player_info["player"]["playertxid"] in my_warriors_list["playerdata"]:
pass
elif not is_warrior_alive(rpc_connection, player_info["player"]["playertxid"]):
pass
else:
warriors_list[token] = player_info["player"]
print("--- %s seconds ---" % (time.time() - start_time))
return warriors_list
def warriors_scanner_for_rating(rpc_connection):
print("It can take some time")
token_list = rpc_connection.tokenlist()
my_warriors_list = rogue_players_list(rpc_connection)
actual_playerids = []
warriors_list = {}
for token in token_list:
player_info = rogue_player_info(rpc_connection, token)
if "status" in player_info and player_info["status"] == "error":
pass
else:
while True:
if "batontxid" in player_info["player"].keys():
player_info = rogue_player_info(rpc_connection, player_info["player"]["batontxid"])
else:
actual_playerids.append(player_info["player"]["playertxid"])
break
for player_id in actual_playerids:
player_info = rogue_player_info(rpc_connection, player_id)
if not is_warrior_alive(rpc_connection, player_info["player"]["playertxid"]):
pass
else:
warriors_list[player_id] = player_info["player"]
return warriors_list
def warriors_scanner_for_dex(rpc_connection):
start_time = time.time()
token_list = rpc_connection.tokenlist()
my_warriors_list = rogue_players_list(rpc_connection)
warriors_list = {}
for token in token_list:
player_info = rogue_player_info(rpc_connection, token)
if "status" in player_info and player_info["status"] == "error":
pass
elif player_info["player"]["tokenid"] in my_warriors_list["playerdata"]:
pass
else:
warriors_list[token] = player_info["player"]
print("--- %s seconds ---" % (time.time() - start_time))
return warriors_list
def print_warrior_list(rpc_connection):
players_list = warriors_scanner(rpc_connection)
print(colorize("All warriors on ROGUE chain: \n", "blue"))
warrior_counter = 0
for player in players_list:
warrior_counter = warrior_counter + 1
player_data = rogue_player_info(rpc_connection, player)["player"]
print(colorize("\n================================\n","green"))
print("Warrior " + str(warrior_counter))
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n","blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
input("Press [Enter] to continue...")
def place_bid_on_warriror(rpc_connection):
warriors_list = print_warrior_list(rpc_connection)
# TODO: have to drop my warriors or at least print my warriors ids
while True:
need_buy = input("Do you want to place order to buy some warrior? [y/n]: ")
if need_buy == "y":
playertxid = input("Input playertxid of warrior you want to place bid for: ")
price = input("Input price (in ROGUE coins) you want to buy warrior for: ")
tokenid = rogue_player_info(rpc_connection, playertxid)["player"]["tokenid"]
token_bid_raw = rpc_connection.tokenbid("1", tokenid, price)
try:
token_bid_txid = rpc_connection.sendrawtransaction(token_bid_raw["hex"])
except Exception as e:
print(e)
print(token_bid_raw)
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
print(colorize("Bid succesfully placed. Bid txid is: " + token_bid_txid, "green"))
input("Press [Enter] to continue...")
break
if need_buy == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def check_incoming_bids(rpc_connection):
# TODO: have to scan for warriors which are in asks as well
players_list = rogue_players_list(rpc_connection)
incoming_orders = []
for player in players_list["playerdata"]:
token_id = rogue_player_info(rpc_connection, player)["player"]["tokenid"]
orders = rpc_connection.tokenorders(token_id)
if len(orders) > 0:
for order in orders:
if order["funcid"] == "b":
incoming_orders.append(order)
return incoming_orders
def print_icoming_bids(rpc_connection):
incoming_bids = check_incoming_bids(rpc_connection)
for bid in incoming_bids:
print("Recieved bid for warrior " + bid["tokenid"])
player_data = rogue_player_info(rpc_connection, bid["tokenid"])["player"]
print(colorize("\n================================\n", "green"))
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n", "blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
print(colorize("\n================================\n", "blue"))
print("Order info: \n")
print("Bid txid: " + bid["txid"])
print("Price: " + str(bid["price"]) + "\n")
if len(incoming_bids) == 0:
print(colorize("There is no any incoming orders!", "blue"))
input("Press [Enter] to continue...")
else:
while True:
want_to_sell = input("Do you want to fill any incoming bid? [y/n]: ")
if want_to_sell == "y":
bid_txid = input("Input bid txid you want to fill: ")
for bid in incoming_bids:
if bid_txid == bid["txid"]:
tokenid = bid["tokenid"]
fill_sum = bid["totalrequired"]
fillbid_hex = rpc_connection.tokenfillbid(tokenid, bid_txid, str(fill_sum))
try:
fillbid_txid = rpc_connection.sendrawtransaction(fillbid_hex["hex"])
except Exception as e:
print(e)
print(fillbid_hex)
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
print(colorize("Warrior succesfully sold. Txid is: " + fillbid_txid, "green"))
input("Press [Enter] to continue...")
break
if want_to_sell == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def find_warriors_asks(rpc_connection):
warriors_list = warriors_scanner_for_dex(rpc_connection)
warriors_asks = []
for player in warriors_list:
orders = rpc_connection.tokenorders(player)
if len(orders) > 0:
for order in orders:
if order["funcid"] == "s":
warriors_asks.append(order)
for ask in warriors_asks:
print(colorize("\n================================\n", "green"))
print("Warrior selling on marketplace: " + ask["tokenid"])
player_data = rogue_player_info(rpc_connection, ask["tokenid"])["player"]
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n", "blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
print(colorize("Order info: \n", "red"))
print("Ask txid: " + ask["txid"])
print("Price: " + str(ask["price"]) + "\n")
while True:
want_to_buy = input("Do you want to buy any warrior? [y/n]: ")
if want_to_buy == "y":
ask_txid = input("Input asktxid which you want to fill: ")
for ask in warriors_asks:
if ask_txid == ask["txid"]:
tokenid = ask["tokenid"]
try:
fillask_raw = rpc_connection.tokenfillask(tokenid, ask_txid, "1")
except Exception as e:
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
try:
fillask_txid = rpc_connection.sendrawtransaction(fillask_raw["hex"])
except Exception as e:
print(e)
print(fillask_raw)
print("Something went wrong. Be careful with input next time.")
input("Press [Enter] to continue...")
break
print(colorize("Warrior succesfully bought. Txid is: " + fillask_txid, "green"))
input("Press [Enter] to continue...")
break
if want_to_buy == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def warriors_orders_check(rpc_connection):
my_orders_list = rpc_connection.mytokenorders("17")
warriors_orders = {}
for order in my_orders_list:
player_info = rogue_player_info(rpc_connection, order["tokenid"])
if "status" in player_info and player_info["status"] == "error":
pass
else:
warriors_orders[order["tokenid"]] = order
bids_list = []
asks_list = []
for order in warriors_orders:
if warriors_orders[order]["funcid"] == "s":
asks_list.append(warriors_orders[order])
else:
bids_list.append(order)
print(colorize("\nYour asks:\n", "blue"))
print(colorize("\n********************************\n", "red"))
for ask in asks_list:
print("txid: " + ask["txid"])
print("Price: " + ask["price"])
print("Warrior tokenid: " + ask["tokenid"])
print(colorize("\n================================\n", "green"))
print("Warrior selling on marketplace: " + ask["tokenid"])
player_data = rogue_player_info(rpc_connection, ask["tokenid"])["player"]
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n", "blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
print(colorize("\n================================\n", "green"))
print(colorize("\nYour bids:\n", "blue"))
print(colorize("\n********************************\n", "red"))
for bid in bids_list:
print("txid: " + bid["txid"])
print("Price: " + bid["price"])
print("Warrior tokenid: " + bid["tokenid"])
print(colorize("\n================================\n", "green"))
print("Warrior selling on marketplace: " + bid["tokenid"])
player_data = rogue_player_info(rpc_connection, bid["tokenid"])["player"]
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print(colorize("\nInventory:\n", "blue"))
for item in player_data["pack"]:
print(item)
print("\nTotal packsize: " + str(player_data["packsize"]) + "\n")
print(colorize("\n================================\n", "green"))
while True:
need_order_change = input("Do you want to cancel any of your orders? [y/n]: ")
if need_order_change == "y":
while True:
ask_or_bid = input("Do you want cancel ask or bid? [a/b]: ")
if ask_or_bid == "a":
ask_txid = input("Input txid of ask you want to cancel: ")
warrior_tokenid = input("Input warrior token id for this ask: ")
try:
ask_cancellation_hex = rpc_connection.tokencancelask(warrior_tokenid, ask_txid)
ask_cancellation_txid = rpc_connection.sendrawtransaction(ask_cancellation_hex["hex"])
except Exception as e:
print(colorize("Please re-check your input!", "red"))
print(colorize("Ask succefully cancelled. Cancellation txid: " + ask_cancellation_txid, "green"))
break
if ask_or_bid == "b":
bid_txid = input("Input txid of bid you want to cancel: ")
warrior_tokenid = input("Input warrior token id for this bid: ")
try:
bid_cancellation_hex = rpc_connection.tokencancelbid(warrior_tokenid, bid_txid)
bid_cancellation_txid = rpc_connection.sendrawtransaction(bid_cancellation_hex["hex"])
except Exception as e:
print(colorize("Please re-check your input!", "red"))
print(colorize("Bid succefully cancelled. Cancellation txid: " + bid_cancellation_txid, "green"))
break
else:
print(colorize("Choose a or b!", "red"))
input("Press [Enter] to continue...")
break
if need_order_change == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def set_warriors_name(rpc_connection):
warriors_name = input("What warrior name do you want for legends and tales about your brave adventures?: ")
warrior_name_arg = '"' + "[%22" + warriors_name + "%22]" + '"'
set_name_status = rpc_connection.cclib("setname", "17", warrior_name_arg)
print(colorize("Warrior name succesfully set", "green"))
print("Result: " + set_name_status["result"])
print("Name: " + set_name_status["pname"])
input("Press [Enter] to continue...")
def top_warriors_rating(rpc_connection):
start_time = time.time()
warriors_list = warriors_scanner_for_rating(rpc_connection)
warriors_exp = {}
for warrior in warriors_list:
warriors_exp[warrior] = warriors_list[warrior]["experience"]
warriors_exp_sorted = {}
temp = [(k, warriors_exp[k]) for k in sorted(warriors_exp, key=warriors_exp.get, reverse=True)]
for k,v in temp:
warriors_exp_sorted[k] = v
counter = 0
for experienced_warrior in warriors_exp_sorted:
if counter < 20:
counter = counter + 1
print("\n" + str(counter) + " place.")
print(colorize("\n================================\n", "blue"))
player_data = rogue_player_info(rpc_connection, experienced_warrior)["player"]
print("Name: " + player_data["pname"] + "\n")
print("Player txid: " + player_data["playertxid"])
print("Token txid: " + player_data["tokenid"])
print("Hitpoints: " + str(player_data["hitpoints"]))
print("Strength: " + str(player_data["strength"]))
print("Level: " + str(player_data["level"]))
print("Experience: " + str(player_data["experience"]))
print("Dungeon Level: " + str(player_data["dungeonlevel"]))
print("Chain: " + player_data["chain"])
print("--- %s seconds ---" % (time.time() - start_time))
input("Press [Enter] to continue...")
def exit():
sys.exit()
def warrior_trasnfer(rpc_connection):
print(colorize("Your brave warriors: \n", "blue"))
print_players_list(rpc_connection)
print("\n")
while True:
need_transfer = input("Do you want to transfer any warrior? [y/n]: ")
if need_transfer == "y":
warrior_tokenid = input("Input warrior tokenid: ")
recepient_pubkey = input("Input recepient pubkey: ")
try:
token_transfer_hex = rpc_connection.tokentransfer(warrior_tokenid, recepient_pubkey, "1")
token_transfer_txid = rpc_connection.sendrawtransaction(token_transfer_hex["hex"])
except Exception as e:
print(e)
print("Something went wrong. Please be careful with your input next time!")
input("Press [Enter] to continue...")
break
print(colorize("Warrior succesfully transferred! Transfer txid: " + token_transfer_txid, "green"))
input("Press [Enter] to continue...")
break
if need_transfer == "n":
print("As you wish!")
input("Press [Enter] to continue...")
break
else:
print(colorize("Choose y or n!", "red"))
def check_if_config_is_here(rpc_connection):
if os.path.exists("ROGUE.conf"):
print(colorize("Config is already in daemon folder", "green"))
else:
if operating_system == 'Darwin':
path_to_config = os.environ['HOME'] + '/Library/Application Support/Komodo/ROGUE/ROGUE.conf'
elif operating_system == 'Linux':
path_to_config = os.environ['HOME'] + '/.komodo/ROGUE/ROGUE.conf'
elif operating_system == 'Win64' or operating_system == 'Windows':
path_to_config = '%s/komodo/ROGUE/ROGUE.conf' % os.environ['APPDATA']
try:
copy(path_to_config, os.getcwd())
except Exception as e:
print(e)
print("Can't copy config to current daemon directory automatically by some reason.")
print("Please copy it manually. It's locating here: " + path_to_config)
def find_game_keystrokes_in_log(gametxid):
operating_system = platform.system()
if operating_system == 'Win64' or operating_system == 'Windows':
p1 = subprocess.Popen(["type", "keystrokes.log"], stdout=subprocess.PIPE, shell=True)
p2 = subprocess.Popen(["findstr", gametxid], stdin=p1.stdout, stdout=subprocess.PIPE, shell=True)
else:
p1 = subprocess.Popen(["cat", "keystrokes.log"], stdout=subprocess.PIPE)
p2 = subprocess.Popen(["grep", gametxid], stdin=p1.stdout, stdout=subprocess.PIPE)
p1.stdout.close()
output = p2.communicate()[0]
keystrokes_log_for_game = bytes.decode(output).split("\n")
return keystrokes_log_for_game
def check_if_tx_in_mempool(rpc_connection, txid):
while True:
mempool = rpc_connection.getrawmempool()
if txid in mempool:
print(colorize("Waiting for " + txid + " transaction to be mined", "blue"))
time.sleep(5)
else:
print(colorize("Transaction is mined", "green"))
break
| StarcoderdataPython |
61608 | from bento_meta.objects import Node
from bento_meta_shim.models.mdbproperty import MDBproperty
class MDBnode():
__node = None
"""give proper life"""
def __init__(self, node):
self.__node = node
self.kind = node.mapspec_['label']
self.name = node.handle
self.handle = node.handle
self.model = node.model
self.nanoid = node.nanoid
self.props = self.__convert_props()
def old(self):
return self.__node
def __convert_props(self):
mdbprops = []
for tuple_key in self.__node.props:
_prop = self.__node.props[tuple_key]
mdbprops.append(MDBproperty(property=_prop, key=tuple_key))
return mdbprops
def __str__(self):
return 'a {}: {} called {}'.format(self.kind, self.nanoid, self.name)
def __repr__(self):
return '{}:{}:{}'.format(self.kind, self.nanoid, self.name) | StarcoderdataPython |
1656236 | import argparse
import serial
import serial.tools.list_ports
import requests
import re
import os
import signal
import esptool
TARGET_DIR = os.path.dirname(__file__) + "/firmwares"
read_file_one = os.path.dirname(__file__) + "/data/boot_app0.bin"
read_file_two = os.path.dirname(__file__) + "/data/bootloader_qio_80m.bin"
commands = [
"--chip",
"esp32",
"--port",
"port...",
"--baud",
"921600",
"--before",
"default_reset",
"--after",
"hard_reset",
"write_flash",
"-z",
"--flash_mode",
"dio",
"--flash_freq",
"80m",
"--flash_size",
"detect",
"0xe000",
read_file_one,
"0x1000",
read_file_two,
"0x10000",
"file_1...",
"0x8000",
"file_2...",
]
port = 3
baud = 5
file_one = -3
file_two = -1
remote_url = "https://www.elephantrobotics.com/software/mystudio/apps-2.0/myCobot/"
item_re = re.compile(r"alt=\"\[DIR\]\"><\/td><td><a href=\"(.*?)\/\">")
def get_port():
print("====================================================================")
plist = list(serial.tools.list_ports.comports())
max_ = len(plist)
idx = 1
for port in plist:
print("{} : {}".format(idx, port))
idx += 1
_in = input("\nPlease input 1 - {} to choice(default {}):".format(idx - 1, max_))
try:
_in = int(_in)
_in = max_ if _in > max_ else _in
except Exception:
_in = max_
print('choice: {}'.format(_in))
port = str(plist[(_in) - 1]).split(" - ")[0].strip()
print(port)
return port
# ====================================================================
# remote option functions
# ====================================================================
def get_remote_firmwares():
try:
resp = requests.get(remote_url)
except Exception:
print("Please check your network!!!")
raise SystemExit(0)
content = resp.content.decode()
items = item_re.findall(content, re.S)
return items
def get_remote_versions(name: str):
resp = requests.get(remote_url + name)
content = resp.content.decode()
versions = item_re.findall(content, re.S)
return versions
def ensure_path(path):
if not os.path.exists(path):
os.makedirs(path, exist_ok=True)
def wether_exist(paths):
return os.path.exists(paths[0])
def download_firmware(name, version):
url = remote_url + f"{name}/{version}/"
resp = requests.get(url)
content = resp.content.decode()
firmware_re = re.compile(r"alt=\"\[\s+\]\"><\/td><td><a href=\"(.*?)\">")
firmwares = firmware_re.findall(content, re.S)
headers = {
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:57.0) Gecko/20100101 Firefox/57.0",
"Accept-Encoding": "identity",
}
# print(firmwares)
download_path = "/".join([TARGET_DIR, name, version])
ensure_path(download_path)
download_urls = [url + firmware for firmware in firmwares]
pathes = [download_path + "/" + firmware for firmware in firmwares]
has_downloaded = wether_exist(pathes)
if has_downloaded:
re_download = input("Do you want to download it again?[Y/n] (default: no):")
if re_download in ["n", "no", "N", "NO", ""]:
print("Flash from localtion.")
return pathes
print(f"Download path: {download_path}")
for name, download_url, path in zip(firmwares, download_urls, pathes):
resp_stream = requests.get(download_url, headers=headers, stream=True)
total_size = int(resp_stream.headers["content-length"])
sum = 0
with open(path, "wb") as f:
for chunk in resp_stream.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
sum += len(chunk)
print(
"\r {} downloaded: [{}{}] {}".format(
name,
int(sum / total_size * 100) * "#",
int((1 - sum / total_size) * 100) * "*",
sum,
),
end="",
)
print("")
return pathes
def remote_option():
global commands
print("====================================================================")
remote_items = get_remote_firmwares()
for idx, item_name in enumerate(remote_items):
print(f"{idx}: {item_name}")
item_idx = input("Please choice one firmware, input id(default: 0):")
try:
item_idx = int(item_idx)
except Exception:
item_idx = 0
choice_item = remote_items[item_idx]
print(f"choice: {item_idx}")
print("====================================================================")
versions = get_remote_versions(choice_item)
for idx, version in enumerate(versions):
print(f"{idx}: {version}")
version_idx = input("Please choice one version, input id(default: 0):")
try:
version_idx = int(version_idx)
except Exception:
version_idx = 0
choice_version = versions[version_idx]
print(f"choice: {version_idx}")
pathes = download_firmware(choice_item, choice_version)
commands[file_one] = pathes[0]
commands[file_two] = pathes[1]
# ====================================================================
# local option functions
# ====================================================================
def get_local_items():
return os.listdir(TARGET_DIR)
def get_local_versions(name):
dir_ = TARGET_DIR + "/" + name
return os.listdir(dir_)
def local_option():
global commands
print("====================================================================")
try:
local_items = get_local_items()
except FileNotFoundError:
print("No local firmware, try to download remotely!!!")
raise SystemExit(0)
# check if has local firmware.
if not local_items:
print("No local firmware, try to download remotely!!!")
raise SystemExit(0)
for idx, item in enumerate(local_items):
print(f"{idx}: {item}")
item_idx = input("Please choice one(default: 0):")
try:
item_idx = int(item_idx)
except Exception:
item_idx = 0
choice_item = local_items[item_idx]
print(f"choice: {item_idx}")
print("====================================================================")
versions = get_local_versions(choice_item)
for idx, version in enumerate(versions):
print(f"{idx}: {version}")
version_idx = input("Please choice one(default: 0):")
try:
version_idx = int(version_idx)
except Exception:
version_idx = 0
choice_version = versions[version_idx]
print(f"choice: {version_idx}")
dir_path = f"{TARGET_DIR}/{choice_item}/{choice_version}/"
firewares = os.listdir(dir_path)
print(firewares)
commands[file_one] = dir_path + firewares[0]
commands[file_two] = dir_path + firewares[1]
def exit_(*args):
raise SystemExit(0)
# main
def main():
try:
signal.signal(signal.SIGINT, exit_)
except Exception:
pass
args = argparse.ArgumentParser()
args.add_argument("-b", "--baudrate", help="Port baudrate.")
stdargs = args.parse_args()
if stdargs.baudrate:
commands[baud] = stdargs.baudrate
else:
print("====================================================================")
print("0: basic")
print("1: atom")
board = input("Please choice board(default: 0):")
try:
board = int(board)
except Exception:
board = 0
if board == 1:
commands[baud] = "1500000"
port_str = get_port()
commands[port] = port_str
print("====================================================================")
print("0: choice from local.")
print("1: choice from remote.")
c = input("Please firmware localtion(default: 0):")
try:
c = int(c)
except Exception:
c = 0
print(f"choice: {c}")
if c == 0:
local_option()
elif c == 1:
remote_option()
try:
esptool.main(commands)
except OSError as e:
print(f"Error encountered! {e}")
print()
if "Permission denied" in str(e):
print("Please ensure you part of the `dialout` group. See README "
"for more details")
else:
print("Please do not disconnect from the device.")
except esptool.FatalError as e:
print(e)
if __name__ == "__main__":
main()
| StarcoderdataPython |
1742612 | #https://leetcode.com/problems/next-greater-element-i/submissions/
# Approach :
# We need to find the next greater element than the current element which is in the right side of the array
# Since Nums1 is a subset of Nums2 , we will be finding the next greater element in Nums2 and keep storing it in a Hashmap
# And return only those values which are are present in Nums1
# since we need the immediate next element greater than the current element
# Create a stack to store an element from the nums , compare the top of the stack with the elements from nums2
# IF the element from nums is greater than the top of the stack , then store this pair in a hashmap
# If the element from the nums is lesser than the top of the stack or if the stack is empty , append the element in the stack
# After the nums2 is iterated , check if there is any element remaining in the stack
# If there is any element remaining , it means that , these elements dont have any other element greater than it (to the right side of the array)
# So append these elements to the hashmap as keys with values -1
# Lastly , return the list of elements from the hashmap which are present in nums1 i.e the required output
class Solution(object):
def nextGreaterElement(self, nums1, nums2):
"""
:type nums1: List[int]
:type nums2: List[int]
:rtype: List[int]
"""
stack = []
d = {}
for num in nums2 :
while stack and stack[-1] < num:
top = stack.pop()
d[top] = num
if not stack or stack[-1] > num:
stack.append(num)
while(stack):
rest = stack.pop()
d[rest] = -1
return [d[i] for i in nums1] | StarcoderdataPython |
162061 | <filename>tests/test_dtypes.py
import os
import unittest
from datetime import datetime
from painlessdb import Schema, PainlessDB
class TestDatabaseDataTypes(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestDatabaseDataTypes, self).__init__(*args, **kwargs)
self.db_path = os.path.join(os.getcwd(), 'test.pldb')
self.schema_data = {
'TestModels': {
'field_1': Schema.types.text(),
'field_2': Schema.types.int(),
'field_3': Schema.types.bool(),
'field_4': Schema.types.float(),
'field_5': Schema.types.list(),
'field_6': Schema.types.dict(),
'field_7': Schema.types.datetime(),
'field_8': Schema.types.text(),
},
'static_1': Schema.types.text(),
'static_2': Schema.types.int(),
'static_3': Schema.types.bool(),
'static_4': Schema.types.float(),
'static_5': Schema.types.list(),
'static_6': Schema.types.dict(),
'static_7': Schema.types.datetime()
}
def tearDown(self):
if os.path.exists(self.db_path):
os.remove(self.db_path)
def test_type_returns(self):
database = PainlessDB(file_path_name=self.db_path, schema_data=self.schema_data)
dt_obj = datetime.now()
database.create('TestModels', fields=database.fields(
field_1='This is a text',
field_2=78,
field_3=True,
field_4=12.6812,
field_5=['apple', 'berry', 'banana'],
field_6={'a': 1, 'b': [1, 2, '3'], 'c': True},
field_7=dt_obj,
))
data = database.get('TestModels', where=database.where(id=1), multiple=False)
self.assertEqual(data.field_1, 'This is a text')
self.assertEqual(data.field_2, 78)
self.assertEqual(data.field_3, True)
self.assertEqual(data.field_4, 12.6812)
self.assertEqual(data.field_5, ['apple', 'berry', 'banana'])
self.assertEqual(data.field_6, {'a': 1, 'b': [1, 2, '3'], 'c': True})
self.assertEqual(data.field_7, dt_obj)
dt_obj = datetime.now()
database.update('static_1', value='just a text')
database.update('static_2', value=120)
database.update('static_3', value=True)
database.update('static_4', value=3.8129)
database.update('static_5', value=['120B', '129D', 12, False, {'as': [1, 2, 'b']}])
database.update('static_6', value={'a1': 1, 'a2': False, 'l': ['b', {'a': [1, 2, 3]}]})
database.update('static_7', value=dt_obj)
self.assertEqual(database.get('static_1').value, 'just a text')
self.assertEqual(database.get('static_2').value, 120)
self.assertEqual(database.get('static_3').value, True)
self.assertEqual(database.get('static_4').value, 3.8129)
self.assertEqual(database.get('static_5').value, ['120B', '129D', 12, False, {'as': [1, 2, 'b']}])
self.assertEqual(database.get('static_6').value, {'a1': 1, 'a2': False, 'l': ['b', {'a': [1, 2, 3]}]})
self.assertEqual(database.get('static_7').value, dt_obj)
def test_field_defaults(self):
database = PainlessDB(file_path_name=self.db_path, schema_data=self.schema_data)
database.create('TestModels', fields=database.fields(field_8='This is a dummy field 8 text'))
data = database.get('TestModels', where=database.where(id=1), multiple=False)
self.assertEqual(data.field_1, '')
self.assertEqual(data.field_2, 0)
self.assertEqual(data.field_3, False)
self.assertEqual(data.field_4, 0.0)
self.assertEqual(data.field_5, [])
self.assertEqual(data.field_6, {})
self.assertEqual(data.field_7, None)
self.assertEqual(database.get('static_1').value, '')
self.assertEqual(database.get('static_2').value, 0)
self.assertEqual(database.get('static_3').value, False)
self.assertEqual(database.get('static_4').value, 0.0)
self.assertEqual(database.get('static_5').value, [])
self.assertEqual(database.get('static_6').value, {})
self.assertEqual(database.get('static_7').value, None)
if __name__ == '__main__':
unittest.main()
| StarcoderdataPython |
65426 | <gh_stars>10-100
# Copyright (c) 2020 NVIDIA Corporation. All rights reserved.
# This work is licensed under the NVIDIA Source Code License-NC
# See LICENSE.txt for details
#
# Author: <NAME> (<EMAIL>)
from __future__ import absolute_import
from __future__ import division
import torch
import torch.nn as nn
class CrossEntropyLabelSmooth(nn.Module):
"""Cross entropy loss with label smoothing regularizer.
Reference:
Szegedy et al. Rethinking the Inception Architecture for Computer Vision. CVPR 2016.
Equation: y = (1 - epsilon) * y + epsilon / K.
Args:
- num_classes (int): number of classes.
- epsilon (float): weight.
"""
def __init__(self, num_classes, epsilon=0.1, use_gpu=True):
super(CrossEntropyLabelSmooth, self).__init__()
self.num_classes = num_classes
self.epsilon = epsilon
self.use_gpu = use_gpu
self.logsoftmax = nn.LogSoftmax(dim=1)
def forward(self, inputs, targets):
"""
Args:
- inputs: prediction matrix (before softmax) with shape (batch_size, num_classes)
- targets: ground truth labels with shape (num_classes)
"""
log_probs = self.logsoftmax(inputs)
targets = torch.zeros(log_probs.size()).scatter_(1, targets.unsqueeze(1).data.cpu(), 1)
if self.use_gpu: targets = targets.cuda()
targets = (1 - self.epsilon) * targets + self.epsilon / self.num_classes
loss = (- targets * log_probs).mean(0).sum()
return loss
| StarcoderdataPython |
187236 | import numpy as np
def sigmoid(x):
return 1 / (1 + np.exp(-x))
def sigmoid_der(x):
return x / (1 - x)
def relu(x):
if x > 0:
return x
else:
return 0
def relu_der(x):
if x > 0:
return 1
else:
return 0
def leaky_relu(x):
if x < 0:
return 0.01 * x
return x
def leaky_relu_der(x):
if x < 0:
return 0.01
return 1
def swish(x):
return x / (1 + np.exp(-x))
def swish_der(x):
return np.exp(x) * (np.exp(x) + x + 1) / ((np.exp(x) + 1)**2)
def identity(x):
return x | StarcoderdataPython |
3378504 | <gh_stars>0
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
import numpy as np
import oneflow as flow
@unittest.skipIf(
not flow.unittest.env.eager_execution_enabled(),
".numpy() doesn't work in lazy mode",
)
class TestEmbedding(flow.unittest.TestCase):
def test_embedding(test_case):
weight = np.array(
[
[0.68258786, 0.6957856, 1.1829041],
[1.0154, -1.0616943, 0.50303376],
[0.29679507, 0.65562993, 1.0424724],
[-0.42980736, -0.35347632, -0.15600166],
[0.6763601, -0.24286619, -2.0873115],
[-0.13371214, -0.5589277, 1.9173933],
[0.08762296, 1.0264007, -0.67938024],
[0.32019204, -0.26137325, -1.3534237],
[-1.1555519, -0.67776406, 0.27372134],
[1.0615997, -0.59715784, 1.9855849],
],
dtype=np.float32,
)
output = np.array(
[
[
[1.0154, -1.0616943, 0.50303376],
[0.29679507, 0.65562993, 1.0424724],
[0.6763601, -0.24286619, -2.0873115],
[-0.13371214, -0.5589277, 1.9173933],
],
[
[0.6763601, -0.24286619, -2.0873115],
[-0.42980736, -0.35347632, -0.15600166],
[0.29679507, 0.65562993, 1.0424724],
[1.0615997, -0.59715784, 1.9855849],
],
],
dtype=np.float32,
)
indices = flow.Tensor([[1, 2, 4, 5], [4, 3, 2, 9]], dtype=flow.int)
m = flow.nn.Embedding(10, 3, _weight=flow.Tensor(weight))
y = m(indices)
test_case.assertTrue(np.array_equal(y.numpy(), output))
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
121955 | <reponame>MalikJordan/pyPOM1D
"""
This file is included in all files
"""
# THIS FILE IS INCLUDED IN ALL FILES
RELEASE = 'Created by BFM v. 5.1'
PATH_MAX = 255
stderr = 0
stdout = 6
# HANDY FOR WRITING
def STDOUT(text):
print(text)
def STDERR(text):
print(text)
# STANDARD OUTPUT FOR PARALLEL COMPUTATION
def LEVEL0():
STDERR('')
def LEVEL1():
STDERR(' ')
def LEVEL2():
STDERR(' ')
def LEVEL3():
STDERR(' ')
def LEVEL4():
STDERR(' ')
def FATAL():
STDERR('FATAL ERROR: ')
def LINE():
print('------------------------------------------------------------------------')
# SHAPE OF VARIABLES
POINT = 0
Z_SHAPE = 1
T_SHAPE = 2
XY_SHAPE = 3
XYT_SHAPE = 4
XYZT_SHAPE = 5
OCET_SHAPE = 6
SURFT_SHAPE = 7
BOTT_SHAPE = 8
G_SHAPE = 9
XYZ_SHAPE = 10
# CONSTANTS FOR AVERAGE COMPUTATIONS
INIT = 0
MEAN = 1
RESET = 2
ACCUMULATE = 10
# TO AVOID DIVIDING BY ZERO
SMALL = 1E-08
# WHAT PRECISION WE WILL USE IN THIS COMPILATION
_ZERO_ = 0.0
_ONE_ = 1.0
| StarcoderdataPython |
170963 | <filename>case/case.py
# Copyright 2021 Ringgaard Research ApS
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http:#www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""SLING case system"""
import requests
import datetime
import socket
import time
import urllib.parse
import sling
import sling.net
import sling.util
import sling.flags as flags
import sling.log as log
import services
import imgcache
import wikibase
flags.define("--port",
help="HTTP port",
default=8080,
type=int,
metavar="PORT")
flags.define("--number",
help="Checkpoint file for keeping track of new case numbers",
default=None,
metavar="FILE")
flags.define("--number_service",
help="Endpoint for assigning new case numbers",
default="https://ringgaard.com/case/new",
metavar="URL")
flags.define("--xrefs",
help="Cross-reference configuration file",
default="data/wiki/xrefs.sling",
metavar="FILE")
flags.define("--casedb",
help="database for shared cases",
default="case",
metavar="DB")
# Load services before parsing flags to allow services to define flags.
services.load()
flags.parse()
# Convert ISO 8601 time to unix epoch.
def iso2ts(t):
if t is None: return None
if t.endswith("Z"): t = t[:-1] + "+00:00"
return int(datetime.datetime.fromisoformat(t).timestamp())
# Conver unix epoch to RFC time.
def ts2rfc(t):
return time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(t))
# Connect to case database.
casedb = sling.Database(flags.arg.casedb, "case.py")
# Initialize HTTP server.
app = sling.net.HTTPServer(flags.arg.port)
app.redirect("/", "/c")
# Add static files.
app.static("/common", "app", internal=True)
app.static("/case/app", "case/app")
app.static("/case/plugin", "case/plugin")
# Commons store.
commons = sling.Store()
n_caseid = commons["caseid"]
n_modified = commons["modified"]
n_share = commons["share"]
xrefs = commons.load(flags.arg.xrefs)
commons.freeze()
# Checkpoint with next case number.
numbering = None
if flags.arg.number:
numbering = sling.util.Checkpoint(flags.arg.number)
# Template for main page.
main_page_template = """<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<meta name=viewport content="width=device-width, initial-scale=1">
<link rel="icon" href="/common/image/appicon.ico" type="image/x-icon" />
<script type="module" src="/case/app/main.js"></script>
</head>
<body style="display: none;">
</body>
</html>""";
@app.route("/c")
def main_page(request):
return main_page_template
@app.route("/case/new")
def new_case(request):
if numbering:
# Get new case number.
client = request["X-Forwarded-For"]
caseid = numbering.checkpoint
numbering.commit(caseid + 1)
# Log new case requests with IP address.
log.info("Assign case #%d to client %s" % (caseid, client))
# Return the newly assigned case number.
store = sling.Store(commons)
return store.frame([(n_caseid, caseid)])
elif flags.arg.number_service:
# Redirect to remote case numbering service.
return sling.net.HTTPRedirect(flags.arg.number_service)
else:
return 500
class CaseFile:
def __init__(self, content, modified):
self.content = content
self.modified = modified
@sling.net.response(CaseFile)
def case_reponse(value, request, response):
response.ct = "application/sling"
response["Last-Modified"] = ts2rfc(value.modified)
response.body = value.content
@app.route("/case/fetch")
def fetch_case(request):
# Get case id.
caseid = int(request.params()["id"][0])
# Fetch case file from database.
rec, ts = casedb.get(str(caseid))
if rec is None: return 404;
# Return case file.
return CaseFile(rec, ts)
@app.route("/case/share", method="POST")
def share_case(request):
# Get shared case.
client = request["X-Forwarded-For"]
store = sling.Store(commons)
casefile = request.frame(store);
# Get case id.
caseid = casefile[n_caseid]
if caseid is None: return 400;
# Get modification time.
modified = casefile[n_modified];
ts = iso2ts(modified)
if ts is None: return 400;
# Share or unshare.
if casefile[n_share]:
# Store case in database.
casedb.put(str(caseid), request.body, version=ts)
# Log case updates with IP address.
log.info("Share case #%d version %d for client %s" % (caseid, ts, client))
else:
# Delete case from database.
key = str(caseid)
if key in casedb:
casedb.delete(key)
# Log case delete with IP address.
log.info("Unshare case #%d version %d for client %s" %
(caseid, ts, client))
@app.route("/case/service")
def service_request(request):
# Get service name.
service = request.path
if service.startswith("/"): service = service[1:]
if "/" in service: service = service[:service.find("/")]
# Let service process request.
log.info(service, "request", request.path)
return services.process(service, request)
non_proxy_headers = set([
"connection",
"content-length",
"content-encoding",
"content-security-policy",
"transfer-encoding",
])
checked_hostnames = set()
@app.route("/case/proxy")
def service_request(request):
# Get URL.
url = request.params()["url"][0]
# Check that request is not for local network.
addr = urllib.parse.urlsplit(url)
if addr.hostname not in checked_hostnames:
ipaddr = socket.gethostbyname(addr.hostname)
if ipaddr.startswith("10."): return 403
if ipaddr.startswith("192.168."): return 403
if ipaddr.startswith("127."): return 403
checked_hostnames.add(addr.hostname)
# Set up request headers.
headers = {}
ua = request["XUser-Agent"]
if ua: headers["User-Agent"] = ua
cookies = None
cookie = request["XCookie"]
if cookie:
delim = cookie.find("=")
if delim != -1:
cookies = {cookie[:delim]: cookie[delim + 1:]}
# Forward request.
log.info("Proxy request for", url, headers, cookies)
r = requests.get(url, headers=headers, cookies=cookies)
# Relay back response.
response = sling.net.HTTPResponse()
response.status = r.status_code
response.body = r.content
response.headers = []
for key, value in r.headers.items():
if key.lower() in non_proxy_headers: continue
if key == "Set-Cookie": key = "XSet-Cookie"
response.headers.append((key, value))
log.info("Return", len(response.body), "bytes")
return response
@app.route("/case/xrefs")
def xrefs_request(request):
return xrefs
@app.route("/media")
def media_request(request):
# Dummy media service that always redirects to the original url.
return sling.net.HTTPRedirect(urllib.parse.unquote(request.path[1:]))
@app.route("/case/cacheimg")
def cache_images_request(request):
# Get case id.
caseid = int(request.params()["id"][0])
# Fetch case file from database.
rec, ts = casedb.get(str(caseid))
if rec is None: return 404;
store = sling.Store(commons)
casefile = store.parse(rec)
# Start image caching.
print("Start image caching for case", caseid)
return imgcache.start_image_caching(casefile)
@app.route("/case/wikibase", methods=["GET", "POST"])
def wikibase_request(request):
return wikibase.handle(request)
# Initialize services.
services.init()
# Run HTTP server.
log.info("HTTP server listening on port", flags.arg.port)
app.run()
log.info("Shutdown.")
| StarcoderdataPython |
1753142 | class DummyMapping:
"""
Dummy object used to provide a mapping interface for singular values.
"""
def __init__(self, value):
self._value = value
def __getitem__(self, key):
return self._value
| StarcoderdataPython |
1747732 | <filename>app/lib/message_bus.py
#! /usr/bin/env python
# _*_coding:utf-8 -*_
from queue import Queue, Empty
from threading import *
def singleton(cls):
_instance = {}
def _singleton(*args, **kargs):
if cls not in _instance:
_instance[cls] = cls(*args, **kargs)
return _instance[cls]
return _singleton
@singleton
class MessageBus(object):
def __init__(self):
"""初始化事件管理器"""
# 对象列表
self.__msg_queue = Queue()
# 事件管理器开关
self.__active = False
self.__queue_lock = Lock()
# 事件处理线程
self.__thread = Thread(target=self.___run)
# 这里的__handlers是一个字典,用来保存对应的事件的响应函数
# 其中每个键对应的值是一个列表,列表中保存了对该事件监听的响应函数,一对多
self.__handlers = {}
def ___run(self):
"""引擎运行"""
while self.__active:
try:
# self.__queue_lock.acquire(timeout=0.1)
# 获取事件的阻塞时间设为1秒
msg = self.__msg_queue.get(block=True, timeout=0.1)
# self.__queue_lock.release()
self.___msg_process(msg)
except Empty:
pass
def ___msg_process(self, msg):
"""处理事件"""
# 检查是否存在对该事件进行监听的处理函数
if msg.subject in self.__handlers:
# 若存在,则按顺序将事件传递给处理函数执行
for handler in self.__handlers[msg.subject]:
Thread(target=handler, args=(msg, )).start()
def start(self):
"""启动"""
# 将事件管理器设为启动
self.__active = True
self.__thread.daemon = True
# 启动事件处理线程
self.__thread.start()
def stop(self):
"""停止"""
# 将事件管理器设为停止
self.__active = False
# 等待事件处理线程退出
# self.__thread.join()
def add_msg_listener(self, subject, handler):
"""
绑定事件和监听器处理函数
:param subject: 事件类型,字符串
:param handler: 事件处理函数
:return:
"""
# 尝试获取该事件类型对应的处理函数列表,若无则创建
try:
handler_list = self.__handlers[subject]
except KeyError:
handler_list = []
self.__handlers[subject] = handler_list
# 若要注册的处理器不在该事件的处理器列表中,则注册该事件
if handler not in handler_list:
handler_list.append(handler)
def remove_msg_listener(self, subject, handler):
"""
移除监听器的处理函数
:param subject: 事件类型,字符串
:param handler: 事件处理函数
:return:
"""
try:
self.__handlers[subject].remove(handler)
except:
pass
def send_msg(self, msg):
"""
发送事件,向事件队列中存入事件
"""
self.__msg_queue.put(msg)
class Message:
"""
事件对象
"""
def __init__(self, subject=None):
self.subject = subject # 事件类型
self.data = {} # 字典用于保存具体的事件数据
if __name__ == '__main__':
msg_bus = MessageBus()
| StarcoderdataPython |
3353751 | <filename>tests/test_fastapi/test_m2m_forwardref.py
from typing import List, Optional
import databases
import pytest
import sqlalchemy
from fastapi import FastAPI
from pydantic.schema import ForwardRef
from starlette import status
from starlette.testclient import TestClient
import ormar
app = FastAPI()
from tests.settings import DATABASE_URL
database = databases.Database(DATABASE_URL, force_rollback=True)
metadata = sqlalchemy.MetaData()
app.state.database = database
@app.on_event("startup")
async def startup() -> None:
database_ = app.state.database
if not database_.is_connected:
await database_.connect()
@app.on_event("shutdown")
async def shutdown() -> None:
database_ = app.state.database
if database_.is_connected:
await database_.disconnect()
class BaseMeta(ormar.ModelMeta):
database = database
metadata = metadata
CityRef = ForwardRef("City")
CountryRef = ForwardRef("Country")
# models.py
class Country(ormar.Model):
class Meta(BaseMeta):
tablename = "countries"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128, unique=True)
iso2: str = ormar.String(max_length=3)
iso3: str = ormar.String(max_length=4, unique=True)
population: int = ormar.Integer(maximum=10000000000)
demonym: str = ormar.String(max_length=128)
native_name: str = ormar.String(max_length=128)
capital: Optional[CityRef] = ormar.ForeignKey( # type: ignore
CityRef, related_name="capital_city", nullable=True
)
borders: List[Optional[CountryRef]] = ormar.ManyToMany( # type: ignore
CountryRef, nullable=True, skip_reverse=True
)
class City(ormar.Model):
class Meta(BaseMeta):
tablename = "cities"
id: int = ormar.Integer(primary_key=True)
name: str = ormar.String(max_length=128)
country: Country = ormar.ForeignKey(
Country, related_name="cities", skip_reverse=True
)
Country.update_forward_refs()
@pytest.fixture(autouse=True, scope="module")
def create_test_database():
engine = sqlalchemy.create_engine(DATABASE_URL)
metadata.create_all(engine)
yield
metadata.drop_all(engine)
@app.post("/", response_model=Country, status_code=status.HTTP_201_CREATED)
async def create_country(country: Country): # if this is ormar
result = await country.upsert() # it's already initialized as ormar model
return result
def test_payload():
client = TestClient(app)
with client as client:
payload = {
"name": "Thailand",
"iso2": "TH",
"iso3": "THA",
"population": 23123123,
"demonym": "Thai",
"native_name": "Thailand",
}
resp = client.post("/", json=payload, headers={"application-type": "json"})
# print(resp.content)
assert resp.status_code == 201
resp_country = Country(**resp.json())
assert resp_country.name == "Thailand"
| StarcoderdataPython |
1600652 | """
Tasks Executor
==============
Responsible for executing tasks code, serialization, process separation, privileges management, passing the
already prepared context, processing the results, processing declarative code
Responsibilities are split into separate sub-layers.
"""
| StarcoderdataPython |
3325457 | <reponame>tatuanb/monai_V1<filename>tests/test_prepare_batch_extra_input.py
# Copyright 2020 - 2021 MONAI Consortium
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import torch
from parameterized import parameterized
from monai.engines import PrepareBatchExtraInput, SupervisedEvaluator
from tests.utils import assert_allclose
TEST_CASE_0 = [
{"extra_keys": "extra1"},
{"x": torch.tensor([1, 2]), "t1": torch.tensor([5, 6]), "t2": None, "t3": None},
]
TEST_CASE_1 = [
{"extra_keys": ["extra1", "extra3"]},
{"x": torch.tensor([1, 2]), "t1": torch.tensor([5, 6]), "t2": "test", "t3": None},
]
TEST_CASE_2 = [
{"extra_keys": {"t1": "extra2", "t2": "extra3", "t3": "extra1"}},
{"x": torch.tensor([1, 2]), "t1": 16, "t2": "test", "t3": torch.tensor([5, 6])},
]
class TestNet(torch.nn.Module):
def forward(self, x: torch.Tensor, t1=None, t2=None, t3=None):
return {"x": x, "t1": t1, "t2": t2, "t3": t3}
class TestPrepareBatchExtraInput(unittest.TestCase):
@parameterized.expand([TEST_CASE_0, TEST_CASE_1, TEST_CASE_2])
def test_content(self, input_args, expected_value):
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
dataloader = [
{
"image": torch.tensor([1, 2]),
"label": torch.tensor([3, 4]),
"extra1": torch.tensor([5, 6]),
"extra2": 16,
"extra3": "test",
}
]
# set up engine
evaluator = SupervisedEvaluator(
device=device,
val_data_loader=dataloader,
epoch_length=1,
network=TestNet(),
non_blocking=True,
prepare_batch=PrepareBatchExtraInput(**input_args),
decollate=False,
)
evaluator.run()
output = evaluator.state.output
assert_allclose(output["image"], torch.tensor([1, 2], device=device))
assert_allclose(output["label"], torch.tensor([3, 4], device=device))
for k, v in output["pred"].items():
if isinstance(v, torch.Tensor):
assert_allclose(v, expected_value[k].to(device))
else:
self.assertEqual(v, expected_value[k])
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
26091 | <reponame>Emrys-Merlin/monitor_airquality<filename>setup.py
from importlib.metadata import entry_points
from setuptools import find_packages, setup
setup(
name='monitor_airquality',
version='0.1',
url='',
author='<NAME>',
author_email='<EMAIL>',
description='Measure airquality using some sensors connected to a raspberry pi',
packages=find_packages(),
install_requires=[],
entry_points={
'console_scripts': ['monitor_airquality=monitor_airquality.main:main']
}
)
| StarcoderdataPython |
1721962 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
import math
def equal(x, y, tol=0.000000001):
"""Compare if two real numbers are equal using a tolerance to avoid rounding errors."""
return math.fabs(x - y) < tol, '%(x)f != %(y)f' % locals()
def point_equal(p1, p2, msg=None):
are_equal = True
if p1.dim == p2.dim:
i = 0
while are_equal and i < p1.dim:
if not equal(p1.coords[i], p2.coords[i]):
are_equal = False
raise self.failureException('points are not equal')
i += 1
else:
are_equal = False
raise self.failureException('points are not equal')
return are_equal
| StarcoderdataPython |
1731026 | <gh_stars>1-10
A = 10
def kirim():
B = 15
global C
C = 25
print("1. Modul kirim : Nilai A,B,B :",A,B,C)
def terima():
print("2. Modul terima : Nilai A,B,C :",A,B,C)
B = 10
kirim()
terima() | StarcoderdataPython |
89145 | <filename>module_utils/oracle/oci_wait_utils.py
# Copyright (c) 2019 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.oracle import oci_common_utils
try:
import oci
from oci.util import to_dict
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
LIFECYCLE_STATE_WAITER_KEY = "LIFECYCLE_STATE_WAITER"
WORK_REQUEST_WAITER_KEY = "WORK_REQUEST_WAITER"
NONE_WAITER_KEY = "NONE_WAITER_KEY"
class Waiter:
"""Interface defining wait method"""
def wait(self):
raise NotImplementedError(
"Expected to be implemented by the specific waiter classes."
)
class BaseWaiter(Waiter):
"""Base class for various waiters"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
self.client = client
self.operation_response = operation_response
self.wait_for_states = wait_for_states
self.resource_helper = resource_helper
def get_initial_response(self):
raise NotImplementedError(
"Expected to be implemented by the specific waiter classes."
)
def get_evaluate_response_lambda(self):
raise NotImplementedError(
"Expected to be implemented by the specific waiter classes."
)
def wait(self):
if not self.resource_helper.module.params.get("wait"):
return self.operation_response
wait_response = oci.wait_until(
self.client,
self.get_initial_response(),
evaluate_response=self.get_evaluate_response_lambda(),
max_wait_seconds=self.resource_helper.module.params.get(
"wait_timeout", oci_common_utils.MAX_WAIT_TIMEOUT_IN_SECONDS
),
)
return self.get_resource_from_wait_response(wait_response)
def get_resource_from_wait_response(self, wait_response):
raise NotImplementedError(
"Expected to be implemented by the specific waiter classes."
)
class LifecycleStateWaiterBase(BaseWaiter):
"""Base class for various waiters"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
self.client = client
self.operation_response = operation_response
self.wait_for_states = wait_for_states
self.resource_helper = resource_helper
def get_initial_response(self):
return self.resource_helper.get_resource()
def get_evaluate_response_lambda(self):
lowered_wait_for_states = [state.lower() for state in self.wait_for_states]
return (
lambda r: getattr(r.data, "lifecycle_state")
and getattr(r.data, "lifecycle_state").lower() in lowered_wait_for_states
)
def get_resource_from_wait_response(self, wait_response):
return wait_response.data
class LifecycleStateWaiter(LifecycleStateWaiterBase):
"""Waiter which waits on the lifecycle state of the resource"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
super(LifecycleStateWaiter, self).__init__(
client, resource_helper, operation_response, wait_for_states
)
class CreateOperationLifecycleStateWaiter(LifecycleStateWaiterBase):
"""Waiter which waits on the lifecycle state of the resource"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
super(CreateOperationLifecycleStateWaiter, self).__init__(
client, resource_helper, operation_response, wait_for_states
)
def get_initial_response(self):
identifier = self.operation_response.data.id
if not identifier:
self.resource_helper.module.fail_json(
"Error getting the resource identifier."
)
try:
id_orig = self.resource_helper.module.params[
self.resource_helper.get_module_resource_id_param()
]
except NotImplementedError:
return self.resource_helper.get_resource()
self.resource_helper.module.params[
self.resource_helper.get_module_resource_id_param()
] = identifier
get_response = self.resource_helper.get_resource()
self.resource_helper.module.params[
self.resource_helper.get_module_resource_id_param()
] = id_orig
return get_response
class WorkRequestWaiter(BaseWaiter):
"""Waiter which waits on the work request"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
self.client = client
self.resource_helper = resource_helper
self.operation_response = operation_response
self.wait_for_states = wait_for_states
def get_initial_response(self):
return self.client.get_work_request(
self.operation_response.headers["opc-work-request-id"]
)
def get_evaluate_response_lambda(self):
lowered_wait_for_states = [state.lower() for state in self.wait_for_states]
return (
lambda r: getattr(r.data, "status")
and getattr(r.data, "status").lower() in lowered_wait_for_states
)
def get_resource_from_wait_response(self, wait_response):
get_response = self.resource_helper.get_resource()
return get_response.data
class CreateOperationWorkRequestWaiter(WorkRequestWaiter):
"""Waiter which waits on the work request"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
super(CreateOperationWorkRequestWaiter, self).__init__(
client, resource_helper, operation_response, wait_for_states
)
def get_resource_from_wait_response(self, wait_response):
entity_type = oci_common_utils.get_entity_type(
self.resource_helper.resource_type
)
identifier = None
for resource in wait_response.data.resources:
if (
hasattr(resource, "entity_type")
and getattr(resource, "entity_type") == entity_type
):
identifier = resource.identifier
if not identifier:
self.resource_helper.module.fail_json(
msg="Could not get the resource identifier from work request response {0}".format(
to_dict(wait_response.data)
)
)
get_response = self.resource_helper.get_get_fn()(identifier)
return get_response.data
class NoneWaiter(Waiter):
"""Waiter which does not wait"""
def __init__(self, client, resource_helper, operation_response, wait_for_states):
self.client = client
self.resource_helper = resource_helper
self.operation_response = operation_response
self.wait_for_states = wait_for_states
def wait(self):
return self.operation_response.data
class AuditConfigurationLifecycleStateWaiter(LifecycleStateWaiter):
def __init__(self, client, resource_helper, operation_response, wait_for_states):
super(AuditConfigurationLifecycleStateWaiter, self).__init__(
client, resource_helper, operation_response, wait_for_states
)
def get_evaluate_response_lambda(self):
# The update operation currently returns a work request id but the AuditClient currently does not support
# waiting for the work request. So wait until the configuration is updated by checking the value.
return (
lambda r: r.data.retention_period_days
== self.resource_helper.module.params.get("retention_period_days")
)
# A map specifying the overrides for the default waiters.
# Key is a tuple consisting spec name, resource type and the operation and the value is the waiter class.
# For ex: ("waas", "waas_policy", oci_common_utils.UPDATE_OPERATION_KEY) -> CustomWaasWaiterClass
_WAITER_OVERRIDE_MAP = {
# The audit update operation currently returns a work request id but the AuditClient currently does not support
# waiting for the work request. So inject NoneWaiter and customize it to manually wait on the update condition.
("audit", "configuration", oci_common_utils.UPDATE_OPERATION_KEY): NoneWaiter
}
def get_waiter_override(namespace, resource_type, operation):
"""Return the custom waiter class if any for the resource and operation. Else return None."""
waiter_override_key = (namespace, resource_type, operation)
if waiter_override_key in _WAITER_OVERRIDE_MAP:
return _WAITER_OVERRIDE_MAP.get(waiter_override_key)
# check if an override exists for ANY_OPERATION_KEY. This is helpful if we need a custom waiter for all(any)
# resource operations
waiter_override_key = (namespace, resource_type, oci_common_utils.ANY_OPERATION_KEY)
if waiter_override_key in _WAITER_OVERRIDE_MAP:
return _WAITER_OVERRIDE_MAP.get(waiter_override_key)
return None
def get_waiter(
waiter_type, operation, client, resource_helper, operation_response, wait_for_states
):
"""Return appropriate waiter object based on type and the operation."""
# First check if there is any custom override for the waiter class. If exists, use it.
waiter_override_class = get_waiter_override(
resource_helper.namespace, resource_helper.resource_type, operation
)
if waiter_override_class:
return waiter_override_class(
client, resource_helper, operation_response, wait_for_states
)
if waiter_type == LIFECYCLE_STATE_WAITER_KEY:
if operation == oci_common_utils.CREATE_OPERATION_KEY:
return CreateOperationLifecycleStateWaiter(
client, resource_helper, operation_response, wait_for_states
)
return LifecycleStateWaiter(
client, resource_helper, operation_response, wait_for_states
)
elif waiter_type == WORK_REQUEST_WAITER_KEY:
if operation == oci_common_utils.CREATE_OPERATION_KEY:
return CreateOperationWorkRequestWaiter(
client, resource_helper, operation_response, wait_for_states
)
return WorkRequestWaiter(
client, resource_helper, operation_response, wait_for_states
)
return NoneWaiter(client, resource_helper, operation_response, wait_for_states)
def call_and_wait(
call_fn,
call_fn_args,
call_fn_kwargs,
waiter_type,
operation,
waiter_client,
resource_helper,
wait_for_states,
):
"""Call the given function and wait until the operation is completed and return the resource."""
operation_response = oci_common_utils.call_with_backoff(
call_fn, *call_fn_args, **call_fn_kwargs
)
waiter = get_waiter(
waiter_type,
operation,
waiter_client,
resource_helper,
operation_response=operation_response,
wait_for_states=wait_for_states,
)
return waiter.wait()
| StarcoderdataPython |
3271076 | import unittest
from logic.cidr_calculator import CIDRCalculator
from models.CustomExceptions import *
class TestIPRangeExceptions(unittest.TestCase):
def test_calculate_ip_range_when_ip_over_255_all(self):
ip = '256.256.256.256'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_over_255_middle(self):
ip = '10.256.150.4'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_over_negative_number(self):
ip = '74.192.-2.0'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_decimal_number(self):
ip = '74.192.0.5.0'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_has_empty(self):
ip = '74.192..0'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_is_incomplete(self):
ip = '74.192.0'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_has_additional_dot(self):
ip = '256.150.120.4.'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_has_more_than_4_parts(self):
ip = '256.150.120.4.0'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_over_255_start(self):
ip = '256.150.120.4'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_over_255_end(self):
ip = '74.150.120.256'
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(BadArgumentExcepton,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_is_empty(self):
ip = ''
cidrRange = 32
cidr = CIDRCalculator()
self.assertRaises(NullArgumentException,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_cidr_is_0(self):
ip = '192.168.3.11'
cidrRange = 0
cidr = CIDRCalculator()
self.assertRaises(NullArgumentException,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_ip_and_cidr_are_empty_and_0(self):
ip = ''
cidrRange = 0
cidr = CIDRCalculator()
self.assertRaises(NullArgumentException,
cidr.calculateIPRange, ip, cidrRange)
def test_calculate_ip_range_when_cidrRange_0(self):
ip = '192.168.3.11'
cidrRange = 0
cidr = CIDRCalculator()
self.assertRaises(NullArgumentException,
cidr.calculateIPRange, ip, cidrRange)
if __name__ == "main":
unittest.main()
| StarcoderdataPython |
1683067 | <reponame>Joshua-Barawa/My-Photos
from django.contrib.postgres.fields import ArrayField
from django.db.models import Subquery
from django.utils.functional import cached_property
class ArraySubquery(Subquery):
template = "ARRAY(%(subquery)s)"
def __init__(self, queryset, **kwargs):
super().__init__(queryset, **kwargs)
@cached_property
def output_field(self):
return ArrayField(self.query.output_field)
| StarcoderdataPython |
29283 |
print
print('This is Naveen')
:q
| StarcoderdataPython |
1682716 | import textwrap
import sys
from datetime import datetime
HEADER = """\
from zic.classes import *
from datetime import *
"""
RAW_FILES = [
'africa', 'antarctica', 'asia', 'australasia',
'europe', 'northamerica', 'southamerica'
]
def lines(input):
"""Remove comments and empty lines"""
for raw_line in input:
line = raw_line.strip()
if line and not line.startswith('#'):
yield strip_comments(line)
def strip_comments(line):
quoted = False
for i, c in enumerate(line):
if c == '"':
quoted = not quoted
elif c == "#" and not quoted:
return line[:i].strip()
return line
OBSERVANCE_TEMPLATE = """\
Observance(gmtoff={},
rules={},
format='{}',
until={}),
"""
def compile(infile, outfile=None):
with open(infile) as input:
if outfile is None:
compile_stream(input, sys.stdout)
else:
with open(outfile, 'w') as output:
compile_stream(input, output)
def compile_stream(input, output, header=HEADER):
output.write(header)
observances = state = None
zones = {}
rules = {}
for line in lines(input):
fields = line.split()
if fields[0] == 'Zone':
names = fields[1].split('/')
z = zones
for name in names:
z = z.setdefault(name, {})
observances = z.setdefault('observances', [])
state = 'Zone'
del fields[:2]
elif fields[0] == 'Rule':
rules.setdefault(fields[1], []).append(fields[2:])
if state == 'Zone':
gmtoff, zone_rules, format = fields[:3]
until = format_until(fields[3:])
if until is None:
state = None
observances.append(
format_observance(gmtoff, zone_rules, format, until))
print_rules(rules, file=output)
print_zones(zones, file=output)
RULE_TEMPLATE = ('Rule({}, {}, {}, {}, {},\n'
' at={},\n'
' save={}, letters={!r})')
def format_rule(begin, end, type, in_month, on, at, save, letters):
begin = int(begin)
if end == 'only':
end = begin + 1
elif end == 'max':
end = 10000
else:
end = int(end) + 1
if type == '-':
type = None
if letters == '-':
letters = ''
at = format_at(at)
save = format_time(save)
return RULE_TEMPLATE.format(begin, end, type, in_month,
on, at, save, letters)
TIME_FORMATS = ['%H', '%H:%M', "%H:%M:%S"]
TIME_TYPES = {
'w': 'wall',
'u': 'utc',
'g': 'utc',
'z': 'utc',
's': 'std',
}
def format_time(t):
if t == '-':
return 'timedelta(0)'
if t.startswith('24'):
return 'timedelta(1)'
n = t.count(':')
fmt = TIME_FORMATS[n]
t = datetime.strptime(t, fmt).time()
args = ['hours={0.hour}', 'minutes={0.minute}', 'seconds={0.second)']
template = 'timedelta(%s)' % ', '.join(args[:n+1])
return template.format(t)
def format_at(at):
try:
time_type = TIME_TYPES[at[-1]]
except KeyError:
time_type = 'wall'
else:
at = at[:-1]
return '(%s, %r)' % (format_time(at), time_type)
def print_rules(rules, file):
prefix = ' ' * 8
for name, lines in rules.items():
file.write('class %s(Rules):\n'
' name ="%s"\n'
' rules = [\n' % (rules_name(name), name))
for args in lines:
rule = format_rule(*args)
file.write(textwrap.indent(rule, prefix) + ',\n')
file.write(' ]\n\n')
TIME_UNITS = 'hours', 'minutes', 'seconds'
def format_until(until):
n = len(until)
if n == 0:
return None
if n == 1:
return int(until[0])
return '(%s)' % ', '.join(repr(u) for u in until)
def format_delta(delta):
sign = ''
if delta.startswith('-'):
sign = '-'
delta = delta[1:]
args = ['%s=%s' % (unit, int(value))
for unit, value in zip(TIME_UNITS, delta.split(':'))]
return '%stimedelta(%s)' % (sign, ', '.join(args))
def format_observance(gmtoff, rules, format, until):
if rules == '-':
rules = None
elif ':' in rules:
rules = "'%s'" % rules
else:
rules = rules_name(rules)
return OBSERVANCE_TEMPLATE.format(format_delta(gmtoff),
rules, format, until)
def print_zones(zones, file, indent=0):
for name, info in sorted(zones.items()):
try:
observances = info['observances']
except KeyError:
file.write(indent * ' ' + 'class %s:\n' % name)
print_zones(info, file, indent + 4)
else:
prefix = indent * ' '
file.write(prefix + 'class %s(Zone):\n' % zone_name(name))
file.write(prefix + ' name = %r\n' % name)
file.write(prefix + ' observances = [\n')
for observance in observances:
file.write(textwrap.indent(observance, prefix + 8 * ' '))
file.write(prefix + '%s]\n' % (4 * ' '))
def rules_name(name):
return name.replace('-', '_')
zone_name = rules_name
if __name__ == '__main__':
if len(sys.argv) < 2:
print("Usage: zic infile [outfile]")
sys.exit(1)
if sys.argv[1] == '--all':
for f in RAW_FILES:
compile('raw/' + f, f + '.py')
else:
compile(*sys.argv[1:])
| StarcoderdataPython |
3285166 | <reponame>allexvip/bot
import logging
from aiogram import executor
from create_bot import dp
from db import sqlite_db
logging.basicConfig(level=logging.INFO)
async def on_startup(_):
print('Bot online')
sqlite_db.sql_start()
from handlers import client, admin, other
client.register_handlers_client(dp)
admin.register_handlers_admin(dp)
other.register_handlers_other(dp)
executor.start_polling(dp, skip_updates=True, on_startup=on_startup)
| StarcoderdataPython |
1691265 | <filename>TSIS_5/cc.py
import os
import os.path
import time
import shutil
from pathlib import Path
path = os.getcwd()
def Menu():
print("1. Files")
print("2. Directories")
print("3. Exit")
def DirMenuChoices():
print("0. Back")
print("1. Content")
print("2. Rename Directory")
print("3. Number of files")
print("4. Number of directories")
print("5. Add file")
print("6. Add new directory")
def FileMenuChoices():
print("0. Back")
print("1. Delete file")
print("2. Rename File")
print("3. Add content")
print("4. Rewrite content")
print("5. Return to the parent directory")
def ContentOfDir():
content = os.listdir(curDir)
for i in content:
print(i)
def delete():
print('Write the name of file')
try:
file = str(input())
os.remove(file)
print('File deleted')
except FileNotFoundError:
print('File not found')
def rename():
print("Enter name of file")
try:
source = str(input())
print('Enter the new name')
dest = str(input())
os.rename(source, dest)
except FileNotFoundError:
print("File not found")
def return_to_parent():
print("Enter full path")
try:
file = str(input())
path = Path(file).parent
print(path)
except FileNotFoundError:
print("File not found")
def add():
print('Enter the name of file to add data')
try:
file = str(input())
with open(file, 'a') as file:
file.write(str(input()))
print('Data added successfully')
os.system("cls")
except FileNotFoundError:
print('File not found')
def rewrite():
print('Enter the name of file to rewrite data')
try:
file = str(input())
with open(file, 'w') as file:
file.write(str(input()))
print('Data rewrited successfully')
except FileNotFoundError:
print('File not found')
def rename_dir():
print('Enter the name of directory')
try:
name = str(input())
print('Enter the new name of directory')
new = str(input())
os.rename(name, new)
print('Directory renamed successfully')
except FileNotFoundError:
print('Directory not found')
def number_of_files():
cnt = 0
with os.scandir(path) as entries:
for entry in entries:
if entry.is_file():
cnt += 1
print('Number of files in directory = {}'.format(cnt))
def number_of_dir():
with os.scandir(path) as entries:
cnt = 0
for entry in entries:
if entry.is_dir():
cnt += 1
print('Number of directories in directory = {}'.format(cnt))
def list_content():
with os.scandir(path) as entries:
for entry in entries:
if entry.is_file():
print(entry.name)
if entry.is_dir():
print(entry.name)
def create_dir():
print('Enter the name of new directory')
dir_name = str(input())
os.mkdir(dir_name)
print('Directory created successfully')
def create_file():
print('Enter the name of new file')
name = str(input())
with open(name, "w") as file:
print('File created successfully')
file.close()
print("FileMan v1.0")
while(True):
curDir = os.getcwd()
Menu()
print(f'Current working directory is: {curDir}')
Fchoice = int(input("Input: "))
if Fchoice == 1:
FileMenuChoices()
Schoice = int(input("Input: "))
if Schoice == 1:
delete()
if Schoice == 2:
rename()
if Schoice == 3:
add()
if Schoice == 4:
rewrite()
if Schoice == 5:
return_to_parent()
else:
continue
if Fchoice == 2:
DirMenuChoices()
Schoice = int(input("Input: "))
if Schoice == 1:
list_content()
if Schoice == 2:
rename_dir()
if Schoice == 3:
number_of_files()
if Schoice == 4:
number_of_dir()
if Schoice == 5:
create_file()
if Schoice == 6:
create_dir()
else:
continue
if Fchoice == 3:
exit()
# davai#gg | StarcoderdataPython |
1682382 | <gh_stars>0
import numpy as np
import pandas as pd
import math
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
def ABX(A, B, X, A_dictionary, B_dictionary, X_dictionary):
A_radii = A_dictionary.get(A[0])
B_radii = B_dictionary.get(B[0])
X_radii = X_dictionary.get(X[0])
t_effective = (A_radii + X_radii) / (math.sqrt(2) * (B_radii + X_radii))
return t_effective
def ABX2(A, B, X, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if X_ratio == None:
X_radii = []
for ions in X:
X_radii.append(X_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
r_effective = ratio * X_radii[0] + (1 - ratio) * X_radii[1]
t_effective = (A_radii + r_effective) / (math.sqrt(2) * (B_radii + r_effective))
ones = np.ones(ratio.shape)
eights = ones*0.8
nines = ones*0.9
plt.plot(ratio, t_effective, color='red', lw=2)
plt.plot(ratio, ones, c='black')
plt.plot(ratio, eights, c='black')
plt.ylim(0.6, 1.2)
plt.xlim(0,1)
title = plt.title("$%s%s_x%s_{3x}%s_{3(1-x)}$ tolerance factor as a function of %s molar fraction" % (A[0], B[0], X[0], X[1], X[0]))
title.set_position([0.5, 1.05])
plt.ylabel("Tolerance Factor t", fontsize=14)
plt.xlabel("%s Molar Ratio" % X[0], fontsize=14)
plt.fill_between(ratio, nines, eights, color='yellow', alpha=0.5)
plt.fill_between(ratio, nines, ones, color='green', alpha=0.5)
plt.show()
df = pd.DataFrame(np.round(ratio, 2), columns=['%s Ratio' % X[0]])
df['%s Ratio' % X[1]] = np.round(1-ratio, 2)
df['Tolerance Factor'] = t_effective
return df
else:
if sum(X_ratio) == 1:
X_radii = []
for ions in X:
X_radii.append(X_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
B_radii = B_dictionary.get(B[0])
r_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1]
t_effective = (A_radii + r_effective) / (math.sqrt(2) * (B_radii + r_effective))
return t_effective
else:
print('Error: The sum of X_ratio is not equal to 1.')
def ABX3(A, B, X, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if X_ratio == None:
X_radii = []
for ions in X:
X_radii.append(X_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
B_radii = B_dictionary.get(B[0])
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
r_effective = x_ratio * X_radii[0] + y_ratio * X_radii[1] + z_ratio * X_radii[2]
t_effective = (A_radii + r_effective) / (math.sqrt(2) * (B_radii + r_effective))
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
img = ax.scatter(x_ratio, y_ratio, z_ratio, c=t_effective, cmap=plt.jet())
fig.colorbar(img)
ax.set_xlabel("%s Molar Ratio" % X[0])
ax.set_ylabel("%s Molar Ratio" % X[1])
ax.set_zlabel("%s Molar Ratio" % X[2])
title = plt.title("$%s%s%s_x%s_y%s_z$ tolerance factor as a function of halide composition" % (A[0], B[0], X[0], X[1], X[2]))
title.set_position([0.5,1.05])
plt.show()
df = pd.DataFrame(x_ratio, columns =['%s Ratio' % X[0]])
df['%s Ratio' % X[1]] = y_ratio
df['%s Ratio' % X[2]] = z_ratio
df['Tolerance Factor'] = t_effective
return df
else:
if sum(X_ratio) == 1:
X_radii = []
for ions in X:
X_radii.append(X_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
B_radii = B_dictionary.get(B[0])
r_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1] + X_ratio[2] * X_radii[2]
t_effective = (A_radii + r_effective) / (math.sqrt(2) * (B_radii + r_effective))
return t_effective
else:
print('Error: The sum of X_ratio is not equal to 1.')
def AB2X(A, B, X, B_ratio, A_dictionary, B_dictionary, X_dictionary):
if B_ratio == None:
B_radii = []
for ions in B:
B_radii.append(B_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
X_radii = X_dictionary.get(X[0])
ratio = np.linspace(0, 1, num=11)
r_effective = ratio * B_radii[0] + (1 - ratio) * B_radii[1]
t_effective = (A_radii + X_radii) / (math.sqrt(2) * (r_effective + X_radii))
ones = np.ones(ratio.shape)
eights = ones*0.8
nines = ones*0.9
plt.plot(ratio, t_effective, color='red', lw=2)
plt.plot(ratio, ones, c='black')
plt.plot(ratio, eights, c='black')
plt.ylim(0.6, 1.2)
plt.xlim(0,1)
title = plt.title("$%s%s_x%s_{1-x}%s_3$ tolerance factor as a function of %s molar fraction" % (A[0], B[0], B[1], X[0], B[0]))
title.set_position([0.5, 1.05])
plt.ylabel("Tolerance Factor t", fontsize=14)
plt.xlabel("%s Molar Ratio" % B[0], fontsize=14)
plt.fill_between(ratio, nines, eights, color='yellow', alpha=0.5)
plt.fill_between(ratio, nines, ones, color='green', alpha=0.5)
plt.show()
df = pd.DataFrame(np.round(ratio, 2), columns=['%s Ratio' % B[0]])
df['%s Ratio' % B[1]] = np.round(1-ratio, 2)
df['Tolerance Factor'] = t_effective
return df
else:
if sum(B_ratio) == 1:
B_radii = []
for ions in B:
B_radii.append(B_dictionary.get(ions))
A_radii = A_dictionary.get(A[0])
X_radii = X_dictionary.get(X[0])
r_effective = B_ratio[0] * B_radii[0] + B_ratio[1] * B_radii[1]
t_effective = (A_radii + X_radii) / (math.sqrt(2) * (r_effective + X_radii))
return t_effective
else:
print('Error: The sum of B_ratio is not equal to 1.')
def A2BX(A, B, X, A_ratio, A_dictionary, B_dictionary, X_dictionary):
if A_ratio == None:
A_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
X_radii = X_dictionary.get(X[0])
ratio = np.linspace(0, 1, num=11)
r_effective = ratio * A_radii[0] + (1 - ratio) * A_radii[1]
t_effective = (r_effective + X_radii) / (math.sqrt(2) * (B_radii + X_radii))
ones = np.ones(ratio.shape)
eights = ones*0.8
nines = ones*0.9
plt.plot(ratio, t_effective, color='red', lw=2)
plt.plot(ratio, ones, c='black')
plt.plot(ratio, eights, c='black')
plt.ylim(0.6, 1.2)
plt.xlim(0,1)
title = plt.title("$%s_x%s_{1-x}%s%s_3$ tolerance factor as a function of %s molar fraction" % (A[0], A[1], B[0], X[0], A[0]))
title.set_position([0.5, 1.05])
plt.ylabel("Tolerance Factor t", fontsize=14)
plt.xlabel("%s Molar Ratio" % A[0], fontsize=14)
plt.fill_between(ratio, nines, eights, color='yellow', alpha=0.5)
plt.fill_between(ratio, nines, ones, color='green', alpha=0.5)
plt.show()
df = pd.DataFrame(np.round(ratio, 2), columns=['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = np.round(1-ratio, 2)
df['Tolerance Factor'] = t_effective
return df
else:
if sum(A_ratio) == 1:
A_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
X_radii = X_dictionary.get(X[0])
r_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1]
t_effective = (r_effective + X_radii) / (math.sqrt(2) * (B_radii + X_radii))
return t_effective
else:
print('Error: The sum of A_ratio is not equal to 1.')
def A3BX(A, B, X, A_ratio, A_dictionary, B_dictionary, X_dictionary):
if A_ratio == None:
A_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
X_radii = X_dictionary.get(X[0])
B_radii = B_dictionary.get(B[0])
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
r_effective = x_ratio * A_radii[0] + y_ratio * A_radii[1] + z_ratio * A_radii[2]
t_effective = (r_effective + X_radii) / (math.sqrt(2) * (B_radii + X_radii))
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
img = ax.scatter(x_ratio, y_ratio, z_ratio, c=t_effective, cmap=plt.jet())
fig.colorbar(img)
ax.set_xlabel("%s Molar Ratio" % A[0])
ax.set_ylabel("%s Molar Ratio" % A[1])
ax.set_zlabel("%s Molar Ratio" % A[2])
title = plt.title("$%s%s%s_x%s_y%s_z$ tolerance factor as a function of A-site cation composition" % (A[0], A[1], A[2], B[0], X[0]))
title.set_position([0.5,1.05])
plt.show()
df = pd.DataFrame(x_ratio, columns =['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = y_ratio
df['%s Ratio' % A[2]] = z_ratio
df['Tolerance Factor'] = t_effective
return df
else:
if sum(A_ratio) == 1:
A_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
X_radii = X_dictionary.get(X[0])
B_radii = B_dictionary.get(B[0])
r_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1] + A_ratio[2] * A_radii[2]
t_effective = (r_effective + X_radii) / (math.sqrt(2) * (B_radii + X_radii))
return t_effective
else:
print('Error: The sum of A_ratio is not equal to 1.')
def A2BX2(A, B, X, A_ratio, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if (A_ratio == None and X_ratio == None):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
A_effective = ratio * A_radii[0] + (1-ratio) * A_radii[1]
X_effective = ratio * X_radii[0] + (1-ratio) * X_radii[1]
t_effective = []
for i in A_effective:
t_effective.append((i + X_effective) / (math.sqrt(2) * (B_radii + X_effective)))
df = pd.DataFrame(ratio, columns =['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = 1-ratio
#df['Tolerance Factor'] = t_effective
i_count = 0
ratio = np.round(ratio, decimals=2)
for i in ratio:
df['%s' % i] = t_effective[i_count]
i_count += 1
df = df.rename(columns = {'0.0' : '%s Ratio : 0.0' % X[0]})
return df
elif ((A_ratio == None and X_ratio != None) or (A_ratio != None and X_ratio == None)):
print('Warning: Insert a list of ratios for both A_ratio and X_ratio to calculate a specifice tolerance factor')
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
A_effective = ratio * A_radii[0] + (1-ratio) * A_radii[1]
X_effective = ratio * X_radii[0] + (1-ratio) * X_radii[1]
t_effective = []
for i in A_effective:
t_effective.append((i + X_effective) / (math.sqrt(2) * (B_radii + X_effective)))
df = pd.DataFrame(ratio, columns =['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = 1-ratio
#df['Tolerance Factor'] = t_effective
i_count = 0
ratio = np.round(ratio, decimals=2)
for i in ratio:
df['%s' % i] = t_effective[i_count]
i_count += 1
df = df.rename(columns = {'0.0' : '%s Ratio : 0.0' % X[0]})
return df
elif (A_ratio != None and X_ratio != None):
if (sum(A_ratio) == 1 and sum(X_ratio_ ==1)):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
A_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1]
X_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1]
t_effective = (A_effective + X_effective) / (math.sqrt(2) * (B_radii + X_effective))
return t_effective
else:
print('Error: Either the sum of A_ratio or X_ratio is not equal to 1.')
def A3BX2(A, B, X, A_ratio, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if (A_ratio == None and X_ratio == None):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
A_effective = x_ratio * A_radii[0] + y_ratio * A_radii[1] + z_ratio * A_radii[2]
X_effective = ratio * X_radii[0] + (1-ratio) * X_radii[1]
t_effective = []
for i in A_effective:
t_effective.append((i + X_effective) / (math.sqrt(2) * (B_radii + X_effective)))
df = pd.DataFrame(x_ratio, columns=['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = y_ratio
df['%s Ratio' % A[2]] = z_ratio
df['A_effective'] = A_effective
df2 = pd.DataFrame(t_effective, columns = np.round(ratio,2))
df_merged = pd.merge(df,df2,left_index=True,right_index=True)
df_merged = df_merged.rename(columns = {0.0 : '%s Ratio : 0.0' % X[0]})
return df_merged
elif ((A_ratio == None and X_ratio != None) or (A_ratio != None and X_ratio == None)):
print('Warning: Insert a list of ratios for both A_ratio and X_ratio to calculate a specifice tolerance factor')
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
A_effective = x_ratio * A_radii[0] + y_ratio * A_radii[1] + z_ratio * A_radii[2]
X_effective = ratio * X_radii[0] + (1-ratio) * X_radii[1]
t_effective = []
for i in A_effective:
t_effective.append((i + X_effective) / (math.sqrt(2) * (B_radii + X_effective)))
df = pd.DataFrame(x_ratio, columns=['%s Ratio' % A[0]])
df['%s Ratio' % A[1]] = y_ratio
df['%s Ratio' % A[2]] = z_ratio
df['A_effective'] = A_effective
df2 = pd.DataFrame(t_effective, columns = np.round(ratio,2))
df_merged = pd.merge(df,df2,left_index=True,right_index=True)
df_merged = df_merged.rename(columns = {0.0 : '%s Ratio : 0.0' % X[0]})
return df_merged
elif (A_ratio != None and X_ratio != None):
if (sum(A_ratio) == 1 and sum(X_ratio) == 1):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
A_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1] + A_ratio[2] * A_radii[2]
X_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1]
t_effective = (A_effective + X_effective) / (math.sqrt(2) * (B_radii + X_effective))
return t_effective
else:
print('Error: Either the sum of A_ratio or X_ratio is not equal to 1.')
def A2BX3(A, B, X, A_ratio, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if (A_ratio == None and X_ratio == None):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
X_effective = x_ratio * X_radii[0] + y_ratio * X_radii[1] + z_ratio * X_radii[2]
A_effective = ratio * A_radii[0] + (1-ratio) * A_radii[1]
t_effective = []
for i in X_effective:
t_effective.append((A_effective + i) / (math.sqrt(2) * (B_radii + i)))
df = pd.DataFrame(x_ratio, columns=['%s Ratio' % X[0]])
df['%s Ratio' % X[1]] = y_ratio
df['%s Ratio' % X[2]] = z_ratio
df['X_effective'] = X_effective
df2 = pd.DataFrame(t_effective, columns = np.round(ratio,2))
df_merged = pd.merge(df,df2,left_index=True,right_index=True)
df_merged = df_merged.rename(columns = {0.0 : '%s Ratio : 0.0' % A[0]})
return df_merged
elif ((A_ratio == None and X_ratio != None) or (A_ratio != None and X_ratio == None)):
print('Warning: Insert a list of ratios for both A_ratio and X_ratio to calculate a specifice tolerance factor')
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
X_effective = x_ratio * X_radii[0] + y_ratio * X_radii[1] + z_ratio * X_radii[2]
A_effective = ratio * A_radii[0] + (1-ratio) * A_radii[1]
t_effective = []
for i in X_effective:
t_effective.append((A_effective + i) / (math.sqrt(2) * (B_radii + i)))
df = pd.DataFrame(x_ratio, columns=['%s Ratio' % X[0]])
df['%s Ratio' % X[1]] = y_ratio
df['%s Ratio' % X[2]] = z_ratio
df['X_effective'] = X_effective
df2 = pd.DataFrame(t_effective, columns = np.round(ratio,2))
df_merged = pd.merge(df,df2,left_index=True,right_index=True)
df_merged = df_merged.rename(columns = {0.0 : '%s Ratio : 0.0' % A[0]})
return df_merged
elif (A_ratio != None and X_ratio != None):
if sum(A_ratio) == 1 and sum(X_ratio) == 1:
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
A_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1]
X_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1] + X_ratio[2] * X_radii[2]
t_effective = (A_effective + X_effective) / (math.sqrt(2) * (B_radii + X_effective))
return t_effective
else:
print('Error: Either the sum of A_ratio or X_ratio is not equal to 1.')
def A3BX3(A, B, X, A_ratio, X_ratio, A_dictionary, B_dictionary, X_dictionary):
if (A_ratio == None and X_ratio == None):
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
X_effective = x_ratio * X_radii[0] + y_ratio * X_radii[1] + z_ratio * X_radii[2]
A_effective = x_ratio * A_radii[0] + y_ratio * A_radii[1] + z_ratio * A_radii[2]
t_effective = np.zeros(shape=[len(X_effective), len(A_effective)])
i_count = 0
for i in X_effective:
j_count = 0
for j in A_effective:
t_effective[i_count][j_count] = (j + i) / (math.sqrt(2) * (B_radii + i))
j_count += 1
i_count += 1
X_labels = []
A_labels = []
for i in range(len(x_ratio)):
X_labels.append("%s: %s, %s: %s, %s: %s" % (X[0], np.round(x_ratio[i],2), X[1], np.round(y_ratio[i],2), X[2], np.round(z_ratio[i],2)))
A_labels.append("%s: %s, %s: %s, %s: %s" % (A[0], np.round(x_ratio[i],2), A[1], np.round(y_ratio[i],2), A[2], np.round(z_ratio[i],2)))
df = pd.DataFrame(t_effective, columns=A_labels)
df['X Ratio Index'] = X_labels
df = df.set_index('X Ratio Index')
return df
elif ((A_ratio == None and X_ratio != None) or (A_ratio != None and X_ratio == None)):
print('Warning: Insert a list of ratios for both A_ratio and X_ratio to calculate a specifice tolerance factor')
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
ratio = np.linspace(0, 1, num=11)
x_ratio = []
y_ratio = []
z_ratio = []
x = np.linspace(0,1,11)
y = np.linspace(0,1,11)
xx, yy = np.meshgrid(x, y)
z = -xx -yy +1
for i in range(len(x)):
for j in range(len(y)):
if z[i][j] >= 0:
x_ratio.append(x[i])
y_ratio.append(y[j])
z_ratio.append(z[i][j])
else:
continue
x_ratio = np.array(x_ratio)
y_ratio = np.array(y_ratio)
z_ratio = np.array(z_ratio)
X_effective = x_ratio * X_radii[0] + y_ratio * X_radii[1] + z_ratio * X_radii[2]
A_effective = x_ratio * A_radii[0] + y_ratio * A_radii[1] + z_ratio * A_radii[2]
t_effective = np.zeros(shape=[len(X_effective), len(A_effective)])
i_count = 0
for i in X_effective:
j_count = 0
for j in A_effective:
t_effective[i_count][j_count] = (j + i) / (math.sqrt(2) * (B_radii + i))
j_count += 1
i_count += 1
X_labels = []
A_labels = []
for i in range(len(x_ratio)):
X_labels.append("%s: %s, %s: %s, %s: %s" % (X[0], np.round(x_ratio[i],2), X[1], np.round(y_ratio[i],2), X[2], np.round(z_ratio[i],2)))
A_labels.append("%s: %s, %s: %s, %s: %s" % (A[0], np.round(x_ratio[i],2), A[1], np.round(y_ratio[i],2), A[2], np.round(z_ratio[i],2)))
df = pd.DataFrame(t_effective, columns=A_labels)
df['X Ratio Index'] = X_labels
df = df.set_index('X Ratio Index')
return df
elif (A_ratio != None and X_ratio != None):
if sum(A_ratio) == 1 and sum(X_ratio) == 1:
A_radii = []
X_radii = []
for ions in A:
A_radii.append(A_dictionary.get(ions))
for ions in X:
X_radii.append(X_dictionary.get(ions))
B_radii = B_dictionary.get(B[0])
A_effective = A_ratio[0] * A_radii[0] + A_ratio[1] * A_radii[1] + A_ratio[2] * A_radii[2]
X_effective = X_ratio[0] * X_radii[0] + X_ratio[1] * X_radii[1] + X_ratio[2] * X_radii[2]
t_effective = (A_effective + X_effective) / (math.sqrt(2) * (B_radii + X_effective))
return t_effective
else:
print('Error: Either the sum of A_ratio or X_ratio is not equal to 1.')
| StarcoderdataPython |
1610120 | <reponame>acdh-oeaw/exploreAT-questionnaireexplorer
from rest_framework import generics
from rest_framework.renderers import TemplateHTMLRenderer
from SPARQLWrapper import SPARQLWrapper, JSON
from django.views import generic
from rest_framework.response import Response
from rest_framework.views import APIView
import json
import ast
import os, os.path
from .pagination import PostLimitOffsetPagination, PostPageNumberPagination
from django.core.paginator import Paginator
from django.conf import settings
dataset="http://fuseki:3030/dboe/query"
def generateSortCode(currentLemma, sortEncodingDict):
lemmaSortCode = ""
currentLemma = prepareLemmaInOrder(currentLemma)
# print("\nfinal conversion", currentLemma)
for letter in currentLemma:
if letter != ' ' or letter != '-':
# print("the code for ", letter, " is ", sortEncodingDict[letter])
if letter in sortEncodingDict:
lemmaSortCode += sortEncodingDict[letter]
else: # for unknown characters such as and special characters such as +,? and others used in the data
lemmaSortCode += ''
return lemmaSortCode
def prepareLemmaInOrder(rawLemma):
inBracket = ""
withBracket = ""
beforeBracket = ""
head = ""
invertedHead = ""
invertedInBraket = ""
invertedBeforeBraket = ""
if '(' in rawLemma and ')' in rawLemma:
indexStart = rawLemma.find('(')
indexEnd = rawLemma.find(')')
inBracket = rawLemma[indexStart + 1: indexEnd]
withBracket = rawLemma[indexStart: indexEnd + 1]
beforeBracket = rawLemma[0:indexStart]
head = rawLemma.replace(beforeBracket + withBracket, "")
# print("The head = ", head)
if '-' in head:
indexAt = head.find('-')
# If the - occurs in the middle of the head word, split the head word using - and reorder the compound word elements
if indexAt > 0:
compounds = head.split('-')
for compound in compounds:
invertedHead = compound + invertedHead
else:
invertedHead = head
else:
# if - doesn't appear anywhere, take it as it is
invertedHead = head
# Apply the same logic for the lemma in the bracket as the one used for the headword
if '-' in beforeBracket:
indexAt = beforeBracket.find('-')
if indexAt > 0:
compounds = beforeBracket.split('-')
for compound in compounds:
invertedBeforeBraket = compound + invertedBeforeBraket
else:
invertedBeforeBraket = beforeBracket
else:
invertedBeforeBraket = beforeBracket
# Apply the same logic for the lemma in the bracket as the one used for the headword
if '-' in inBracket:
indexAt = inBracket.find('-')
if indexAt > 0:
compounds = inBracket.split('-')
for compound in compounds:
invertedInBraket = compound + invertedInBraket
else:
invertedInBraket = inBracket
else:
invertedInBraket = inBracket
return invertedHead + invertedInBraket + invertedBeforeBraket
class IndexView(generic.TemplateView):
template_name = "index.html"
class OntologyView(generic.TemplateView):
template_name = "Ontology.owl"
class DetailedQuestionnaireHtmlView(generic.TemplateView):
def get(self, request):
template_name = "questionnaire.html"
class DetailedQuestionHtmlView(APIView):
renderer_classes = [TemplateHTMLRenderer]
template_name='question.html'
def get(self, request, pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://localhost/oldca/frage/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://localhost/questions>
WHERE {
Graph <http://localhost/questions> {""" +subj + """ ?p ?o}
} Limit 50
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
# class QuestionnaireView(generics.GenericAPIView):
# pagination_class = PostPageNumberPagination
#
# def get(self, request):
# sparql = SPARQLWrapper(dataset)
# sparql.setQuery("""
#
# SELECT *
# From named <http://exploreat.adaptcentre.ie/Questionnaire_graph>
# WHERE {
# Graph <http://exploreat.adaptcentre.ie/Questionnaire_graph> {?s ?p ?o}
# } Limit 100
# """)
# sparql.setReturnFormat(JSON)
# results = sparql.query().convert()
# results = results['results']['bindings']
# page=Paginator(results, 100)
#
# if page is not None:
# serializer_class = self..get_serializer(page, many=True)
# return self.get_paginated_response(serializer_class.data)
#
# serializer = self.get_serializer(results, many=True)
# return Response(serializer.data)
class QuestionnaireView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Questionnaire_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Questionnaire_graph> {?s ?p ?o}
} Limit 100
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
results=results['results']['bindings']
return Response(results)
class QuestionView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Question_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Question_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class LemmaView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Lemma_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Lemma_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class SourceView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Source_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Source_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class PaperSlipView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlip_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlip_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class PaperSlipRecordView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class MultimediaView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Multimedia_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Multimedia_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class PersonView(APIView):
def get(self, request):
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Person_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Person_graph> {?s ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedQuestionnaireView(APIView):
def get(self, request,pk):
#the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj = "<http://exploreat.adaptcentre.ie/Questionnaire/" + pk + ">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Questionnaire_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Questionnaire_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedQuestionnaireViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Questionnaire/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Questionnaire_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Questionnaire_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedQuestionView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/Question/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Question_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Question_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
# if request.accepted_renderer.format == 'html':
# # TemplateHTMLRenderer takes a context dict,
# # and additionally requires a 'template_name'.
# # It does not require serialization.
# return Response(results, template_name='question.html')
return Response(results)
class DetailedQuestionViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Question/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Question_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Question_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPaperSlipRecordView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/PaperSlipRecord/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPaperSlipRecordViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) < 124301: #set the offset to 1
offset= int(offset)+124301
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/PaperSlipRecord/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlipRecord_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedLemmaView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/Lemma/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Lemma_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Lemma_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedLemmaViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Lemma/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Lemma_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Lemma_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedSourceView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/Source/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Source_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Source_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedSourceViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Source/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Source_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Source_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPaperSlipView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/PaperSlip/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlip_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlip_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPaperSlipViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/PaperSlip/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/PaperSlip_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/PaperSlip_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedMultimediaView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/Multimedia/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Multimedia_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Multimedia_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedMultimediaViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <= 0: #set the offset to 1
offset=1
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Multimedia/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Multimedia_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Multimedia_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPersonView(APIView):
def get(self, request,pk):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
subj="<http://exploreat.adaptcentre.ie/Person/" +pk +">"
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Person_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Person_graph> {""" +subj + """ ?p ?o}
}
""")
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class DetailedPersonViewLimit(APIView):
def get(self, request,limit,offset):
# the query will strip the questionnaire number and replace http://localhost/oldca/fragebogen/1 in the query
if int(limit) > 100: #Set the max Limit to 100
limit=100
if int(offset) <7259: #set the offset to 1
offset=int(offset)+7259
uris=''
for i in range(int(offset),int(limit)+int(offset)):
uris+='<http://exploreat.adaptcentre.ie/Person/'+str(i)+">,"
uris=uris[:-1]
sparql = SPARQLWrapper(dataset)
sparql.setQuery("""
SELECT *
From named <http://exploreat.adaptcentre.ie/Person_graph>
WHERE {
Graph <http://exploreat.adaptcentre.ie/Person_graph> {?s ?p ?o}
Filter (?s IN(""" + uris +"""
))
}
"""
)
sparql.setReturnFormat(JSON)
results = sparql.query().convert()
return Response(results)
class LemmaSortCode(APIView):
def get(self, request,entry):
lemmaRec=entry
# This method prepares the raw lemma into a proper order. If a raw lemma appears as (ein-hin)pȧssen, it will convert it
# to the head word first i.e pȧssen and convert all the compounds from right first to left last i.e hin-ein. Hyphen, space and brackets
# are not converted
counter = 0
records = set()
# Read the sort key from a file================================
print(settings.STATIC_URL)
staticFileDir=os.path.join(settings.BASE_DIR, 'static')
sortFiledir = os.path.join(staticFileDir, 'sortEncoding.txt')
sortFile=open(sortFiledir, 'r')
sortEncoding = sortFile.read()
sortEncodingDict = ast.literal_eval(sortEncoding)
if lemmaRec != "":
content = str.split(lemmaRec.strip(), "\t")
lemma = content[0]
# print("current content of lemma ", lemma)
if len(content) > 1:
sortOriginal = content[1]
else:
sortOriginal = "Empty"
print("\n\nRaw Input", lemmaRec)
sortNew = generateSortCode(lemma.replace(" © 2008-2080 <NAME>", ""), sortEncodingDict)
print("==>", lemma, sortOriginal, " \tnew\t", sortNew, sortOriginal == sortNew)
result={'lemma':lemmaRec, 'sortCode':sortNew}
return Response(result)
| StarcoderdataPython |
190421 | # from flask import Flask, render_template
# from flask_cors import CORS
# def create_app():
# app = Flask(__name__, static_folder='../client/build/static', template_folder='../client/build')
# CORS(app)
# @app.route("/", defaults = {'path': ''})
# @app.route("/<path:path>")
# def index(path):
# """
# Default route: returns the React app
# """
# return render_template('index.html')
# @app.route("/route2")
# def test():
# """
# Test route to contrast with server route
# """
# return "Route 2"
# return app
"""
Embed bokeh server session into a flask framework
Adapted from bokeh-master/examples/howto/serve_embed/flask_gunicorn_embed.py
"""
import asyncio
import logging
from threading import Thread
import time
from bokeh import __version__ as ver
from bokeh.embed import server_document
from flask import Flask, render_template, request
from flask.wrappers import Response
from flask_cors import CORS, cross_origin
import requests
from tornado.ioloop import IOLoop
from tornado.web import Application, FallbackHandler
from tornado.wsgi import WSGIContainer
from server.bkapp import bokeh_cdn_resources
from server.config import (
BOKEH_PATH,
BOKEH_URL,
BOKEH_WS_PATH,
FLASK_PATH,
FLASK_PORT,
FLASK_URL,
get_bokeh_port,
)
from server.wsproxy import WebSocketProxy
from server.predict import predict_symptoms
logging.basicConfig(level=logging.INFO)
log = logging.getLogger(__name__)
app = Flask(__name__, static_folder='templates/static')
CORS(app)
app.config["CORS_HEADERS"] = "Content-Type"
app.config["SECRET_KEY"] = "secret!"
@app.route("/", defaults = {'path': ''})
@app.route("/<path:path>")
def index(path):
"""
Default route: returns the React app
"""
return render_template('index.html')
@app.route("/predict-symptoms", methods=['POST'])
def predict():
data = request.json
symptoms = predict_symptoms(data)
return { "symptoms": symptoms, "demographics": data }
@app.route("/route2")
def test():
"""
Test route to contrast with server route
"""
return "Route 2"
@app.route("/seagraph", methods=["GET"])
def seagraph_test():
"""Index"""
resources = bokeh_cdn_resources()
script = server_document(FLASK_URL + BOKEH_PATH, resources=None)
return render_template("embed.html", script=script, resources=resources)
@app.route("/bokeh/<path:path>", methods=["GET"])
@cross_origin(origins="*")
def proxy(path):
"""HTTP Proxy"""
# print(request.__dict__)
path = "bokeh/" + path
print("path", path)
query = ""
if request.query_string is not None:
query = "?" + request.query_string.decode("utf-8")
bokeh_url = BOKEH_URL.replace("$PORT", get_bokeh_port())
request_url = f"{bokeh_url}/{path}{query}"
resp = requests.get(request_url)
excluded_headers = ["content-length", "connection"]
headers = [
(name, value)
for (name, value) in resp.raw.headers.items()
if name.lower() not in excluded_headers
]
response = Response(resp.content, resp.status_code, headers)
return response
def start_tornado():
"""Start Tornado server to run a flask app in a Tornado
WSGI container.
"""
asyncio.set_event_loop(asyncio.new_event_loop())
container = WSGIContainer(app)
server = Application(
[
(BOKEH_WS_PATH, WebSocketProxy),
(r".*", FallbackHandler, dict(fallback=container)),
],
**{"use_xheaders": True},
)
server.listen(port=FLASK_PORT)
IOLoop.instance().start()
if __name__ == "__main__":
t = Thread(target=start_tornado, daemon=True)
t.start()
log.info("Flask + Bokeh Server App Running at %s", FLASK_URL + FLASK_PATH)
while True:
time.sleep(0.05)
| StarcoderdataPython |
3282313 | <reponame>brown170/fudge<filename>numericalFunctions/ptwXY/Python/Test/UnitTesting/integrate/groupTestAll1.py
# <<BEGIN-copyright>>
# Copyright 2021, Lawrence Livermore National Security, LLC.
# See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
# <<END-copyright>>
import os
from numericalFunctions import pointwiseXY_C
if( 'CHECKOPTIONS' in os.environ ) :
options = os.environ['CHECKOPTIONS'].split( )
if( '-e' in options ) : print( __file__ )
CPATH = '../../../../Test/UnitTesting/integrate'
os.system( 'cd %s; make -s clean; ./groupTestAll1 -v' % CPATH )
def getIntegerValue( name, ls ) :
s = "# %s = " % name
n = len( s )
if( ls[0][:n] != s ) : raise Exception( '%s: %s does not contain %s info: "%s"' % ( __file__, name, ls[0][:-1] ) )
value = int( ls[0].split( '=' )[1] )
return( ls[1:], value )
def compareValues( label, i, v1, v2 ) :
sv1, sv2 = '%.12e' % v1, '%.12e' % v2
sv1, sv2 = '%.7e' % float( sv1 ), '%.7e' % float( sv2 )
if( sv1 != sv2 ) : print( '<%s> <%s>' % ( sv1, sv2 ) )
if( sv1 != sv2 ) : raise Exception( '%s: values %e and %e diff by %e at %d for label = %s' % ( __file__, v1, v2, v2 - v1, i, label ) )
def compareGroups( fileName, norm, g1 ) :
label = fileName + '_' + norm
g2 = getXData( label )
if( len( g1 ) != len( g2 ) ) : raise Exception( '%s: for %s len( g1 ) = %d != len( g2 ) = %d' %( __file__, label, len( g1 ), len( g2 ) ) )
for i , g1X in enumerate( g1 ) : compareValues( label, i, g1X, g2[i] )
def getXData( fileName ) :
fileName_ = os.path.join( CPATH, fileName + '.dat' )
f = open( fileName_ )
ls = f.readlines( )
f.close( )
ls, length = getIntegerValue( 'length', ls )
if( len( ls ) != length ) : raise Exception( '%s: len( ls ) = %s != length = %d for file %s' % ( len( ls ), length, fileName ) )
data = [ float( l ) for l in ls ]
return( data )
def getXYData( fileName ) :
fileName_ = os.path.join( CPATH, fileName )
f = open( fileName_ )
ls = f.readlines( )
f.close( )
ls, length = getIntegerValue( 'length', ls )
data = [ list( map( float, l.split( ) ) ) for l in ls ]
data = pointwiseXY_C.pointwiseXY_C( data, initialSize = len( data ), overflowSize = 10 )
return( data )
def checkOneFunctionGrouping( fileName, groupBoundaries ) :
flux = getXYData( fileName + '.dat' )
flux_None = flux.groupOneFunction( groupBoundaries )
compareGroups( fileName, 'None', flux_None )
flux_dx = flux.groupOneFunction( groupBoundaries, norm = 'dx' )
compareGroups( fileName, 'dx', flux_dx )
flux_norm = flux.groupOneFunction( groupBoundaries, norm = flux_None )
compareGroups( fileName, 'norm', flux_norm )
return( flux, flux_None )
def checkTwoFunctionGrouping( fileName, groupBoundaries, flux, flux_None ) :
crossSection = getXYData( fileName + '.dat' )
crossSection_None = crossSection.groupTwoFunctions( groupBoundaries, flux )
compareGroups( fileName, 'None', crossSection_None )
crossSection_dx = crossSection.groupTwoFunctions( groupBoundaries, flux, norm = 'dx' )
compareGroups( fileName, 'dx', crossSection_dx )
crossSection_norm = crossSection.groupTwoFunctions( groupBoundaries, flux, norm = flux_None )
compareGroups( fileName, 'norm', crossSection_norm )
return( crossSection )
def checkThreeFunctionGrouping( fileName, groupBoundaries, flux, crossSection, flux_None ) :
multiplicity = getXYData( fileName + '.dat' )
multiplicity_None = multiplicity.groupThreeFunctions( groupBoundaries, flux, crossSection )
compareGroups( fileName, 'None', multiplicity_None )
multiplicity_dx = multiplicity.groupThreeFunctions( groupBoundaries, flux, crossSection, norm = 'dx' )
compareGroups( fileName, 'dx', multiplicity_dx )
multiplicity_norm = multiplicity.groupThreeFunctions( groupBoundaries, flux, crossSection, norm = flux_None )
compareGroups( fileName, 'norm', multiplicity_norm )
groupBoundaries = getXData( 'groupBoundaries' )
flux, flux_None = checkOneFunctionGrouping( 'flux', groupBoundaries )
crossSection = checkTwoFunctionGrouping( 'crossSection', groupBoundaries, flux, flux_None )
checkThreeFunctionGrouping( 'multiplicity', groupBoundaries, flux, crossSection, flux_None )
| StarcoderdataPython |
174821 | from locators import *
from utilities import *
from browserSetUp import *
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
class TestSetUp:
def __init__(self):
self.utilities = Utilities()
self.driver = DriverSetup(os.getenv('BROWSER'))
self.locator = TemperatureLocators
self.url = os.getenv('URL')
def testRun(self):
self.driver.openWebPage(os.getenv('URL'))
self.driver.switchFrame(str(self.locator.TEMPERATURE_FRAME))
self.driver.find_element_by_id(str(self.locator.TEMP_VALUE))
def getText(self,element):
return element.text
| StarcoderdataPython |
129884 | <gh_stars>0
from django.apps import AppConfig
class MahTagsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'mah_tags'
| StarcoderdataPython |
3390173 | <gh_stars>0
from __future__ import annotations
import pytest
import numpy as np
from tools.numpy_numerology import (
repeated_indices,
make_outer_indices,
flattened_ranges,
)
# ELECTRONS TABLE
# event_idx electron_idx
# -----------------------
# 0 0
# 0 1
# ------------------------
# ------------------------
# 2 0
# 2 1
# 2 2
# ------------------------
# ------------------------
# ELECTRONS_HITS TABLE
# event_idx electron_idx hit_idx
# ---------------------------
# 0 0 0
# 0 0 1
# ............................
# 0 1 0
# 0 1 1
# 0 1 2
# 0 1 3
# ----------------------------
# ----------------------------
# 2 0 0
# 2 0 1
# 2 0 2
# ............................
# 2 1 0
# 2 1 1
# ............................
# 2 2 0
# ---------------------------
@pytest.fixture()
def num_events() -> int:
return 3
@pytest.fixture()
def electrons_per_event() -> List[int]:
return [2, 0, 3]
@pytest.fixture()
def hits_per_electron() -> List[int]:
return [2, 4, 3, 2, 1]
def test_repeated_indices():
assert repeated_indices([2, 0, 3]) == [0, 0, 2, 2, 2]
def test_make_outer_indices():
assert make_outer_indices([1, 2], [2, 1, 2]) == [0, 0, 0, 1, 1]
def test_hit_index_generation(hits_per_electron):
hit_idx = np.array(flattened_ranges(hits_per_electron))
assert isinstance(hit_idx, np.ndarray)
num_hits = len(hit_idx)
assert num_hits == sum(hits_per_electron)
assert np.array_equal(hit_idx, np.array([0, 1, 0, 1, 2, 3, 0, 1, 2, 0, 1, 0]))
def test_electron_index_generation(num_events, electrons_per_event, hits_per_electron):
electrons_idx = np.array(make_outer_indices(electrons_per_event, hits_per_electron))
assert isinstance(electrons_idx, np.ndarray)
assert len(electrons_idx) == sum(hits_per_electron)
assert np.array_equal(electrons_idx, np.array([0, 0, 1, 1, 1, 1, 0, 0, 0, 1, 1, 2]))
| StarcoderdataPython |
1726892 | #!/usr/bin/env python
import numpy as np
import rospy
import time
from sensor_msgs.msg import LaserScan
from nav_msgs.msg import OccupancyGrid
import tf
# p(x) = 1 - \frac{1}{1 + e^l(x)}
def l2p(l):
return 1 - (1/(1+np.exp(l)))
# l(x) = log(\frac{p(x)}{1 - p(x)})
def p2l(p):
return np.log(p/(1-p))
class GridMapping:
def __init__(self, map_center_x, map_center_y, map_size_x, map_size_y, map_resolution, laser_min_angle, laser_max_angle, laser_resolution, laser_max_dist, sensor_model_p_occ, sensor_model_p_free, sensor_model_p_prior):
self.map_center_x = map_center_x #meter
self.map_center_y = map_center_y #meter
self.map_size_x = map_size_x #meter
self.map_size_y = map_size_y #meter
self.map_resolution = map_resolution #meter/cell
self.laser_min_angle = laser_min_angle #radian
self.laser_max_angle = laser_max_angle #radian
self.laser_resolution = laser_resolution #radian
self.laser_max_dist = laser_max_dist #meter
self.sensor_model_l_occ = p2l(sensor_model_p_occ)
self.sensor_model_l_free = p2l(sensor_model_p_free)
self.sensor_model_l_prior = p2l(sensor_model_p_prior)
map_rows = int(map_size_y / map_resolution)
map_cols = int(map_size_x / map_resolution)
self.gridmap = self.sensor_model_l_prior * np.ones((map_rows, map_cols))
def to_xy (self, i, j):
x = j * self.map_resolution + self.map_center_x
y = i * self.map_resolution + self.map_center_y
return x, y
def to_ij (self, x, y):
i = (y-self.map_center_y) / self.map_resolution
j = (x-self.map_center_x) / self.map_resolution
return i, j
def is_inside (self, i, j):
return i<self.gridmap.shape[0] and j<self.gridmap.shape[1] and i>=0 and j>=0
def raycast_update(self, x0, y0, theta, d):
# see: https://www.ros.org/reps/rep-0117.html
# Detections that are too close to the sensor to quantify shall be represented by -Inf.
# Erroneous detections shall be represented by quiet (non-signaling) NaNs.
# Finally, out of range detections will be represented by +Inf.
if np.isinf(d) and np.sign(d) == +1:
d = self.laser_max_dist
elif np.isinf(d) or np.isnan(d):
return
x1 = x0 + d*np.cos(theta)
y1 = y0 + d*np.sin(theta)
i0, j0 = self.to_ij(x0, y0)
i1, j1 = self.to_ij(x1, y1)
d_cells = d / self.map_resolution
ip, jp, is_hit = self.bresenham(i0, j0, i1, j1, d_cells)
if not np.isnan(d) and d != self.laser_max_dist and self.is_inside(int(ip),int(jp)):
# Hit!
self.gridmap[int(ip),int(jp)] += self.sensor_model_l_occ - self.sensor_model_l_prior
return
#bresenham method is used to plot the lines
def bresenham (self, i0, j0, i1, j1, d, debug=False): # i0, j0 (starting point)
dx = np.absolute(j1-j0)
dy = -1 * np.absolute(i1-i0)
sx = -1
if j0<j1:
sx = 1
sy = -1
if i0<i1:
sy = 1
jp, ip = j0, i0
err = dx+dy # error value e_xy
while True: # loop
if (jp == j1 and ip == i1) or (np.sqrt((jp-j0)**2+(ip-i0)**2) >= d) or not self.is_inside(ip, jp):
return ip, jp, False
elif self.gridmap[int(ip),int(jp)]==100:
return ip, jp, True
if self.is_inside(ip, jp):
# miss:
self.gridmap[int(ip),int(jp)] += self.sensor_model_l_free - self.sensor_model_l_prior
e2 = 2*err
if e2 >= dy: # e_xy+e_x > 0
err += dy
jp += sx
if e2 <= dx: # e_xy+e_y < 0
err += dx
ip += sy
def update(self, x, y, theta, scan):
# test by printing robot trajectory
#i,j = self.to_ij(x,y)
#self.gridmap[int(i), int(j)] = 100
for i, z in enumerate(scan):
self.raycast_update(x, y, (theta + self.laser_min_angle + i*self.laser_resolution), z)
return self.gridmap
class GridMappingROS:
def __init__(self):
rospy.init_node('RosGridMapping', anonymous=True)
self.is_gridmapping_initialized = False
self.map_last_publish = rospy.Time()
self.prev_robot_x = -99999999
self.prev_robot_y = -99999999
self.sensor_model_p_occ = rospy.get_param('~sensor_model_p_occ', 0.75)
self.sensor_model_p_free = rospy.get_param('~sensor_model_p_free', 0.45)
self.sensor_model_p_prior = rospy.get_param('~sensor_model_p_prior', 0.5)
self.robot_frame = rospy.get_param('~robot_frame', 'base_link')
self.map_frame = rospy.get_param('~map_frame', 'map')
self.map_center_x = rospy.get_param('~map_center_x', -1.0)
self.map_center_y = rospy.get_param('~map_center_y', -1.0)
self.map_size_x = rospy.get_param('~map_size_x', 32.0)
self.map_size_y = rospy.get_param('~map_size_y', 12.0)
self.map_resolution = rospy.get_param('~map_resolution', 0.1)
self.map_publish_freq = rospy.get_param('~map_publish_freq', 1.0)
self.update_movement = rospy.get_param('~update_movement', 0.1)
# Creata a OccupancyGrid message template
self.map_msg = OccupancyGrid()
self.map_msg.header.frame_id = self.map_frame
self.map_msg.info.resolution = self.map_resolution
self.map_msg.info.width = int(self.map_size_x / self.map_resolution)
self.map_msg.info.height = int(self.map_size_y / self.map_resolution)
self.map_msg.info.origin.position.x = self.map_center_x
self.map_msg.info.origin.position.y = self.map_center_y
self.laser_sub = rospy.Subscriber("scan", LaserScan, self.laserscan_callback, queue_size=2)
self.map_pub = rospy.Publisher('map', OccupancyGrid, queue_size=2)
self.tf_sub = tf.TransformListener()
def init_gridmapping(self, laser_min_angle, laser_max_angle, laser_resolution, laser_max_dist):
self.gridmapping = GridMapping(self.map_center_x, self.map_center_y, self.map_size_x, self.map_size_y, self.map_resolution, laser_min_angle, laser_max_angle, laser_resolution, laser_max_dist, self.sensor_model_p_occ, self.sensor_model_p_free, self.sensor_model_p_prior)
self.is_gridmapping_initialized = True
# https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles#Quaternion_to_Euler_angles_conversion
def quarternion_to_yaw(self, qx, qy, qz, qw):
siny_cosp = 2 * (qw * qz + qx * qy)
cosy_cosp = 1 - 2 * (qy * qy + qz * qz)
return np.arctan2(siny_cosp, cosy_cosp)
def publish_occupancygrid(self, gridmap, stamp):
# Convert gridmap to ROS supported data type : int8[]
# http://docs.ros.org/en/melodic/api/nav_msgs/html/msg/OccupancyGrid.html
# The map data, in row-major order, starting with (0,0). Occupancy probabilities are in the range [0,100]. Unknown is -1.
gridmap_p = l2p(gridmap)
#unknown_mask = (gridmap_p == self.sensor_model_p_prior) # for setting unknown cells to -1
gridmap_int8 = (gridmap_p*100).astype(dtype=np.int8)
#gridmap_int8[unknown_mask] = -1 # for setting unknown cells to -1
# Publish map
self.map_msg.data = gridmap_int8
self.map_msg.header.stamp = stamp
self.map_pub.publish(self.map_msg)
rospy.loginfo_once("Published map!")
def laserscan_callback(self, data):
if not self.is_gridmapping_initialized:
self.init_gridmapping(data.angle_min, data.angle_max, data.angle_increment, data.range_max)
self.tf_sub.waitForTransform(self.map_frame, self.robot_frame, data.header.stamp, rospy.Duration(1.0))
try:
# get the robot position associated with the current laserscan
(x, y, _),(qx, qy, qz, qw) = self.tf_sub.lookupTransform(self.map_frame, self.robot_frame, data.header.stamp)
theta = self.quarternion_to_yaw(qx, qy, qz, qw)
# check the movement if update is needed
if ( (x-self.prev_robot_x)**2 + (y-self.prev_robot_y)**2 >= self.update_movement**2 ):
gridmap = self.gridmapping.update(x, y, theta, data.ranges).flatten() # update map
self.prev_robot_x = x
self.prev_robot_y = y
# publish map (with the specified frequency)
if (self.map_last_publish.to_sec() + 1.0/self.map_publish_freq < rospy.Time.now().to_sec() ):
self.map_last_publish = rospy.Time.now()
self.publish_occupancygrid(gridmap, data.header.stamp)
except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException) as e:
rospy.logerr(e)
gm = GridMappingROS()
while not rospy.is_shutdown():
rospy.spin()
| StarcoderdataPython |
4839268 | import re
import requests
from bs4 import BeautifulSoup
from botutils.constants import IS_URL_REGEX
def get_ffn_url_from_query(query):
ffn_list = []
href = []
url = 'https://www.google.com/search?q=' + \
query+"+fanfiction"
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
found = soup.findAll('a')
for link in found:
href.append(link['href'])
for i in range(len(href)):
if re.search(r"fanfiction.net/s/", href[i]) is not None:
ffn_list.append(href[i])
if not ffn_list:
return None
ffn_url = re.search(IS_URL_REGEX, ffn_list[0])
return ffn_url.group(0)
def get_ao3_url_from_query(query):
ao3_list = []
href = []
url = 'https://www.google.com/search?q=' + \
query+"+archiveofourown"
page = requests.get(url)
soup = BeautifulSoup(page.content, 'html.parser')
found = soup.findAll('a')
for link in found:
href.append(link['href'])
for i in range(len(href)):
# append /works/ first
if re.search(r"\barchiveofourown.org/works/\b", href[i]) is not None:
ao3_list.append(href[i])
# append /chapters/ next
if re.search(r"\barchiveofourown.org/chapters/\b", href[i]) is not None:
ao3_list.append(href[i])
if not ao3_list:
return None
ao3_url = re.search(IS_URL_REGEX, ao3_list[0])
return ao3_url.group(0)
| StarcoderdataPython |
176441 | from datetime import datetime
from Myna import db
from werkzeug.security import generate_password_hash, check_password_hash, new_hash
from Myna import login
from flask_login import UserMixin
from .Hornbill import IMGresizer
from Myna.config import Config
import os
from Myna import photos
followers = db.Table('followers',
db.Column('follower_id', db.Integer, db.ForeignKey('user.id')),
db.Column('followed_id', db.Integer, db.ForeignKey('user.id'))
)
class User(UserMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(64), index=True, unique=True)
email = db.Column(db.String(120), index=True, unique=True)
stakeholder = db.Column(db.String(64))
password_hash = db.Column(db.String(128))
last_seen = db.Column(db.DateTime, index=True, default=datetime.utcnow)
posts = db.relationship('Post', backref='author', lazy='dynamic')
avatar_img=db.Column(db.String(128))
followed = db.relationship(
'User', secondary=followers,
primaryjoin=(followers.c.follower_id == id),
secondaryjoin=(followers.c.followed_id == id),
backref=db.backref('followers', lazy='dynamic'), lazy='dynamic')
def __repr__(self):
return '<User {}>'.format(self.username)
def set_password(self, password):
self.password_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.password_hash, password)
def update_avatar_img(self,filepath):
self.avatar_img=filepath
def avatarIMG(self,size):
filename=self.avatar_img.split('/')[-1]
print (filename)
imgloc=os.path.join(Config.UPLOADED_PHOTOS_DEST,filename)
print (imgloc)
img=IMGresizer(size,imgloc,'')
return photos.url(img.GetIMG())
def follow(self, user):
if not self.is_following(user):
self.followed.append(user)
def unfollow(self, user):
if self.is_following(user):
self.followed.remove(user)
def is_following(self, user):
return self.followed.filter(
followers.c.followed_id == user.id).count() > 0
def followed_posts(self):
return Post.query.join(
followers, (followers.c.followed_id == Post.user_id)).filter(
followers.c.follower_id == self.id).order_by(Post.timestamp.desc())
class Post(db.Model):
id = db.Column(db.Integer, primary_key=True)
body = db.Column(db.String(140))
timestamp = db.Column(db.DateTime, index=True, default=datetime.utcnow)
user_id = db.Column(db.Integer, db.ForeignKey('user.id'))
def __repr__(self):
return '<Post {}>'.format(self.body)
@login.user_loader
def load_user(id):
return User.query.get(id) | StarcoderdataPython |
161632 | #!/usr/bin/python
import os
from pynvml import *
def choose_gpu():
nvmlInit()
n_gpus = nvmlDeviceGetCount()
gpu_memusage = []
mostfree_index = 0
for gpu_index in range(n_gpus):
device_handle = nvmlDeviceGetHandleByIndex(gpu_index)
meminfo = nvmlDeviceGetMemoryInfo(device_handle)
gpu_memusage.append(float(meminfo.used)/float(meminfo.total))
if gpu_memusage[-1] < gpu_memusage[mostfree_index]:
mostfree_index = gpu_index
os.environ['CUDA_VISIBLE_DEVICES'] = str(mostfree_index)
nvmlShutdown()
return None
| StarcoderdataPython |
4807134 | <reponame>JiaqiYao/dynamic_multi_label<gh_stars>1-10
import tensorflow as tf
import numpy as np
from gensim.models.keyedvectors import KeyedVectors
import pickle
import json
import os
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def __init__(self, data_dir, word2vec_path, max_sentence_length):
self.data_dir = data_dir
self.word2vec_path = word2vec_path
self.max_sentence_length = max_sentence_length
self.labels = set()
self.num_class = 0
self.label_map = dict()
self.tokenizer = None
def _build_vocabulary(self, train_texts, oov_token='UNK', filters='', lower=True):
self.tokenizer = tf.keras.preprocessing.text.Tokenizer(
oov_token=oov_token,
filters=filters,
lower=lower)
self.tokenizer.fit_on_texts(train_texts)
# add PAD
self.tokenizer.word_index['<PAD>'] = 0
self.tokenizer.index_word[0] = '<PAD>'
self.tokenizer.word_counts['<PAD>'] = 0
self.tokenizer.word_docs['<PAD>'] = 0
# get word embedding
self.dump_word_embedding(self.tokenizer.word_index)
print("Build the vocabulary done")
def build_label_map(self, train_labels_name, valid_labels_name, test_labels_name):
train_labels_path = os.path.join(self.data_dir, train_labels_name)
valid_labels_path = os.path.join(self.data_dir, valid_labels_name)
test_labels_path = os.path.join(self.data_dir, test_labels_name)
with open(train_labels_path, 'rt') as fin:
train_labels = json.load(fin)
with open(valid_labels_path, 'rt') as fin:
valid_labels = json.load(fin)
with open(test_labels_path, 'rt') as fin:
test_labels = json.load(fin)
for train_label in train_labels+valid_labels+test_labels:
self.labels = self.labels.union(train_label)
self.num_class = len(self.labels)
self.label_map = dict(zip(self.labels, range(self.num_class)))
def _transform_label(self, label):
label_id = np.zeros(self.num_class, dtype=np.int64)
for item in label:
if item in self.label_map:
label_id[self.label_map[item]] = 1
else:
return None
return label_id
def dump_train_features(self, text_name, label_name):
text_path = os.path.join(self.data_dir, text_name)
label_path = os.path.join(self.data_dir, label_name)
texts, labels = self._get_data_from_json(text_path, label_path)
self._build_vocabulary(texts)
# self._build_label_map(labels)
texts_ids = self.tokenizer.texts_to_sequences(texts)
max_sentence_length = max(len(x) for x in texts_ids)
if max_sentence_length < self.max_sentence_length:
self.max_sentence_length = max_sentence_length
print("max sentence length is {}".format(self.max_sentence_length))
# padding
texts_ids = tf.keras.preprocessing.sequence.pad_sequences(texts_ids,
maxlen=self.max_sentence_length,
padding='post',
truncating='post')
labels_ids = np.array([self._transform_label(label) for label in labels])
with open(os.path.join(self.data_dir, 'train_texts_ids.dat'), 'wb') as fout:
pickle.dump(texts_ids, fout)
with open(os.path.join(self.data_dir, 'train_labels_ids.dat'), 'wb') as fout:
pickle.dump(labels_ids, fout)
print("Train Data Done {}".format(len(labels_ids)))
def dump_eval_features(self, text_name, label_name):
text_path = os.path.join(self.data_dir, text_name)
label_path = os.path.join(self.data_dir, label_name)
texts, labels = self._get_data_from_json(text_path, label_path)
texts_ids = self.tokenizer.texts_to_sequences(texts)
# padding
texts_ids = tf.keras.preprocessing.sequence.pad_sequences(texts_ids,
maxlen=self.max_sentence_length,
padding='post',
truncating='post')
labels_ids = np.array([self._transform_label(label) for label in labels])
# texts_ids, labels_ids = self._filter_examples(texts_ids, labels_ids)
with open(os.path.join(self.data_dir, 'valid_texts_ids.dat'), 'wb') as fout:
pickle.dump(texts_ids, fout)
with open(os.path.join(self.data_dir, 'valid_labels_ids.dat'), 'wb') as fout:
pickle.dump(labels_ids, fout)
print("Valid Data Done {}".format(len(labels_ids)))
def dump_test_features(self, text_name, label_name):
text_path = os.path.join(self.data_dir, text_name)
label_path = os.path.join(self.data_dir, label_name)
texts, labels = self._get_data_from_json(text_path, label_path)
texts_ids = self.tokenizer.texts_to_sequences(texts)
# padding
texts_ids = tf.keras.preprocessing.sequence.pad_sequences(texts_ids,
maxlen=self.max_sentence_length,
padding='post',
truncating='post')
labels_ids = np.array([self._transform_label(label) for label in labels])
# texts_ids, labels_ids = self._filter_examples(texts_ids, labels_ids)
with open(os.path.join(self.data_dir, 'test_texts_ids.dat'), 'wb') as fout:
pickle.dump(texts_ids, fout)
with open(os.path.join(self.data_dir, 'test_labels_ids.dat'), 'wb') as fout:
pickle.dump(labels_ids, fout)
print("Test Data Done {}".format(len(labels_ids)))
def dump_word_embedding(self, vocabulary):
vocab_size = len(vocabulary)
print("vocabulary size is {}".format(vocab_size))
word_vectors = KeyedVectors.load_word2vec_format(self.word2vec_path, binary=True)
embed_size = word_vectors.vector_size
bound = np.sqrt(6.0 / embed_size)
vocab_size = len(vocabulary)
word_embeddings = np.random.uniform(-bound, bound, [vocab_size+1, embed_size])
for word in vocabulary:
# print(word)
if word in word_vectors:
word_embeddings[vocabulary[word], :] = word_vectors[word]
with open(os.path.join(self.data_dir, 'word_embeddings.dat'), 'wb') as fout:
pickle.dump(word_embeddings, fout)
def dump_meta_data(self):
with open(os.path.join(self.data_dir, "tokenizer.dat"), 'wb') as fout:
pickle.dump(self.tokenizer, fout)
with open(os.path.join(self.data_dir, "label_map.dat"), 'wb') as fout:
pickle.dump(self.label_map, fout)
with open(os.path.join(self.data_dir, "max_sentence_length.dat"), 'wb') as fout:
pickle.dump(self.max_sentence_length, fout)
def get_labels(self):
"""Gets the list of labels for this data set."""
raise self.labels
@classmethod
def _get_data_from_json(cls, text_path, label_path):
with open(text_path, 'rt') as fin:
texts = json.load(fin)
with open(label_path, 'rt') as fin:
labels = json.load(fin)
return texts, labels
@classmethod
def _filter_examples(cls, text_ids, label_ids):
output_text_ids = list()
output_label_ids = list()
count = 0
for text_id, label_id in zip(text_ids, label_ids):
if label_id is not None:
output_label_ids.append(label_id)
output_text_ids.append(text_id)
else:
count += 1
print("Filter {} examples".format(count))
return np.array(output_text_ids), np.array(output_label_ids)
if __name__ == "__main__":
data_dir = r'/home/yaojq/data/text/reuters'
word2vec_path = r'/home/yaojq/data/word_embedding/GoogleNews-vectors-negative300.bin'
print(data_dir)
max_seq_length = 512
processor = DataProcessor(data_dir, word2vec_path, max_seq_length)
processor.build_label_map("train_labels.txt", "valid_labels.txt", "test_labels.txt")
processor.dump_train_features("train_texts.txt", "train_labels.txt")
processor.dump_eval_features("valid_texts.txt", "valid_labels.txt")
processor.dump_test_features("test_texts.txt", "test_labels.txt")
processor.dump_meta_data()
| StarcoderdataPython |
3242594 | <reponame>robotcaresystems/roboticslanguage
#
# This is the Robotics Language compiler
#
# Parameters.py: Definition of the parameters for this package
#
# Created on: 17 August, 2018
# Author: <NAME>
# Author: <NAME>
# Licence: Apache 2.0
# Copyright: 2014-2017 Robot Care Systems BV, The Hague, The Netherlands. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from RoboticsLanguage.Base import Utilities
from RoboticsLanguage.Tools import Serialise
def transform(code, parameters):
# look for all variables with an assign function
for variable, value in parameters['Transformers']['Base']['variables'].iteritems():
if 'assign' in value.keys():
for assignment in code.xpath('//assign/variable[@name="' + variable + '"]/..'):
assignment.attrib['assignFunction'] = 'true'
# # find all relevant outputs
# package_parents = []
# for element in Utilities.ensureList(parameters['globals']['output']):
# package_parents += Utilities.getPackageOutputParents(parameters, element)
#
# # make them unique
# package_parents = list(set(package_parents))
#
# # serialize for each output
# for language in package_parents:
# for xml_child in code.getchildren():
# Serialise.serialise(xml_child, parameters, parameters['language'], language)
for language in Utilities.ensureList(parameters['globals']['output']):
for xml_child in code.getchildren():
Serialise.serialise(xml_child, parameters, parameters['language'], language)
return code, parameters
| StarcoderdataPython |
1757686 | <gh_stars>10-100
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright (c) 2016 <NAME> (http://www.jdhp.org)
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# See also: http://effbot.org/tkinterbook/listbox.htm
import tkinter as tk
root = tk.Tk()
# LISTBOX #############################
# The "selectmode" can be:
# - SINGLE: just a single choice
# - BROWSE: same, but the selection can be moved using the mouse
# - MULTIPLE: multiple item can be choosen, by clicking at them one at a
# time
# - EXTENDED: multiple ranges of items can be chosen, using the Shift and
# Control keyboard modifiers
listbox = tk.Listbox(root, selectmode=tk.EXTENDED)
listbox.pack()
items = ["banana", "apple", "mango", "orange"]
for item in items:
listbox.insert(tk.END, item)
# BUTTON ##############################
def print_selection():
selection_id_tuple = listbox.curselection()
selection_label_tuple = tuple(listbox.get(item) for item in selection_id_tuple)
print(selection_id_tuple)
print(selection_label_tuple)
button = tk.Button(root, text="Print selection", width=15, command=print_selection)
button.pack()
# MAIN LOOP ###########################
root.mainloop()
| StarcoderdataPython |
1647526 | <reponame>commonism/acmetk<filename>acmetk/models/order.py<gh_stars>1-10
import enum
import typing
import uuid
from datetime import datetime, timezone, timedelta
import acme.messages
from cryptography import x509
from cryptography.hazmat.primitives import serialization
from sqlalchemy import (
Column,
Enum,
DateTime,
String,
ForeignKey,
LargeBinary,
TypeDecorator,
Integer,
)
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.orm import relationship
from .authorization import AuthorizationStatus, Authorization
from .base import Serializer, Entity, AcmeErrorType
from .challenge import Challenge
from .identifier import Identifier
from ..util import url_for, names_of
class CSRType(TypeDecorator):
"""x509 Certificate as PEM"""
impl = LargeBinary
def process_bind_param(self, value, dialect):
if value:
return value.public_bytes(encoding=serialization.Encoding.PEM)
return value
def process_result_value(self, value, dialect):
if value:
return x509.load_pem_x509_csr(value)
return value
class OrderStatus(str, enum.Enum):
# subclassing str simplifies json serialization using json.dumps
PENDING = "pending"
READY = "ready"
PROCESSING = "processing"
VALID = "valid"
INVALID = "invalid"
class Order(Entity, Serializer):
"""Database model for ACME order objects.
`7.1.3. Order Objects <https://tools.ietf.org/html/rfc8555#section-7.1.3>`_
"""
__tablename__ = "orders"
__serialize__ = frozenset(["status", "expires", "notBefore", "notAfter"])
__diff__ = frozenset(
["status", "expires", "notBefore", "notAfter", "proxied_url", "proxied_error"]
)
__mapper_args__ = {
"polymorphic_identity": "order",
}
_entity = Column(Integer, ForeignKey("entities.entity"), nullable=False, index=True)
order_id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4)
"""The order's ID."""
proxied_url = Column(String, nullable=True, unique=False)
"""The order's URL at the remote CA."""
proxied_error = Column(AcmeErrorType, nullable=True)
"""The error that occured at the remote CA while processing the order."""
status = Column("status", Enum(OrderStatus), nullable=False)
"""The order's status."""
expires = Column(DateTime(timezone=True), nullable=False)
"""The :class:`datetime.datetime` from which the order is considered expired."""
identifiers = relationship(
"Identifier",
cascade="all, delete",
lazy="noload",
foreign_keys="Identifier.order_id",
)
"""List of identifiers (:class:`~acmetk.models.identifier.Identifier`) associated with the order."""
notBefore = Column(DateTime(timezone=True))
"""The requested *notBefore* field in the certificate."""
notAfter = Column(DateTime(timezone=True))
"""The requested *notAfter* field in the certificate."""
account_id = Column(
UUID(as_uuid=True), ForeignKey("accounts.account_id"), nullable=False
)
account = relationship(
"Account", back_populates="orders", lazy="noload", foreign_keys=account_id
)
"""The :class:`~acmetk.models.account.Account` that created the order."""
certificate = relationship(
"Certificate",
uselist=False,
single_parent=True,
back_populates="order",
lazy="noload",
foreign_keys="Certificate.order_id",
)
"""The :class:`~acmetk.models.certificate.Certificate` that was generated as a result of the order."""
csr = Column(CSRType)
"""The :class:`cryptography.x509.CertificateSigningRequest` that was submitted by the client."""
def url(self, request) -> str:
"""Returns the order's URL.
:param request: The client request needed to build the URL.
:return: The order's URL.
"""
return url_for(request, "orders", id=str(self.order_id))
def finalize_url(self, request) -> str:
"""Returns the order's *finalize* URL.
:param request: The client request needed to build the URL.
:return: The URL at which the client may request the order to be finalized.
"""
return url_for(request, "finalize-order", id=str(self.order_id))
def certificate_url(self, request):
"""Returns the order's *certificate* URL.
:param request: The client request needed to build the URL.
:return: The URL at which the client may download the certificate that was generated as a result of the order.
"""
return url_for(request, "certificate", id=str(self.certificate.certificate_id))
def validate_csr(self, csr: "cryptography.x509.CertificateSigningRequest") -> bool:
"""Validates whether the given CSR's names equal the order's identifiers.
Accounts for different capitalizations.
:param cert: The CSR to validate.
:return: *True* iff the set of names in the CSR equals the order's set of identifiers.
"""
identifiers = set(identifier.value.lower() for identifier in self.identifiers)
return identifiers == names_of(csr, lower=True)
async def validate(self) -> OrderStatus:
"""Validates the order.
This method is usually not called directly. Rather,
:func:`acmetk.models.authorization.Authorization.validate` calls it as a authorization that corresponds
to the order is being validated.
:param session: The open database session.
:return: The order's status after validation.
"""
if self.status != OrderStatus.PENDING:
return self.status
if datetime.now(timezone.utc) > self.expires:
self.status = OrderStatus.INVALID
return self.status
for identifier in self.identifiers:
if identifier.authorization.status == AuthorizationStatus.INVALID:
self.status = OrderStatus.INVALID
break
if not identifier.authorization.is_valid():
break
else:
self.status = OrderStatus.READY
return self.status
def serialize(self, request=None) -> dict:
d = super().serialize(request)
d["identifiers"] = super().serialize_list(self.identifiers)
# Section on which authorizations to include:
# https://tools.ietf.org/html/rfc8555#section-7.1.3
def show_authz(authorization) -> bool:
if self.status in (OrderStatus.VALID, OrderStatus.INVALID):
return authorization.is_valid()
else: # self.status in (OrderStatus.PENDING, OrderStatus.PROCESSING, OrderStatus.READY):
return (
authorization.status == AuthorizationStatus.PENDING
or authorization.is_valid()
)
d["authorizations"] = [
identifier.authorization.url(request)
for identifier in self.identifiers
if show_authz(identifier.authorization)
]
d["finalize"] = self.finalize_url(request)
if self.status == OrderStatus.VALID:
d["certificate"] = self.certificate_url(request)
if self.proxied_error:
d["error"] = self.proxied_error.to_partial_json()
return d
@classmethod
def from_obj(
cls,
account: "acmetk.models.account.Account",
obj: acme.messages.NewOrder,
challenge_types: typing.Iterable["acmetk.models.challenge.ChallengeType"],
) -> "Order":
"""A factory that constructs a new :class:`Order` from a message object.
The field *expires* is set to 7 days in the future from the time this method is called and
the *status* is initially set to *pending*.
Furthermore, the order object is automatically associated with the given account and all
:class:`~acmetk.models.identifier.Identifier`, :class:`~acmetk.models.authorization.Authorization`,
and :class:`~acmetk.models.challenge.Challenge` objects are created as well as associated with the order.
:param account: The account's key.
:param obj: The registration message object.
:param challenge_types: The types of challenges to create.
:return: The constructed order.
"""
identifiers = [
Identifier.from_obj(identifier) for identifier in obj.identifiers
]
for identifier in identifiers:
identifier.authorization = Authorization.for_identifier(identifier)
identifier.authorization.challenges = Challenge.create_types(
challenge_types
)
order = Order(
expires=datetime.now(timezone.utc) + timedelta(days=7),
status=OrderStatus.PENDING,
account=account,
identifiers=identifiers,
)
return order
@property
def account_of(self):
return self.account
@property
def order_of(self):
return self
| StarcoderdataPython |
1694198 | #!/usr/bin/env python
from __future__ import print_function
import argparse
import hashlib
import io
import os
import shutil
import tarfile
import tempfile
from glob import glob
import requests
parser = argparse.ArgumentParser()
parser.add_argument('url')
parser.add_argument('--checksum', '-c', required=True)
parser.add_argument('--checksum-type', '-t', default='sha256')
parser.add_argument('--out-path', '-p', default='.')
parser.add_argument('--strip-components', type=int, default=0)
args = parser.parse_args()
# should be possible to do this in one pass, but requires "tee"ing the file to
# both GzipFile and hashlib, so whatever.
chunksize = 32 * 2**20
tar_fn = 'shogun-gpl.tar.gz'
response = requests.get(args.url, verify=False, stream=True)
response.raise_for_status()
with open(tar_fn, 'wb') as f:
for block in response.iter_content(chunksize):
f.write(block)
try:
digest = hashlib.new(args.checksum_type)
with io.open(tar_fn, 'rb') as f:
while True:
x = f.read(chunksize)
digest.update(x)
if not x:
break
d = digest.hexdigest()
if d != args.checksum:
parser.error("Bad digest: expected {}, got {}".format(args.checksum, d))
if not os.path.exists(args.out_path):
os.makedirs(args.out_path)
with tarfile.open(tar_fn, 'r') as tar:
if not args.strip_components:
tar.extractall(args.out_path)
else:
# hacky way to do this...
tmpdir = tempfile.mkdtemp(dir=os.path.dirname(args.out_path))
try:
tar.extractall(tmpdir)
print('extracted to {}'.format(tmpdir))
tup = ('*',) * (args.strip_components + 1)
for fn in glob(os.path.join(tmpdir, *tup)):
target = os.path.join(args.out_path, os.path.basename(fn))
os.rename(fn, target)
finally:
shutil.rmtree(tmpdir)
finally:
os.remove(tar_fn)
| StarcoderdataPython |
3383119 | from django import forms
from .models import UserProfile,Neighborhood,Business,Update,Health
class UpdateForm(forms.ModelForm):
class Meta:
model = Update
exclude = ['writer']
class BusinessForm(forms.ModelForm):
class Meta:
model = Business
fields = '__all__'
class HealthForm(forms.ModelForm):
class Meta:
model = Health
fields = '__all__'
class ProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
exclude = ['user']
class NeighborhoodForm(forms.ModelForm):
class Meta:
model = UserProfile
exclude = ['user','user_image','user_email']
| StarcoderdataPython |
1624840 | import logging
from contextlib import contextmanager
from metadata_provider import MetadataProvider
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
DbBase = declarative_base()
class DbProvider(object):
def __init__(self, metadata: MetadataProvider):
self.metadata = metadata
def connect(self):
logging.info("Connecting to mysql...")
sql_host = self.metadata.get('sql_host').decode('utf-8')
sql_user = self.metadata.get('sql_user').decode('utf-8')
sql_pass = self.metadata.get('sql_pass').decode('utf-8')
sql_db = self.metadata.get('sql_db').decode('utf-8')
connect_str = 'mysql://{}:{}@{}/{}'.format(sql_user, sql_pass, sql_host, sql_db)
logging.debug("Connecting to db: {}".format(connect_str))
self.engine = create_engine(connect_str)
self.session_factory = sessionmaker(bind=self.engine)
def generateSchema(self):
logging.info("Generating mysql schema")
DbBase.metadata.create_all(self.engine)
def deleteMatchData(self, match_key):
year = int(match_key[:4])
event_key = match_key.split("_")[0]
match_id = match_key.split("_")[1]
if year == 2017:
from db.match_state_2017 import MatchState2017 as MatchState
elif year == 2018:
from db.match_state_2018 import MatchState2018 as MatchState
with self.session() as session:
logging.info("clearing all match data for {}".format(match_key))
session.query(MatchState).filter(
MatchState.event_key == event_key).filter(
MatchState.match_id == match_id).delete()
def deleteEventData(self, event_key):
year = int(event_key[:4])
if year == 2017:
from db.match_state_2017 import MatchState2017 as MatchState
elif year == 2018:
from db.match_state_2018 import MatchState2018 as MatchState
with self.session() as session:
logging.info("clearing all event data for {}".format(event_key))
session.query(MatchState).filter(
MatchState.event_key == event_key).delete()
@contextmanager
def session(self):
session = self.session_factory()
try:
yield session
session.commit()
except:
session.rollback()
raise
finally:
session.close()
| StarcoderdataPython |
3288140 | # This file is part of spot_motion_monitor.
#
# Developed for LSST System Integration, Test and Commissioning.
#
# See the LICENSE file at the top-level directory of this distribution
# for details of code ownership.
#
# Use of this source code is governed by a 3-clause BSD-style
# license that can be found in the LICENSE file.
from PyQt5.QtCore import QObject, pyqtSignal
__all__ = ['InformationUpdater']
class InformationUpdater(QObject):
"""Small class to allow any object to update the main application
or other controllers.
Attributes
----------
acquireRoiState : pyqtSignal
Signal used to update data controller on acquire ROI state changes.
bufferSizeChanged : pyqtSignal
Signal used to update data controller with a new buffer size.
cameraState : pyqtSignal
Signal used to update application UI based on camera state.
displayStatus : pyqtSignal
Signal used for updating the main application status bar.
roiFpsChanged : pyqtSignal
Signal used to update controllers with a new ROI FPS.
takeScreenshotState : pyqtSignal
Signal used to take a screenshot of the CCD plot.
"""
acquireRoiState = pyqtSignal(bool)
bufferSizeChanged = pyqtSignal(int)
cameraState = pyqtSignal(bool)
roiFpsChanged = pyqtSignal(int)
displayStatus = pyqtSignal(str, int)
takeScreenshotState = pyqtSignal()
| StarcoderdataPython |
1734733 | #!/usr/bin/python
"""
make some analysis on the contents of the DB
"""
import db
from config import *
# details?
PRINT_DETAILS=True
# simple: connect to the sqlite DB
get_db()
conn, model = conf['db'], conf['model']
# get models used
dbc = conn.cursor()
# 1. which is the last step
dbc.execute("select distinct model from data")
models = [c[0] for c in dbc.fetchall()]
for model in models:
# see if max_stage == 3, else continue
dbc.execute("SELECT MAX(step) FROM data WHERE model='%s'" % (model,))
if (dbc.fetchone()[0] != 3): continue
# total number of concepts:
dbc.execute("SELECT COUNT(*) FROM data WHERE model='%s' AND step IN (1,2,3)" % (model,))
tot_num = dbc.fetchone()[0]
if tot_num != 75: raise Exception
# num releveant
dbc.execute("SELECT COUNT(*) FROM data WHERE model='%s' AND step IN (1,2,3) AND relevant=1" % (model,))
rel_num = dbc.fetchone()[0]
print "\n\nNext model:", model, " -- Total number of concepts:", tot_num, "Num: relevant", rel_num , "Perc. relevant", rel_num / float(tot_num)
if PRINT_DETAILS:
for step in [1,2,3]:
dbc.execute("SELECT term FROM data WHERE model='%s' AND step IN (%d) AND relevant=1" % (model, step))
rel = [t[0] for t in dbc.fetchall()]
print "\n\tStep: %d -- Number of relevant concepts: %d -- Percent: %f" % (step, len(rel), len(rel)/25.0)
for t in rel:
print "\t\t+ ", t
dbc.execute("SELECT term FROM data WHERE model='%s' AND step IN (%d) AND relevant=0" % (model, step))
nrel = [t[0] for t in dbc.fetchall()]
for t in nrel:
print "\t\t- ", t
| StarcoderdataPython |
65838 | import math, time, os, argparse, logging, json
from wand.image import Image
parser = argparse.ArgumentParser(
prog='tile_cutter',
description='Cuts large images into tiles.')
parser.add_argument('--tile-size', metavar='SIZE', type=int, default=512,
help='Tile size (width and height)')
parser.add_argument('-v', '--verbose', action='store_true',
help='Log debugging information')
parser.add_argument('image', type=argparse.FileType('rb'),
help='Source image')
args = parser.parse_args()
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
layers = []
tile_size = args.tile_size
logging.info("tile size: %dx%d", tile_size, tile_size)
with Image(file=args.image) as source:
logging.info("image size: %dx%d", source.width, source.height)
# every zoom level has 2x more tiles
max_zoom = math.ceil(math.log(max(source.size) / args.tile_size, 2))
logging.info("zoom levels: 1-%d", max_zoom)
image_size = args.tile_size * (2 ** max_zoom)
offset_x, offset_y = tuple((image_size - orig) // 2 for orig in source.size)
logging.info("tiled size: %dx%d-%d-%d", image_size, image_size, offset_x, offset_y)
layers.append({
"name": "???",
"URL": os.path.basename(args.image.name),
"width": source.width,
"height": source.height,
"tileSize": args.tile_size,
"imageSize": image_size
})
square_source = Image(width=image_size, height=image_size)
square_source.composite(source,
(square_source.width - source.width) // 2,
(square_source.height - source.height) // 2)
for z in range(1, max_zoom + 1):
source_size = int(args.tile_size * (2 ** (max_zoom - z)))
logging.info("zoom level %d: source %dx%d", z, source_size, source_size)
current_image = 0
total_images = (image_size // source_size) ** 2
start_time = last_report_time = time.clock()
for y in range(0, image_size // source_size):
for x in range(0, image_size // source_size):
crop_x, crop_y = x * source_size, y * source_size
path = "%s-tiles/%d/%d/%d.png" % (args.image.name, z, x, y)
logging.debug("tile %s: source %dx%d%+d%+d",
path, source_size, source_size, crop_x, crop_y)
with square_source.clone() as tile:
tile.crop(crop_x, crop_y, width=source_size, height=source_size)
tile.resize(tile_size, tile_size)
os.makedirs(os.path.dirname(path), exist_ok=True)
tile.save(filename=path)
current_image += 1
if time.clock() - last_report_time > 1:
last_report_time = time.clock()
eta = (last_report_time - start_time) / current_image * \
(total_images - current_image)
logging.info("completion: %.2f%% (ETA: %dh%dm%ds)",
current_image / total_images * 100,
eta // 3600, (eta % 3600) // 60, eta % 60)
with open("%s.json" % args.image.name, "w") as descr:
descr.write(json.dumps({
"name": "???",
"scale": None,
"layers": layers
}))
logging.info("image description written to: %s" % descr.name)
logging.info("done")
| StarcoderdataPython |
1720785 | # Generated by Django 3.1 on 2020-08-16 05:23
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('lesson_planner', '0018_auto_20200815_1745'),
]
operations = [
migrations.AddField(
model_name='lesson',
name='date_end',
field=models.DateField(null=True),
),
migrations.AddField(
model_name='lesson',
name='date_start',
field=models.DateField(null=True),
),
]
| StarcoderdataPython |
1721643 | from flask import Blueprint
from flask import request
from ..utils.responses import response_with
from ..utils import responses as resp
from ..models.books import Book, BookSchema
from ..utils.database import db
book_routes = Blueprint('book_routes', __name__)
@book_routes.route('/', methods=['POST'])
def create_book():
try:
data = request.get_json()
book_schema = BookSchema()
book = book_schema.load(data)
result = book_schema.dump(book.create())
return response_with(resp.SUCCESS_201, value={'book': result})
except Exception as e:
print(e)
return response_with(resp.INVALID_INPUT_422)
@book_routes.route('/', methods=['GET'])
def get_book_list():
fetched = Book.query.all()
book_schema = BookSchema(many=True, only=['author_id', 'title', 'year'])
books = book_schema.dump(fetched)
return response_with(resp.SUCCESS_200, value={"books": books})
@book_routes.route('/<int:id>', methods=['GET'])
def get_book_detail(id):
fetched = Book.query.get_or_404(id)
book_schema = BookSchema()
book = book_schema.dump(fetched)
return response_with(resp.SUCCESS_200, value={'book': book})
@book_routes.route('/<int:id>', methods=['PUT'])
def update_book_detail(id):
data = request.get_json()
get_book = Book.query.get_or_404(id)
get_book.title = data['title']
get_book.year = data['year']
db.session.add(get_book)
db.session.commit()
book_schema = BookSchema()
book = book_schema.dump(get_book)
return response_with(resp.SUCCESS_200, value={'book': book})
@book_routes.route('/<int:id>', methods=['PATCH'])
def modify_book_detail(id):
data = request.get_json()
get_book = Book.query.get_or_404(id)
if data.get('title'):
get_book.title = data['title']
if data.get('year'):
get_book.year = data['year']
db.session.add(get_book)
db.session.commit()
book_schema = BookSchema()
book = book_schema.dump(get_book)
return response_with(resp.SUCCESS_200, value={'book': book})
@book_routes.route('/<int:id>', methods=['DELETE'])
def delete_book(id):
get_book = Book.query.get_or_404(id)
db.session.delete(get_book)
db.session.commit()
return response_with(resp.SUCCESS_204)
| StarcoderdataPython |
82070 | import dataclasses
import click
import datetime
import neuro_extras
from collections import defaultdict
from graphviz import Digraph
from neuro_cli import __version__ as cli_version
from neuro_sdk import Client, ResourceNotFound, __version__ as sdk_version
from operator import attrgetter
from rich import box
from rich.console import Console
from rich.panel import Panel
from rich.table import Table
from types import TracebackType
from typing import (
AbstractSet,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Type,
Union,
cast,
)
from typing_extensions import AsyncContextManager, AsyncIterator
from yarl import URL
import neuro_flow
from . import ast
from .batch_executor import BatchExecutor, LocalsBatchExecutor, get_running_flow
from .colored_topo_sorter import ColoredTopoSorter
from .commands import CmdProcessor
from .config_loader import BatchLocalCL
from .context import (
EMPTY_ROOT,
EarlyBatch,
EarlyLocalCall,
ProjectCtx,
RunningBatchFlow,
setup_project_ctx,
)
from .expr import EvalError, MultiError
from .parser import ConfigDir
from .storage.base import (
Attempt,
Bake,
BakeImage,
BakeMeta,
BakeStorage,
ProjectStorage,
Storage,
)
from .types import FullID, LocalPath, TaskStatus
from .utils import (
CommandRunner,
GlobalOptions,
collect_git_info,
encode_global_options,
fmt_datetime,
fmt_timedelta,
make_cmd_exec,
)
EXECUTOR_IMAGE = f"ghcr.io/neuro-inc/neuro-flow:{neuro_flow.__version__}"
GRAPH_COLORS = {
TaskStatus.PENDING: "skyblue",
TaskStatus.RUNNING: "steelblue",
TaskStatus.SUCCEEDED: "limegreen",
TaskStatus.CANCELLED: "orange",
TaskStatus.SKIPPED: "magenta",
TaskStatus.CACHED: "yellowgreen",
TaskStatus.FAILED: "orangered",
TaskStatus.UNKNOWN: "crimson",
}
class BakeFailedError(Exception):
def __init__(self, status: TaskStatus):
self.status = status
async def iter_flows(top_flow: EarlyBatch) -> AsyncIterator[Tuple[FullID, EarlyBatch]]:
to_check: List[Tuple[FullID, EarlyBatch]] = [((), top_flow)]
while to_check:
prefix, flow = to_check.pop(0)
yield prefix, flow
for tid in flow.graph:
if await flow.is_action(tid):
sub_flow = await flow.get_action_early(tid)
to_check.append((prefix + (tid,), sub_flow))
async def check_no_cycles(top_flow: EarlyBatch) -> None:
async for _, flow in iter_flows(top_flow):
ColoredTopoSorter(flow.graph)
async def check_local_deps(top_flow: EarlyBatch) -> None:
# This methods works in O(kn^3), where:
# - n is number of tasks in the flow
# - k is maximal depths of actions
# This complexity is because:
# For each task (n) for task's each dependency (n) and for each remote task (n)
# do prefix check (k). Note that task are ofter have a few dependencies,
# so in real cases one of those n effectively const.
#
# If performance becomes a problem, it can be replaced
# with Trie (prefix tree) to reduce time complexity to O(kn^2)
# (for each task (n) for each task's dependency (n) do Trie check (k))
runs_on_remote: Set[FullID] = set()
async for prefix, flow in iter_flows(top_flow):
runs_on_remote.update(
{prefix + (tid,) for tid in flow.graph if await flow.is_task(tid)}
)
def _is_prefix(item: FullID, prefix: FullID) -> bool:
if len(item) < len(prefix):
return False
return all(x == y for (x, y) in zip(item, prefix))
def _remote_deps(prefix: FullID, deps: Iterable[str]) -> Iterable[FullID]:
return (
remote
for dep in deps
for remote in runs_on_remote
if _is_prefix(remote, prefix + (dep,))
)
async for prefix, flow in iter_flows(top_flow):
early_locals = cast(
AsyncIterator[EarlyLocalCall],
(
await flow.get_local_early(tid)
for tid in flow.graph
if await flow.is_local(tid)
),
)
with_bad_deps = (
(early_local, remote)
async for early_local in early_locals
for remote in _remote_deps(prefix, early_local.needs)
)
async for early_local, remote in with_bad_deps:
early_local_str = ".".join(prefix + (early_local.real_id,))
remote_str = ".".join(remote)
raise Exception(
f"Local action '{early_local_str}' depends on remote "
f"task '{remote_str}'. This is not supported because "
"all local action should succeed before "
"remote executor starts."
)
async def check_expressions(top_flow: RunningBatchFlow) -> None:
errors: List[EvalError] = []
async for _, flow in iter_flows(top_flow):
errors += flow.validate_expressions()
if errors:
raise MultiError(errors)
class ImageRefNotUniqueError(Exception):
@dataclasses.dataclass
class ImageInfo:
context: Optional[Union[URL, LocalPath]]
dockerfile: Optional[Union[URL, LocalPath]]
ast: ast.Image
def __init__(self, ref: str, images: Sequence[ImageInfo]) -> None:
self._ref = ref
self._images = images
def __str__(self) -> str:
return (
f"Image with ref '{self._ref}' defined multiple times "
f"with different attributes:\n"
+ "\n".join(
f"at {EvalError.format_pos(image.ast._start)} with params:\n"
f" context: {image.context or '<empty>'}\n"
f" dockerfile: {image.dockerfile or '<empty>'}"
for image in self._images
)
)
async def check_image_refs_unique(top_flow: RunningBatchFlow) -> None:
_tmp: Dict[str, List[ImageRefNotUniqueError.ImageInfo]] = defaultdict(list)
async for _, flow in iter_flows(top_flow):
for image in flow.early_images.values():
if image.ref.startswith("image:"):
_tmp[image.ref].append(
ImageRefNotUniqueError.ImageInfo(
context=image.context,
dockerfile=image.dockerfile,
ast=flow.get_image_ast(image.id),
)
)
errors = []
for ref, images in _tmp.items():
contexts_differ = len({it.context for it in images}) > 1
dockerfiles_differ = len({it.dockerfile for it in images}) > 1
if contexts_differ or dockerfiles_differ:
errors.append(ImageRefNotUniqueError(ref, images))
if errors:
raise MultiError(errors)
async def build_graphs(
top_flow: RunningBatchFlow,
) -> Mapping[FullID, Mapping[FullID, AbstractSet[FullID]]]:
graphs = {}
async for prefix, flow in iter_flows(top_flow):
graphs[prefix] = {
prefix + (key,): {prefix + (node,) for node in nodes}
for key, nodes in flow.graph.items()
}
return graphs
async def upload_image_data(
top_flow: RunningBatchFlow,
neuro_runner: CommandRunner,
storage: BakeStorage,
) -> List[BakeImage]:
@dataclasses.dataclass
class _TmpData:
context_on_storage: Optional[URL]
dockerfile_rel: Optional[str]
yaml_defs: List[FullID]
_tmp: Dict[str, _TmpData] = {}
async for prefix, flow in iter_flows(top_flow):
for image in flow.early_images.values():
if isinstance(image.context, LocalPath):
# Reusing image ref between bakes introduces
# race condition anyway, so we can safely use it
# as remote context dir name
storage_context_dir: Optional[URL] = URL(
f"storage:.flow/{top_flow.project_id}/{image.ref.replace(':', '/')}"
)
else:
storage_context_dir = image.context
dockerfile_rel = None
if image.dockerfile_rel:
dockerfile_rel = str(image.dockerfile_rel.as_posix())
prev_entry = _tmp.get(image.ref)
if prev_entry is not None:
# Validation is done before
prev_entry.yaml_defs.append(prefix + (image.id,))
else:
if isinstance(image.context, LocalPath):
await neuro_runner(
"mkdir",
"--parents",
str(storage_context_dir),
)
await neuro_runner(
"cp",
"--recursive",
"--update",
"--no-target-directory",
str(image.context),
str(storage_context_dir),
)
_tmp[image.ref] = _TmpData(
yaml_defs=[prefix + (image.id,)],
context_on_storage=storage_context_dir,
dockerfile_rel=dockerfile_rel,
)
return [
await storage.create_bake_image(
ref=ref,
yaml_defs=entry.yaml_defs,
context_on_storage=entry.context_on_storage,
dockerfile_rel=entry.dockerfile_rel,
)
for ref, entry in _tmp.items()
]
class BatchRunner(AsyncContextManager["BatchRunner"]):
def __init__(
self,
config_dir: ConfigDir,
console: Console,
client: Client,
storage: Storage,
global_options: GlobalOptions,
run_neuro_cli: Optional[CommandRunner] = None,
) -> None:
self._config_dir = config_dir
self._console = console
self._client = client
self._storage = storage
self._project_storage: Optional[ProjectStorage] = None
self._config_loader: Optional[BatchLocalCL] = None
self._project: Optional[ProjectCtx] = None
self._run_neuro_cli = run_neuro_cli or make_cmd_exec(
"neuro", global_options=encode_global_options(global_options)
)
self._global_options = global_options
@property
def project_id(self) -> str:
assert self._project is not None
return self._project.id
@property
def project_role(self) -> Optional[str]:
assert self._project is not None
return self._project.role
@property
def config_loader(self) -> BatchLocalCL:
assert self._config_loader is not None
return self._config_loader
@property
def storage(self) -> ProjectStorage:
assert self._project_storage is not None
return self._project_storage
async def close(self) -> None:
if self._config_loader is not None:
await self._config_loader.close()
async def __aenter__(self) -> "BatchRunner":
self._config_loader = BatchLocalCL(self._config_dir, self._client)
self._project = await setup_project_ctx(EMPTY_ROOT, self._config_loader)
project = await self._storage.get_or_create_project(self._project.id)
self._project_storage = self._storage.project(id=project.id)
return self
async def __aexit__(
self,
exc_typ: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
await self.close()
# Next function is also used in tests:
async def _setup_bake(
self,
batch_name: str,
params: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
tags: Sequence[str] = (),
) -> Tuple[Bake, RunningBatchFlow]:
# batch_name is a name of yaml config inside self._workspace / .neuro
# folder without the file extension
self._console.log(f"[bright_black]neuro_sdk=={sdk_version}")
self._console.log(f"[bright_black]neuro_cli=={cli_version}")
self._console.log(f"[bright_black]neuro-extras=={neuro_extras.__version__}")
self._console.log(f"[bright_black]neuro-flow=={neuro_flow.__version__}")
self._console.log(f"Use config file {self.config_loader.flow_path(batch_name)}")
# Check that the yaml is parseable
flow = await RunningBatchFlow.create(
self.config_loader, batch_name, "fake-bake-id", params
)
for volume in flow.volumes.values():
if volume.local is not None:
# TODO: sync volumes if needed
raise NotImplementedError("Volumes sync is not supported")
await check_no_cycles(flow)
await check_local_deps(flow)
await check_expressions(flow)
await check_image_refs_unique(flow)
graphs = await build_graphs(flow)
self._console.log(
"Check config... [green]ok[/green]",
)
self._console.log("Create bake...")
bake = await self.storage.create_bake(
batch=batch_name,
graphs=graphs,
params=flow.params,
name=name,
tags=tags,
meta=BakeMeta(
git_info=await collect_git_info(),
),
)
bake_storage = self.storage.bake(id=bake.id)
config_meta = await self.config_loader.collect_configs(batch_name, bake_storage)
await bake_storage.create_attempt(number=1, configs_meta=config_meta)
self._console.log(
f"Bake [b]{bake.name or bake.id}[/b] of "
f"project [b]{self.project_id}[/b] is created"
)
self._console.log("Uploading image contexts/dockerfiles...")
await upload_image_data(flow, self._run_neuro_cli, bake_storage)
return bake, flow
# Next function is also used in tests:
async def _run_locals(
self,
bake_id: str,
) -> TaskStatus:
async with LocalsBatchExecutor.create(
self._console,
bake_id,
self._client,
self._storage,
project_role=self.project_role,
) as executor:
return await executor.run()
async def bake(
self,
batch_name: str,
local_executor: bool = False,
params: Optional[Mapping[str, str]] = None,
name: Optional[str] = None,
tags: Sequence[str] = (),
) -> None:
self._console.print(
Panel(f"[bright_blue]Bake [b]{batch_name}[/b]", padding=1),
justify="center",
)
bake, flow = await self._setup_bake(batch_name, params, name, tags)
await self._run_bake(bake, flow, local_executor)
async def _run_bake(
self,
bake: Bake,
flow: RunningBatchFlow,
local_executor: bool,
) -> None:
self._console.rule("Run local actions")
locals_result = await self._run_locals(bake.id)
if locals_result != TaskStatus.SUCCEEDED:
return
self._console.rule("Run main actions")
if local_executor:
self._console.log(f"[bright_black]Using local executor")
await self.process(bake.id)
else:
self._console.log(f"[bright_black]Starting remote executor")
if flow.life_span:
life_span = fmt_timedelta(flow.life_span)
else:
life_span = "7d"
run_args = [
"run",
"--pass-config",
f"--volume=storage:.flow/logs/{bake.id}/:/root/.neuro/logs"
f"--life-span={life_span}",
f"--tag=project:{self.project_id}",
f"--tag=flow:{bake.batch}",
f"--tag=bake_id:{bake.id}",
f"--tag=remote_executor",
]
project_role = self.project_role
if project_role is not None:
run_args.append(f"--share={project_role}")
run_args += [
EXECUTOR_IMAGE,
"--",
"neuro-flow",
*encode_global_options(self._global_options),
"--fake-workspace",
"execute",
bake.id,
]
await self._run_neuro_cli(*run_args)
async def process(
self,
bake_id: str,
) -> None:
async with BatchExecutor.create(
self._console,
bake_id,
self._client,
self._storage,
project_role=self.project_role,
) as executor:
status = await executor.run()
if status != TaskStatus.SUCCEEDED:
raise BakeFailedError(status)
def get_bakes(self) -> AsyncIterator[Bake]:
return self.storage.list_bakes()
async def get_bake_attempt(self, bake_id: str, *, attempt_no: int = -1) -> Attempt:
return await self._storage.bake(id=bake_id).attempt(number=attempt_no).get()
async def list_bakes(
self,
tags: AbstractSet[str] = frozenset(),
since: Optional[datetime.datetime] = None,
until: Optional[datetime.datetime] = None,
recent_first: bool = False,
) -> None:
def _setup_table() -> Table:
table = Table(box=box.MINIMAL_HEAVY_HEAD)
table.add_column(
"ID",
style="bold",
width=len("bake-f6bd815b-3a3b-4ea1-b5ec-e8ab13678e3e"),
)
table.add_column("NAME", min_width=12)
table.add_column("BATCH", min_width=20)
table.add_column(
"EXECUTOR", width=len("job-f6bd815b-3a3b-4ea1-b5ec-e8ab13678e3e")
)
table.add_column("STATUS", width=9)
table.add_column("WHEN", min_width=10)
table.show_edge = False
return table
header_table = _setup_table()
self._console.print(header_table)
async for bake in self.storage.list_bakes(
tags=tags,
since=since,
until=until,
recent_first=recent_first,
):
if bake.last_attempt is None:
self._console.print(
f"[yellow]Bake [b]{bake.id}[/b] is malformed, skipping"
)
else:
row_table = _setup_table()
row_table.show_header = False
row_table.add_row(
bake.id,
bake.name or "",
bake.batch,
bake.last_attempt.executor_id or "",
bake.last_attempt.result,
fmt_datetime(bake.last_attempt.created_at),
)
self._console.print(row_table)
async def inspect(
self,
bake_id: str,
*,
attempt_no: int = -1,
output: Optional[LocalPath] = None,
save_dot: bool = False,
save_pdf: bool = False,
view_pdf: bool = False,
) -> None:
bake_storage = self.storage.bake(id=bake_id)
try:
bake = await bake_storage.get()
except ResourceNotFound:
self._console.print("[yellow]Bake not found")
self._console.print(
f"Please make sure that the bake [b]{bake_id}[/b] and "
f"project [b]{self.project_id}[/b] are correct."
)
exit(1)
assert False, "unreachable"
attempt_storage = bake_storage.attempt(number=attempt_no)
attempt = await attempt_storage.get()
self._console.print(f"[b]Bake id: {bake_id}[/b]")
self._console.print(f"[b]Attempt #{attempt.number}[/b]", attempt.result)
if attempt.executor_id:
info = await self._client.jobs.status(attempt.executor_id)
self._console.print(
f"[b]Executor {attempt.executor_id}[/b]", TaskStatus(info.status)
)
task_table = Table(box=box.MINIMAL_HEAVY_HEAD)
task_table.add_column("ID", style="bold")
task_table.add_column("STATUS")
task_table.add_column("RAW ID", style="bright_black")
task_table.add_column("STARTED")
task_table.add_column("FINISHED")
tasks = [task async for task in attempt_storage.list_tasks()]
for task in sorted(tasks, key=attrgetter("created_at")):
task_table.add_row(
".".join(task.yaml_id),
task.status,
task.raw_id,
fmt_datetime(task.created_at),
fmt_datetime(task.finished_at),
)
self._console.print("Tasks:")
self._console.print(task_table)
image_table = Table(box=box.MINIMAL_HEAVY_HEAD)
image_table.add_column("REF", style="bold")
image_table.add_column("STATUS")
image_table.add_column("BUILDER ID", style="bright_black")
async for image in bake_storage.list_bake_images():
image_table.add_row(
image.ref,
image.status,
image.builder_job_id or "",
)
if image_table.rows:
self._console.print("Images:")
self._console.print(image_table)
if output is None:
output = LocalPath(f"{bake.id}_{attempt.number}").with_suffix(".gv")
graphs = bake.graphs
dot = Digraph(bake.batch, filename=str(output), strict=True, engine="dot")
dot.attr(compound="true")
dot.node_attr = {"style": "filled"}
await self._subgraph(
dot, graphs, (), {}, {task.yaml_id: task.status for task in tasks}
)
if save_dot:
self._console.print(f"Saving file {dot.filename}")
dot.save()
if save_pdf:
self._console.print(f"Rendering {dot.filename}.pdf")
dot.render(view=view_pdf)
elif view_pdf:
self._console.print(f"Opening {dot.filename}.pdf")
dot.view()
async def _subgraph(
self,
dot: Digraph,
graphs: Mapping[FullID, Mapping[FullID, AbstractSet[FullID]]],
prefix: FullID,
anchors: Dict[str, str],
statuses: Dict[FullID, TaskStatus],
) -> None:
lhead: Optional[str]
ltail: Optional[str]
color: Optional[str]
first = True
graph = graphs[prefix]
for task_id, deps in graph.items():
tgt = ".".join(task_id)
name = task_id[-1]
if first:
anchors[".".join(prefix)] = tgt
first = False
if task_id in statuses:
color = GRAPH_COLORS.get(statuses[task_id])
else:
color = None
if task_id in graphs:
lhead = "cluster_" + tgt
with dot.subgraph(name=lhead) as subdot:
subdot.attr(label=f"{name}")
subdot.attr(compound="true")
subdot.attr(color=color)
await self._subgraph(
subdot,
graphs,
task_id,
anchors,
statuses,
)
tgt = anchors[tgt]
else:
dot.node(tgt, name, color=color)
lhead = None
for dep in deps:
src = ".".join(dep)
if src in anchors:
# src is a subgraph
ltail = "cluster_" + src
src = anchors[src]
else:
ltail = None
dot.edge(src, tgt, ltail=ltail, lhead=lhead)
async def logs(
self, bake_id: str, task_id: str, *, attempt_no: int = -1, raw: bool = False
) -> None:
attempt_storage = self.storage.bake(id=bake_id).attempt(number=attempt_no)
attempt = await attempt_storage.get()
full_id = tuple(task_id.split("."))
try:
task = await attempt_storage.task(yaml_id=full_id).get()
except ResourceNotFound:
raise click.BadArgumentUsage(f"Unknown task {task_id}")
if not task.status.is_finished:
raise click.BadArgumentUsage(f"Task {task_id} is not finished")
self._console.print(f"[b]Attempt #{attempt.number}[/b]", attempt.result)
self._console.print(f"Task [b]{task_id}[/b]", task.status)
if not task.raw_id:
return
if raw:
async for chunk in self._client.jobs.monitor(task.raw_id):
self._console.print(chunk.decode("utf-8", "replace"), end="")
else:
async with CmdProcessor() as proc:
async for chunk in self._client.jobs.monitor(task.raw_id):
async for line in proc.feed_chunk(chunk):
self._console.print(line.decode("utf-8", "replace"), end="")
async for line in proc.feed_eof():
self._console.print(line.decode("utf-8", "replace"), end="")
async def cancel(self, bake_id: str, *, attempt_no: int = -1) -> None:
attempt_storage = self.storage.bake(id=bake_id).attempt(number=attempt_no)
attempt = await attempt_storage.get()
if attempt.result.is_finished:
raise click.BadArgumentUsage(
f"Attempt #{attempt.number} of {attempt.bake_id} is already stopped."
)
await attempt_storage.update(result=TaskStatus.CANCELLED)
self._console.print(
f"[b]Attempt #{attempt.number}[/b] of bake "
f"[b]{attempt.bake_id}[/b] was cancelled."
)
async def clear_cache(
self, batch: Optional[str] = None, task_id: Optional[str] = None
) -> None:
full_id: Optional[FullID] = None
if task_id:
full_id = tuple(task_id.split("."))
await self.storage.delete_cache_entries(batch, full_id)
async def restart(
self,
bake_id: str,
*,
attempt_no: int = -1,
from_failed: bool = True,
local_executor: bool = False,
) -> None:
bake, flow = await self._restart(
bake_id, attempt_no=attempt_no, from_failed=from_failed
)
await self._run_bake(bake, flow, local_executor)
async def _restart(
self,
bake_id: str,
*,
attempt_no: int = -1,
from_failed: bool = True,
) -> Tuple[Bake, RunningBatchFlow]:
bake_storage = self.storage.bake(id=bake_id)
bake = await bake_storage.get()
if bake.last_attempt and attempt_no == -1:
last_attempt = attempt = bake.last_attempt
else:
attempt = await bake_storage.attempt(number=attempt_no).get()
last_attempt = await bake_storage.last_attempt().get()
if not attempt.result.is_finished:
raise click.BadArgumentUsage(
f"Cannot re-run still running attempt #{attempt.number} "
f"of {bake.id}."
)
if not last_attempt.result.is_finished:
raise click.BadArgumentUsage(
f"Cannot re-run bake when last attempt #{last_attempt.number} "
f"of {bake.id} is still running."
)
if attempt.result == TaskStatus.SUCCEEDED and from_failed:
raise click.BadArgumentUsage(
f"Cannot re-run successful attempt #{attempt.number} "
f"of {bake.id} with `--from-failed` flag set.\n"
"Hint: Try adding --no-from-failed to restart bake from the beginning."
)
if attempt.number >= 99:
raise click.BadArgumentUsage(
f"Cannot re-run {bake.id}, the number of attempts exceeded."
)
new_attempt = await bake_storage.create_attempt(
number=last_attempt.number + 1,
configs_meta=attempt.configs_meta,
)
if from_failed:
new_attempt_storage = bake_storage.attempt(id=new_attempt.id)
graphs = bake.graphs
handled = set() # a set of succesfully finished and not cached tasks
tasks = {
task.yaml_id: task
async for task in bake_storage.attempt(id=attempt.id).list_tasks()
}
for task in sorted(tasks.values(), key=attrgetter("created_at")):
if task.status == TaskStatus.SUCCEEDED:
# should check deps to don't process post-actions with
# always() precondition
prefix = task.yaml_id[:-1]
graph = graphs[prefix]
deps = graph[task.yaml_id]
if not deps or all(dep in handled for dep in deps):
if (
prefix in tasks
and tasks[prefix].status != TaskStatus.SUCCEEDED
):
# If action didn't succeeded, we should create task manually
await new_attempt_storage.create_task(
yaml_id=prefix,
status=TaskStatus.PENDING,
raw_id=None,
)
# TODO allow to create task with multiple statuses
# and copy them from old task
await new_attempt_storage.create_task(
yaml_id=task.yaml_id,
status=TaskStatus.SUCCEEDED,
raw_id=task.raw_id,
outputs=task.outputs,
state=task.state,
)
handled.add(task.id)
self._console.print(f"[b]Attempt #{new_attempt.number}[/b] is created")
flow = await get_running_flow(
bake, self._client, bake_storage, new_attempt.configs_meta
)
return bake, flow
| StarcoderdataPython |
176913 | <gh_stars>1-10
#!/usr/bin/env python3
import tornado.ioloop
import tornado.options
import tornado.web
# Constants
PORT = 9999 # Set port to listen on
# Handlers
class HelloHandler(tornado.web.RequestHandler):
def get(self): # Handler for HTTP GET request
self.write('Hello, World!') # Write text response
self.write('<h1>Hello, World!</h1>') # Write HTML response
self.write('''
<center>
<img src="https://www.tornadoweb.org/en/stable/_images/tornado.png">
</center>
''')
# Main Execution
def main():
application = tornado.web.Application([ # Create application
(r'/', HelloHandler), # Register callback
])
application.listen(PORT) # Set application to listen on port
tornado.options.parse_command_line() # Parse command line options
tornado.ioloop.IOLoop.current().start() # Start executing IO loop
if __name__ == '__main__':
main()
| StarcoderdataPython |
1717698 | from TestAsynchSource import TestAsynchSource
from TestAsynchSink import TestAsynchSink
from TestMemory import TestMemory
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.