index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
13,600 | 81855abd45882f76ab9127f81e26e7368140014e | ## 2. ReLU Activation Function ##
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
x = np.linspace(-2, 2, 20)
def relu(x):
outp = np.maximum(0,x)
return outp
relu_y = relu(x)
print(x, relu_y)
plt.plot(x,relu_y)
## 3. Trigonometric Functions ##
x = np.linspace(-2*np.pi, 2*np.pi, 100)
tan_y = np.tan(x)
print(x,tan_y)
plt.plot(x,tan_y)
## 5. Hyperbolic Tangent Function ##
x = np.linspace(-40, 40, 100)
tanh_y = np.tanh(x)
plt.plot(x,tanh_y) |
13,601 | 66e35cf958e187ca4a0abef383e41ee4ff5f7b59 | # Base on stock_pred.py
from datetime import time
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.pylab import rcParams
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import MinMaxScaler
from keras.models import Sequential
from keras.layers import LSTM, Dropout, Dense, SimpleRNN
from xgboost import XGBRegressor
import joblib
# check GPU
# from tensorflow.python.client import device_lib
# print(device_lib.list_local_devices())
TIME_STEP = 60
def create_train_dataset(df, features=["Close"], time_step=60):
features = sorted(features)
df["Date"] = pd.to_datetime(df.Date,format="%Y-%m-%d")
df.index = df["Date"]
df = df.sort_index(ascending=True,axis=0)
df = df.reset_index(drop=True)
# Scaler
dct_scaler = {}
for i,feat in enumerate(features):
dct_scaler[feat] = MinMaxScaler(feature_range=(0,1))
df[feat] = dct_scaler[feat].fit_transform(df[feat].values.reshape(-1,1)).reshape(-1)
train_df_root, test_df_root = df.iloc[0:987, ], df.iloc[987-time_step:, ]
train_df, test_df = train_df_root[features], test_df_root[features]
X_train = np.zeros((train_df.shape[0]-time_step, time_step, len(features)))
y_train = np.zeros((train_df.shape[0]-time_step, ))
for i in range(time_step, train_df.shape[0]):
X_train[i-time_step] = train_df.values[i-time_step:i,:]
y_train[i-time_step] = train_df.values[i,0]
X_test = np.zeros((test_df.shape[0]-time_step, time_step, len(features)))
y_test = np.zeros((test_df.shape[0]-time_step, ))
for i in range(time_step, test_df.shape[0]):
X_test[i-time_step] = test_df.values[i-time_step:i,:]
y_test[i-time_step] = test_df.values[i,0]
return X_train, y_train, X_test, y_test, dct_scaler, train_df_root, test_df_root
def build_model_lstm(input_shape):
model=Sequential()
model.add(LSTM(units=50, return_sequences=True, input_shape=input_shape))
model.add(LSTM(units=50, return_sequences=True))
model.add(LSTM(units=50))
model.add(Dense(units=1))
model.compile(
loss="mean_squared_error",
optimizer="adam")
return model
def train_model_lstm(X_train, y_train, output_path="saved_lstm_model.h5"):
lstm_model = build_model_lstm(input_shape=(X_train.shape[1], X_train.shape[2]))
lstm_model.fit(
X_train,
y_train,
epochs=10,
batch_size=32,
verbose=2)
lstm_model.save(output_path)
return lstm_model
def build_model_rnn(input_shape):
model=Sequential()
model.add(SimpleRNN(units=50, return_sequences=True, input_shape=input_shape))
model.add(SimpleRNN(units=50, return_sequences=True))
model.add(SimpleRNN(units=50))
model.add(Dense(units=1))
model.compile(
loss="mean_squared_error",
optimizer="adam")
return model
def train_model_rnn(X_train, y_train, output_path="saved_rnn_model.h5"):
rnn_model = build_model_rnn(input_shape=(X_train.shape[1], X_train.shape[2]))
rnn_model.fit(
X_train,
y_train,
epochs=10,
batch_size=32,
verbose=2)
rnn_model.save(output_path)
return rnn_model
def build_model_xgboost():
model = XGBRegressor(
n_estimators=100,
objective="reg:squarederror",
gamma=0.01,
learning_rate=0.01,
max_depth=4,
random_state=42,
subsample=1,
verbosity=2,
seed=132,
)
return model
def train_model_xgboost(X_train, y_train, output_path="saved_xgboost_model.joblib"):
xgboost_model = build_model_xgboost()
print(xgboost_model)
xgboost_model.fit(X_train.reshape((X_train.shape[0], -1)), y_train)
joblib.dump(xgboost_model, output_path)
return xgboost_model
def train(X_train, y_train, method="LSTM", output_path="model_output/saved_lstm_model.h5"):
if method=="LSTM":
print("Training model LSTM ...")
lstm = train_model_lstm(X_train, y_train, output_path)
return lstm
elif method=="RNN":
print("Training model RNN ...")
output_path = "model_output/saved_rnn_model.h5"
rnn = train_model_rnn(X_train, y_train, output_path)
return rnn
elif method=="XGBOOST":
print("Training model Xgboost ...")
output_path = "model_output/saved_xgboost_model.joblib"
xgboost = train_model_xgboost(X_train, y_train, output_path)
return xgboost
if __name__ == "__main__":
df = pd.read_csv("NSE-TATA.csv")
features = ['Close']
X_train, y_train, X_test, y_test, dct_scaler, train_df, test_df = create_train_dataset(df, features=features, time_step=TIME_STEP)
lstm = train(X_train, y_train, method="LSTM", output_path="model_output/saved_lstm_model.h5")
rnn = train(X_train, y_train, method="RNN", output_path="model_output/saved_rnn_model.h5")
xgboost = train(X_train, y_train, method="XGBOOST", output_path="model_output/saved_xgboost_model.joblib") |
13,602 | 51d60b06186c34f0a814bee2c3a5aa0854185510 | # -*- coding: utf-8 -*-
"""
Created on Sat Apr 14 19:42:45 2018
@author: singh.shivam
"""
import numpy as np
import matplotlib.pyplot as plt
l = [i for i in range(1,13)]
x = np.array(l)
print(x)
maxy=np.array([39,41,43,47,49,51,45,38,37,29,27,25])
miny=np.array([21,23,27,28,32,35,31,28,21,19,17,18])
#calculate polynomial
Z=np.polyfit(x,maxy,6)
V = np.polyfit(x,miny,6)
print(Z)
f=np.poly1d(Z)
f1=np.poly1d(V)
#caculate new x, y
x_new=np.linspace(x[0],x[-1],12)
y_new=f(x_new)
x1_new=np.linspace(x[0],x[-1],12)
y1_new=f1(x1_new)
plt.plot(x,maxy,'o',x_new,y_new,"blue")
plt.plot(x,miny,'o',x1_new,y1_new,"orange")
plt.xlim()
plt.ylabel('Temperature (degree C)')
plt.xlabel('Months')
plt.show() |
13,603 | 8ae9731b4ce209ff3171ea328e7f6defd5c9d40b | def to_bebisspråket(text):
vowels = "aeiouyåäö"
res = ""
for word in text.split():
res += word[:word.find(any(c for c in word.lower() if c in vowels))+1]*3 + " "
return res.strip()
|
13,604 | 6ea55426fcd24db36847baa5656080052f234554 | #!/usr/bin/env python3
"""
Given a WAD, it'll detect if it's for DOOM1 or 2 (based on map names) and
run GZDoom with the right iwad arg
"""
import argparse
import os
import wad
import subprocess
import dero_config
parser = argparse.ArgumentParser()
parser.add_argument('--pwad', type=str, required=True, help='Path to the PWAD to play')
parser.add_argument('--nomons', action='store_true', default=False)
parser.add_argument('--voxels', action=argparse.BooleanOptionalAction, default=True)
parser.add_argument('--map', type=str, default=None)
parser.add_argument('--savegame', type=str, default=None)
args = parser.parse_args()
def main():
iwad = dero_config.DOOM2_WAD_PATH
for name in wad.enum_map_names(args.pwad):
print(f'First map name: {name}')
if name[0] == 'E' and name[2] == 'M':
print('..looks like DOOM 1')
iwad = dero_config.DOOM1_WAD_PATH
else:
print('..looks like DOOM 2')
break
print(f'Assumed IWAD: {os.path.basename(iwad)}')
pwad_dir = os.path.dirname(args.pwad)
# Find all WADs and PK3's in this folder and load them.
wadpaths = []
for file in os.listdir(pwad_dir):
if file.lower().endswith('.wad') or file.lower().endswith('.pk3'):
wadpaths.append(os.path.join(pwad_dir, file))
if args.voxels:
wadpaths.append('/Users/stevenan/dooming/wads/cheello_voxels.zip')
gzdoom_path = '/Applications/GZDoom.app/Contents/MacOS/gzdoom'
call_args = [gzdoom_path] + wadpaths + [
'-iwad', iwad,
'-savedir', pwad_dir,
'-shotdir', pwad_dir,
]
if args.nomons: call_args.append('-nomonsters')
if args.map: call_args += ['+map', args.map, '+skill', '1']
if args.savegame: call_args += ['-loadgame', args.savegame]
print('final args:', call_args)
subprocess.check_call(call_args)
if __name__ == '__main__':
main()
|
13,605 | 2b72778600c7936d1cf3098e13bbc7bc5816a91b | from django.urls import path
from .views import Event_List, Event_Detail, get_price, checkout_view
from django.conf.urls import url
app_name = 'event'
urlpatterns = [path('event_list', Event_List.as_view(), name='event_list'),
path('<int:id>/<slug:slug>/', Event_Detail.as_view(), name='event_detail'),
url(r'ajax/get_price/$', get_price, name='hall_price'),
path('checkout/<str:event>/', checkout_view, name='checkout')
]
|
13,606 | ddb4a15bc37378805d062a78f6489508e8f303b8 | '''
Permutation Experiments
Usage: python3 03_excitation_02_permutation.py
NOTE The parameter DIMS_TO_PERMUTE (per comparison type as stored in variable MODE) encodes which event-to-dimension associations to permute. It results from empirical observations of results from "03_excitation_01_effects.py". Also, note ANALYSIS_PERIOD_START and ANALYSIS_PERIOD_END can be set to cover only a period of interest.
'''
import functools
import json
import multiprocessing
import numpy as np
from tick.inference import HawkesExpKern
import constants
###################
# CONSTANTS
###################
NUMBER_PROCESSES = constants.NUMBER_OF_PROCESSES
TEST_DATA_PATH_ORIGIN = constants.PATH_TO_DESTINATION_DATASET
ANALYSIS_PERIOD = "m_3"
ANALYSIS_PERIOD_DICT = constants.ANALYSIS_PERIOD_DICT
ANALYSIS_PERIOD_OFFSET = ANALYSIS_PERIOD_DICT[ANALYSIS_PERIOD[:ANALYSIS_PERIOD.find("_")]] \
* int(ANALYSIS_PERIOD[ANALYSIS_PERIOD.find("_")+1:])
ANALYSIS_PERIOD_END = 12 # end analysis after ANALYSIS_PERIOD_END quarters
ANALYSIS_PERIOD_START = 8 # should be 1 for pcpa perm! if analysis period is 2, then it skips timetamps of 1st quarter
NUMBER_OF_DIMENSIONS = constants.NUMBER_OF_DIMENSIONS
PERMUTED_FITTING_REPETITIONS = 100
FIT_TYPE = "cqpa" # "cqca", "cqpa", "pqpa", "all". notation: c - casuals, p - power users, q - questions, a - answers, all - permute all dimensions
DIMS_TO_PERMUTE = [1, 2] # grow_vs_dec - cqca: [1, 3], cqpa: [1, 2], pqpa: [0, 2]; stem_vs_human - all: list(range(4)), cqpa: [1, 2], cqca: [1, 3].
MODE = "STEM_VS_HUMAN" # "STEM_VS_HUMAN", "GROW_VS_DEC"
MODE_TO_DATASETS = constants.MODE_TO_DATASETS
DATASET_LIST = MODE_TO_DATASETS[MODE]
ALL_DATASET_LIST = []
for key in DATASET_LIST:
ALL_DATASET_LIST += DATASET_LIST[key]
MIN_NUMBER_OF_EVENTS = constants.MIN_NUMBER_OF_EVENTS
DIMENSION_NAMES = constants.DIMENSION_NAMES
assert len(DIMENSION_NAMES) == NUMBER_OF_DIMENSIONS
#EXEC_TIME = datetime.datetime.today().strftime('%Y%m%d-%H%M%S')
def __normalization_function(value_of_list, centering_value, min_of_scale, max_of_scale):
return (value_of_list - centering_value) / (max_of_scale - min_of_scale)
# previously determined beta values
FITTED_BETA = 2.288 if MODE == "GROW_VS_DEC" else (2.067 if MODE == "STEM_VS_HUMAN" else "unknown")
if FITTED_BETA == "unknown":
raise Exception("unknown MODE")
print("beta: {}".format(FITTED_BETA))
###################
# MODELLING OVER TIME
###################
# Event to label permutation
def __eventlabel_permutation(list_of_events_per_dim):
long_dim_array = sorted([(dim, event) for dim, event_list in enumerate(list_of_events_per_dim) for event in event_list], key=lambda e: e[1])
event_dims = [event_dim for event_dim, event_time in long_dim_array if event_dim in DIMS_TO_PERMUTE]
np.random.shuffle(event_dims)
event_dim_shuffled_index = 0
shuffled_long_dim_array = []
for event_dim, event_time in long_dim_array:
if event_dim in DIMS_TO_PERMUTE:
new_event_dim = event_dims[event_dim_shuffled_index]
event_dim_shuffled_index += 1
else:
new_event_dim = event_dim
shuffled_long_dim_array.append((new_event_dim, event_time))
result = [[] for i in range(NUMBER_OF_DIMENSIONS)]
[result[dim].append(timestamp) for dim, timestamp in shuffled_long_dim_array]
return [np.array(i) for i in result]
# Reading datasets
def __read_dataset_window(some_dataset, window_index):
timestamp_list = [np.genfromtxt(TEST_DATA_PATH_ORIGIN + some_dataset + "/" + some_dataset + dim_name + ".csv", dtype=np.float, delimiter=",") for dim_name in DIMENSION_NAMES]
timestamp_list = __eventlabel_permutation(timestamp_list)
potential_window_start = [np.where(timestamp_list[dim] > timestamp_list[dim][0] + (window_index - 1) * ANALYSIS_PERIOD_OFFSET)
for dim in range(NUMBER_OF_DIMENSIONS)]
potential_window_end = [np.where(timestamp_list[dim] <= timestamp_list[dim][0] + window_index * ANALYSIS_PERIOD_OFFSET)
for dim in range(NUMBER_OF_DIMENSIONS)]
# check if all dimensions have events
# np.where returns (x,) tuple, hence the following "hack"
if all(map(len, [potential_window_start[dim][0] for dim in range(NUMBER_OF_DIMENSIONS)])):
window_start = [np.min(potential_window_start[dim]) for dim in range(NUMBER_OF_DIMENSIONS)]
window_end = [np.max(potential_window_end[dim]) for dim in range(NUMBER_OF_DIMENSIONS)]
timestamp_list = [timestamp_list[dim][window_start[dim] : window_end[dim]]
for dim in range(NUMBER_OF_DIMENSIONS)]
# check if all dimensions have enough events
if all([len(timestamp_dim) > MIN_NUMBER_OF_EVENTS for timestamp_dim in timestamp_list]):
begin_and_end_timestamp_list = [{"first": timestamp_dim[0], "last": timestamp_dim[-1]}
for timestamp_dim in timestamp_list]
first_timestamp = min([begin_and_end_timestamps["first"]
for begin_and_end_timestamps in begin_and_end_timestamp_list])
#print(" {} has len {}".format(some_dataset, tuple(map(len, timestamp_list))))
return [__normalization_function(timestamp_list[dim_i], first_timestamp, 0, 1)
for dim_i in range(NUMBER_OF_DIMENSIONS)]
POOL = multiprocessing.Pool(processes=NUMBER_PROCESSES)
for dataset_type in DATASET_LIST.keys():
PERIOD_RESULTS = []
for time_span in range(ANALYSIS_PERIOD_START, ANALYSIS_PERIOD_END + 1):
print("PROCESSING Q{}".format(time_span))
parameter_results = {"mus": [], "alphas": [], "betas": []}
for _ in range(PERMUTED_FITTING_REPETITIONS):
__read_dataset_bound_window = functools.partial(__read_dataset_window, window_index=time_span)
EVENT_TIMES = POOL.map(__read_dataset_bound_window, DATASET_LIST[dataset_type])
EVENT_TIMES = [events for events in EVENT_TIMES if events is not None]
learner = HawkesExpKern([[FITTED_BETA] * NUMBER_OF_DIMENSIONS] * NUMBER_OF_DIMENSIONS)
learner.fit(EVENT_TIMES)
parameter_results["mus"].append(np.array(learner.baseline).tolist())
parameter_results["alphas"].append((learner.adjacency * np.array(learner.decays)).tolist())
parameter_results["betas"].append(np.array(learner.decays).tolist())
PERIOD_RESULTS.append({"mu": parameter_results["mus"],
"alpha": parameter_results["alphas"],
"beta": parameter_results["betas"],
"#datasets": len(EVENT_TIMES),
"quarter": time_span})
EVENT_TIMES = None
with open("quarter_permutation_{}_{}.json".format(dataset_type, FIT_TYPE), "w") as f:
json.dump(PERIOD_RESULTS, f)
|
13,607 | 9a584ed7cbcbeb5305dc36de56b1a33f0a4e8353 | '''Module to generate test graphs'''
from random import randint
from random import sample
from subprocess import call
import sys
filename = "test"
def printUsage():
print "Enter 1 n to generate star graph with n nodes"
print "Enter 2 n to generate line graph with n nodes"
print "Enter 3 n to generate a random tree with n nodes"
print "Enter 4 n e to generate a graph with n nodes, and"
print "each is added to the graph with probability e/nC2"
def star(n):
sys.stdout = open(filename+'.txt','w')
print n, n-1
for i in range(1,n):
print 0, i
sys.stdout.close()
def line(n):
sys.stdout = open(filename+'.txt','w')
print n, n-1
for i in range(0,n-1):
print i, i+1
sys.stdout.close()
def randTree(n):
sys.stdout = open(filename+'.txt','w')
print n, n-1
for i in range(1, n):
print i, randint(0,i-1)
sys.stdout.close()
def randGraph(n, e):
sys.stdout = open(filename+'.txt','w')
edges = []
nc2 = (n*(n-1))/2
for i in range(0, n):
for j in range(i+1, n):
edges.append((i,j))
graphEdges = sample(edges, e)
print n, e
for edge in graphEdges:
print edge[0], edge[1]
sys.stdout.close()
def doDotty():
f = open(filename+".txt", 'r')
sys.stdout = open(filename+'.dot','w')
print "graph G"
print "{"
for line in f.readlines()[1:]:
a,b = map(int, line.split(" "))
print "node" + str(a) + " -- " + "node" + str(b)
print "}"
sys.stdout.close()
f.close()
call(["dot","-Tpng",filename+".dot","-o",filename+".png"])
call(["gnome-open",filename+".png"])
if __name__ == '__main__':
printUsage()
try:
inp = map(int, raw_input().split(" "))
if inp[0] == 1:
assert(inp[1] > 1)
star(inp[1])
elif inp[0] == 2:
assert(inp[1] > 1)
line(inp[1])
elif inp[0] == 3:
assert(inp[1] > 1)
randTree(inp[1])
elif inp[0] == 4:
assert(inp[1] > 1)
assert(inp[2] > 0 and inp[2] <= (inp[1]*(inp[1]-1)/2))
randGraph(inp[1], inp[2])
else:
assert 0
doDotty()
except Exception, e:
print e
print "Wrong input format"
printUsage()
|
13,608 | 52c8ab928074af1aeb3cda29e03d0c76f698c63b |
from flask import render_template, request
from app import app
from app.logic import generate_text
@app.route('/')
def index():
context = {
'troll_text': generate_text(),
}
return render_template('index.html', **context)
@app.route('/_get_text')
def get_troll_text():
subject = request.args.get('subject')
# Clean the string
if isinstance(subject, basestring):
subject = subject.strip()
if len(subject) <= 0:
subject = None
return generate_text(subject)
|
13,609 | 383629fe5a15f07d541c475f80af6c21dffc5349 | import math
import time
from misc import plot_vline
from matplotlib.figure import Figure
from pylab import cm
from matplotlib.gridspec import GridSpec
from matplotlib.backends.backend_pdf import FigureCanvasPdf as FigureCanvas
import pylab as plt
def calculate_mean_bo_b_images(dwi_file, bval_file=False, bvec_file=False):
from dipy.io import read_bvals_bvecs
from nipype.utils.filemanip import split_filename
import nibabel as nb
import numpy as np
import os
print len(dwi_file)
print bval_file
if (len(dwi_file)==1 and os.path.isfile(dwi_file[0])):
bvals,bvecs = read_bvals_bvecs(bval_file,bvec_file)
print dwi_file[0]
dwi = nb.load(dwi_file[0])
print dwi.get_affine()
dwi_data = dwi.get_data()
#create average bo image
bo_id=bvals==0
print np.shape(dwi_data[:,:,:,bo_id])
if np.shape(dwi_data[:,:,:,bo_id])[3] != 7:
print "why there are not 7 B0s"
mean_bo=np.mean(dwi_data[:,:,:,bo_id],3)
b_id=bvals!=0
b_images=dwi_data[:,:,:,b_id]
print np.shape(b_images)
if np.shape(b_images)[3]!=60:
print "why there are not 60 directions?"
mean_bo_nii = nb.Nifti1Image(mean_bo, dwi.get_affine(), dwi.get_header())
mean_bo_nii.set_data_dtype(np.float32)
_, base, _ = split_filename(dwi_file[0])
nb.save(mean_bo_nii, base + "_mean_bo.nii.gz")
b_images_nii = nb.Nifti1Image(b_images, dwi.get_affine(), dwi.get_header())
b_images_nii.set_data_dtype(np.float32)
_, base, _ = split_filename(dwi_file[0])
print base
nb.save(b_images_nii, base + "_b_images.nii.gz")
print os.path.abspath(base + "_mean_bo.nii.gz")
return True, str(os.path.abspath(base + "_mean_bo.nii.gz")), str(os.path.abspath(base + "_b_images.nii.gz"))
else:
print "no dti or more than 1 dti acquired"
return False, str('not acquired'), str('not acquired')
|
13,610 | c5928907ecc51b1e708a13bc15040b90e8f7916f | import numpy as np
import pandas as pd
import googlemaps
import json
from datetime import datetime
# Choose starting and ending times
start_time = "2020-09-01T04:00:00.464Z"
end_time = "2020-09-03T22:00:00.464Z"
YOUR_API_KEY = ""
starttime = datetime.strptime(start_time,'%Y-%m-%dT%H:%M:%S.%fZ')
endtime = datetime.strptime(end_time,'%Y-%m-%dT%H:%M:%S.%fZ')
# gmaps = googlemaps.Client(key=YOUR_API_KEY)
#
# # Look up an address with reverse geocoding
# origin = gmaps.reverse_geocode((51.4913,-0.08168))
# destination = gmaps.reverse_geocode((51.490469,-0.080686))
#
#
# # Request directions
# directions_result = gmaps.directions("Sydney Town Hall",
# "Parramatta, NSW",
# mode="driving",
# units="metric",
# traffic_model="best_guess",
# departure_time=starttime)
|
13,611 | 6221644c0a134556df821e7b356c314ccb60ed35 | # Generated by Django 3.2 on 2021-05-27 18:38
import django.core.validators
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
('presupuestos', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Gasto',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('descripcion', models.CharField(max_length=256, verbose_name='Descripción')),
('proveedor', models.CharField(max_length=200, verbose_name='Proveedor')),
('precio_unitario', models.FloatField(max_length=9, verbose_name='Precio Unitario')),
('cantidad', models.PositiveIntegerField(validators=[django.core.validators.MaxValueValidator(100000), django.core.validators.MinValueValidator(1)], verbose_name='Cantidad')),
('precio_total', models.FloatField(editable=False, validators=[django.core.validators.MaxValueValidator(1000000), django.core.validators.MinValueValidator(0)], verbose_name='Total')),
('fecha', models.DateField(default=django.utils.timezone.now, verbose_name='Fecha')),
('factura', models.FileField(blank=True, max_length=254, null=True, upload_to='facturas', verbose_name='Factura')),
('id_actividad', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='presupuestos.actividad')),
],
),
]
|
13,612 | c655e004966fb4320bf0319651a0ca3dc67b0cd5 | from mcpi.minecraft import Minecraft
mc = Minecraft.create()
x,y,z = mc.player.getTilePos()
a = 0
while a<20:
mc.setBlocks(x-20,y-1,z,x+20,y-10,z,19)
z=z+5
a=a+1
|
13,613 | 26d07d7c231d2cfd7eb7dcda4a8d4574483c0317 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkemr.endpoint import endpoint_data
class ListEmrAvailableResourceRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Emr', '2016-04-08', 'ListEmrAvailableResource')
self.set_method('POST')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_ResourceOwnerId(self):
return self.get_query_params().get('ResourceOwnerId')
def set_ResourceOwnerId(self,ResourceOwnerId):
self.add_query_param('ResourceOwnerId',ResourceOwnerId)
def get_DepositType(self):
return self.get_query_params().get('DepositType')
def set_DepositType(self,DepositType):
self.add_query_param('DepositType',DepositType)
def get_SystemDiskType(self):
return self.get_query_params().get('SystemDiskType')
def set_SystemDiskType(self,SystemDiskType):
self.add_query_param('SystemDiskType',SystemDiskType)
def get_ResourceGroupId(self):
return self.get_query_params().get('ResourceGroupId')
def set_ResourceGroupId(self,ResourceGroupId):
self.add_query_param('ResourceGroupId',ResourceGroupId)
def get_InstanceType(self):
return self.get_query_params().get('InstanceType')
def set_InstanceType(self,InstanceType):
self.add_query_param('InstanceType',InstanceType)
def get_EmrVersion(self):
return self.get_query_params().get('EmrVersion')
def set_EmrVersion(self,EmrVersion):
self.add_query_param('EmrVersion',EmrVersion)
def get_InstanceChargeType(self):
return self.get_query_params().get('InstanceChargeType')
def set_InstanceChargeType(self,InstanceChargeType):
self.add_query_param('InstanceChargeType',InstanceChargeType)
def get_ClusterId(self):
return self.get_query_params().get('ClusterId')
def set_ClusterId(self,ClusterId):
self.add_query_param('ClusterId',ClusterId)
def get_DestinationResource(self):
return self.get_query_params().get('DestinationResource')
def set_DestinationResource(self,DestinationResource):
self.add_query_param('DestinationResource',DestinationResource)
def get_ClusterType(self):
return self.get_query_params().get('ClusterType')
def set_ClusterType(self,ClusterType):
self.add_query_param('ClusterType',ClusterType)
def get_SpotStrategy(self):
return self.get_query_params().get('SpotStrategy')
def set_SpotStrategy(self,SpotStrategy):
self.add_query_param('SpotStrategy',SpotStrategy)
def get_NetType(self):
return self.get_query_params().get('NetType')
def set_NetType(self,NetType):
self.add_query_param('NetType',NetType)
def get_ZoneId(self):
return self.get_query_params().get('ZoneId')
def set_ZoneId(self,ZoneId):
self.add_query_param('ZoneId',ZoneId)
def get_DataDiskType(self):
return self.get_query_params().get('DataDiskType')
def set_DataDiskType(self,DataDiskType):
self.add_query_param('DataDiskType',DataDiskType) |
13,614 | 3e689c0f840ccc425934b2c65492366ee2b59b79 | """Test suite for fmbiopy.io."""
from uuid import uuid4
import pandas as pd
from pytest import fixture, mark
from fmbiopy.io import *
@fixture
def list_data():
return [["n1", "n2", "n3"], ["a", "b", "c"], ["1", "2", "3"]]
@mark.parametrize("delimiter", [(","), ("\t")])
def test_list_to_csv(list_data, sandbox, delimiter):
if delimiter == ",":
suffix = ".csv"
else:
suffix = ".tsv"
output_file = sandbox / (uuid4().hex + suffix)
list_to_csv(list_data, output_file, delimiter)
df = pd.read_csv(output_file, sep=delimiter)
for i, row in enumerate(df.itertuples()):
assert tuple(row)[1:] == tuple(list_data[i + 1])
@fixture(params=["#", "/"])
def file_with_header(sandbox, request):
comment_char = request.param
filename = sandbox / "file_with_header.txt"
with filename.open("w") as f:
f.write("{} foo\n".format(comment_char))
f.write("\n")
f.write("bar")
return {"filename": filename, "comment_char": comment_char}
def test_read_header(file_with_header):
expected = ["{} foo\n".format(file_with_header["comment_char"]), "\n"]
assert read_header(**file_with_header) == expected
def test_write_table_with_header(sandbox, dataframe, dataframe_header):
tmpfile = sandbox / "write_table_with_header" / "table.csv"
expected = ["# foo\n", "\n", "A,B\n", "0,0\n", "0,0\n"]
write_table_with_header(dataframe, dataframe_header, tmpfile, sep=",")
with tmpfile.open("r") as f:
lines = f.readlines()
assert lines == expected
|
13,615 | 47b9870beada4a0dfdfa44dbeaa830abbcd87972 | import json
def lambda_handler(event, context):
#print("Received event: " + json.dumps(event, indent=2))
res = "test complete !! your value = " + event['key'] + "\n"
return res # Echo back the first key value
|
13,616 | d8178b6e9c0ac862e929e09428270c17958b46cb | from socket import *
from logger import _logging as Log
import threading
import time
import binascii
import struct
import codecs
import re
from socket import error as sckerror
class Mysocket():
def __init__(self, host, port):
self.k = Log()
self.logger = self.k.Getlogger(__name__)
self.HOST = host
self.PORT = port
# self.conn = socket(AF_INET, SOCK_STREAM)
self.conn = socket(AF_INET,SOCK_DGRAM)
self.conn.connect((self.HOST, self.PORT))
self.conn.settimeout(10)
self.is_number = re.compile("\d+")
self.bit = {'0':"",'1':".U",'2':".S",'3':".D",'4':".L"}
# self.Connect()
# "RD ZF100.U"
# "WR ZF100.U 150"
def Reconn(self):
self.conn = socket(AF_INET, SOCK_STREAM)
try:
self.conn.connect((self.HOST, self.PORT))
except:
pass
self.conn.settimeout(1)
return self.conn
def Send(self, register, value, bit=''):
# cmd = "\x57\x52\x20\x5A\x46\x31\x30\x30\x2E\x44\x20\x38\x31\x35\x30\x30\x0D\x0A"
cmd = "WR " + str(register) + self.bit[bit] + " " + str(value) + "\x0D"
# print(cmd)
self.conn.sendall(cmd.encode())
# time.sleep(0.1)
try:
result = self.conn.recv(1024).decode().strip()
except:
return False
return result
def Sends(self, register, num, datas, bit=''):
# cmd = "\x57\x52\x20\x5A\x46\x31\x30\x30\x2E\x44\x20\x38\x31\x35\x30\x30\x0D\x0A"
self.__data = ""
for x in datas:
self.__data += " " + str(x)
# print(self.__data)
cmd = "WRS " + str(register) + self.bit[bit] + " " + str(num) + self.__data + "\x0D"
# print(cmd)
self.conn.sendall(cmd.encode())
# time.sleep(0.1)
try:
result = self.conn.recv(1024).decode().strip()
except:
return False
return result
def Get(self, register, bit='', logout= False):
# cmd = "\x52\x44\x20\x5A\x46\x31\x30\x30\2E\55\x0D\x0A"
cmd = "RD " + register + self.bit[bit] + "\x0D"
# print(cmd.encode())
self.conn.sendall(cmd.encode())
try:
result = self.conn.recv(1024).decode().strip()
except:
return False
if len(self.is_number.findall(result)) > 0:
result = int(result)
else:
return False
if logout == True:
print(result)
return result
def Gets(self, register, nums, bit=''):
# cmd = "\x52\x44\x20\x5A\x46\x31\x30\x30\2E\55\x0D\x0A"
cmd = "RDS " + register + self.bit[bit] + ' ' + str(nums) + "\x0D"
# print(cmd.encode())
self.conn.sendall(cmd.encode())
self.__list = []
try:
result = self.conn.recv(1024).decode().strip()
except:
return False
if len(self.is_number.findall(result)) > 0:
[self.__list.append(int(x)) for x in self.conn.recv(1024).decode().strip().split(" ")]
else:
return False
return self.__list
if __name__ == '__main__':
a = Mysocket("192.168.10.10",8501)
while True:
time.sleep(1)
a.Send("W308","100","3")
time.sleep(1)
a.Send("W308","200","3")
# .U : 16位無符號十進位
# .S : 16位有符號十進位
# .D : 32位無符號十進位
# .L : 32位有符號十進位
# .H : 16位十六進位值數
|
13,617 | 48496ba26f0eeac18b97886f5a4f51737d6d022c | #python为了将语意变得更加明确,就引入了async和awit关键字用于定义原声协程
#from collections import Awaitable
import types
# async def downloader(url):
# return "TT"
@types.coroutine
def downloader(url):
yield "TT"
async def download_url(url):
#do somethine
html = await downloader(url)
return html
if __name__ == "__main__":
coro = download_url("http://www.baidu.com")
coro.send(None) |
13,618 | 2b3b70271c13a28e4457ec6458b45415dab5b6d1 | from datetime import timedelta, datetime
def filter_dict(obj, val=None):
# TODO: We should not always remove all None items (maybe!?)
return dict(filter(lambda item: item[1] is not val, obj.items()))
def get_season_tag_name(key):
table = {
"Clas Ohlson": "COB",
"Matkasse": "MK",
"Grillartiklar": "G",
"Halloweenartiklar": "H",
"Julartiklar": "J",
"Artiklar som säljs året runt, men mest runt jul": "JB",
"Midsommarartiklar": "M",
"Artiklar som säljs året runt, men mest runt midsommar": "MB",
"Nyårsartiklar": "N",
"Artiklar som säljs året runt, men mest runt nyår": "NB",
"Påskartiklar": "P",
"Artiklar som säljs året runt, men mest runt påsk": "PB",
"Sommarartiklar": "S",
"Sommartorget": "ST",
}
return table[key] if key in table else None
def convert_season_tags(product):
tags = map(lambda x: get_season_tag_name(x), product.tags.all())
return list(filter(lambda x: x is not None, tags))
def convert_order_route_from_product_type(key):
table = {
"Crossdocking": "X",
"Nightorder": "A",
}
return table[key] if key in table else None
def get_attribute_id(key):
# data from prefilledautomaten.attribute
table = {
'Ekonomipack': 1,
'Nyckelhålsmärkt': 1736,
'Ekologisk': 2167,
'Glutenfri': 2168,
'Laktosfri': 2169,
'Låglaktos': 2170,
'Premiumkvalité': 2171,
'Mjölkproteinfri': 2172,
# 'Nyhet': 2173,
'18Åldersgräns': 2174,
'Fairtrade': 2175,
'Svanenmärkt': 2176,
'Kravmärkt': 2177,
'Video': 2178,
'Äkta vara': 2181,
'Astma- och Allergiförbundet': 2184,
'test': 2187,
'Rosa bandet': 2190,
'Svenskt sigill': 2191,
'3+ dagar': 2194,
'5+ dagar': 2197,
'7+ dagar': 2200,
'10+ dagar': 2203,
'30+ dagar': 2206,
'Svenskt ursprung': 2209,
'Svensk fågel': 2212,
'4+ dagar': 2215,
'Vegansk': 2218,
'MSC': 2219,
'Strategisk produkt': 2222,
'Svenskt sigill klimatcertifierad': 2224,
'ASC': 2227,
'Från Sverige': 2230,
'Kött från Sverige': 2233,
'Mjölk från Sverige': 2236,
'Faroklass brandfarligt': 2239,
'Faroklass miljöfarligt': 2242,
'Faroklass skadligt': 2245,
'Faroklass Warning': 2248,
'Energiklass A+': 2251,
'Energiklass C': 2254,
'Energiklass D': 2257,
'Energiklass E': 2260,
'Energiklass A++': 2263,
'Energiklass A': 2266,
'Energiklass B': 2269,
}
return table[key] if key in table else None
def get_dynamic_property_id(key):
table = {
'Volume': 1,
'Weight': 2,
'KfpDfp': 3,
'LastSalesDay': 4,
'LastReceiptDay': 5,
'OldPz1': 6,
'OldPz2': 7,
'OldPz3': 8,
'MaxStock': 9,
'Season': 10,
'OrderFactor': 11,
'MinStock': 12,
'DfpLengthMM': 13,
'DfpWidthMM': 14,
'DfpHeightMM': 15,
'DfpWeightG': 16,
'DfpType': 17,
'SupplierArticleNumber': 18,
'AxfoodArticleId': 19,
'TruckrouteOptimizationProd3': 20,
'KfpHeightMM': 21,
'KfpLengthtMM': 22,
'KfpWidthMM': 23,
'IsFakeStockBalance': 24,
'ExternalImageUrl': 25,
'ProductSupplier': 26,
'ValdioDFPWidthMM': 27,
'ValdioDFPHeightMM': 28,
'ValidoDFPLengthtMM': 29,
'ValdioDFPWeightG': 30,
'DFPEANCode': 31,
'SafetyStock': 33,
'KfpDfpPurchaseOrder': 36,
'NoNutritionsNeeded': 38,
'NoIngredientsNeeded': 41,
'NoAllergensNeeded': 44,
'DeliveredUnitConversionFactor': 45,
'HandlingUnitQuantity': 46,
'BDMaterialNumber': 49,
'ProductSegment': 55,
'StandardUnitKfp': 56,
'StandardUnitGtin': 59,
'LimitedOfferProduct': 61,
'QLPricing': 64,
'QLMatching': 67,
'FirstSalesDate': 70,
'CategoryManager': 73,
}
return table[key] if key in table else None
def get_origin_id(key):
table = {
752: 1, # Svensk
249: 2, # Fransk
# TODO: MAP THIS ?: 3, # Afrika
# TODO: MAP THIS ?: 4, # Grekiskt
# TODO: MAP THIS ?: 5, # Indien
# TODO: MAP THIS ?: 6, # Nordamerika
# TODO: MAP THIS ?: 7, # Latinamerika
# TODO: MAP THIS ?: 8, # Orienten
# TODO: MAP THIS ?: 9, # Japan
# TODO: MAP THIS ?: 10, # Italienskt
# TODO: MAP THIS ?: 11, # Sydostasien
# TODO: MAP THIS ?: 12, # Spansk
# TODO: MAP THIS ?: 13, # Tyskland
# TODO: MAP THIS ?: 14, # "Ryssland och Östeuropa"
# TODO: MAP THIS ?: 15, # Internationellt
# TODO: MAP THIS ?: 16, # Övriga
# TODO: MAP THIS ?: 73, # Sverige
# TODO: MAP THIS ?: 74, # Norge
# TODO: MAP THIS ?: 75, # Kanada
# TODO: MAP THIS ?: 76, # Frankrike
# TODO: MAP THIS ?: 77, # Grekland
# TODO: MAP THIS ?: 78, # Portugal
# TODO: MAP THIS ?: 79, # Danmark
# TODO: MAP THIS ?: 80, # Italien
# TODO: MAP THIS ?: 81, # Finland
# TODO: MAP THIS ?: 82, # Kalifornien
# TODO: MAP THIS ?: 83, # Thailand
# TODO: MAP THIS ?: 84, # Kina
# TODO: MAP THIS ?: 85, # Belgien
# TODO: MAP THIS ?: 86, # Europa
# TODO: MAP THIS ?: 87, # Turkiet
# TODO: MAP THIS ?: 88, # Holland
# TODO: MAP THIS ?: 89, # England
# TODO: MAP THIS ?: 90, # Spanien
# TODO: MAP THIS ?: 91, # Nederländerna
# TODO: MAP THIS ?: 92, # Polen
# TODO: MAP THIS ?: 93, # "Blandat: EG och icke EG"
# TODO: MAP THIS ?: 94, # Ungern
# TODO: MAP THIS ?: 95, # Bulgarien
# TODO: MAP THIS ?: 96, # Kroatien
# TODO: MAP THIS ?: 98, # India
# TODO: MAP THIS ?: 99, # Uruguay
# TODO: MAP THIS ?: 100, # Irland
# TODO: MAP THIS ?: 101, # "Nya Zeeland"
# TODO: MAP THIS ?: 102, # Sverige/England
# TODO: MAP THIS ?: 103, # Sverige/Danmark
# TODO: MAP THIS ?: 104, # China
# TODO: MAP THIS ?: 105, # Holland/Frankrike
# TODO: MAP THIS ?: 106, # "Costa Rica"
# TODO: MAP THIS ?: 107, # Zaire
# TODO: MAP THIS ?: 108, # Israel/USA
# TODO: MAP THIS ?: 109, # Mexico
# TODO: MAP THIS ?: 110, # Holland/Belgien
# TODO: MAP THIS ?: 111, # Frankrike/Italien
# TODO: MAP THIS ?: 112, # Sverge
# TODO: MAP THIS ?: 113, # Centralamerika
# TODO: MAP THIS ?: 114, # Brasilien
# TODO: MAP THIS ?: 115, # Israel/Indien
# TODO: MAP THIS ?: 116, # "Italien/Nya Zeeland"
# TODO: MAP THIS ?: 117, # Sydafrika
# TODO: MAP THIS ?: 118, # Argentina
# TODO: MAP THIS ?: 119, # China/Thailand
# TODO: MAP THIS ?: 120, # USA
# TODO: MAP THIS ?: 121, # Kenya
# TODO: MAP THIS ?: 122, # Israel
# TODO: MAP THIS ?: 123, # Malaysia
# TODO: MAP THIS ?: 124, # Nordostatlanten
# TODO: MAP THIS ?: 125, # Vietnam
# TODO: MAP THIS ?: 126, # Norden
# TODO: MAP THIS ?: 127, # Litauen
# TODO: MAP THIS ?: 131, # Roslagen
# TODO: MAP THIS ?: 135, # U.S.A.
# TODO: MAP THIS ?: 136, # DK
# TODO: MAP THIS ?: 137, # Egypten
# TODO: MAP THIS ?: 138, # Marocko
# TODO: MAP THIS ?: 139, # Chile
# TODO: MAP THIS ?: 140, # "Dominikanska Republiken"
# TODO: MAP THIS ?: 141, # Iran
# TODO: MAP THIS ?: 142, # Colombia
# TODO: MAP THIS ?: 143, # Peru
# TODO: MAP THIS ?: 144, # Zimbabwe
}
return table[key] if key in table else None
def convert_attributes(product, detail=None):
result = []
for tag in product.tags.all():
id = get_attribute_id(tag.name)
if id is not None:
result.append({
'AttributeId': id
})
# Special case for "Nyhet"
if not detail and product.product_detail:
detail = product.product_detail.filter(store=10).first()
if detail is None:
detail = product.product_detail.first()
if detail:
first_enabled = detail.first_enabled if detail.first_enabled else datetime.now() - \
timedelta(days=60)
result.append({
'AttributeId': 2173,
'FromDate': first_enabled,
'ToDate': first_enabled + timedelta(days=30),
})
return result
def create_dynamic_property(key, value, store=None):
prop = {
'PropertyId': get_dynamic_property_id(key),
'PropertyName': key,
'PropertyValue': value,
}
if store is not None:
prop['StoreId'] = store
return prop
def convert_dynamic_properties(product):
result = [
create_dynamic_property('Volume', product.volume_dm3),
create_dynamic_property('Weight', product.weight_g),
create_dynamic_property('KfpHeightMM', product.height_mm),
create_dynamic_property('KfpLengthtMM', product.length_mm),
create_dynamic_property('KfpWidthMM', product.width_mm),
create_dynamic_property('Season', '.'.join(
convert_season_tags(product))),
create_dynamic_property('LastReceiptDay', product.last_receipt_day),
create_dynamic_property('LastSalesDay', product.last_sales_day),
create_dynamic_property('TruckrouteOptimizationProd3',
convert_order_route_from_product_type(product.product_type)),
create_dynamic_property('BDMaterialNumber',
product.prefered_merchantarticle.external_id if product.prefered_merchantarticle else None),
create_dynamic_property('SupplierArticleNumber',
product.prefered_merchantarticle.external_id if product.prefered_merchantarticle else None),
]
base_unit_quantity = get_base_unit_quantity(product, product.article.gtin)
if base_unit_quantity is not None:
create_dynamic_property('KfpDfp', base_unit_quantity)
for detail in product.product_detail.all():
result.append(create_dynamic_property(
'OrderFactor', 1 if detail.orderfactor else 0, detail.store))
result.append(create_dynamic_property(
'BDMaterialNumber', detail.prefered_merchantarticle.external_id if detail.prefered_merchantarticle else None, detail.store))
result.append(create_dynamic_property(
'SupplierArticleNumber', detail.prefered_merchantarticle.external_id if detail.prefered_merchantarticle else None, detail.store))
base_unit_quantity = get_base_unit_quantity(
detail, product.article.gtin)
if base_unit_quantity is not None:
create_dynamic_property('KfpDfp', base_unit_quantity, detail.store)
return result
def get_base_unit_quantity(product, base_unit_gtin):
if product.prefered_merchantarticle is not None:
if product.prefered_merchantarticle.article.child_gtin == base_unit_gtin:
return product.prefered_merchantarticle.article.quantity_of_lower_layer
else:
upper_quantity = product.prefered_merchantarticle.article.quantity_of_lower_layer
next_lower_article = Article.objects.filter(
gtin=product.prefered_merchantarticle.article.child_gtin).first()
if next_lower_article is not None:
if next_lower_article.child_gtin == product.article.gtin:
return next_lower_article.quantity_of_lower_layer * upper_quantity
return None
def convert_unit(validoo_unit):
# data from prefilledautomaten.unit
unit_table = {
"H87": 1, # st, PIECES
"GRM": 2, # g, WEIGHT
"KGM": 3, # kg, WEIGHT
"DLT": 6, # dl, VOLUME
"LTR": 7, # L, VOLUME
"MLT": 10, # ml, VOLUME
"CLT": 11, # cl, VOLUME
"HGM": 12, # hg, WEIGHT
"G24": 13, # msk, VOLUME
"G25": 14, # tsk, VOLUME
# "???": 16, # st tekoppar, VOLUME
# "???": 17, # st kaffekoppar, VOLUME
# "???": 18, # glas, VOLUME
"MGM": 25, # mg, WEIGHT,
# "???": 26, # krm, VOLUME
# "???": 27, # st klyftor, PARTS,
# "???": 28, # st krukor, PIECES
# "???": 29, # st tärningar, PIECES
# "???": 30, # knippe, PIECES
}
if(validoo_unit in unit_table):
return unit_table[validoo_unit]
return None
def convert_tags(product):
tags = filter(lambda tag: get_season_tag_name(tag.name) is
None and get_attribute_id(tag.name) is None, product.tags.all())
return list(map(lambda tag: tag.id, tags))
def convert_product(product):
from api.serializers import ProductSerializer
serializer = ProductSerializer(product)
article = product.article
image = product.productimage_set.first()
unit_id = convert_unit(serializer.data['net_content_unit_code'])
return filter_dict({
"ProductId": product.product_id, # int
"ProductName": serializer.data['name'], # string
"Quantity": serializer.data['net_content'], # float
# int
"UnitId": unit_id,
"DisplayUnitId": unit_id, # int
"CategoryId": product.product_category.id if product.product_category else None, # int
# "ProductGroupId": ???, # int
# "CalculatedWeight": ???, # float
# "RecommendedPrice": ???, # float
"VatRate": article.vat, # float
"EanCode": article.gtin, # string
# string
"ImageUrl": image.filename if image else None,
# "ProductUrl": ???, # string
# "SupplierId": ???, # int
# "MaximumOrder": ???, # float
"ProductDescription": serializer.data['description'], # string
# "UsageDescription": ???, # string
# string
"IngredientsDescription": serializer.data['ingredient_description'],
# string
"NutritionDescription": serializer.data['nutrition_description'],
# "StorageDescription": ???, # string
# "StoreVarmColdFrozen": ???, # string
# "PossibleToBuy": ???, # bool
# "IsOffer": ???, # bool
"RecycleFee": product.recycle_fee, # double
# "AmountInPackage": ???, # int
# "TempMostBought": ???, # int
# "ExternalComment": ???, # string
# "InternalComment": ???, # string
# "IsPickingCostIncluded": ???, # bool
# "IsDeliveryCostIncluded": ???, # bool
# "RatesSum": ???, # int
# "RatesCount": ???, # int
"OriginId": get_origin_id(product.origin), # int?
# "IsWine": ???, # bool
# "AxfoodSAPId": ???, # string
# "IsEcological": ???, # bool
# "RelatedProductIDs": ???, # string
"IsAdultProduct": product.adult_product, # bool
# "AutomaticSubscription": ???, # bool
# "IsAlreadyRenamed": ???, # bool
# "OriginalAfterRenameFileSize": ???, # string
# "OriginalCurrentFileSize": ???, # string
# "CreationDate": ???, # DateTime?
# "LastModifiedDate": ???, # DateTime?
# "LastUpdatedByUserId": ???, # int
# "RemovedDate": ???, # DateTime?
})
def convert_product_store(detail, product):
return filter_dict({
# "ProductStoreId": ???, # int
"ProductId": product.product_id, # int
"StoreId": detail.store, # int
# "LocalEancode": ???, # string
"CalculatedCustomerPrice": detail.price, # decimal
# "CalculatedCustomerPrice_Per_Unit": ???, # decimal
"IsOutOfStock": detail.status == 2, # bool
# "OutOfStockDate": ???, # DateTime
# "StockBackDate": ???, # DateTime
"IsReplacementProduct": detail.status == 3 # bool
# "IsApproximateWeight": ???, # bool
# "IsShowPricePerUnit": ???, # bool
# "PriceValidFrom": ???, # DateTime
# "PriceValidTo": ???, # DateTime
# "PriceIn": ???, # decimal
# "PercentageAddon": ???, # decimal
# "FixedAddon": ???, # decimal
# "PickingZone1": ???, # string
# "PickingZone2": ???, # string
# "PickingZone3": ???, # string
# "SoldCount": ???, # int
# "IsForeCastPriorityProduct": ???, # bool
# "DontShowAsMissedProduct": ???, # bool
# "StoreLevelOriginId": ???, # int?
# "PickingNote": ???, # string
# "AdvanceDeliveryMinimumOrder": ???, # int
# "MinimumRequiredDeliveryDays": ???, # byte
# "DeliverableWeekDays": ???, # string
# "DeliveryDaysAhead": ???, # int
# "CancelDaysBefore": ???, # int
# "StorePriceIn": ???, # decimal
# "CreationDate": ???, # DateTime?
# "LastModifiedDate": ???, # DateTime?
# "RemovedDate": ???, # DateTime?
# "CanSendAdvanceDeliveryEmail": ???, # bool
# "OldCalculatedCustomerPrice": ???, # decimal
})
def convert_product_stores(product):
return list(map(lambda x: convert_product_store(x, product), product.product_detail.all()))
|
13,619 | 5f5fbefb79b6164ec56bd5b1ffaf97d1832914e3 | from django.conf.urls import include, patterns
urlpatterns = patterns(
'',
(r'^messages/', include('django_messages.urls')),
)
|
13,620 | facb557ba15deaed25c967e33021f6e934cc4fc6 | from collections import Counter
import time
def greedy(money):
#checking if input is a string or value is negative
while (money.isalpha() or float(money) < 0):
money = input("Change: ")
else:
money = float(money) #accepts floating numbers
cents = int(100*money) #converting to integer
"""
Divide cents by 25; followed by its remainder after dividing by 10; then by dividing the remainder of
what is left after dividing 25 and 10 by 5 and then finally get the remainder after dividing the cents by
25,10 and 5; then add all up
"""
result = (cents // 25) + ((cents % 25) // 10) + ((cents % 25 % 10) // 5) + (cents % 25 % 10 % 5)
return result
money = input("Change: ")
coins = [25,10,5,1]
coins_list = [] #stores the list of coins
t1 = time.time()
result = greedy(money)
t2 = time.time()
print("Total Coins: %s" %result)
print(t2-t1)
|
13,621 | 09476ef95b54ba7e083c05e80df1be6a82fce2d9 | # NOTE: Generated By HttpRunner v3.1.4
# FROM: opsLogin.har
from httprunner import HttpRunner, Config, Step, RunRequest, RunTestCase, Parameters
import pytest
import ast
class TestCaseOpslogin(HttpRunner):
@pytest.mark.parametrize(
"param",
Parameters({
"userName-password-verifyCode1-regType": "${parameterize(common.csv)}",
})
)
def test_start(self, param) -> "HttpRunner":
super().test_start(param)
config = (
Config("登录ops环境")
.base_url("${get_base_url()}")
.variables(**{
"x_tenant_id": "2", # tenant Default value : 2 必填选项 header部分
"x_app_id": "200",
})
.export(*["token", "sessionId"])
.verify(False)
)
teststeps = [
Step(
RunRequest("获取验证码")
.get("/ops/api/web/getVerificationCode?")
.with_headers(
**{
"x-app-id": "200",
"x-tenant-id": "2",
"sso_sessionid": "",
"Token": "",
}
)
.with_cookies(
**{
"sessionId": "",
"token": "",
}
)
.extract()
.with_jmespath('body.data.verifyId', "verifyId")
.validate()
.assert_equal("status_code", 200)
.assert_equal('headers."Content-Type"', "application/json;charset=UTF-8")
.assert_equal("body.code", "000000")
.assert_equal("body.msg", "Success")
),
Step(
RunRequest("ops登录")
.post("/ops/api/web/login")
.with_headers(
**{
"x-app-id": "$x_app_id",
"x-tenant-id": "$x_tenant_id",
"sso_sessionid": "",
"Token": "",
}
)
.with_cookies(
**{
"sessionId": "",
"token": "",
}
)
.with_json(
{
"userName": "$userName",
"password": "$password",
"verifyCode": "$verifyCode1",
"verifyId": "$verifyId",
"regType": "$regType",
}
)
# .teardown_hook("${teardown_hook_sleep_N_secs($response, 7)}")
.extract()
.with_jmespath('body.data.token', "token")
.with_jmespath('body.data.sessionId', "sessionId")
.validate()
.assert_equal("status_code", 200)
.assert_equal('headers."Content-Type"', "application/json;charset=UTF-8")
.assert_equal("body.code", "000000")
.assert_equal("body.msg", "Success")
),
]
if __name__ == "__main__":
TestCaseOpslogin().test_start()
|
13,622 | 1429dcdf5515bb5ea6f48f7c120d5c4fdc2f4677 | #!/usr/bin/env python3
import pandas as pd
import sys
path_tbl = str(sys.argv[1])
tbl = pd.read_csv(path_tbl, sep = '\t')
print("Method", "SNP_threshold", "Comparison", "Number_isolate_pairs", sep = '\t')
for snp_threshold in range(1,21):
for comparison in [ 'different_carrier', 'same_carrier_same_timepoint' ]:
isolate_pairs = tbl.query('comparison == @comparison & SNPs_corrected < @snp_threshold').shape[0]
print("Pairwise_corrected", snp_threshold, comparison, isolate_pairs, sep = '\t')
for snp_threshold in range(1,21):
for comparison in [ 'different_carrier', 'same_carrier_same_timepoint' ]:
isolate_pairs = tbl.query('comparison == @comparison & SNPs_not_corrected < @snp_threshold').shape[0]
print("Pairwise_not_corrected", snp_threshold, comparison, isolate_pairs, sep = '\t')
for snp_threshold in range(1,21):
for comparison in [ 'different_carrier', 'same_carrier_same_timepoint' ]:
isolate_pairs = tbl.query('comparison == @comparison & SNPs_no_gaps < @snp_threshold').shape[0]
print("Core_genome_nogaps", snp_threshold, comparison, isolate_pairs, sep = '\t')
|
13,623 | 576712d3ac1abb7f44e6989343bef8982f27f24e | #created by Christos Kagkelidis
import socket
from api.database import Database
from datetime import datetime
import psycopg2
import msvcrt
import pickle
import sys
# def add_record_to_db(data, cursor):
# print('Adding record to database...')
# try:
# cursor.execute("""INSERT INTO "sensor_data" (s_id, name, value, date) VALUES (%s,%s,%s,%s)""", (data['s_id'], data['name'], data['value'], datetime.now()))
# except psycopg2.Error as e:
# print(f"Error: {e}")
# return
# cursor.commit()
# print(f"Success. {data} added to database.")
UDP_IP = socket.gethostname()
UDP_PORT = 5005
print(UDP_IP)
# initialize UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# bind socket to address
sock.bind(('', UDP_PORT))
print("waiting for incoming messages...")
print("press CTRL+C to exit")
db = Database()
con, cursor = db.connect()
while True:
data, addr = sock.recvfrom(120) #receive data with certain buffer size
data = pickle.loads(data)
# print(f"received following data: {data} from {addr}. duration: {datetime.now()}\n") # decode incoming message
print(data) #dict format
#add_recort_to_db(data, cursor)
if msvcrt.kbhit():
print("Key interruption. Program closing...")
break
con.close()
cursor.close()
|
13,624 | e639f3e3aecf9c429c11bcb80e3549feb190a41a | #!/usr/bin/env python
# -*- coding: utf-8 -*-
__author__ = 'han'
import torch
import torch.nn.functional as F
class MyNLLLoss(torch.nn.modules.loss._Loss):
"""
a standard negative log likelihood loss. It is useful to train a classification
problem with `C` classes.
Shape:
- y_pred: (batch, answer_len, prob)
- y_true: (batch, answer_len)
- output: loss
"""
def __init__(self):
super(MyNLLLoss, self).__init__()
def forward(self, y_pred, y_true):
y_pred_log = torch.log(y_pred)
start_loss = F.nll_loss(y_pred_log[:, 0, :], y_true[:, 0])
end_loss = F.nll_loss(y_pred_log[:, 1, :], y_true[:, 1])
return start_loss + end_loss
|
13,625 | 9ea537619f8d9b76474c89938d8f2abe6808dfdd | #-*- coding:utf-8 -*-
import sys
import os
import random
import time
import numpy as np
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import datasets
from tensorflow.keras import layers
os.environ["TF_CPP_MIN_LOG_LEVEL"]='2'
def printUsages():
print "Usage: python wsabie_model_train.py [options] train_file"
print "options:"
print " -val validation_file (default None)"
print " -save directory for saving the trained model (default None)"
print " -em embedding_size: set the size of embedding layer (default 32)"
print " -ep epoch_num: set the epoch num (default 50)"
print " -al alpha: set the learning rate (default 0.001)"
print " -b batch_size: set the batch size (default 128)"
print " -v verbose: print runing log (default True)"
def parseParameter(argv):
if len(argv) < 2: #at least 2 paramters: train.py train_file
printUsages()
exit(1)
parameters = {}
parameters['train_file'] = argv[-1]
for i in range(1, len(argv) - 2, 2):
if '-val' == argv[i]:
parameters['val'] = argv[i + 1]
elif '-save' == argv[i]:
parameters['save'] = argv[i + 1]
elif '-em' == argv[i]:
parameters['em'] = int(argv[i + 1])
elif '-ep' == argv[i]:
parameters['ep'] = int(argv[i + 1])
elif '-al' == argv[i]:
parameters['al'] = float(argv[i + 1])
elif '-b' == argv[i]:
parameters['b'] = int(argv[i + 1])
elif '-v' == argv[i]:
if argv[i + 1] in ['True', 'true', '1']:
parameters['v'] = True
else:
parameters['v'] = False
return parameters
class DataLoader():
'''DataLoader类用于从文件加载数据。
文件的第一行格式为m \t n \t b:
--m:表示左bow字典大小;
--n:和右bow字典大小;
--b:表示一组训练数据的行数。
后续每一行为一个样本,每b(例如11)个样本为一组训练数据,称为one batch。
每个one_batch的第一行为正样本,其余b-1行为负样本。
每行的格式为:label \t left_bow_vec \t right_bow_vec,其中bow_vec用word的index表示。
--left_bow_vec格式为:w_0 w_2 ... w_i,i in [0, m];
--right_bow_vec格式为:w_0 w_2 ... w_j,j in [0, m]。
注意,左右bow字典是独立的,即左右特征的原始特征在不同的特征空间。
'''
def __init__(self, filename):
self.filename = filename
self.left_bow_size = 0
self.right_bow_size = 0
self.one_batch = 0
self.left_vec_size = 0
self.right_vec_size = 0
self.lX = []
self.rX = []
self.X = None
self.y = []
def load_data(self):
'''从filename中加载数据。'''
cnt = 0
fin = open(self.filename, 'r')
for line in fin:
content = line.strip().split('\t')
if cnt == 0:
if len(content) < 3:
print >> sys.stderr, "第一行的组织方式必须为:「left_bow_size \\t right_bow_size \\t one_batch_size」"
return
try:
self.left_bow_size, self.right_bow_size, self.one_batch =[int(v) for v in content[:3]]
except Exception as e:
print >> sys.stderr, e
return
else:
if len(content) != 3:
continue
label, left_feas, right_feas = content
label = int(label)
try:
left_feas = [int(v) for v in left_feas.split(' ')]
right_feas = [int(v) for v in right_feas.split(' ')]
except Exception as e:
print >> sys.stderr, e
continue
self.lX.append(left_feas)
self.rX.append(right_feas)
self.y.append(label)
cnt += 1
def preprocess_data(self, left_vec_size=None, right_vec_size=None):
'''对数据进行预处理'''
if left_vec_size == None:
self.left_vec_size = max([len(ins) for ins in self.lX])
else:
self.left_vec_size = left_vec_size
if right_vec_size == None:
self.right_vec_size = max([len(ins) for ins in self.rX])
else:
self.right_vec_size = right_vec_size
# padding 0可能存在问题,0是特征不在字典里面的默认值
self.lX = keras.preprocessing.sequence.pad_sequences(self.lX, value=0, padding='post', maxlen=self.left_vec_size)
self.rX = keras.preprocessing.sequence.pad_sequences(self.rX, value=0, padding='post', maxlen=self.right_vec_size)
self.X = tf.concat([self.lX, self.rX], axis=1)
class WSABIE(keras.Model):
'''WSABIE模型
原始WSABIE模型在user-item推荐场景下的变种,也可以认为是只有embedding层的DSSM。
left_bow_size:左bow的字典大小
right_bow_size:右bow的字典大小
left_vec_size:左特征向量维度
'''
def __init__(self, left_bow_size, right_bow_size, left_vec_size, embedding_size=32):
super(WSABIE, self).__init__(self)
self.lwn = left_vec_size
self.left_embedding = layers.Embedding(left_bow_size, embedding_size)
self.right_embedding = layers.Embedding(right_bow_size, embedding_size)
self.pooling = keras.layers.GlobalAveragePooling1D()
# self.left_dense = layers.Dense(16, activation='relu')
# self.right_dense = layers.Dense(16, activation='relu')
def call(self, inputs, training=None):
lx = self.left_embedding(inputs[:, :self.lwn])
rx = self.right_embedding(inputs[:, self.lwn:])
lx = self.pooling(lx)
rx = self.pooling(rx)
# lx = self.left_dense(lx)
# rx = self.right_dense(rx)
x = lx * rx
x = tf.reduce_sum(x, axis=1)
return x
def left_fea_map(self, inputs):
x = self.left_embedding(inputs)
x = self.pooling(x)
return x
def right_fea_map(self, inputs):
x = self.right_embedding(inputs)
x = self.pooling(x)
return x
'''丑陋的实现,可能是训练速度的瓶颈。'''
def pairwise_hinge_loss(out, one_batch):
loss = tf.constant([], tf.float32)
pos = out[0]
neg = out[1:]
for i in range(0, len(out), one_batch):
pos = out[i]
neg = out[i+1:i+one_batch]
zeros = tf.zeros_like(neg)
_loss = tf.reduce_max(tf.stack([zeros, 1 - pos + neg], axis=0), axis=0)
loss = tf.concat([loss, _loss], axis=0)
return loss
def train(train_filename, val_filename=None, save_model_dir=None, embedding_size=32, epoch_num=50, alpha=0.001, batch_size=128, verbose=True):
# 加载训练数据
train_dl = DataLoader(train_filename)
train_dl.load_data()
train_dl.preprocess_data()
batch_size *= train_dl.one_batch
if verbose:
print >> sys.stderr, "train_data: left_bow_size[%d], right_bow_size[%d], left_vec_size[%d], right_vec_size[%d]" % \
(train_dl.left_bow_size, train_dl.right_bow_size, train_dl.left_vec_size, train_dl.right_vec_size)
# 加载验证数据
if val_filename:
val_dl = DataLoader(train_filename)
val_dl.load_data()
val_dl.preprocess_data(train_dl.left_vec_size, train_dl.right_vec_size)
if verbose:
print >> sys.stderr, "val_data: left_bow_size[%d], right_bow_size[%d], left_vec_size[%d], right_vec_size[%d]" % \
(val_dl.left_bow_size, val_dl.right_bow_size, val_dl.left_vec_size, val_dl.right_vec_size)
if val_dl.left_bow_size != train_dl.left_bow_size or val_dl.right_bow_size != train_dl.right_bow_size:
val_filename = None
# 创建模型
model = WSABIE(train_dl.left_bow_size, train_dl.right_bow_size, train_dl.left_vec_size, embedding_size)
model.build(input_shape=(None, train_dl.left_vec_size + train_dl.right_vec_size))
if verbose:
model.summary()
optimizer = tf.keras.optimizers.Adam(alpha)
train_loss_results = []
train_auc_results = []
# 训练
for epoch in range(epoch_num):
epoch_loss_avg = tf.keras.metrics.Mean()
epoch_auc = tf.keras.metrics.AUC()
for step in range(0, len(train_dl.y), batch_size):
input_data = train_dl.X[step:step+batch_size]
with tf.GradientTape() as tape:
out = model(input_data)
loss = pairwise_hinge_loss(out, train_dl.one_batch)
grads = tape.gradient(loss, model.trainable_variables)
optimizer.apply_gradients(zip(grads, model.trainable_variables))
epoch_loss_avg(loss)
gt = train_dl.y[step:step+batch_size]
# 必须将out变换到[0, 1]之后才能计算auc,否则会出错
epoch_auc(gt, tf.nn.sigmoid(out))
train_loss_results.append(epoch_loss_avg.result())
train_auc_results.append(epoch_auc.result())
if verbose:
print >> sys.stderr, "Epoch {:03d}: Loss: {:.3f}, AUC: {:.3f}".format(epoch, epoch_loss_avg.result(), epoch_auc.result())
# 验证
if val_filename and epoch % 1 == 0:
val_auc = tf.keras.metrics.AUC()
val_out = model(val_dl.X)
val_auc(val_dl.y, tf.nn.sigmoid(val_out))
if verbose:
print >> sys.stderr, "Epoch {:03d}: Validation AUC: {:.3f}".format(epoch, val_auc.result())
# 保存模型
if save_model_dir != None:
model.save_weights(save_model_dir + 'wsabie_' + time.strftime("%Y%m%d", time.localtime()))
if __name__ == '__main__':
parameters = parseParameter(sys.argv)
train_filename = parameters['train_file']
val_filename = None
save_model_dir = None
embedding_size = 32
epoch_num = 50
alpha = 0.001
batch_size = 128
verbose = True
if 'val' in parameters:
val_filename = parameters['val']
if 'save' in parameters:
save_model_dir = parameters['save']
if 'em' in parameters:
embedding_size = parameters['em']
if 'ep' in parameters:
epoch_num = parameters['ep']
if 'al' in parameters:
alpha = parameters['al']
if 'b' in parameters:
batch_size = parameters['b']
if 'v' in parameters:
verbose = parameters['v']
train(train_filename, val_filename, save_model_dir=save_model_dir,
embedding_size=embedding_size, epoch_num=epoch_num, alpha=alpha,
batch_size=batch_size, verbose=verbose)
|
13,626 | 25099380b4bf9d4672ba9b59c2da917a7db0722f | import argparse
import os
import settings
from libs.utils import classifier_factory
from libs.bbc import train_bbc
from libs.cnn import train_cnn
from libs.rz import train_rz
def main():
parser = argparse.ArgumentParser("model training")
parser.add_argument("--dataset", type=str, choices=["bbc", "cnn", "rz"], required=True, help="Dataset name")
parser.add_argument("--nn_type", type=str, choices=["simple", "conv", "lstm"], required=True, help="Neural Network type")
parser.add_argument("--monitor", type=str, choices=["loss", "acc", "val_loss", "val_acc"], required=True, help="Quantity to monitor")
parser.add_argument("--model", type=str, required=True, help="Neural Net model file name")
parser.add_argument("--w2v", type=str, required=True, help="Word2Vec model filename")
parser.add_argument("--batch_size", type=int, default=128, help="Batch size")
parser.add_argument("--epochs", type=int, default=100, help="Number of epochs")
parser.add_argument("--length", type=int, default=400, help="Articles length (characters)")
args = parser.parse_args()
with open(os.path.join(settings.MODELS_PATH, args.w2v), "r") as model_file:
first_line = model_file.readline().split(" ")
try:
vector_length = int(first_line[1])
assert type(vector_length) == int
except (AssertionError, ValueError) :
vector_length = len(first_line) - 1
print(f"Word embeddings vector length: {vector_length}")
if args.dataset == "bbc":
train_bbc(classifier_factory[args.nn_type], args.monitor, args.model, args.w2v, vector_length, args.batch_size, args.epochs, args.length)
elif args.dataset == "cnn":
train_cnn(classifier_factory[args.nn_type], args.monitor, args.model, args.w2v, vector_length, args.batch_size, args.epochs, args.length)
else:
train_rz(classifier_factory[args.nn_type], args.monitor, args.model, args.w2v, vector_length, args.batch_size, args.epochs, args.length)
if __name__ == '__main__':
main()
|
13,627 | d5aa59215067072f1e354392a38e51bdfba48c12 |
class Tree:
"""This is a class representing a binary tree"""
def __init__(self, data, left = None, right = None):
self.left = left
self.right = right
self.data = data
def printInOrder(self):
if (self.left != None):
self.left.printInOrder()
print self.data
if (self.right != None):
self.right.printInOrder()
if __name__ == "__main__":
root = Tree(15,
Tree(10,
Tree(5),
Tree(12)),
Tree(20,
Tree(17),
Tree(22)))
root.printInOrder()
raw_input() |
13,628 | 6eb1501db132cc6c1aaf64201559e74bf2c38a3b | import os
import json
import pickle
def get_dictionary(file_entry, file_mode):
if file_mode == 'json':
with open(file_entry, 'r') as f:
dictionary = json.load(f)
elif file_mode == 'pickle':
with open(file_entry, 'rb') as f:
dictionary = pickle.load(f)
else:
with open(file_entry, 'r') as f:
dictionary_list = f.readlines()
trimmed_list = [entry.strip('\n') for entry in dictionary_list]
del dictionary_list
dictionary = dict()
for entry in trimmed_list:
parts = entry.split(' -> ')
dictionary[parts[0]] = int(parts[1])
del trimmed_list
return dictionary
|
13,629 | 684c10b9038291ad388c6f58cee051389fcdb914 | import logging
import json
import azure.functions as func
def main(req: func.HttpRequest) -> func.HttpResponse:
logging.info('Python HTTP trigger function processed a request.')
req_body = req.get_json()
# Read Input JSOn
threat = req_body["threat"]
year = req_body["analysisYear"]
parameters = req_body["parameters"]
attributes = req_body["attributes"]
print(attributes)
formatted_input_json = {
"threat" : threat,
"analysisYear" : year,
"attributes" : format_attribute_for_input(attributes),
"parameters" : format_attribute_for_input(parameters),
"failureScenarios" : [],
"frequencies" : []
}
if attributes:
return func.HttpResponse(json.dumps(formatted_input_json))
else:
return func.HttpResponse(
"This HTTP triggered function executed successfully. Pass a name in the query string or in the request body for a personalized response.",
status_code=200
)
def format_input(attribute):
formatted_attr = {}
for x in attribute:
code = x['code']
value = x['value']
formatted_attr[code] = value
return json.dumps(formatted_attr)
def format_attribute_for_input(input_json):
attr_pairs = []
for x,y in input_json.items():
pair = {
"code": x,
"value": y
}
attr_pairs.append(pair)
return attr_pairs |
13,630 | 0f6be2529cbbf4d1a6c3bf1b172c8e28c046367b | '''
Copyright (C) 2014 Jacques Lucke
mail@jlucke.com
Created by Jacques Lucke
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import sys, os, bpy
sys.path.append(os.path.dirname(__file__))
import target_camera
from sniper_utils import *
bl_info = {
"name": "Sniper",
"description": "Professional camera animations for motion graphics.",
"author": "Jacques Lucke",
"version": (1, 3, 2),
"blender": (2, 80, 0),
"location": "View 3D > Tool Shelf > Animation/Sniper",
"category": "Animation"
}
# interface
class CameraToolsPanel(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_category = "Animation"
bl_label = "Sniper"
bl_context = "objectmode"
def draw(self, context):
layout = self.layout
col = layout.column(align = True)
col.operator("sniper.insert_target_camera", icon = "OUTLINER_DATA_CAMERA")
if target_camera.targetCameraSetupExists(): col.label(text="Settings are in 'Sniper' tab.", icon = "INFO")
col = layout.column(align = True)
col.operator("sniper.seperate_text")
col.operator("sniper.text_to_name")
# operators
class TextToNameOperator(bpy.types.Operator):
bl_idname = "sniper.text_to_name"
bl_label = "Text to Name"
bl_description = "Rename all text objects to their content."
def execute(self, context):
textToName()
return{"FINISHED"}
class SeperateTextOperator(bpy.types.Operator):
bl_idname = "sniper.seperate_text"
bl_label = "Seperate Text"
bl_description = "Create new text object for every line in active text object."
def execute(self, context):
active = getActive()
if isTextObject(active):
seperateTextObject(active)
delete(active)
return{"FINISHED"}
#registration
classes = (
CameraToolsPanel,
TextToNameOperator,
SeperateTextOperator,
)
def register():
for c in classes:
bpy.utils.register_class(c)
target_camera.register()
def unregister():
for c in reversed(classes):
bpy.utils.unregister_class(c)
target_camera.unregister()
if __name__ == "__main__":
register()
|
13,631 | 7a2b65e341327aeee1c928557aa505eba3a53c5d | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('foodpantry', '0006_auto_20150414_0325'),
]
operations = [
migrations.AddField(
model_name='tweetsettings',
name='freq_hp',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='tweetsettings',
name='freq_lp',
field=models.IntegerField(default=0),
),
migrations.AddField(
model_name='tweetsettings',
name='freq_np',
field=models.IntegerField(default=0),
),
migrations.AlterField(
model_name='drives',
name='last_tweeted',
field=models.DateTimeField(default=datetime.datetime(1900, 4, 14, 3, 48, 32, 653795, tzinfo=utc), verbose_name=b'last tweet'),
),
migrations.AlterField(
model_name='fooditem',
name='last_tweeted',
field=models.DateTimeField(default=datetime.datetime(1900, 4, 14, 3, 48, 32, 648068, tzinfo=utc), verbose_name=b'last tweet'),
),
]
|
13,632 | d190ebed84e90430d172ebc3f4c0c1514a2bccdb | # -*- coding: utf-8 -*-
"""
CATS ANALYSIS
"""
# Basic Libraries
import pandas as pd
import numpy as np
# Pull in data
test = pd.read_csv('./test.csv')
train = pd.read_csv('./train.csv')
df = train.copy()
df['logy'] = np.log(df['SalePrice'])
# function for CATS
def cda(colname):
print(pd.concat([df[colname].value_counts(), test[colname].value_counts()], axis = 1, sort = False))
df.boxplot('SalePrice', colname)
print(df.groupby(colname).mean()['SalePrice'])
### function to get metrics for CAT cols - fits 2xLR: dummy & one.v.all (one is top/mode)
def catdf(df,y,tot):
df = df.drop(df._get_numeric_data().columns, axis = 1)
dfdf = pd.DataFrame(columns = ["unique", "set",
"mode", "mode%", "NAs",
"dummyLRscore", "ovaLRscore",
"quantLRscore", "suggest"])
for col in df:
temp = df.describe()
quantcol = ['BsmtQual', 'BsmtCond', 'KitchenQual', 'ExterQual', 'ExterCond',
'GarageQual', 'GarageCond', 'HeatingQC', 'FireplaceQu', 'PoolQC', 'OverallCond', 'OverallQual']
xunique = temp.loc['unique', col]
xset = df[col].unique()
xmode = temp.loc['top', col]
xmodep = round((temp.loc['freq', col] / df.shape[0]) *100, 2)
xnas = df.shape[0] - temp.loc['count', col]
if tot == "train":
from sklearn import linear_model
xdummy = pd.get_dummies(df[col], drop_first=True)
lrdummy = linear_model.LinearRegression()
lrdummy.fit(xdummy, y)
xova = df[col].eq(xmode).mul(1).values.reshape(-1, 1)
lrova = linear_model.LinearRegression()
lrova.fit(xova, y)
xcorr = round(lrdummy.score(xdummy,y),4)
xcorr2 = round(lrova.score(xova, y),4)
xcorr3 = 0
# only if in QUANTABLE columns
if col in quantcol:
if col in ['OverallCond', 'OverallQual']:
xquant = df[col].astype(int).values.reshape(-1, 1)
else:
xquant = df[col].fillna(0).replace('None', 0).replace('Po', 1).replace('Fa',2).replace('TA', 3).replace('Gd',4).replace('Ex',5).values.reshape(-1, 1)
lrquant = linear_model.LinearRegression()
lrquant.fit(xquant, y)
xcorr3 = round(lrquant.score(xquant, y),4)
# determine action based on metrics
if xnas >= df.shape[0]*.9:
xaction = "ignore"
elif xunique == 2:
xaction = "binary"
elif (xcorr2 + .01) > xcorr:
xaction = "1vA"
elif (xcorr3 + .01) > xcorr:
xaction = "quantify"
else:
xaction = "dummify"
else:
xcorr = "test"
xcorr2 = "test"
xcorr3 = "test"
xaction = "test"
dfdf.loc[col] = [xunique, xset, xmode, xmodep, xnas, xcorr, xcorr2, xcorr3, xaction]
# save results so can duplicate on test set later
dfdf.to_csv("FeatureSuggestion.csv", index = True)
return dfdf
# Get all cats cols
#cat_df = df.drop(df._get_numeric_data().columns, axis = 1)
# Create cat df
#train_catdf = catdf(cat_df, df['SalePrice'], 'train')
#train_logy_catdf = catdf(cat_df, df['logy'], 'train')
#test_catdf = catdf(test.drop(test._get_numeric_data().columns, axis = 1), "na", 'test')
# compare test/train catdfs - tops are same, some differences in NA/diversity
#ttcatdf = pd.concat([train_catdf, test_catdf], axis = 1, keys = ['train', 'test']).swaplevel(axis='columns')[train_catdf.columns[:5]]
# compare y/loy catdfs - logy is better estimator for most
#import matplotlib.pyplot as plt
#plt.plot(train_logy_catdf.dummyLRscore - train_catdf.dummyLRscore)
#plt.plot(train_logy_catdf.ovaLRscore - train_catdf.ovaLRscore)
#plt.axhline(y=0, color='r', linestyle='-')
#plt.show()
# Top 10 CATs to use:
#top10Ccol = list(train_catdf.sort_values('dummyLRscore', ascending = False)[:10].index.values)
|
13,633 | a927732b156da2f3a5e464d0a7d8315eb5f17638 | D=int(input())
C=list(map(int,input().split()))
S=[list(map(int,input().split())) for i in range(D)]
T=[int(input()) for i in range(D)]
l=[0]*26
v=0
for d in range(D):
x=[S[d][i]-sum([(d+1-l[j])*C[j] for j in range(26) if i!=j]) for i in range(26)]
l[T[d]-1]=d+1
v+=x[T[d]-1]
print(v) |
13,634 | 2a1826df25eb0521f45ef79a158aafa24ba05af0 | import socket
# import cv2
import numpy as np
import pickle
#Upload image
# img = cv2.imread('/path/to/image', 0)
#Turn image into numpy-array
arr = np.array([[1,2,3],[4,5,6]])
#Receiver ip
ip = socket.gethostname()
#Set up socket and stuff
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
#Loop through each array (5 for test)
for each in range(2):
#Encode each array
# msg = pickle.dumps(arr[each])
msg = pickle.dumps(np.random.random((68, 1)))
#Send msg to ip with port
s.sendto(msg, (ip, 50000))
s.close() |
13,635 | ed6b20c7afac51e01a8fb7508be62a9fa348cacc | from collections import deque as Queue
def slider(arr,k):
n=len(arr)
q=Queue()
#from the first k elements keep the max and decreasing
for i in range(k):
while(len(q) and arr[i]<=arr[q[-1]]):
q.pop()
q.append(i)
for i in range(k,n):
#previous window min
print(arr[q[0]],end=" ")
#if any element does not belong to this window we pop the,
while(len(q) and q[0]<=i-k):
#this index does not belong to this window
q.popleft()
#remove all smaller elements
curr=arr[i]
while(len(q) and curr<=arr[q[-1]]):
q.pop()
q.append(i)
print(arr[q[0]])
slider([10, 5, 2, 7, 8, 7],3)
slider([2, 10, 5, 7, 7, 8],3)
slider([10,0,3,2,5],2) |
13,636 | 3acd966117f296b2805cd8d91c19f67b8481703d | from time import sleep
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
# init driver
driver = webdriver.Chrome(executable_path=r"C:\Users\yordi\Automation\python-selenium-automation\chromedriver.exe")
driver.maximize_window()
# open the url
driver.get('https://www.google.com/')
search = driver.find_element(By.NAME, 'q')
search.clear()
search.send_keys('Dress')
# wait for 4 sec
#sleep(4)
#driver.implicitly_wait(4)
driver.wait = WebDriverWait(driver, 2)
e = driver.wait.until(EC.element_to_be_clickable((By.NAME, 'btnk')))
e.click
# click search
#driver.find_element(By.NAME, 'btnK').click()
# verify
assert 'dress' in driver.current_url.lower(), f"Expected query not in {driver.current_url.lower()}"
print('Test Passed')
driver.quit()
|
13,637 | 2566a27c7ee27ce269e210d4c482d7c30e62bbd1 | from django.apps import AppConfig
class MytodoConfig(AppConfig):
name = 'MyTodo'
|
13,638 | 374ca3769ca1423dcbe5a7237d253a0a1b6fdde2 | import torch
from torch import nn
from pyg_graph_models import GCN, GraphAttentionPooling, ResNetBlock, TensorNetworkModule
from utils import construct_graph_batch, pad_tensor
import numpy as np
from torch_geometric.utils import to_dense_batch
from torch.distributions import Categorical
from sinkhorn import Sinkhorn
from ged_ppo_bihyb_model import CriticNet
class GraphEncoder(torch.nn.Module):
def __init__(
self,
node_feature_dim,
node_output_size,
batch_norm,
one_hot_degree,
num_layers=10
):
super(GraphEncoder, self).__init__()
self.node_feature_dim = node_feature_dim
self.node_output_size = node_output_size
self.batch_norm = batch_norm
self.one_hot_degree = one_hot_degree
self.num_layers = num_layers
one_hot_dim = self.one_hot_degree + 1 if self.one_hot_degree > 0 else 0
self.siamese_gcn = GCN(self.node_feature_dim + one_hot_dim, self.node_output_size, num_layers=self.num_layers,
batch_norm=self.batch_norm)
self.sinkhorn = Sinkhorn(max_iter=20, tau=0.005)
self.att = GraphAttentionPooling(self.node_output_size)
@property
def device(self):
return next(self.parameters()).device
def forward(self, input_graphs_1, input_graphs_2, partial_x):
# construct graph batches
batched_graphs_1 = construct_graph_batch(input_graphs_1, self.one_hot_degree, self.device)
batched_graphs_2 = construct_graph_batch(input_graphs_2, self.one_hot_degree, self.device)
# forward pass
batched_node_feat_1 = self.siamese_gcn(batched_graphs_1)
batched_node_feat_2 = self.siamese_gcn(batched_graphs_2)
# compute cross-graph similarity
node_feat_1, node_indicator_1 = to_dense_batch(batched_node_feat_1, batched_graphs_1.batch)
node_feat_2, node_indicator_2 = to_dense_batch(batched_node_feat_2, batched_graphs_2.batch)
num_nodes_1 = node_indicator_1.sum(-1)
num_nodes_2 = node_indicator_2.sum(-1)
sim_mat = torch.bmm(node_feat_1, node_feat_2.transpose(1, 2)).detach()
sim_mat = self.sinkhorn(sim_mat, num_nodes_1, num_nodes_2)
partial_x = torch.stack(pad_tensor([px[:-1, :-1] for px in partial_x]))
for b, px in enumerate(partial_x):
graph_1_mask = px.sum(dim=-1).to(dtype=torch.bool)
graph_2_mask = px.sum(dim=-2).to(dtype=torch.bool)
sim_mat[b, graph_1_mask, :] = 0
sim_mat[b, :, graph_2_mask] = 0
sim_mat[b] = sim_mat[b] + px
# compute cross-graph difference features
diff_feat = node_feat_1 - torch.bmm(sim_mat, node_feat_2)
global_feat_1 = self.att(batched_node_feat_1, batched_graphs_1.batch)
global_feat_2 = self.att(batched_node_feat_2, batched_graphs_2.batch)
return diff_feat, node_feat_2, global_feat_1, global_feat_2
class ActorNet(torch.nn.Module):
def __init__(
self,
state_feature_size,
batch_norm,
):
super(ActorNet, self).__init__()
self.state_feature_size = state_feature_size
self.batch_norm = batch_norm
self.act1_resnet = ResNetBlock(self.state_feature_size, 1, batch_norm=self.batch_norm)
self.act2_query = nn.Linear(self.state_feature_size, self.state_feature_size, bias=False)
@property
def device(self):
return next(self.parameters()).device
def forward(self, input_feat1, input_feat2, partial_x, known_action=None):
return self._act(input_feat1, input_feat2, partial_x, known_action)
def _act(self, input_feat1, input_feat2, partial_x, known_action=None):
if known_action is None:
known_action = (None, None)
# roll-out 2 acts
mask1 = self._get_mask1(partial_x)
act1, log_prob1, entropy1 = self._select_node(input_feat1, input_feat2, mask1, known_action[0])
mask2 = self._get_mask2(partial_x, act1)
act2, log_prob2, entropy2 = self._select_node(input_feat1, input_feat2, mask2, known_action[1], act1)
return torch.stack((act1, act2)), torch.stack((log_prob1, log_prob2)), entropy1 + entropy2
def _select_node(self, node_feat1, node_feat2, mask, known_cur_act=None, prev_act=None, greedy_sel_num=0):
node_feat1 = torch.cat((node_feat1, node_feat1.max(dim=1, keepdim=True).values), dim=1)
# neural net prediction
if prev_act is None: # for act 1
act_scores = self.act1_resnet(node_feat1).squeeze(-1)
else: # for act 2
node_feat2 = torch.cat((node_feat2, node_feat2.max(dim=1, keepdim=True).values), dim=1)
prev_node_feat = node_feat1[torch.arange(len(prev_act)), prev_act, :]
act_query = torch.tanh(self.act2_query(prev_node_feat))
act_scores = (act_query.unsqueeze(1) * node_feat2).sum(dim=-1)
# select action
act_probs = nn.functional.softmax(act_scores + mask, dim=1)
if greedy_sel_num > 0:
argsort_prob = torch.argsort(act_probs, dim=-1, descending=True)
acts = argsort_prob[:, :greedy_sel_num]
return acts, act_probs[torch.arange(acts.shape[0]).unsqueeze(-1), acts]
else:
dist = Categorical(probs=act_probs)
if known_cur_act is None:
act = dist.sample()
return act, dist.log_prob(act), dist.entropy()
else:
return known_cur_act, dist.log_prob(known_cur_act), dist.entropy()
def _get_mask1(self, partial_x):
batch_num = len(partial_x)
act_num = max([px.shape[0] for px in partial_x])
mask = torch.full((batch_num, act_num), -float('inf'), device=self.device)
for b in range(batch_num):
for available_act in (1-partial_x[b][:-1, :].sum(dim=-1)).nonzero():
mask[b, available_act] = 0
mask[b, -1] = 0
return mask
def _get_mask2(self, partial_x, prev_act):
batch_num = len(partial_x)
act1_num = max([px.shape[0] for px in partial_x])
act2_num = max([px.shape[1] for px in partial_x])
mask = torch.full((batch_num, act2_num), -float('inf'), device=self.device)
for b in range(batch_num):
for available_act in (1-partial_x[b][:, :-1].sum(dim=-2)).nonzero():
mask[b, available_act] = 0
if prev_act[b] != act1_num - 1:
mask[b, -1] = 0
return mask
|
13,639 | 651301f923de9606a71975193f851196a6a5ce7c | # Uses python3
import sys
def get_fibonacci_last_digit_naive(n):
if n <= 1:
return n
previous = 0
current = 1
data = []
data.append(0)
data.append(1)
# print(0, 0)
# print(1, len(data))
for _ in range(n - 1):
previous, current = current, (previous + current) % 10
if len(data) > 2 and data[len(data)-1] == 0 and current == 1:
del data[len(data)-1]
break
else:
data.append(current)
mysize = len(data)
return data[n % mysize]
if __name__ == '__main__':
input = sys.stdin.read()
n = int(input)
# n = 32730513
print(get_fibonacci_last_digit_naive(n))
|
13,640 | 95ef3cc7d25eeb8e72eff4bf5f5726f77de0a716 | # _*_ coding.utf-8 _*_
# 开发人员 : leehm
# 开发时间 : 2020/7/7 22:59
# 文件名称 : 44.py
# 开发工具 : PyCharm
s1 = input()
|
13,641 | 27ccffb2db2e07e26f325ed28561b76806cab669 | import os.path
from glob import glob
from torch.utils import Dataset
from PIL import Image
class DatasetFolder(Dataset):
def __init__(self, root, pattern, transform=None, target_transform=None):
# classes, class_to_idx = find_classes(root)
samples = glob(pattern)
self.loader = loader
self.samples = samples
self.transform = transform
self.target_transform = target_transform
def __getitem__(self, index):
"""
Args:
index (int): Index
Returns:
tuple: (sample, target) where target is class_index of the target class.
"""
path, target = self.samples[index]
sample = self.loader(path)
if self.transform is not None:
sample = self.transform(sample)
if self.target_transform is not None:
target = self.target_transform(target)
return sample, target
def __len__(self):
return len(self.samples)
def pil_loader(path):
# open path as file to avoid ResourceWarning (https://github.com/python-pillow/Pillow/issues/835)
with open(path, 'rb') as f:
img = Image.open(f)
return img.convert('RGB')
|
13,642 | 7c174405dac6ea7d664a7988c5b137920366521b | # -*- coding: utf-8 -*-
# Created by li huayong on 2019/10/9
import numpy as np
import torch
import random
def set_seed(args):
random.seed(args.seed)
np.random.seed(args.seed)
torch.manual_seed(args.seed)
if args.n_gpu > 0:
torch.cuda.manual_seed_all(args.seed)
if __name__ == '__main__':
pass
|
13,643 | e777af91cdff564970170156e896712587dc0957 | from sklearn.linear_model import LinearRegression
predictor = LinearRegression(n_jobs=-1)
predictor.fit(X=TRAIN_INPUT, y=TRAIN_OUTPUT) |
13,644 | 007d71bc68830ef3b08985ed0d93d5ba4abc447a | # -*- coding: utf-8 -*-
from django import template
from django.utils.safestring import mark_safe
from easy_thumbnails.files import get_thumbnailer
from djangocms_address import settings
register = template.Library()
@register.filter()
def render_logo(item):
image = item.logo
if not image:
return u''
thumb_url = get_thumbnailer(image).get_thumbnail(settings.IMG_OPTIONS_LOGO).url
return mark_safe('<img src="%s" alt="%s" />' % (thumb_url, item.name))
@register.simple_tag()
def gmaps_api_key():
if settings.GEOCODING_KEY:
return settings.GEOCODING_KEY_URL
return ''
@register.simple_tag()
def filter_via_ajax():
return 'ajax_filter' if settings.FILTER_USING_AJAX else ''
|
13,645 | 564a13be0241e222a21778f2c663d8c7b49b5eca | # # ЗАДАЧА 1
# #
# # Реализовать класс Person, у которого должно быть два публичных поля: age и name.
# # Также у него должен быть следующий набор методов: know(person),
# # который позволяет добавить другого человека в список знакомых.
# # И метод is_known(person), который возвращает знакомы ли два человека
#
#
# class Person(object):
#
# def __init__(self,age,name):
# self.age=age
# self.name=name
# self.lst_knows = []
#
#
#
# def know_add(self,person):
#
# if person in self.lst_knows:
# print('{} уже знает по имени {}'.format(self.name,person))
# else:
# self.lst_knows.append(person)
# print(self.lst_knows)
#
#
# def is_know(self,person):
#
# knowing_state=person in self.lst_knows
# if knowing_state:
# print('{} знаком с {}'.format(self.name,person))
# else:
# print('{} Не знаком с {}'.format(self.name, person))
#
#
# p=Person(14,'Misha')
# p.know_add('Misha')
# p.know_add('Саша')
# p.know_add('Джон')
# p.know_add('Саша')
# p.is_know('Джон')
#
#
# # ЗАДАЧА 2
# #
# # Есть класс, который выводит информацию в консоль: Printer,
# # у него есть метод: log(*values).
# # Написать класс FormattedPrinter, который выводит в консоль информацию, окружая ее строками из *
#
# class Printer(object):
# def log(self,*values):
# p=values
# print(*p)
#
# class FormattedPrinter(Printer):
#
# def format_print(self,*values):
# v=values
# for val in v:
# print('*'*10)
# self.log(val)
# print('*' * 10)
#
#
#
#
# p=Printer()
# #p.log(10,33,15,66,'john','alisa')
#
# f=FormattedPrinter()
# f.format_print(10,33,15,66,'john','alisa')
#
# ЗАДАЧА 3
#
# Написать класс Animal и Human,
# сделать так, чтобы некоторые животные были опасны для человека (хищники, ядовитые).
# Другие - нет. За что будет отвечать метод is_dangerous(animal)
# Слегка дополнил задачу:
# Человек наследуется от животного.
# И у животных и у людей добавлен параметр агрессии.
# У животного и у человека есть метод Атаковать человека.
# Если параметр агрессии у нападающего и жертвы совпадает считается,
# что жертва отбилась и не считает нападавшего опасным.
# В противном случае жертва добавляет нападающего в перечень опасных для себя существ
import random
class Animal(object):
def __init__(self,aggressive,type,power):
self.aggressive =aggressive
self.type=type
self.power=power
def attak_power(self):
power_animal = self.aggressive*self.power
print('Power of {} is {}'.format(self.type,power_animal))
return power_animal
class Attak(object):
def __init__(self,animalOne_pow,animalTwo_pow):
self.animal_one_k=animalOne_pow
self.animal_two_k=animalTwo_pow
self.result_list=[]
def attak(self):
result_attak=self.attak_result()
self.print_result_attak(result_attak)
return result_attak
def attak_result(self):
attak_result=self.animal_one_k*self.luck() - self.animal_two_k*self.luck()
if attak_result < 0:
return 'Succes!'
elif attak_result == 0:
return 'Draw!'
else:
return 'Lose!'
def print_result_attak(self,result_to_print):
print('*' * 20)
print('Attack was', result_to_print)
print('*' * 20)
pass
def luck(self):
luck = random.randint(1, 10)
return luck
Human=Animal(5,'Human',3)
Wolf=Animal(7,'Wolf',2)
Horse=Animal(2,'Horse',4)
Crocodile = Animal(9,'Croco',3)
Gippo=Animal(7,'Gippo',10)
Yojick=Animal(1,'Yojick',1)
animal_list=[Horse,Wolf,Crocodile,Gippo,Yojick]
human_attak_power=Human.attak_power()
is_dangeros=[]
for animal in animal_list:
animal_attak_power=animal.attak_power()
battle = Attak(human_attak_power,animal_attak_power)
table_result = []
for i in range(10):
table_result.append(battle.attak())
if table_result.count('Succes!') >5:
is_dangeros.append({animal.type:'Danger'})
else:
is_dangeros.append({animal.type: 'Nyasha'})
print(is_dangeros) |
13,646 | 1f02ddb59b3ce388e85e9a607f955c8ebd9ad2e3 | str1=str(input())
str2=str1[::-1]
if(str1==str2):
print("True")
else:
print("False") |
13,647 | 97a60a28a7646d7a71fa0480b9734c816ca03fa2 | import os
import cv2
import time
from moviepy.editor import AudioFileClip
import tqdm
import glob
from PyPDF2 import PdfFileMerger, PdfFileReader
from modules.tape import TapeFrame, Tapes, PDF
class VideoReader:
def __init__(self, dt: float = 1.0, take_speech: bool = True, verbose: bool = True, **kwargs):
self.dt = dt
self.take_speech = take_speech
self.verbose = verbose
self.saving_folder = kwargs.get("saving_folder", 'resultsdata')
self.kwargs = kwargs
self.pdf_paths = []
os.makedirs("tempdata", exist_ok=True)
os.makedirs("resultsdata", exist_ok=True)
def make_pdf_from_mp4(self, mp4_filename: str) -> str:
vidcap = cv2.VideoCapture(mp4_filename)
audioclip = AudioFileClip(mp4_filename) if self.take_speech else None
pdf = PDF(os.path.basename(mp4_filename).replace(".mp4", ''),
take_text=self.take_speech,
saving_folder=self.saving_folder,)
tapes = Tapes()
cv2.startWindowThread()
t_i = time.time()
frame_start_time = 0
frame_end_time = 0
time_s = 0
print(f"Reading of the video ...") if self.verbose else None
while True:
vidcap.set(cv2.CAP_PROP_POS_MSEC, time_s * 1_000)
success, image = vidcap.read()
if not success:
break
image = cv2.resize(image, (min(980, image.shape[0]), min(750, image.shape[1])))
if not tapes.has_image_at(image, -1):
frame_end_time = time_s
if self.take_speech and len(tapes) > 0:
subaudio = audioclip.subclip(frame_start_time, frame_end_time)
# print("duration audio cut: ", time.strftime('%H:%M:%S', time.gmtime(subaudio.duration)),
# " [h:m:s] ", (frame_start_time, frame_end_time))
tapes[-1].audioclip = subaudio
if len(tapes) > 0:
frame_start_time = frame_end_time
tapes[-1].times = (frame_start_time, frame_end_time)
tapes.add_tape(TapeFrame(image, **self.kwargs))
time_s += self.dt
cv2.destroyAllWindows()
t_f = time.time()
vidcap.release()
print(f"Reading of the video done") if self.verbose else None
print(f"Making the pdf...") if self.verbose else None
pdf.add_diapos(tapes)
pdf.save()
print(f"Making pdf done") if self.verbose else None
print(f"elapse time: {t_f - t_i:.2f} [s]") if self.verbose else None
self.pdf_paths.append(pdf.path)
return pdf.path
def get_sort_pdf_paths(self) -> list:
sorted_paths = self.pdf_paths.copy()
sorted_paths.sort()
return sorted_paths
def make_pdf_from_folder(self, dir_path: str) -> str:
self.saving_folder = dir_path
for mp4_file_path in tqdm.tqdm(glob.glob(os.path.join(self.saving_folder, '*.mp4')), unit="mp4_file"):
self.make_pdf_from_mp4(mp4_file_path)
# Call the PdfFileMerger
merged_pdf = PdfFileMerger()
# Loop through all of pdf and append their pages
for pdf_path in tqdm.tqdm(self.get_sort_pdf_paths(), unit="pdf_file"):
merged_pdf.append(PdfFileReader(pdf_path, 'rb'))
# Write all the files into a file which is named as shown below
merged_pdf_path = f"{self.saving_folder}/{os.path.basename(dir_path)}.pdf"
merged_pdf.write(merged_pdf_path)
return merged_pdf_path
|
13,648 | bd152ef06d569fa062812ca373b5865eaec7bc01 | #
# @lc app=leetcode.cn id=209 lang=python3
#
# [209] 长度最小的子数组
#
# @lc code=start
from typing import List
class Solution:
def minSubArrayLen(self, s: int, nums: List[int]) -> int:
n = len(nums)
res = n+1
for i in range(n):
total = 0
for length in range(1, n+1):
if i + length-1 < n:
total += nums[i+length-1]
if total >= s:
res = min(res, length)
break
return res if res < n+1 else 0
def minSubArrayLen(self, s: int, nums: List[int]) -> int:
n = len(nums)
sums = [0]
for i in range(n):
sums.append(sums[-1]+nums[i])
res = n+1
for i in range(n):
target = s+sums[i]
from bisect import bisect_left
index = bisect_left(sums, target)
if index != n+1:
res = min(res, index-i)
return res if res != n+1 else 0
def minSubArrayLen(self, s: int, nums: List[int]) -> int:
n = len(nums)
l, r = 0, 0
res = n+1
total = 0
while r < n:
total += nums[r]
while total >= s:
res = min(res, r-l+1)
total -= nums[l]
l += 1
r += 1
return res if res != n+1 else 0
# @lc code=end
|
13,649 | 8d409647853193c0f19b5b010fa3819127f8278b | import africastalking
import os
class SMS:
def __init__(self):
# Set your app credentials
self.username = os.getenv("AFRICASTALKING_USERNAME")
self.api_key = os.getenv("AFRICASTALKING_API_KEY")
# Initialize the SDK
africastalking.initialize(self.username, self.api_key)
# Get the SMS service
self.sms = africastalking.SMS
def send(self, *recipients):
# Set the numbers you want to send to in international format
recipients = recipients
# Set your message
message = "I'm a lumberjack and it's ok, I sleep all night and I work all day"
# Set your shortCode or senderId
sender = "shortCode or senderId"
# hit send.
try:
response = self.sms.send(message, recipients, sender)
print (response)
except Exception as e:
print ('Encountered an error while sending: %s' % str(e))
if __name__ == '__main__':
SMS().send(["+254713YYYZZZ", "+254733YYYZZZ"]) |
13,650 | 65c89351e2f575359f5cd8aef43c0c9b90a56b3e | # FUNCTIONAL TESTS INVOLVING USER STORIES
import os
from django.test import LiveServerTestCase
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from django.contrib.auth.hashers import make_password
from users.models import UserAuth
class NewUser(LiveServerTestCase):
# LiveServerTestCase creates it's own development server
def setUp(self):
self.BASE_URL = self.live_server_url
# For Mac/Linux, chrome webdriver should be in PATH or in usr/local/bin
# For Windows, you can pass the path of the driver
# using executable_path=<path>/chromedriver.exe
self.browser = webdriver.Chrome()
self.wait = WebDriverWait(self.browser, 10)
def tearDown(self):
self.browser.quit()
def test_new_user(self):
'''
Tests a new user functionality for signup and login
'''
# Adam had attended Joe's wedding.
# Joe told him that he had created a website which
# allows people to upload and share the wedding photos.
# Intrigued, he goes to the url Joe mentioned.
self.browser.get(self.BASE_URL)
# He is greeted by a welcome page.
# The page says: "Welcome to Jane and Joe's Wedding Album"
tag = self.browser.find_element_by_tag_name('body')
self.assertIn("Welcome to Jane and Joe's Wedding Album",tag.text)
# He also notices the tab title "J&J's Wedding Roll"
title = self.browser.title
self.assertEqual("J&J's Wedding Roll",title)
# Below, there is a login form which asks for an email
# address and password and a submit button below it.
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
self.assertEqual('Login',login_submit_button.get_attribute("innerHTML"))
# He also sees a link for new user signup
signup_link = self.browser.find_element_by_link_text('Signup')
# Since, he has never used it before, he clicks on Signup
signup_link.click()
# He is redirected to a registration page
curr_url = self.browser.current_url
self.assertEqual(curr_url,self.BASE_URL+'/signup/')
# He sees three form fields this time:
# One asks for his email, while the other two are
# password and password confirmation fields along with a
# Signup Button
signup_email_field = self.browser.find_element_by_id('id_email')
signup_password1_field = self.browser.find_element_by_id('id_password1')
signup_password2_field = self.browser.find_element_by_id('id_password2')
signup_submit_button = self.browser.find_element_by_id('id_submit_button')
self.assertEqual('SignUp',signup_submit_button.get_attribute("innerHTML"))
# He starts filling out the form
signup_email_field.send_keys('adam2000@gmail.com')
signup_password1_field.send_keys('password123')
signup_password2_field.send_keys('password123')
signup_submit_button.click()
## Alternate method
# signup_password2_field.send_keys(Keys.ENTER)
##
# He is redirected to the login page
curr_url = self.browser.current_url
self.assertEqual(curr_url,self.BASE_URL+'/login/')
# A message is displayed that his account has been created
tag = self.browser.find_element_by_tag_name('body')
self.assertIn('Your Account Has Been Created! Go Ahead and Log in!!',tag.text)
# Happy that his account was created he proceeds to log in
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
login_email_field.send_keys('adam2000@gmail.com')
login_password_field.send_keys('password123')
login_submit_button.click()
# Voila! he is logged in and is presented with a page.
curr_url = self.browser.current_url
self.assertEqual(curr_url,self.BASE_URL+'/roll/')
# He Sees "Oops! No Photos Have Been Uploaded Yet!!".
tag = self.browser.find_element_by_tag_name('body')
self.assertIn('Oops! No Photos Have Been Uploaded Yet!!',tag.text)
try:
upload_button =WebDriverWait(self.browser,10).until(
EC.presence_of_element_located((By.ID,"id_upload"))
)
except:
self.fail()
# He sees a tab "Upload A Photo"
#upload_button = self.browser.find_element_by_id('id_upload')
# Since he has a few pictures to upload, he clicks on it
upload_button.click()
# Now he is directed to aother page.
# Is says 'roll/upload' in the URL bar
self.assertEqual(self.browser.current_url,self.BASE_URL+'/roll/upload/')
# He finds a field with a button which says "Upload Files" and a description
# He chooses a file from his computer
upload_photo_button = self.browser.find_element_by_id('id_photo_url')
upload_photo_button.send_keys(os.path.join(os.getcwd(),"image.jpeg"))
# He then fills out the description
description_field = self.browser.find_element_by_id('id_description')
description_field.send_keys("Adam's First Photo Upload")
submit_button = self.browser.find_element_by_id('id_submit')
submit_button.click()
# He is redirected to the home page which again says "Oops! No Photos Have Been Uploaded Yet!!".
# He is confused
# Aha!, a message says "Your Photo Has Been Uploaded But Will NOT Be Visible Untill Jane Or Joe Approve It."
self.assertEqual(self.browser.current_url,self.BASE_URL+'/roll/')
tag = self.browser.find_element_by_tag_name('body')
self.assertIn('Oops! No Photos Have Been Uploaded Yet!!',tag.text)
self.assertIn('Your Photo Has Been Uploaded But Will NOT Be Visible Untill Jane Or Joe Approve It.',tag.text)
# He has no choice but to wait for either Joe or Jane to approve his photos
# So, He logs out (there is a logout button to the top left)
logout_button = self.browser.find_element_by_id('id_logout')
logout_button.click()
self.assertEqual(self.browser.current_url,self.BASE_URL+'/login/')
class JoeAndJaneNewImage(LiveServerTestCase):
def setUp(self):
self.BASE_URL = self.live_server_url
self.browser = webdriver.Chrome()
# Jane and Joe are the owners of the website with extra previlidges
# The site admin has created their accounts for them
user_joe = UserAuth.objects.create_user(email='joe123@email.com')
user_joe.set_password('password123')
user_joe.is_owner = True
user_joe.save()
user_jane = UserAuth.objects.create_user(email='jane456@email.com')
user_jane.set_password('password123')
user_jane.is_owner = True
user_jane.save()
def tearDown(self):
self.browser.quit()
def test_admin(self):
# Joe visits his site and logs in with the credentials the admin gave him
self.browser.get(self.BASE_URL)
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
login_email_field.send_keys('joe123@email.com')
login_password_field.send_keys('password123')
login_submit_button.click()
# He is directed to the rolls page, but it's empty as nobody has uploaded anything yet
# He proceeds to the Upload page to upload a photo.
self.browser.find_element_by_id('id_upload').click()
upload_photo_button = self.browser.find_element_by_id('id_photo_url')
upload_photo_button.send_keys(os.path.join(os.getcwd(),"image.jpeg"))
# He then fills out the description
description_field = self.browser.find_element_by_id('id_description')
description_field.send_keys("Joe's First Photo Upload")
submit_button = self.browser.find_element_by_id('id_submit')
submit_button.click()
# He is, as expected redirected to the rolls page
# Since, he is the owner, the photo is auto-approved and appears in the page
try:
photo = self.browser.find_elements_by_tag_name('img')
except:
self.fail('Image did not appear after posting')
# Happy, He logs off
logout_button = self.browser.find_element_by_id('id_logout')
logout_button.click()
# Now Jane Logs in
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
login_email_field.send_keys('jane456@email.com')
login_password_field.send_keys('password123')
login_submit_button.click()
# She sees the photo that Joe uploaded
try:
self.browser.find_element_by_link_text("Joe's First Photo Upload")
img = self.browser.find_element_by_tag_name('img')
self.assertIn('uploaded_files/image',img.get_attribute('src'))
except:
self.fail('Jane could not see image uploaded by Joe')
class JoeAndUserApprovals(LiveServerTestCase):
'''
Assumes users have already created accounts
'''
def setUp(self):
self.BASE_URL = self.live_server_url
self.browser = webdriver.Chrome()
# admin
user_joe = UserAuth.objects.create_user(email='joe123@email.com')
user_joe.set_password('password123')
user_joe.is_owner = True
user_joe.save()
# standard user
user_adam = UserAuth.objects.create_user(email='adam2000@email.com')
user_adam.set_password('password123')
user_adam.save()
def tearDown(self):
self.browser.quit()
def test_approvals(self):
# Adam Logs In first and uploads a photo
self.browser.get(self.BASE_URL)
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
login_email_field.send_keys('adam2000@email.com')
login_password_field.send_keys('password123')
login_submit_button.click()
self.browser.find_element_by_id('id_upload').click()
upload_photo_button = self.browser.find_element_by_id('id_photo_url')
upload_photo_button.send_keys(os.path.join(os.getcwd(),"image.jpeg"))
description_field = self.browser.find_element_by_id('id_description')
description_field.send_keys("Adam's First Photo Upload")
submit_button = self.browser.find_element_by_id('id_submit')
submit_button.click()
# He then Logs Out. Previous Tests verified photo did not appear
logout_button = self.browser.find_element_by_id('id_logout')
logout_button.click()
# Now Joe Logs In
self.browser.get(self.BASE_URL)
login_email_field = self.browser.find_element_by_id('id_email')
login_password_field = self.browser.find_element_by_id('id_password')
login_submit_button = self.browser.find_element_by_id('id_submit_button')
login_email_field.send_keys('joe123@email.com')
login_password_field.send_keys('password123')
login_submit_button.click()
# verify that photo is indeed not present yet
try:
photo = self.browser.find_elements_by_tag_name('img')
self.fail("Image appeared when it shouldn't have")
except:
pass
# Joe sees a '(1)' next to Manage Pending Requests.
# This Tells him that 1 upload request is pending
# He clicks it
requests_button = self.browser.find_element_by_id('id_manage')
self.assertIn('(1)',requests_button.get_attribute('innerHTML'))
requests_button.click()
# He is taken to the Manage Page
self.assertEqual(self.browser.current_url,self.BASE_URL+'/roll/manage/')
# The photo pending approval is visible on the page with a checkbox next to it.
try:
self.browser.find_element_by_link_text("Adam's First Photo Upload")
img = self.browser.find_element_by_tag_name('img')
self.assertIn('uploaded_files/image',img.get_attribute('src'))
except:
self.fail('Joe could not see image uploaded by Adam')
try:
check_boxes = self.browser.find_elements_by_tag_name('input')
for cb in check_boxes:
if cb.get_attribute('name') == 'adam2000@email.com':
cb.click()
except:
self.fail("The Checkbox did not have Adam's email address.")
# Joe clicks on the checkbox and presses Approve.
self.browser.find_element_by_id('id_approve').click()
# He is redirected to the rolls page, which now has the approved photo
try:
self.browser.find_element_by_link_text("Adam's First Photo Upload")
img = self.browser.find_element_by_tag_name('img')
self.assertIn('uploaded_files/image',img.get_attribute('src'))
except:
self.fail('Joe could not see the approved image')
# Also, the manage pending approval shows 0.
requests_button = self.browser.find_element_by_id('id_manage')
self.assertIn('(0)',requests_button.get_attribute('innerHTML'))
# Happy, he logs out.
self.browser.find_element_by_id('id_logout').click()
|
13,651 | ab56892cc17d387cd0a9b595ccaa6e9de52c7482 | #!/usr/bin/python
import sys
import os
usuario = sys.argv[1]
clave = sys.argv[2]
basedatos = input ("¿A que base de datos quieres hacerle una copia de seguridad?")
print ("Vas a copiar la base de datos:" + basedatos)
cadena = "mysqldump -u " + usuario + " -p" + clave + " " + basedatos + " | gzip > backup.sql.gz"
print (cadena)
os.system (cadena)
#mysqldump -u usuario -pclave basedatos | gzip > backup.sql.gz
|
13,652 | 2a431dfaeff82093e43b487b9da4745491bcdb83 | import os
import sys
import pickle
import subprocess
import numpy as np
import glob ####
def dispatch(script_path, dataset_name, data_dir, boundaries_path, names, out_pattern_base, memory, fails_only=False):
jobs = []
# print(data_dir) ####
for name in names:
bam_path = os.path.join(data_dir, name, "{0}Aligned.sortedByCoord.out.bam".format(name))
if not os.path.isfile(bam_path) or os.path.getsize(bam_path) < 1e5:
# print(bam_path) ####
continue
status_path = os.path.join(data_dir, name, "countstatus.txt")
if fails_only and os.path.isfile(status_path):
with open(status_path) as status_file:
# print(repr(status_file.read())) ####
# continue ####
if status_file.read() == "Complete":
# print("complete") ####
# outs = glob.glob(os.path.join(data_dir, name, "count_*.out")) ####
# with open(max(outs)) as of: ####
# ol = of.readlines()
# print(ol) ####
# print(len(ol)) ####
# if len(ol) == 1:
continue
if not fails_only:
with open(status_path, "w") as status_file:
status_file.write("")
err_name = os.path.join(data_dir, name, "count_%j.out")
out_pattern = out_pattern_base.format(name)
cmd = [
"sbatch", "--mem={0}".format(memory), "-J", name, "-o", err_name, "-x", "node02",
script_path, dataset_name, bam_path, boundaries_path, out_pattern, status_path
]
print(" ".join(cmd))
jobs.append(cmd)
timeout = "sbatch: error: Batch job submission failed: Socket timed out on send/recv operation"
for i in jobs:
while True:
try:
submission = subprocess.run(i, check=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
print(str(submission.stdout, 'utf-8').rstrip())
break
except subprocess.CalledProcessError as e:
# print(e.stdout) ####
err = str(e.stderr, 'utf-8').rstrip()
print(err)
if err == timeout:
print("Retrying Submit")
continue
else:
raise e
if __name__ == '__main__':
curr_path = os.path.abspath(os.path.dirname(__file__))
script_path = os.path.join(curr_path, "count_reads.py")
# print("start") ####
boundaries_path = "/agusevlab/DATA/ANNOTATIONS/gencode.v26lift37.annotation.patched_contigs.gtf"
# Ye lab (except "flare" bams)
data_path_ye = "/agusevlab/awang/sc_le"
bam_path_ye = os.path.join(data_path_ye, "processed")
names_ye = os.listdir(bam_path_ye)
out_pattern_base_ye = os.path.join(data_path_ye, "genes/{{0}}/bamdata/{{0}}_{0}.pickle")
# dispatch(script_path, "Ye", bam_path_ye, boundaries_path, names_ye, out_pattern_base_ye, 2000)
# dispatch(script_path, "Ye", bam_path_ye, boundaries_path, names_ye, out_pattern_base_ye, 2000, fails_only=True)
# Kellis 48
data_path_kellis = "/agusevlab/awang/sc_kellis"
bam_path_kellis = os.path.join(data_path_kellis, "processed")
names_kellis = os.listdir(bam_path_kellis)
# print(names_kellis) ####
out_pattern_base_kellis = os.path.join(data_path_kellis, "genes/{{0}}/bamdata/{{0}}_{0}.pickle")
# dispatch(script_path, "Kellis", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 2000)
# dispatch(script_path, "Kellis", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 10000, fails_only=True)
# Kellis 429
data_path_kellis = "/agusevlab/awang/sc_kellis"
bam_path_kellis = os.path.join(data_path_kellis, "processed_429")
names_kellis = os.listdir(bam_path_kellis)
# print(names_kellis) ####
out_pattern_base_kellis = os.path.join(data_path_kellis, "genes_429/{{0}}/bamdata/{{0}}_{0}.pickle")
# dispatch(script_path, "Kellis_429", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 2000)
# dispatch(script_path, "Kellis_429", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 5000, fails_only=True)
# Kellis 429 Partitioned
data_path_kellis = "/agusevlab/awang/sc_kellis"
contigs = ["9", "10", "11", "12", "13", "14", "15", "17"]
bam_path_kellis = os.path.join(data_path_kellis, "partitioned_429")
names_kellis = os.listdir(bam_path_kellis)
print(names_kellis) ####
out_pattern_base_kellis = os.path.join(data_path_kellis, "genes_429/{{0}}/bamdata/{{0}}_{0}.pickle")
# dispatch(script_path, "Kellis_429", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 2000)
# dispatch(script_path, "Kellis_429", bam_path_kellis, boundaries_path, names_kellis, out_pattern_base_kellis, 5000, fails_only=True)
|
13,653 | 85692cf4a4fe3d088dc6aca36318f0ef54cf00b5 | """
Created on Mar 2, 2012
@author: Alex Hansen
"""
# local import
parse_line = "A simple CEST experiment"
description = \
''' HELP NOT YET IMPLEMENTED
Use 4-space indentation in description.
Add measured spin to parse_line. ie)
15N - one line experiment description
Parameters below are fine'''
reference = {'journal': 'Journal',
'year': 1900,
'volume': 0,
'pages': '1-10'
}
|
13,654 | f3db338a83c3dc93ad198e730cbf7b90a538f5a0 | from tkinter import *
ventana = Tk()
ventana.title("Calculadora")
indice = 0
# Funciones
def click_boton(valor):
# con global accedemos a la variable global indice que declaramos antes y
# asi poder actualizar su valor desde la funcion click_boton
global indice
pantalla.insert(indice, valor)
indice += 1
def limpiar_pantalla():
# borramos pantalla
pantalla.delete(0, END) # END <=> tkinter.END
indice = 0
def operacion():
ecuacion = pantalla.get()
resultado = eval(ecuacion)
pantalla.delete(0, END) # borramos pantalla
pantalla.insert(0, resultado) # insertamos el resultado
indice = 0
# Pantalla
pantalla = Entry(ventana, font=("Arial", 15))
pantalla.grid(row=0, column=0, columnspan=4, padx=10, pady=(10, 5), ipady=10, sticky="WE")
# sticky: estiramos a los lados y con ipady damos altura en el eje Y
# Botones
boton1 = Button(ventana, text="1", width=5, height=2, command=lambda:click_boton(1))
boton2 = Button(ventana, text="2", width=5, height=2, command=lambda:click_boton(2))
boton3 = Button(ventana, text="3", width=5, height=2, command=lambda:click_boton(3))
boton4 = Button(ventana, text="4", width=5, height=2, command=lambda:click_boton(4))
boton5 = Button(ventana, text="5", width=5, height=2, command=lambda:click_boton(5))
boton6 = Button(ventana, text="6", width=5, height=2, command=lambda:click_boton(6))
boton7 = Button(ventana, text="7", width=5, height=2, command=lambda:click_boton(7))
boton8 = Button(ventana, text="8", width=5, height=2, command=lambda:click_boton(8))
boton9 = Button(ventana, text="9", width=5, height=2, command=lambda:click_boton(9))
boton0 = Button(ventana, text="0", width=5, height=2, command=lambda:click_boton(0))
borrar = Button(ventana, text="AC", width=5, height=2, command=limpiar_pantalla)
parentesis_1 = Button(ventana, text="(", width=5, height=2, command=lambda:click_boton("("))
parentesis_2 = Button(ventana, text=")", width=5, height=2, command=lambda:click_boton(")"))
punto = Button(ventana, text=".", width=5, height=2, command=lambda:click_boton("."))
division = Button(ventana, text="÷", width=5, height=2, command=lambda:click_boton("/"))
multiplicacion = Button(ventana, text="×", width=5, height=2, command=lambda:click_boton("*"))
suma = Button(ventana, text="+", width=5, height=2, command=lambda:click_boton("+"))
resta = Button(ventana, text="-", width=5, height=2, command=lambda:click_boton("-"))
igual = Button(ventana, text="=", width=5, height=2, command=operacion)
# Grid
borrar.grid(row=1, column=0, padx=(10, 5), pady=5)
parentesis_1.grid(row=1, column=1, padx=5, pady=5)
parentesis_2.grid(row=1, column=2, padx=5, pady=5)
division.grid(row=1, column=3, padx=(5, 10), pady=5)
boton7.grid(row=2, column=0, padx=(10, 5), pady=5)
boton8.grid(row=2, column=1, padx=5, pady=5)
boton9.grid(row=2, column=2, padx=5, pady=5)
multiplicacion.grid(row=2, column=3, padx=(5, 10), pady=5)
boton4.grid(row=3, column=0, padx=(10, 5), pady=5)
boton5.grid(row=3, column=1, padx=5, pady=5)
boton6.grid(row=3, column=2, padx=5, pady=5)
suma.grid(row=3, column=3, padx=(5, 10), pady=5)
boton1.grid(row=4, column=0, padx=(10, 5), pady=5)
boton2.grid(row=4, column=1, padx=5, pady=5)
boton3.grid(row=4, column=2, padx=5, pady=5)
resta.grid(row=4, column=3, padx=(5, 10), pady=5)
boton0.grid(row=5, column=0, columnspan=2, padx=(10, 5), pady=(5, 10), sticky="WE")
punto.grid(row=5, column=2, padx=5, pady=(5, 10))
igual.grid(row=5, column=3, padx=(5, 10), pady=(5, 10))
ventana.mainloop()
|
13,655 | 079a712fa8b759cafbc90329e513167b883014c3 | # -*- coding: utf-8 -*-
import os
if os.path.exists( r'\\10.99.1.6\Digital\Library\hq_toolbox' )==False and os.path.exists(r'\\XMFTDYPROJECT\digital\film_project\Tool\hq_toolbox')==False :
raise IOError()
#####################################################################################
import maya.cmds as cmds
import os
#---------w09 Start
class w09_playBlastWin(object):
_menuStr = '''{'path':'Window/w09_playBlastWin()',
'icon':':/timeplay.png',
'tip' : '拍屏,自动绽放防止切屏',
'html':True,
'usage':'$fun()',
}
'''
def __init__(self):
windowName = 'w09_pb'
if cmds.window( windowName, exists=True):
cmds.deleteUI( windowName)
sceneName = cmds.file(q=True,sn=True,shortName=True).split('.')[0]
cmds.window(windowName, title="w09_playBlastWin",w=450, sizeable=1)
cmds.columnLayout("w09_L01", p=windowName, adj=True)
#cmds.floatSliderGrp( 'w09_uiScale', p="w09_L01", field=True, label="Scale", h=50, cw=([1,50],[2, 50], [3, 300]), minValue=.1, maxValue=1,value=1, pre=2)
#cmds.textFieldGrp( 'w09_uiFiles', p="w09_L01", label='Files ', cw=([1,50],[2, 340]), h=40,en=False,text=sceneName)
cmds.textScrollList( 'w09_uiCameras', p="w09_L01", numberOfRows=8, allowMultiSelection=True, en=0, h=300, bgc=[.2,.2,.2], showIndexedItem=4)
cmds.button( 'w09_uiGetCameras', p="w09_L01", label="Get Cameras", h=40, c=self.w09_getCameras_cmds )
cmds.separator( p="w09_L01", st="double", h=15)
cmds.button( 'w09_uiPbutton', p="w09_L01", label="Playblast", h=40, c=self.w09_playblast_cmd )
cmds.showWindow(windowName)
self.w09_getCameras_cmds()
def w09_playblast_cmd(self, *args):
#Clamp resolution to view port
import maya.OpenMayaUI as omui
curView = omui.M3dView.active3dView()
portWidth = curView.portWidth()
portHeight = curView.portHeight()
resWidth = cmds.getAttr( 'defaultResolution.width' )
resHeight = cmds.getAttr( 'defaultResolution.height' )
resAspect = float(resWidth)/resHeight
if resWidth>portWidth or resHeight>portHeight:
if portWidth<portHeight:
resWidth, resHeight = portWdith, int(portWidth/resAspect)
else: #protHeight<portWidth
resWidth, resHeight = int(portHeight*resAspect), portHeight
#Get model panel
for mPanel in cmds.getPanel(vis=True):
if cmds.modelPanel(mPanel, exists=True):
break
else:
raise IOError( 'No found modelPanel!' )
sceneName = cmds.file(q=True,sn=True,shortName=True).split('.')[0]
for ca in cmds.textScrollList( 'w09_uiCameras', q=True, si=True):
camShortName = ca.split('|')[-1].replace(":", '_')
#Set Model panel camera
cameraShape = cmds.listRelatives( ca, shapes=True, typ='camera', f=True)[0]
cmds.modelPanel(mPanel, e=True, camera=cameraShape)
cmds.camera(cameraShape, e=True, displayResolution=False, displayGateMask=False, displayFilmGate=False)
filenameV = 'playblast/%s/%s/%s'%(sceneName,camShortName, camShortName);
cmds.playblast( format='iff', filename=filenameV, sequenceTime=False, viewer=False, clearCache=True, showOrnaments=True, fp=4, percent=100, compression="jpg", quality=100, wh=[resWidth, resHeight] )
imDir = cmds.workspace(q=True,rd=True)
imDir = os.path.join( imDir, 'images/playblast/%s'%(sceneName) )
if os.path.exists(imDir):
os.startfile( imDir )
def w09_getCameras_cmds(self, *args):
cmds.textScrollList('w09_uiCameras', e=True, en=True, removeAll=True)
for ca in cmds.ls(cameras=True):
camPapa = cmds.listRelatives( ca, parent=True, f=True)
cmds.textScrollList('w09_uiCameras', e=True, append=camPapa)
#---------------w09 End
|
13,656 | fe2a1e1fea20a1e43b0673e95170f1358f861dac | #Avery Tan(altan:1392212), Canopus Tong(canopus:1412275)
#
#
#
#Requires queue python library
#
#
#
import p1
from queue import PriorityQueue
class Node(object):
"""
class we use to make implement priority queue
"""
def __init__(self,coor):
self.coor = coor
self.g=0
self.h=0
self.f=0
self.parent = None
def __lt__(self,other):
if self.f<other.f:
return True
def find_valid_moves(curr_coor, grid_coors):
"""
returns all valid next states.
inputs: curr_coor = tuple representing x and y coor of the curr position
grid_coors = list containing 2 lists; the list containing tuples
of all empty cells and the list containing tuples of
all cells containing obstacles
returns a list containing tuples representing possible next state transitions
"""
x = curr_coor[0]
y = curr_coor[1]
empty_cell_coors = grid_coors[0]
valid_moves = [] # udlr
# up
if (x, y-1) in empty_cell_coors:
valid_moves.append((x, y-1))
# down
if (x, y+1) in empty_cell_coors:
valid_moves.append((x, y+1))
# left
if (x-1, y) in empty_cell_coors:
valid_moves.append((x-1, y))
# right
if (x+1, y) in empty_cell_coors:
valid_moves.append((x+1, y))
return valid_moves
def a_star(start, goal, grid, Htype):
"""
A* algorithm
inputs: start = tuple representing starting coordinates
goal = tuple representing goal coordinates
grid = list containing tuples of obstacles and free cells
Htype = string representing h=0 or h=M
returns a string with the formatted result as specified in the assg spec
"""
Open = PriorityQueue()
Closed = dict()
maxOpen = 1 #we already have one element in Open, the start state
maxClosed = 0
def get_sol(cn):
'''
this function takes as input a tuple representing (x,y) of the curr node
in which the curr node has stumbled upon the goal state
returns a string representing the list of moves taken from the start state
that takes the agent all the way to the goal state
'''
list_of_moves = ''
#this dict stores move vectors and their names
moves = {(0,1):'D', (0,-1):'U', (1,0):'R', (-1,0):'L'}
while cn.coor != start:
parent_node = cn.parent
x = cn.coor[0]-parent_node.coor[0]
y = cn.coor[1]-parent_node.coor[1]
ultimate_action = moves[(x,y)]
list_of_moves = ultimate_action + list_of_moves
cn = parent_node
cost = len(list_of_moves)
result_string = 'h='+Htype+' '+str(cost)+' '+str(maxOpen)+' '+str(maxClosed)+' '+\
str(start[0])+' '+str(start[1])+' '+str(goal[0])+' '+str(goal[1])+' '+list_of_moves
return result_string
s = Node(start) #create a Node object and place it into the priority Q
Open.put(s)
while not Open.empty():
curr_node = Open.get()
if curr_node.coor in Closed:
continue
else:
Closed[curr_node.coor]=curr_node
if len(Closed)>maxClosed: #update the max size of Closed
maxClosed = len(Closed)
#are we at goal state?
if curr_node.coor[0] == goal[0] and curr_node.coor[1]== goal[1]:
solution = get_sol(curr_node)
return solution
#expand the search.
curr_node_children= find_valid_moves(curr_node.coor,grid)
for i in curr_node_children: #curr_node_children is now a list of tuples representing reachable next states
successor=Node(i)
successor.parent= curr_node
successor.g = curr_node.g+1
if Htype == 'M':
successor.h = abs(successor.coor[0]-goal[0])+abs(successor.coor[1]-goal[1])
else:
successor.h = 0
successor.f = successor.h+successor.g
if successor.coor not in Closed:
Open.put(successor)
if Open.qsize()>maxOpen: #keep track of max size of Open
maxOpen = Open.qsize()
#FAILURE
result_string = 'h='+Htype+' '+'-1'+' '+str(maxOpen)+' '+str(maxClosed)+' '+\
str(start[0])+' '+str(start[1])+' '+str(goal[0])+' '+str(goal[1])
return result_string
def main():
N = int(input()) # Get the size of a NxN grid from stdin.
grid = list() #where we will store each row of the grid as a list of strings
sg = [] #where we will store start and goal states as a list of tuples (s,g)
for i in range(1, N + 1): # Get the grid from stdin and put them into a list.
grid.append(list(input()))
for i in range(N + 1, N + 2):
P = int(input())
for i in range(N + 2, N + 2 + P): # Read the problems
input_sg = input().split()
s = (int(input_sg[0]), int(input_sg[1]))
g = (int(input_sg[2]), int(input_sg[3]))
input_sg = (s,g)
sg.append(input_sg)
grid_coors = p1.read(grid) # Calls read() and create data.
#go through each (start,goal) pair and run A*
for i in range(len(sg)):
ans=a_star(sg[i][0],sg[i][1],grid_coors, '0') #no heuristics
print(ans)
ans=a_star(sg[i][0],sg[i][1],grid_coors, 'M') # manhattan distance heuristics
print(ans)
if __name__ == "__main__":
x=main() |
13,657 | 3067bef87f622d3acfb727157a811e99a17a3b27 | from random import randrange
pokracovani_hry = "hra"
while pokracovani_hry == "hra":
hod_kostkou = randrange (0,3)
if hod_kostkou == 0:
tah_pocitace = "kamen"
elif hod_kostkou == 1:
tah_pocitace = "nuzky"
elif hod_kostkou == 2:
tah_pocitace = "papir"
tah_hrace = input ("zvol kamen, nuzky nebo papir ")
print ("tah pocitace je:", tah_pocitace)
if tah_hrace == tah_pocitace:
print ("plichta")
elif tah_hrace == "kamen":
if tah_pocitace == "nuzky":
print ("vyhrál jsi")
elif tah_pocitace == "papir":
print ("prohrál jsi")
elif tah_hrace == "nuzky":
if tah_pocitace == "papir":
print ("vyhrál jsi")
elif tah_pocitace == "kamen":
print ("prohrál jsi")
elif tah_hrace == "papir":
if tah_pocitace == "nuzky":
print ("prohrál jis")
elif tah_pocitace == "kamen":
print ("vyhrál jsi")
else:
print("špatně zadaný tah hráče ")
pokracovani_hry = input ( "Pokud chcete hrát dál napište - hra, pokud nechcte dál hrát napište - konec ")
print ("Tak si zahrajeme zase příště")
|
13,658 | 30d7be1c038a5b424b728594c550ce801ee65d70 | import numpy as np
from sklearn.cluster import KMeans
def generate_points():
N = 50
points = [(x,y) for x in range(N) for y in range(N)]
points = np.asarray(points)
for i in xrange(len(points)):
if (i%4 == 0):
a = np.random.randint(10, 100)
b = np.random.randint(10, 100)*2
if (i%4 == 1):
a = np.random.randint(150, 200)*2
b = np.random.randint(150, 200)
if (i%4 == 2):
a = np.random.randint(500, 1000)
b = np.random.randint(1, 1000)
if (i%4 == 3):
a = np.random.randint(200, 300)
b = np.random.randint(200, 300)*3
points[i, 0] = a
points[i, 1] = b
#print points
#import matplotlib.pyplot as plt
#plt.scatter(points[:, 0], points[:, 1])
#plt.show()
return points
if __name__ =='__main__':
points = generate_points()
random_state = 80
y_pred = KMeans(n_clusters=4, random_state=random_state).fit_predict(points)
print y_pred[0:16]
import matplotlib.pyplot as plt
#plt.subplot(211)
#plt.scatter(points[:, 0], points[:, 1])
plt.scatter(points[:, 0], points[:, 1], c=y_pred)
plt.title("K-Means CLustering random number "+str(random_state))
plt.show()
|
13,659 | dc0b4f6aa2b2818dcf6ff78068892e5a0a1812b3 | from django.shortcuts import render, redirect, reverse
# Create your views here.
from django.contrib.auth.decorators import login_required
from django.views.decorators.http import require_POST
from shopcart.models import ShopCart
from goods.models import Goods
from users.models import Address
from . import models
@require_POST
def confirm(req):
s_ids = req.POST.getlist('s_id')
shopCarts = ShopCart.objects.filter(pk__in=s_ids)
addresses = Address.objects.filter(user=req.user)
return render(req, 'orders/confirm.html', {'shopCarts': shopCarts, 'addresses': addresses})
def pay(req):
#第三方插件 支付宝 微信 往上银行等
pass
@require_POST
def done(req):
s_ids = req.POST.getlist('s_id')
address_id = req.POST['address']
remark = req.POST['remark']
shopCares = ShopCart.objects.filter(pk__in=s_ids)
address = Address.objects.get(pk=address_id)
#拼接收获地址
_address = address.province + '|' +\
address.city + '|' + address.area + '|' + address.street + '|' + address.desc
# 生成订单
order = models.Order(recv_address=_address, user=req.user, recv_name=address.recv_name, recv_tel=address.recv_tel,\
allPrice=0, remark=remark)
order.save()
allCount = 0
for s in shopCares:
g = s.goods
orderItem = models.OrderItem(goods_id=g.id, goods_img=g.goodsimage_set.all().first().path,\
goods_name=g.name, goods_price=g.price, goods_count=s.count,\
goods_allprice=s.allTotal, order=order)
orderItem.save()
allCount += s.allTotal
order.allPrice = allCount
order.save()
return redirect(reverse('orders:list'))
@login_required
def list(req):
orders = models.Order.objects.filter(user=req.user)
return render(req, 'orders/list.html', {'orders': orders,})
def delete(req, oid):
order = models.Order.objects.get(pk=oid)
order.delete()
return redirect(reverse('orders:list'))
def detail(req, o_id):
pass
|
13,660 | b0a051ebe1b86ae35e6b0da58c491ed274958021 | from datetime import datetime as dt
def None_check(key, value):
if value is None or value == '':
print(f'{key} is empty.')
value = None
return value
def str_check(key, value):
if type(value) != str:
raise TypeError(f'{key} must be str type')
return value
def int_check(key, value):
if type(value) != int:
raise TypeError(f'{key} must be int type')
return value
def bool_check(key, value):
if type(value) != bool:
raise TypeError(f'{key} must be bool type')
return value
def datetime_check(key, value):
try:
value = dt.strptime(value, '%Y%m%d%H%M')
return value
except ValueError as e:
raise ValueError(f'{key} must be datetime format like %Y%m%d%H%M') |
13,661 | 2dddb7ca2fc22ca95409ea48aeb8aca65cdf430c | # GENERATED BY KOMAND SDK - DO NOT EDIT
import insightconnect_plugin_runtime
import json
class Component:
DESCRIPTION = "Returns a risk list of URLs matching a filtration"
class Input:
LIST = "list"
class Output:
RISK_LIST = "risk_list"
class DownloadUrlRiskListInput(insightconnect_plugin_runtime.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"list": {
"type": "string",
"title": "List",
"description": "The risk list to retrieve, left this field blank to retrieve default risk list",
"enum": [
"Historically Reported by Insikt Group",
"C\\u0026C URL",
"Compromised URL",
"Historically Reported as a Defanged URL",
"Historically Reported by DHS AIS",
"Historically Reported Fraudulent Content",
"Historically Reported in Threat List",
"Large",
"Historically Detected Malicious Browser Exploits",
"Historically Detected Malware Distribution",
"Historically Detected Cryptocurrency Mining Techniques",
"Historically Detected Phishing Techniques",
"Active Phishing URL",
"Positive Malware Verdict",
"Ransomware Distribution URL",
"Recently Reported by Insikt Group",
"Recently Reported as a Defanged URL",
"Recently Reported by DHS AIS",
"Recently Reported Fraudulent Content",
"Recently Detected Malicious Browser Exploits",
"Recently Detected Malware Distribution",
"Recently Detected Cryptocurrency Mining Techniques",
"Recently Detected Phishing Techniques",
"Recent Ransomware Distribution URL",
"Recently Referenced by Insikt Group",
"Recently Reported Spam or Unwanted Content",
"Recently Detected Suspicious Content",
"Recently Active URL on Weaponized Domain",
"Historically Referenced by Insikt Group",
"Historically Reported Spam or Unwanted Content",
"Historically Detected Suspicious Content"
],
"order": 1
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class DownloadUrlRiskListOutput(insightconnect_plugin_runtime.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"risk_list": {
"type": "object",
"title": "Risk List",
"description": "Risk list of matching URLs",
"order": 1
}
},
"required": [
"risk_list"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
|
13,662 | d22845051ec9d63aa9a92c6696656a09c911cc77 | # -*- coding: utf-8 -*-
"""
DECAWAVE DW1000 ANTENNA DELAY CALIBRATION SOFTWARE (x64)
NOTES:
-This program uses the DecaWave DW1000 Arduino adapter board made by Wayne
Holder to determine the anntena delay value of one anchor and one receiver
-Note that this script is very preliminary; it currently requires the user
to manually set the antenna delay to zero in DW1000.cpp (line:
writeValueToBytes(antennaDelayBytes, 16384, LEN_STAMP);)
-The calibration method involves the following steps:
1. Set the antenna delay to zero in DW1000.cpp
2. Fix the anchor and tag 1 meter apart
3. Run this script to determine the antenna delay values
Created: Mon June 5 15:37 2017
Last updated: Fri Aug 18 10:44 2017
Author: Alex Naylor
FUTURE ADDITIONS:
-Make antenna delay calibration automated
-Means updating Arduino code
-Update the test cancellation feature to handle mid-test stoppage and to save
data on quit
-------------------------------------------------------------------------------
Executable: N/A
DW1000gui: V0.5.0
DW1000test: V0.5.0
DW1000serial: V0.4.0
-------------------------------------------------------------------------------
CHANGELOG (V0.5.0):
AN:
-When browsing for files, the browser now opens to the last opened directory
-Changed the way the start and stop distance spinboxes work
"""
#==========================================================================
# IMPORTS
#==========================================================================
import ast
import DW1000test
import os
import sys
from PyQt5 import (QtGui,QtCore,QtWidgets)
from scipy.stats import norm
#==========================================================================
# ANTENNA CALIBRATION THREAD
#==========================================================================
class distMeasThread(QtCore.QObject):
"""
Must derive from QObject in order to emit signals, connect slots to other signals, and operate in a QThread.
"""
sig_done = QtCore.pyqtSignal() # ask the thread to end on completion
sig_msg = QtCore.pyqtSignal(str, str) # GUI field, GUI string
def __init__(self, id : int, testInfoDict, plotInfoDict):
super().__init__()
self.__id = id
self.__abort = False
self.DW1000 = DW1000test.DW1000test(testInfoDict=testInfoDict) #make and instance of DW1000test
self.plotInfoDict = plotInfoDict #plot information
self.testInfoDict = testInfoDict #general test information
self.anchorDict = {} #array for holding anchor distance values at each distance
self.tagDict = {} #array for holding tag distance values at each distance
self.loopTimeDict = {} #array for holding loop time at each distance
#test variables
self.curDist = self.testInfoDict["startDist"]
def setup(self):
"""
Pretend this worker method does work that takes a long time. During this time, the thread's
event loop is blocked, except if the application's processEvents() is called: this gives every
thread (incl. main) a chance to process events, which in this sample means processing signals
received from GUI (such as abort).
"""
if self.plotInfoDict["useFile"] == False:
self.DW1000.clearBuffers() #Clear all the buffers in case they aren't empty
anchorResult = self.DW1000.deviceConnect("anchor")
#If there was an issue connecting to the anchor, throw a warning and return
if not (anchorResult == True):
self.sig_msg.emit("errGeneralMsgBox","Could not connect to {0}.".format(anchorResult))
self.sig_done.emit()
return
tagResult = self.DW1000.deviceConnect("tag")
#If there was an issue connecting to the devices, throw a warning and return
if not (tagResult == True):
self.sig_msg.emit("errGeneralMsgBox","Could not connect to {0}.".format(tagResult))
self.sig_done.emit()
return
if not (self.DW1000.anchor.setAntennaDelay(self.testInfoDict["anchorAntDelayDec"])):
if not (self.DW1000.anchor.setAntennaDelay(self.testInfoDict["anchorAntDelayDec"])):
self.sig_msg.emit("errGeneralMsgBox","Error setting anchor\n"\
"antenna delay value")
self.sig_done.emit()
return
if not (self.DW1000.tag.setAntennaDelay(self.testInfoDict["tagAntDelayDec"])):
if not (self.DW1000.tag.setAntennaDelay(self.testInfoDict["tagAntDelayDec"])):
self.sig_msg.emit("errGeneralMsgBox","Error setting tag\n"\
"antenna delay value")
self.sig_done.emit()
return
self.sig_msg.emit("infoThreadMsgBox","Please move the device to {0} cm\n"\
"and press 'OK' to continue.".format(self.testInfoDict["startDist"]))
else:
result = self.DW1000.fileRead(self.plotInfoDict["fileName"])
self.sig_msg.emit("statusBar","STATUS: Reading CSV file...")
if (result == None):
self.sig_msg.emit("errGeneralMsgBox","Unexpected value in .csv file!")
self.sig_done.emit()
return
try: distDict = ast.literal_eval(result["distDict"])
except:
self.sig_msg.emit("errGeneralMsgBox","Unexpected value in .csv file!")
self.sig_done.emit()
return
try: testInfoDict = ast.literal_eval(result["testInfoDict"])
except:
self.sig_msg.emit("errGeneralMsgBox","Unexpected value in .csv file!")
self.sig_done.emit()
return
self.DW1000.testInfoDict = testInfoDict.copy()
self.sig_msg.emit("statusBar","STATUS: Plotting data...")
self.DW1000.makeErrorPlotDist(distDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(distDict.copy(),self.plotInfoDict.copy())
if self.plotInfoDict["scaleData"] == True:
self.plotInfoDict["scaleData"] = False
self.DW1000.makeErrorPlotDist(distDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(distDict.copy(),self.plotInfoDict.copy())
self.sig_msg.emit("infoGeneralMsgBox","Data plotting complete.")
self.sig_done.emit()
def innerLoop(self):
self.sig_msg.emit("statusBar","STATUS: Collecting data...")
while (len(self.DW1000.anchorRangeBuffer) < self.testInfoDict["numSamples"]):
if not (self.DW1000.distMeasLoop()):
self.sig_msg.emit("errGeneralMsgBox","Lost device connection, please try again.")
self.sig_msg.emit("testProgressBar",str(0))
self.sig_msg.emit("loopProgressBar",str(0))
self.sig_msg.emit("loopProgressBar_Label","Loop time remaining: N/A")
self.DW1000.clearBuffers()
self.sig_done.emit()
return
loopProgressVal = int(len(self.DW1000.anchorRangeBuffer)*100/self.testInfoDict["numSamples"])
totalNumSamples = (self.testInfoDict["numSteps"]+1)*self.testInfoDict["numSamples"]
cumulativeSamples = (self.curDist - self.testInfoDict["startDist"])*self.testInfoDict["numSamples"]/self.testInfoDict["stepDist"]
testProgressVal = int((len(self.DW1000.anchorRangeBuffer) + cumulativeSamples)*100/totalNumSamples)
self.sig_msg.emit("testProgressBar",str(testProgressVal))
self.sig_msg.emit("loopProgressBar",str(loopProgressVal))
self.sig_msg.emit("loopProgressBar_Label","Loop time remaining: {0}".format(self.DW1000.remainTimeStr))
self.anchorDict["{0} cm".format(self.curDist)] = list(self.DW1000.anchorRangeBuffer)
self.tagDict["{0} cm".format(self.curDist)] = list(self.DW1000.tagRangeBuffer)
self.loopTimeDict["{0} cm".format(self.curDist)] = list(self.DW1000.loopTimeBuffer)
if self.testInfoDict["testType"] == "distMeas":
anchorMu, anchorSigma = norm.fit(sorted(self.anchorDict["{0} cm".format(self.curDist)]))
tagMu, tagSigma = norm.fit(sorted(self.tagDict["{0} cm".format(self.curDist)]))
self.sig_msg.emit("infoGeneralMsgBox","At {0} cm:\n"\
"Anchor average: {1:.3f} cm\n"\
"Anchor std dev: {2:.3f} cm\n"\
"Tag average: {3:.3f} cm\n"\
"Tag std dev: {4:.3f} cm\n".format(self.curDist,
anchorMu,
anchorSigma,
tagMu,
tagSigma))
self.DW1000.clearBuffers() #clear buffers for next loop
self.curDist += self.testInfoDict["stepDist"] #increase distance
if not (self.curDist > self.testInfoDict["stopDist"]): #If we're at the last distance, don't print a message
self.sig_msg.emit("statusBar","STATUS: Data collection at {0} cm complete.".format(self.curDist))
self.sig_msg.emit("infoThreadMsgBox","Please move the device to {0} cm\n"\
"and press 'OK' to continue.".format(self.curDist))
else:
self.outerLoop()
def outerLoop(self):
if (self.curDist <= self.testInfoDict["stopDist"]):
self.innerLoop()
else:
if (self.testInfoDict["testType"] == "antDelayCal"):
anchorAntDelayDec,tagAntDelayDec = self.DW1000.getAntDelay((self.testInfoDict["startDist"]/100),
self.anchorDict["{0} cm".format(self.testInfoDict["startDist"])],
self.tagDict["{0} cm".format(self.testInfoDict["startDist"])])
self.sig_msg.emit("infoGeneralMsgBox","Press 'OK' to find optimal \n"\
"antenna delay value...")
self.sig_msg.emit("testProgressBar",str(0))
self.sig_msg.emit("loopProgressBar",str(0))
self.sig_msg.emit("loopProgressBar_Label","Loop time remaining: N/A")
self.sig_msg.emit("statusBar","STATUS: Finding optimal antenna delay value...")
anchorAntDelayDec = self.DW1000.antDelayCalLoop(anchorAntDelayDec)
if (anchorAntDelayDec == None):
self.sig_msg.emit("errGeneralMsgBox","Error calibrating\n"\
"antenna delay")
self.sig_done.emit()
return
else:
self.sig_msg.emit("infoGeneralMsgBox","Calibration complete.\n"\
"Anchor antenna delay: {0}\n"\
"Tag antenna delay: {1}\n".format(anchorAntDelayDec,tagAntDelayDec))
self.sig_msg.emit("anchorDelaySpinBox",str(anchorAntDelayDec))
self.sig_msg.emit("tagDelaySpinBox",str(tagAntDelayDec))
self.testInfoDict["anchorAntDelayDec"] = anchorAntDelayDec
self.testInfoDict["tagAntDelayDec"] = tagAntDelayDec
self.DW1000.testInfoDict["device"] = "anchor"
if self.testInfoDict["testType"] == "distMeas":
self.sig_msg.emit("statusBar","STATUS: Plotting anchor data...")
self.DW1000.makeErrorPlotDist(self.anchorDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(self.anchorDict.copy(),self.plotInfoDict.copy())
if self.plotInfoDict["scaleData"] == True:
self.plotInfoDict["scaleData"] = False
self.DW1000.makeErrorPlotDist(self.anchorDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(self.anchorDict.copy(),self.plotInfoDict.copy())
self.plotInfoDict["scaleData"] = True
self.DW1000.fileWrite(self.anchorDict,
self.loopTimeDict)
self.DW1000.testInfoDict["device"] = "tag"
if self.testInfoDict["testType"] == "distMeas":
self.sig_msg.emit("statusBar","STATUS: Plotting tag data...")
self.DW1000.makeErrorPlotDist(self.tagDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(self.tagDict.copy(),self.plotInfoDict.copy())
if self.plotInfoDict["scaleData"] == True:
self.plotInfoDict["scaleData"] = False
self.DW1000.makeErrorPlotDist(self.tagDict.copy(),self.plotInfoDict.copy())
self.DW1000.makeGaussianPlotDist(self.tagDict.copy(),self.plotInfoDict.copy())
self.DW1000.fileWrite(self.tagDict,
self.loopTimeDict)
self.DW1000.deviceDisconnect("anchor")
self.DW1000.deviceDisconnect("tag")
if (self.testInfoDict["testType"] == "distMeas"):
self.sig_msg.emit("infoGeneralMsgBox","Distance data collection complete.\n")
self.sig_done.emit()
def abort(self):
self.__abort = True
#==========================================================================
# GUI CLASS
#==========================================================================
class DW1000testGUI(QtWidgets.QWidget):
#==========================================================================
# CLASS VARIABLES
#==========================================================================
verNum = "0.5.0"
sig_abort_workers = QtCore.pyqtSignal() #Signal used to abort worker threads
NUM_THREADS = 1 #Maximum number of threads
#Initialize GUI parent
def __init__(self):
#======================================================================
# INSTANCE VARIABLES
#======================================================================
#Initializations
self.remainTimeStr = "N/A"
self.DW1000serial = DW1000test.DW1000serial.DW1000() #so we can query COM ports and populate comboboxes
self.baudRates = ["110",
"300",
"600",
"1200",
"2400",
"4800",
"9600",
"14400",
"19200",
"38400",
"57600",
"115200",
"230400",
"460800",
"921600"]
#this is here more as a placeholder to show all of the available keys
self.testInfoDict = {"testType":"antDelayCal",
"numSamples":100, #number of sample to take
"startDist":5, #measurement start distance
"stopDist":100, #measurement stop distance
"stepDist":5, #measurement step distance
"device":None, #which device the test was for
"anchorPort":"COM14", #COM port for anchor (add to GUI)
"tagPort":"COM12", #COM port for tag (add to GUI)
"anchorBaud":115200, #baud rate for anchor (add to GUI)
"tagBaud":115200, #baud rate for tag (add to GUI)
"anchorAntDelayDec":32900, #anchor antenna delay in decimal
"tagAntDelayDec":0, #tag antenna delay in decimal
"enableDebug":False} #Whether or not to enable debug mode
self.plotInfoDict = {"makeGaussPlot":True, #whether or not to make the gaussian part of the average plot
"makeHistPlot":True, #whether or not to make the histogram part of the average plot
"makeRefPlot":True, #whether or not to add the curve fit line to the plot
"scaleData":False, #Whether or not to scale data based on reference curve
"truncateData":False, #whether or not to truncate distance array when making graphs
"fileName":"", #Name of the file to read data from
"useFile":False, #Whether or not to use a file for data plotting
"show":False, #Whether or not to display the plot
"minTruncDist":5, #lower limit for truncation
"maxTruncDist":5} #upper limit for truncation
self.testInfoDict["numSteps"] = (self.testInfoDict["stopDist"] - self.testInfoDict["startDist"])/self.testInfoDict["stepDist"]
#spinbox values
self.distMin = 0 #minimum distance in cm
self.stepDistMin = 1 #obviously can't have a step distance of 0
self.distMax = 2000#maximum distance in cm
self.startDistDef = 5 #default start distance in cm
self.stopDistDef = 100 #default stop distance in cm
self.stepDistDef = 5 #default step distance in cm
self.calDistDef = 5 #default calibration distance in cm
self.numSamplesMax = 10000 #maximum number of samples at each distance
self.numSamplesMin = 1 #minimum number of samples at each distance
self.numSamplesDef = 100 #default number of samples
self.antDelayMin = 0 #minimum antenna delay value
self.antDelayMax = 2**16-1 #maximum antenna delay value
self.antDelayDef = 32900 #reasonable default value for antenna delay
super().__init__()
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
self.basedir = sys._MEIPASS #Temp directory
else:
# we are running in a normal Python environment
self.basedir = os.path.dirname(__file__) #Temp directory
self.guiOnly = False #Set true if you want to test the GUI without the DW1000
#Initialize threads
QtCore.QThread.currentThread().setObjectName('mainThread') # threads can be named, useful for log output
self.__workers_done = None
self.__threads = None
self.initWidgets()
self.refreshComPorts()
#==========================================================================
# GUI-RELATED INITALIZATION FUNCTIONS
#==========================================================================
#Initialize GUI widgets
def initWidgets(self):
#FRAMES
self.Vframe1 = QtWidgets.QFrame(self)
self.Vframe1.setFrameStyle(QtWidgets.QFrame.VLine)
self.Vframe2 = QtWidgets.QFrame(self)
self.Vframe2.setFrameStyle(QtWidgets.QFrame.VLine)
self.mainHframe = QtWidgets.QFrame(self)
self.mainHframe.setFrameStyle(QtWidgets.QFrame.HLine)
self.deviceSetupHframe = QtWidgets.QFrame(self)
self.deviceSetupHframe.setFrameStyle(QtWidgets.QFrame.HLine)
self.calConfigHframe = QtWidgets.QFrame(self)
self.calConfigHframe.setFrameStyle(QtWidgets.QFrame.HLine)
#Information widgets
self.guiStatusBar = QtWidgets.QStatusBar(self)
#Need separate message boxes as the thread box triggers the thread's outer loop
self.generalMsgBox = QtWidgets.QMessageBox(self)
#Deals with messages involving the thread
self.threadMsgBox = QtWidgets.QMessageBox(self)
self.threadMsgBox.buttonClicked.connect(self.workerLoop)
self.threadMsgBox.closeEvent = self.msgBoxCloseEvent
#Labels
self.setupSec_Label = QtWidgets.QLabel("<b>Device setup</b>", self)
self.setupSec_Label.setObjectName('setupSec_Label')
self.testSec_Label = QtWidgets.QLabel("<b>Test Progress</b>", self)
self.testSec_Label.setObjectName('testSec_Label')
self.calConfigSec_Label = QtWidgets.QLabel("<b>Calibration Configuration</b>", self)
self.calConfigSec_Label.setObjectName('calConfigSec_Label')
self.distConfigSec_Label = QtWidgets.QLabel("<b>Distance Test Configuration</b>", self)
self.distConfigSec_Label.setObjectName('distConfigSec_Label')
self.deviceConfigSec_Label = QtWidgets.QLabel("<b>Device Configuration</b>", self)
self.deviceConfigSec_Label.setObjectName('deviceConfigSec_Label')
self.plotSettingsSec_Label = QtWidgets.QLabel("<b>Plot settings</b>", self)
self.plotSettingsSec_Label.setObjectName('plotSettingsSec_Label')
self.filePlotSec_Label = QtWidgets.QLabel("<b>Plot from file</b>", self)
self.filePlotSec_Label.setObjectName('filePlotSec_Label')
#Buttons
#Refresh icon to use
self.refreshIconDir = os.path.join(self.basedir,'refresh.ico')
self.refreshIcon = QtGui.QIcon(self.refreshIconDir)
self.refreshIconSizes = self.refreshIcon.availableSizes() #Get all .ico sizes
self.refreshIconWidth = self.refreshIconSizes[0].width() #Choose the smallest size
self.refreshIconHeight = self.refreshIconSizes[0].height() #Choose the smallest size
#Button to refresh COM ports
self.refreshComPorts_PushButton = QtWidgets.QPushButton()
self.refreshComPorts_PushButton.clicked.connect(self.refreshComPorts)
self.refreshComPorts_PushButton.setObjectName("refreshComPorts_PushButton")
self.refreshComPorts_PushButton.setIcon(QtGui.QIcon(self.refreshIconDir))
self.refreshComPorts_PushButton.setIconSize(self.refreshIconSizes[0])
self.refreshComPorts_PushButton.setFixedWidth(int(round(self.refreshIconHeight*1.1))) #add a little border around the icon
self.refreshComPorts_PushButton.setFixedHeight(int(round(self.refreshIconHeight*1.1))) #add a little border around the icon
#Button to calibrate the antenna delay
self.antDelayCal_PushButton = QtWidgets.QPushButton("Calibrate")
self.antDelayCal_PushButton.setFixedWidth(75)
self.antDelayCal_PushButton.clicked.connect(self.startThread)
self.antDelayCal_PushButton.setObjectName("antDelayCal_PushButton")
#Button to run a distance test
self.distMeas_PushButton = QtWidgets.QPushButton("Measure")
self.distMeas_PushButton.setFixedWidth(75)
self.distMeas_PushButton.clicked.connect(self.startThread)
self.distMeas_PushButton.setObjectName("distMeas_PushButton")
#Button to select distance file to plot from
self.filePlot_PushButton = QtWidgets.QPushButton("Browse")
self.filePlot_PushButton.setFixedWidth(75)
self.filePlot_PushButton.setObjectName("filePlot_PushButton")
self.filePlot_PushButton.clicked.connect(self.startThread)
#Check boxes
#Check box to make gaussian plot
self.makeGauss_CheckBox = QtWidgets.QCheckBox()
self.makeGauss_CheckBox.setObjectName('makeGauss_CheckBox')
self.makeGauss_CheckBox.setChecked(True)
self.makeGauss_CheckBox.setToolTip("Sets whether or not to make a\n"\
"gaussian plot when running the\n"\
"distance test.")
#Make gaussian plot check box label
self.makeGauss_CheckBox_Label = ExtendedQLabel("Make gaussian plot", self)
self.makeGauss_CheckBox_Label.setObjectName('makeGauss_CheckBox_Label')
self.makeGauss_CheckBox_Label.clicked.connect(lambda: self.configureWidgets({self.makeGauss_CheckBox_Label.objectName():None}))
#Check box to make histogram plot
self.makeHist_CheckBox = QtWidgets.QCheckBox()
self.makeHist_CheckBox.setObjectName('makeHist_CheckBox')
self.makeHist_CheckBox.setChecked(True)
self.makeHist_CheckBox.setToolTip("Sets whether or not to make a\n"\
"histogram plot when running the\n"\
"distance test.")
#Make histogram plot check box label
self.makeHist_CheckBox_Label = ExtendedQLabel("Make histogram plot", self)
self.makeHist_CheckBox_Label.setObjectName('makeHist_CheckBox_Label')
self.makeHist_CheckBox_Label.clicked.connect(lambda: self.configureWidgets({self.makeHist_CheckBox_Label.objectName():None}))
#Check box to plot the reference curve
self.makeRef_CheckBox = QtWidgets.QCheckBox()
self.makeRef_CheckBox.setObjectName('makeRef_CheckBox')
self.makeRef_CheckBox.setChecked(True)
self.makeRef_CheckBox.setToolTip("Sets whether or not to plot the\n"\
"reference curve on the distance\n"\
"test plot.")
#Make histogram plot check box label
self.makeRef_CheckBox_Label = ExtendedQLabel("Make reference plot", self)
self.makeRef_CheckBox_Label.setObjectName('makeRef_CheckBox_Label')
self.makeRef_CheckBox_Label.clicked.connect(lambda: self.configureWidgets({self.makeRef_CheckBox_Label.objectName():None}))
#Check box to scale the data to a reference curve
self.scaleData_CheckBox = QtWidgets.QCheckBox()
self.scaleData_CheckBox.setObjectName('scaleData_CheckBox')
self.scaleData_CheckBox.setChecked(True)
self.scaleData_CheckBox.setToolTip("Sets whether or not to make a plot\n"\
"with the data scaled to a reference\n"\
"curve.")
#Make histogram plot check box label
self.scaleData_CheckBox_Label = ExtendedQLabel("Scale data", self)
self.scaleData_CheckBox_Label.setObjectName('scaleData_CheckBox_Label')
self.scaleData_CheckBox_Label.clicked.connect(lambda: self.configureWidgets({self.scaleData_CheckBox_Label.objectName():None}))
#Check box to truncate plot data
self.truncData_CheckBox = QtWidgets.QCheckBox()
self.truncData_CheckBox.setObjectName('truncData_CheckBox')
self.truncData_CheckBox.stateChanged.connect(lambda: self.configureWidgets({self.minTruncDist_SpinBox.objectName():self.truncData_CheckBox.isChecked(),
self.maxTruncDist_SpinBox.objectName():self.truncData_CheckBox.isChecked(),
self.minTruncDist_SpinBox_Label.objectName():self.truncData_CheckBox.isChecked(),
self.maxTruncDist_SpinBox_Label.objectName():self.truncData_CheckBox.isChecked()}))
self.truncData_CheckBox.setToolTip("Sets whether or not to truncate\n"\
"the dataset when making a plot.")
#Truncate plot data check box label
self.truncData_CheckBox_Label = ExtendedQLabel("Truncate data", self)
self.truncData_CheckBox_Label.setObjectName('truncData_CheckBox_Label')
self.truncData_CheckBox_Label.clicked.connect(lambda: self.configureWidgets({self.truncData_CheckBox_Label.objectName():None}))
#Progress bars
#Loop progress bar
self.loopProgressBar = QtWidgets.QProgressBar(self)
self.loopProgressBar.setFixedWidth(150)
self.loopProgressBar.setValue(0)
self.loopProgressBar.setObjectName("loopProgressBar")
#Loop progress bar labels
self.loopProgressBar_Label = QtWidgets.QLabel("Loop time remaining: {0}".format(self.remainTimeStr), self)
self.loopProgressBar_Label.setFixedWidth(175)
self.loopProgressBar_Label.setObjectName("loopProgressBar_Label")
#Test progress bar
self.testProgressBar = QtWidgets.QProgressBar(self)
self.testProgressBar.setFixedWidth(150)
self.testProgressBar.setObjectName("testProgressBar")
self.testProgressBar.setValue(0)
#Comboboxes
#Combobox for anchor COM port
self.anchorComPort_ComboBox = QtWidgets.QComboBox(self)
self.anchorComPort_ComboBox.setObjectName("anchorComPort_ComboBox")
#Anchor COM port combobox label
self.anchorComPort_ComboBox_Label = QtWidgets.QLabel("Anchor Port:", self)
self.anchorComPort_ComboBox_Label.setObjectName("anchorComPort_ComboBox_Label")
#Combobox for tag COM port
self.tagComPort_ComboBox = QtWidgets.QComboBox(self)
self.tagComPort_ComboBox.setObjectName("tagComPort_ComboBox")
#Tag COM port combobox label
self.tagComPort_ComboBox_Label = QtWidgets.QLabel("Tag Port:", self)
self.tagComPort_ComboBox_Label.setObjectName("tagComPort_ComboBox_Label")
#Combobox for baud rate
self.baudRate_ComboBox = QtWidgets.QComboBox(self)
self.baudRate_ComboBox.addItems(self.baudRates)
self.baudRate_ComboBox.setCurrentIndex(6)
self.baudRate_ComboBox.setObjectName("baudRate_ComboBox")
self.baudRate_ComboBox.setEnabled(False) #disabled because everything breaks if incorrect baud rate is chosen
#Baud rate combobox label
self.baudRate_ComboBox_Label = QtWidgets.QLabel("Baud rate:", self)
self.baudRate_ComboBox_Label.setObjectName("baudRate_ComboBox_Label")
self.baudRate_ComboBox_Label.setToolTip("Sets baud rate for both the\n"\
"anchor and the tag.")
#Spinboxes
#Spinbox for calibration distance
self.calDist_SpinBox = QtWidgets.QSpinBox(self)
self.calDist_SpinBox.setMaximumHeight(25)
self.calDist_SpinBox.setMaximumWidth(60)
self.calDist_SpinBox.setObjectName('stepDist_SpinBox')
self.calDist_SpinBox.setRange(self.stepDistMin,self.distMax)
self.calDist_SpinBox.setValue(self.calDistDef)
self.calDist_SpinBox.setToolTip("Sets distance between the anchor\n"\
"and tag to be used for antenna\n"\
"delay calibration. Make sure to\n"\
"set the antenna delay value on\n"\
"the DecaWave boards to zero\n"\
"before running this procedure.")
#step distance spinbox label
self.calDist_SpinBox_Label = QtWidgets.QLabel("Cal distance (cm):", self)
self.calDist_SpinBox_Label.setObjectName('calDist_SpinBox_Label')
#Spinbox for calibration number of samples per distance
self.calNumSamples_SpinBox = QtWidgets.QSpinBox(self)
self.calNumSamples_SpinBox.setMaximumHeight(25)
self.calNumSamples_SpinBox.setMaximumWidth(60)
self.calNumSamples_SpinBox.setObjectName('calNumSamples_SpinBox')
self.calNumSamples_SpinBox.setRange(self.numSamplesMin,self.numSamplesMax)
self.calNumSamples_SpinBox.setValue(self.numSamplesDef)
self.calNumSamples_SpinBox.setToolTip("Sets the number of samples to\n"\
"take when performing antenna\n"\
"delay calibration. Make sure to\n"\
"set the antenna delay value on\n"\
"the DecaWave boards to zero\n"\
"before running this procedure.")
#calibration samples spinbox label
self.calNumSamples_SpinBox_Label = QtWidgets.QLabel("Number of samples:", self)
self.calNumSamples_SpinBox_Label.setObjectName('calNumSamples_SpinBox_Label')
#Spinbox for start distance
self.startDist_SpinBox = QtWidgets.QSpinBox(self)
self.startDist_SpinBox.setMaximumHeight(25)
self.startDist_SpinBox.setMaximumWidth(60)
self.startDist_SpinBox.setObjectName('startDist_SpinBox')
self.startDist_SpinBox.setRange(self.distMin,self.distMax)
self.startDist_SpinBox.setSingleStep(self.stepDistDef)
self.startDist_SpinBox.setValue(self.startDistDef)
self.startDist_SpinBox.valueChanged.connect(self.spinboxChecker)
self.startDist_SpinBox.setToolTip("Sets the start distance for a\n"\
"distance measurement test in\n"\
"centimeters.")
#start distance spinbox label
self.startDist_SpinBox_Label = QtWidgets.QLabel("Start distance (cm):", self)
self.startDist_SpinBox_Label.setObjectName('startDist_SpinBox_Label')
#Spinbox for stop distance
self.stopDist_SpinBox = QtWidgets.QSpinBox(self)
self.stopDist_SpinBox.setMaximumHeight(25)
self.stopDist_SpinBox.setMaximumWidth(60)
self.stopDist_SpinBox.setObjectName('stopDist_SpinBox')
self.stopDist_SpinBox.setRange(self.distMin,self.distMax)
self.stopDist_SpinBox.setSingleStep(self.stepDistDef)
self.stopDist_SpinBox.setValue(self.stopDistDef)
self.stopDist_SpinBox.valueChanged.connect(self.spinboxChecker)
self.stopDist_SpinBox.setToolTip("Sets the stop distance for a\n"\
"distance measurement test in\n"\
"centimeters.")
#stop distance spinbox label
self.stopDist_SpinBox_Label = QtWidgets.QLabel("Stop distance (cm):", self)
self.stopDist_SpinBox_Label.setObjectName('stopDist_SpinBox_Label')
#Spinbox for step distance
self.stepDist_SpinBox = QtWidgets.QSpinBox(self)
self.stepDist_SpinBox.setMaximumHeight(25)
self.stepDist_SpinBox.setMaximumWidth(60)
self.stepDist_SpinBox.setObjectName('stepDist_SpinBox')
self.stepDist_SpinBox.setRange(self.stepDistMin,self.distMax)
self.stepDist_SpinBox.setValue(self.stepDistDef)
self.stepDist_SpinBox.valueChanged.connect(self.spinboxChecker)
self.stepDist_SpinBox.setToolTip("Sets the step distance for a\n"\
"distance measurement test in\n"\
"centimeters.")
#step distance spinbox label
self.stepDist_SpinBox_Label = QtWidgets.QLabel("Step distance (cm):", self)
self.stepDist_SpinBox_Label.setObjectName('stepDist_SpinBox_Label')
#Spinbox for distance test number of samples per distance
self.distNumSamples_SpinBox = QtWidgets.QSpinBox(self)
self.distNumSamples_SpinBox.setMaximumHeight(25)
self.distNumSamples_SpinBox.setMaximumWidth(60)
self.distNumSamples_SpinBox.setObjectName('distNumSamples_SpinBox')
self.distNumSamples_SpinBox.setRange(self.numSamplesMin,self.numSamplesMax)
self.distNumSamples_SpinBox.setValue(self.numSamplesDef)
self.distNumSamples_SpinBox.setToolTip("Sets the number of samples to\n"\
"take at each distance in the\n"\
"distance measurement test.\n")
#distance test samples spinbox label
self.distNumSamples_SpinBox_Label = QtWidgets.QLabel("Number of samples:", self)
self.distNumSamples_SpinBox_Label.setObjectName('distNumSamples_SpinBox_Label')
#Spinbox for minimum distance to truncate plot data to
self.minTruncDist_SpinBox = QtWidgets.QSpinBox(self)
self.minTruncDist_SpinBox.setMaximumHeight(25)
self.minTruncDist_SpinBox.setMaximumWidth(60)
self.minTruncDist_SpinBox.setObjectName('minTruncDist_SpinBox')
self.minTruncDist_SpinBox.setRange(self.distMin,self.distMax)
self.minTruncDist_SpinBox.setValue(self.startDistDef)
self.minTruncDist_SpinBox.setToolTip("Sets the minimum distance to\n"\
"truncate plot data\n")
self.minTruncDist_SpinBox.setEnabled(False)
#distance test samples spinbox label
self.minTruncDist_SpinBox_Label = QtWidgets.QLabel("Minimum distance (cm):", self)
self.minTruncDist_SpinBox_Label.setObjectName('minTruncDist_SpinBox_Label')
self.minTruncDist_SpinBox_Label.setEnabled(False)
#Spinbox for maximum distance to truncate plot data to
self.maxTruncDist_SpinBox = QtWidgets.QSpinBox(self)
self.maxTruncDist_SpinBox.setMaximumHeight(25)
self.maxTruncDist_SpinBox.setMaximumWidth(60)
self.maxTruncDist_SpinBox.setObjectName('maxTruncDist_SpinBox')
self.maxTruncDist_SpinBox.setRange(self.distMin,self.distMax)
self.maxTruncDist_SpinBox.setValue(self.stopDistDef)
self.maxTruncDist_SpinBox.setToolTip("Sets the maximum distance to\n"\
"truncate plot data\n")
self.maxTruncDist_SpinBox.setEnabled(False)
#distance test samples spinbox label
self.maxTruncDist_SpinBox_Label = QtWidgets.QLabel("Maximum distance (cm):", self)
self.maxTruncDist_SpinBox_Label.setObjectName('maxTruncDist_SpinBox_Label')
self.maxTruncDist_SpinBox_Label.setEnabled(False)
#Spinbox for anchor antenna delay
self.anchorDelay_SpinBox = QtWidgets.QSpinBox(self)
self.anchorDelay_SpinBox.setMaximumHeight(25)
self.anchorDelay_SpinBox.setMaximumWidth(60)
self.anchorDelay_SpinBox.setObjectName('anchorDelay_SpinBox')
self.anchorDelay_SpinBox.setRange(self.antDelayMin,self.antDelayMax)
self.anchorDelay_SpinBox.setValue(self.antDelayDef)
self.anchorDelay_SpinBox.setToolTip("Sets the anchor antenna delay\n"\
"value\n")
#distance test samples spinbox label
self.anchorDelay_SpinBox_Label = QtWidgets.QLabel("Anchor delay:", self)
self.anchorDelay_SpinBox_Label.setObjectName('anchorDelay_SpinBox_Label')
#Spinbox for tag antenna delay
self.tagDelay_SpinBox = QtWidgets.QSpinBox(self)
self.tagDelay_SpinBox.setMaximumHeight(25)
self.tagDelay_SpinBox.setMaximumWidth(60)
self.tagDelay_SpinBox.setObjectName('tagDelay_SpinBox')
self.tagDelay_SpinBox.setRange(self.antDelayMin,self.antDelayMax)
self.tagDelay_SpinBox.setValue(self.antDelayMin)
self.tagDelay_SpinBox.setToolTip("Sets the tag antenna delay\n"\
"value\n")
#distance test samples spinbox label
self.tagDelay_SpinBox_Label = QtWidgets.QLabel("Tag delay:", self)
self.tagDelay_SpinBox_Label.setObjectName('tagDelay_SpinBox_Label')
#Initial Stuff
self.Main = QtWidgets.QGridLayout()
self.Main.addWidget(self.Vframe1,0,2,12,1)
self.Main.addWidget(self.Vframe2,0,5,12,1)
#Setup and test section
#######################################################################
self.Main.addWidget(self.setupSec_Label,0,0,1,2)
self.Main.addWidget(self.refreshComPorts_PushButton,0,1,
alignment = QtCore.Qt.AlignRight | QtCore.Qt.AlignHCenter)
self.Main.addWidget(self.anchorComPort_ComboBox_Label,1,0)
self.Main.addWidget(self.anchorComPort_ComboBox,1,1)
self.Main.addWidget(self.tagComPort_ComboBox_Label,2,0)
self.Main.addWidget(self.tagComPort_ComboBox,2,1)
self.Main.addWidget(self.baudRate_ComboBox_Label,3,0)
self.Main.addWidget(self.baudRate_ComboBox,3,1)
self.Main.addWidget(self.deviceSetupHframe,4,0,1,2)
self.Main.addWidget(self.testSec_Label,5,0,1,2)
self.Main.addWidget(self.testProgressBar,6,0,1,2)
self.Main.addWidget(self.loopProgressBar,7,0,1,2)
self.Main.addWidget(self.loopProgressBar_Label,8,0,1,2)
self.Main.addWidget(self.antDelayCal_PushButton,9,0)
self.Main.addWidget(self.distMeas_PushButton,9,1)
#Configure section
#######################################################################
self.Main.addWidget(self.calConfigSec_Label,0,3,1,2)
self.Main.addWidget(self.calDist_SpinBox_Label,1,3)
self.Main.addWidget(self.calDist_SpinBox,1,4)
self.Main.addWidget(self.calNumSamples_SpinBox_Label,2,3)
self.Main.addWidget(self.calNumSamples_SpinBox,2,4)
self.Main.addWidget(self.calConfigHframe,3,3,1,2)
self.Main.addWidget(self.distConfigSec_Label,4,3,1,2)
self.Main.addWidget(self.startDist_SpinBox_Label,5,3)
self.Main.addWidget(self.startDist_SpinBox,5,4)
self.Main.addWidget(self.stopDist_SpinBox_Label,6,3)
self.Main.addWidget(self.stopDist_SpinBox,6,4)
self.Main.addWidget(self.stepDist_SpinBox_Label,7,3)
self.Main.addWidget(self.stepDist_SpinBox,7,4)
self.Main.addWidget(self.distNumSamples_SpinBox_Label,8,3)
self.Main.addWidget(self.distNumSamples_SpinBox,8,4)
self.Main.addWidget(self.deviceConfigSec_Label,9,3,1,2)
self.Main.addWidget(self.anchorDelay_SpinBox_Label,10,3)
self.Main.addWidget(self.anchorDelay_SpinBox,10,4)
self.Main.addWidget(self.tagDelay_SpinBox_Label,11,3)
self.Main.addWidget(self.tagDelay_SpinBox,11,4)
#Plot settings section
#######################################################################
self.Main.addWidget(self.plotSettingsSec_Label,0,6,1,2)
self.Main.addWidget(self.makeGauss_CheckBox,1,6)
self.Main.addWidget(self.makeGauss_CheckBox_Label,1,7,1,2)
self.Main.addWidget(self.makeHist_CheckBox,2,6)
self.Main.addWidget(self.makeHist_CheckBox_Label,2,7,1,2)
self.Main.addWidget(self.makeRef_CheckBox,3,6)
self.Main.addWidget(self.makeRef_CheckBox_Label,3,7,1,2)
self.Main.addWidget(self.scaleData_CheckBox,4,6)
self.Main.addWidget(self.scaleData_CheckBox_Label,4,7,1,2)
self.Main.addWidget(self.truncData_CheckBox,5,6)
self.Main.addWidget(self.truncData_CheckBox_Label,5,7,1,2)
self.Main.addWidget(self.minTruncDist_SpinBox_Label,6,7)
self.Main.addWidget(self.minTruncDist_SpinBox,6,8)
self.Main.addWidget(self.maxTruncDist_SpinBox_Label,7,7)
self.Main.addWidget(self.maxTruncDist_SpinBox,7,8)
self.Main.addWidget(self.filePlotSec_Label,8,6,1,2)
self.Main.addWidget(self.filePlot_PushButton,9,6,1,2)
self.Main.addWidget(self.mainHframe,12,0,1,9)
#Status bar
self.Main.addWidget(self.guiStatusBar,13,0,1,11,alignment = QtCore.Qt.AlignBottom)
if self.guiOnly == True:
self.guiStatusBar.showMessage("***NOTE: DW1000 control disabled***")
elif self.guiOnly == False:
self.guiStatusBar.showMessage("STATUS: Idle.")
#Instantiate main widget
self.setLayout(self.Main)
#==========================================================================
# GUI-RELATED SUPPORTING FUNCTIONS
#==========================================================================
#Error-checking for spinboxes
def spinboxChecker(self):
widgetInfo = self.widgetInfo()
startValue = self.startDist_SpinBox.value()
stopValue = self.stopDist_SpinBox.value()
stepValue = self.stepDist_SpinBox.value()
if (widgetInfo["widgetName"] == "stepDist"):
newStepValue = self.stepDist_SpinBox.value()
self.startDist_SpinBox.setSingleStep(newStepValue)
self.stopDist_SpinBox.setSingleStep(newStepValue)
newStartValue = int(stepValue*round(float(startValue)/stepValue))
newStopValue = int(stepValue*round(float(stopValue)/stepValue))
self.startDist_SpinBox.setValue(newStartValue)
self.stopDist_SpinBox.setValue(newStopValue)
elif ((widgetInfo["widgetName"] == "startDist") or
(widgetInfo["widgetName"] == "minTruncDist")):
if (startValue >= stopValue) and (startValue < self.distMax):
self.stopDist_SpinBox.setValue(startValue+stepValue)
elif (startValue >= stopValue) and (startValue >= self.distMax):
self.startDist_SpinBox.setValue(stopValue-stepValue)
stopValue = self.stopDist_SpinBox.value()
startValue = self.startDist_SpinBox.value()
# if not ((stopValue - startValue) == stepValue):
# newStopValue = int(stepValue*round(float(stopValue)/stepValue))
# self.stopDist_SpinBox.setValue(newStopValue)
elif ((widgetInfo["widgetName"] == "stopDist") or
(widgetInfo["widgetName"] == "maxTruncDist")):
if (stopValue <= startValue) and (stopValue > self.distMin):
self.startDist_SpinBox.setValue(stopValue-stepValue)
elif (stopValue <= startValue) and (stopValue <= self.distMin):
self.stopDist_SpinBox.setValue(startValue+stepValue)
stopValue = self.stopDist_SpinBox.value()
startValue = self.startDist_SpinBox.value()
# if not ((stopValue - startValue) == stepValue):
# newStartValue = int(stepValue*round(float(startValue)/stepValue))
# self.startDist_SpinBox.setValue(newStartValue)
#Update various GUI-related widgets
def updateGui(self,field,value):
if field == "errThreadMsgBox":
self.threadMsgBox.setIcon(QtWidgets.QMessageBox.Warning)
self.threadMsgBox.setWindowTitle("Error")
self.threadMsgBox.setText(value)
self.threadMsgBox.show()
elif field == "infoThreadMsgBox":
self.threadMsgBox.setIcon(QtWidgets.QMessageBox.Information)
self.threadMsgBox.setWindowTitle("Information")
self.threadMsgBox.setText(value)
self.threadMsgBox.show()
elif field == "confirmMsgBox":
self.confirmMsgBox.show()
elif field == "errGeneralMsgBox":
self.generalMsgBox.warning(self,"Error",value)
elif field == "infoGeneralMsgBox":
self.generalMsgBox.information(self,"Information",value)
elif field == "infoGeneralMsgBoxNoBtns":
self.generalMsgBox.setIcon(QtWidgets.QMessageBox.Information)
self.generalMsgBox.setWindowTitle("Information")
self.generalMsgBox.setText(value)
self.generalMsgBox.removeButton(QtWidgets.QMessageBox.Ok)
self.generalMsgBox.show()
elif field == "closeGeneralMsgBox":
self.generalMsgBox.done(1)
elif field == "testProgressBar":
self.testProgressBar.setValue(int(value))
elif field == "loopProgressBar":
self.loopProgressBar.setValue(int(value))
elif field == "loopProgressBar_Label":
self.loopProgressBar_Label.setText(value)
elif field == "anchorDelaySpinBox":
self.anchorDelay_SpinBox.setValue(int(value))
elif field == "tagDelaySpinBox":
self.tagDelay_SpinBox.setValue(int(value))
elif field == "statusBar":
self.guiStatusBar.showMessage(value)
#Configure various GUI-related widgets
def configureWidgets(self,widgetDict):
for widget,state in widgetDict.items():
widgetInfo = self.widgetInfo(widget=widget)
#Note that clickable labels are currently only connected to check
#boxes and radio buttons and either check or uncheck them. Whenever
#the state argument is "None", the attached widget will change to the
#opposite state. However, if a state is specified, the "state" variable
#sets whether or not the label is enabled
if (widgetInfo["widgetType"] == "Label"):
if (state == None):
checkState = eval("self.{0}.isChecked()".format(widget.rstrip("_Label")))
eval("self.{0}.setChecked({1})".format(widget.rstrip("_Label"),(not checkState))) # change the check box to its opposite state
else:
eval("self.{0}.setEnabled({1})".format(widget,state))
else:
eval("self.{0}.setEnabled({1})".format(widget,state))
#Get the name and type of the most recently clicked widget
def widgetInfo(self,widget=None):
if (widget == None):
widget = self.sender().objectName()
widgetList = widget.split("_")
widgetName = widgetList[0]
widgetType = widgetList[-1]
if (widgetType == "Label") and (len(widgetList) >= 3):
connWidgetType = widgetList[-2] #Some labels are connected to
else:
connWidgetType = None
return {"widget":widget,
"widgetType":widgetType,
"connWidgeType":connWidgetType,
"widgetName":widgetName}
#Update the testInfoDict being sent to the backend with the most recent values
def updateTestInfoDict(self):
widgetInfo = self.widgetInfo()
#Test type (calibration or distance measurement)
self.testInfoDict["testType"] = widgetInfo["widgetName"]
#Device variables
self.testInfoDict["anchorPort"] = self.anchorComPort_ComboBox.currentText()
self.testInfoDict["anchorBaud"] = self.baudRate_ComboBox.currentText()
self.testInfoDict["tagPort"] = self.tagComPort_ComboBox.currentText()
self.testInfoDict["tagBaud"] = self.baudRate_ComboBox.currentText()
#Plot variables
self.plotInfoDict["makeGaussPlot"] = self.makeGauss_CheckBox.isChecked()
self.plotInfoDict["makeHistPlot"] = self.makeHist_CheckBox.isChecked()
self.plotInfoDict["scaleData"] = self.scaleData_CheckBox.isChecked()
self.plotInfoDict["truncateData"] = self.truncData_CheckBox.isChecked()
self.plotInfoDict["minTruncDist"] = self.minTruncDist_SpinBox.value()
self.plotInfoDict["maxTruncDist"] = self.maxTruncDist_SpinBox.value()
self.Main.addWidget(self.calDist_SpinBox,1,4)
self.Main.addWidget(self.calNumSamples_SpinBox_Label,2,3)
self.Main.addWidget(self.calNumSamples_SpinBox,2,4)
if (widgetInfo["widgetName"] == "antDelayCal"):
self.testInfoDict["numSamples"] = self.calNumSamples_SpinBox.value()
self.testInfoDict["startDist"] = self.calDist_SpinBox.value()
self.testInfoDict["stopDist"] = self.calDist_SpinBox.value()
self.testInfoDict["stepDist"] = 1
self.testInfoDict["numSteps"] = (self.testInfoDict["stopDist"] - self.testInfoDict["startDist"])/self.testInfoDict["stepDist"]
self.testInfoDict["anchorAntDelayDec"] = 0
self.testInfoDict["tagAntDelayDec"] = 0
self.plotInfoDict["useFile"] = False
elif (widgetInfo["widgetName"] == "distMeas"):
self.testInfoDict["numSamples"] = self.distNumSamples_SpinBox.value()
self.testInfoDict["startDist"] = self.startDist_SpinBox.value()
self.testInfoDict["stopDist"] = self.stopDist_SpinBox.value()
self.testInfoDict["stepDist"] = self.stepDist_SpinBox.value()
self.testInfoDict["numSteps"] = (self.testInfoDict["stopDist"] - self.testInfoDict["startDist"])/self.testInfoDict["stepDist"]
self.testInfoDict["anchorAntDelayDec"] = self.anchorDelay_SpinBox.value()
self.testInfoDict["tagAntDelayDec"] = self.tagDelay_SpinBox.value()
self.plotInfoDict["useFile"] = False
print("Done")
elif (widgetInfo["widgetName"] == "filePlot"):
if (self.plotInfoDict["fileName"] == ""):
curDir = os.path.dirname(os.path.realpath(__file__))
else:
csvName = self.plotInfoDict["fileName"].split("/")[-1]
curDir = self.plotInfoDict["fileName"].rstrip(csvName)
fileName = QtWidgets.QFileDialog.getOpenFileName(self,
"Select data file",
curDir,
"CSV files (*.csv)")
self.plotInfoDict["useFile"] = True
self.plotInfoDict["fileName"] = fileName[0]
print(self.plotInfoDict["fileName"])
#==========================================================================
# THREAD-RELATED FUNCTIONS
#==========================================================================
#Start thread based on button clicked
def startThread(self):
self.updateTestInfoDict()
# if self.plotInfoDict["fileName"] == "": #why is this here?
# return
self.configureWidgets({self.antDelayCal_PushButton.objectName():False,
self.distMeas_PushButton.objectName():False,
self.filePlot_PushButton.objectName():False,
self.anchorComPort_ComboBox.objectName():False,
self.anchorComPort_ComboBox_Label.objectName():False,
self.tagComPort_ComboBox.objectName():False,
self.tagComPort_ComboBox_Label.objectName():False,
self.baudRate_ComboBox.objectName():False,
# self.baudRate_ComboBox_Label.objectName():False,
self.anchorDelay_SpinBox.objectName():False,
self.anchorDelay_SpinBox_Label.objectName():False,
self.tagDelay_SpinBox.objectName():False,
self.tagDelay_SpinBox_Label.objectName():False}) #Disable widgets to avoid errors
self.__workers_done = 0
self.__threads = []
for idx in range(self.NUM_THREADS):
thread = QtCore.QThread()
thread.setObjectName(self.testInfoDict["testType"])
worker = distMeasThread(idx,self.testInfoDict,self.plotInfoDict)
self.__threads.append((thread, worker)) # need to store worker too otherwise will be gc'd
worker.moveToThread(thread)
# get progress messages from worker:
worker.sig_done.connect(self.abortWorkers) #For now, exit all threads when one is finished; we only use one at a time for now
worker.sig_msg.connect(self.updateGui)
# control worker:
self.sig_abort_workers.connect(worker.abort)
# get read to start worker:
thread.started.connect(worker.setup)
thread.start() # this will emit 'started' and start thread's event loop
def workerLoop(self,button):
for thread, worker in self.__threads: # note nice unpacking by Python, avoids indexing
worker.outerLoop()
#Ask all threads to end
def abortWorkers(self):
self.sig_abort_workers.emit()
for thread, worker in self.__threads: # note nice unpacking by Python, avoids indexing
thread.quit() # this will quit **as soon as thread event loop unblocks**
thread.wait() # <- so you need to wait for it to *actually* quit
self.configureWidgets({self.antDelayCal_PushButton.objectName():True,
self.distMeas_PushButton.objectName():True,
self.filePlot_PushButton.objectName():True,
self.anchorComPort_ComboBox.objectName():True,
self.anchorComPort_ComboBox_Label.objectName():True,
self.tagComPort_ComboBox.objectName():True,
self.tagComPort_ComboBox_Label.objectName():True,
self.baudRate_ComboBox.objectName():True,
# self.baudRate_ComboBox_Label.objectName():True,
self.anchorDelay_SpinBox.objectName():True,
self.anchorDelay_SpinBox_Label.objectName():True,
self.tagDelay_SpinBox.objectName():True,
self.tagDelay_SpinBox_Label.objectName():True}) #Disable widgets to avoid errors
self.updateGui("testProgressBar",str(0))
self.updateGui("loopProgressBar",str(0))
self.updateGui("loopProgressBar_Label","Loop time remaining: N/A")
self.updateGui("statusBar","STATUS: Idle.")
#==========================================================================
# SUPPORTING FUNCTIONS
#==========================================================================
#Populates combox with COM ports
def refreshComPorts(self):
comPortListTypes = {}
comPortList = sorted(self.DW1000serial.getSerialUSBPorts())
baudrate = self.baudRate_ComboBox.currentText()
self.anchorComPort_ComboBox.clear()
self.tagComPort_ComboBox.clear()
#If there are no COM ports detected or only one is found
if len(comPortList) == 0:
self.updateGui("errGeneralMsgBox","No USB serial COM ports detected!\n"\
"Testing requires two USB serial COM ports!")
else:
for port in comPortList:
try: self.DW1000serial.connectToDUT(selPort=port,baudrate=baudrate)
except:
comPortListTypes[port] = None
continue
deviceType = self.DW1000serial.getDeviceType()
#Close the serial port; no longer needed
try:
self.DW1000serial.ser.isOpen()
self.DW1000serial.closeDW1000port()
except:
pass
if deviceType == None:
comPortListTypes[port] = None
continue
else:
comPortListTypes[port] = deviceType
numAnchors = sum(1 for x in comPortListTypes.values() if x == "anchor")
numTags = sum(1 for x in comPortListTypes.values() if x == "tag")
if (numAnchors == 0):
self.updateGui("errGeneralMsgBox","No anchor COM ports discovered.\n"\
"Please check devices, refresh\n"\
"the COM port list, and check\n"\
"the baud rate.")
if (numTags == 0):
self.updateGui("errGeneralMsgBox","No tag COM ports discovered.\n"\
"Please check devices, refresh\n"\
"the COM port list, and check\n"\
"the baud rate.")
if not (numAnchors == 0) and not (numTags == 0):
self.updateGui("infoGeneralMsgBox","{0} anchor COM port(s) and\n"\
"{1} tag COM port(s) discovered.".format(numAnchors,numTags))
for port,deviceType in comPortListTypes.items():
try: eval("self.{0}ComPort_ComboBox.addItem('{1}')".format(deviceType,port))
except: continue
def msgBoxCloseEvent(self,event):
reply = QtWidgets.QMessageBox.question(self,
"Confirm",
"Are you sure you want to\n"\
"quit data collection? Data\n"\
"will NOT be saved!",
QtWidgets.QMessageBox.Ok,
QtWidgets.QMessageBox.Cancel)
if reply == QtWidgets.QMessageBox.Ok:
self.abortWorkers()
event.accept()
else:
event.ignore()
#==========================================================================
# CLICKABLE QLABEL CLASS
#==========================================================================
class ExtendedQLabel(QtWidgets.QLabel):
clicked = QtCore.pyqtSignal()
def __init(self, parent):
QtWidgets.QLabel.__init__(self, parent)
def mouseReleaseEvent(self, event):
self.clicked.emit()
#==========================================================================
# SCROLLBAR GUI CLASS
#==========================================================================
class Scroll(QtWidgets.QScrollArea):
def __init__(self):
super().__init__()
if getattr(sys, 'frozen', False):
# we are running in a |PyInstaller| bundle
self.basedir = sys._MEIPASS
else:
# we are running in a normal Python environment
self.basedir = os.path.dirname(__file__)
self.tglIconDir = os.path.join(self.basedir,'menrva.ico')
self.showMaximized()
self.setWindowTitle("DW1000 Test GUI")
self.setWindowIcon(QtGui.QIcon(self.tglIconDir))
self.ex = DW1000testGUI()
self.setWidget(self.ex)
self.setEnabled(True)
def closeEvent(self, event):
super(Scroll, self).closeEvent(event)
#==========================================================================
# MAIN CODE
#==========================================================================
if __name__ == '__main__':
app = QtWidgets.QApplication(sys.argv)
ex = Scroll()
sys.exit(app.exec_()) |
13,663 | 6d84017b9c946e33c4ee063992d8f5db0fbdd7be | /Users/karljo/anaconda3/lib/python3.6/copy.py |
13,664 | a4a2044305fd3184a13e2d100e79755bee35a358 | from pprint import pprint
import re
extract_x = re.compile('x=((?P<spread>\d+\.\.\d+)|(?P<single>(\d+)))')
extract_y = re.compile('y=((?P<spread>\d+\.\.\d+)|(?P<single>(\d+)))')
def x_vals(line):
return match_var(extract_x, line)
def y_vals(line):
return match_var(extract_y, line)
def match_var(extract, line):
search = re.search(extract, line)
single = search.group('single')
spread = search.group('spread')
if single:
return (int(single), int(single))
else:
first, second = list(map(int, spread.split('..')))
if first < second:
return (first, second)
else:
return (second, first)
#with open('simple.txt', 'r') as f:
with open('input.txt', 'r') as f:
ranges = [(x_vals(line), y_vals(line)) for line in f.readlines()]
x_min = 1000000000
x_max = -1000000000
y_min = 1000000000
y_max = -1000000000
for r in ranges:
((x1, x2), (y1, y2)) = r
x_min = min(x1, x_min)
x_max = max(x2, x_max)
y_min = min(y1, y_min)
y_max = max(y2, y_max)
grid = [['.' for _ in range(x_max - x_min + 3)] for _ in range(y_max - y_min + 3)]
corrected_ranges = [((x1 - x_min + 1, x2 - x_min + 1), (y1 - y_min + 1, y2 - y_min + 1)) for ((x1, x2), (y1, y2)) in ranges]
for bound in corrected_ranges:
((x1, x2), (y1, y2)) = bound
for x in range(x1, x2 + 1):
for y in range(y1, y2 + 1):
grid[y][x] = '#'
#pprint(grid)
# grid = [['.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '#', '.'],
# ['.', '#', '.', '.', '#', '.', '.', '.', '.', '.', '.', '.', '#', '.'],
# ['.', '#', '.', '.', '#', '.', '.', '#', '.', '.', '.', '.', '.', '.'],
# ['.', '#', '.', '.', '#', '.', '.', '#', '.', '.', '.', '.', '.', '.'],
# ['.', '#', '.', '.', '.', '.', '.', '#', '.', '.', '.', '.', '.', '.'],
# ['.', '#', '.', '.', '.', '.', '.', '#', '.', '.', '.', '.', '.', '.'],
# ['.', '#', '#', '#', '#', '#', '#', '#', '.', '.', '.', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.', '.', '#', '.', '.', '.', '#', '.', '.'],
# ['.', '.', '.', '.', '#', '.', '.', '.', '#', '.', '#', '.', '.', '.'],
# ['.', '.', '.', '.', '#', '.', '.', '.', '#', '.', '#', '.', '.', '.'],
# ['.', '.', '.', '.', '#', '.', '.', '.', '.', '.', '#', '.', '#', '.'],
# ['.', '.', '.', '.', '#', '#', '#', '#', '#', '#', '#', '.', '.', '.'],
# ['.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.', '.']]
def fill(x, y):
if x > 0 and y > 0 and x < x_max - x_min + 3 and y < y_max - y_min + 3:
grid[y][x] = '~'
def is_bounded_grid(x, y):
sym = grid[y][x]
return sym == '~' or sym == '#'
def is_clay_grid(x, y):
sym = grid[y][x]
return sym == '#'
def is_bounded(sym):
return sym == '~' or sym == '#'
def spread_water(x_pts, next_pts, x, y):
for pos, next_pos in zip(x_pts, next_pts):
fill(pos, y)
if not is_bounded_grid(pos, y + 1) :
return [(y, pos)]
if is_clay_grid(next_pos, y):
return [(y - 1, x)]
return []
def flow_out_left(x, y):
return spread_water(range(x, 1, -1), range(x - 1, 0, -1), x, y)
def flow_out_right(x, y):
return spread_water(range(x, len(grid[0]) - 1), range(x + 1, len(grid[0])), x, y)
def flow_out(x, y):
left = flow_out_left(x, y)
right = flow_out_right(x, y)
flows = left + right
if flows == []:
return []
else:
max_height = max([y for y, _ in flows])
return [(y, x) for y, x in flows if y == max_height]
def flow(water):
y, x = water
if is_bounded(grid[y+1][x]):
fill(x, y)
return flow_out(x, y)
else:
fill(x, y)
return [(y+1, x)]
def flow_all_water(water):
result = []
for w in water:
result.extend(flow(w))
return result
active_water = [(0, 500 - x_min + 1)]
while len(active_water) > 0:
filtered_water = set([(y, x) for y, x in active_water if y < y_max - y_min + 2])
active_water = flow_all_water(filtered_water)
#print(active_water)
#pprint(grid)
#print()
print('Part 1: ', len([tile for row in grid for tile in row if tile == '~']))
|
13,665 | c74e1e44728427135d53f4fb397bbdf20a221a28 | from database_classes import StorageSystem, StorageSystemException
storage_reader = StorageSystem()
system = True
while system:
username = input("What is your username? ")
password = input("What is your password? ")
print(storage_reader.get_by_username_pw(username, password))
if storage_reader.get_by_username_pw(username, password) != None:
continue
else:
system = False
|
13,666 | 9ee59db613194128ec097f50fb0681796254b457 | # encoding:utf-8
import matplotlib.pyplot as plt
import numpy as np
from sklearn.decomposition import PCA
def doPCA(data):
pca = PCA(n_components=2)
pca.fit(data)
return pca
if __name__ == "__main__":
X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
pca = PCA(n_components=2)
pca.fit(X)
print("explained_variance_ratio_: %s"% pca.explained_variance_ratio_) # 每个主成分的方差比
first_pc = pca.components_[0] # 向量表示的第一个主成分 [-0.83849224 -0.54491354]
print ("first_pc: %s" % first_pc)
second_pc = pca.components_[1] # 向量表示的第二个主成分 [ 0.54491354 -0.83849224]
print ("second_pc: %s" % second_pc)
transformed_data = pca.transform(X)
print transformed_data
for ii,jj in zip(transformed_data,X):
print (first_pc[0]*ii[0],first_pc[1]*ii[0])
plt.scatter(first_pc[0]*ii[0],first_pc[1]*ii[0],color='r') # 第一主成分上映射的点 (主成分与需要映射的点的点积)
plt.scatter(second_pc[0]*ii[1],second_pc[1]*ii[1],color='c') # 第二主成分上映射的点
plt.scatter(jj[0],jj[1],color='b')
plt.xlabel("x")
plt.ylabel("y")
plt.show()
|
13,667 | 4c633ca84646e32b5168d2743d1caada014438e2 | """Python/Flask prgram to integrate recipe_program.py with GUI (homepage.html)"""
from flask import Flask, render_template, request, jsonify
import urllib # urlencode function
import urllib2 # urlopen function (better than urllib version)
import json
from pickle import dump, load
from os.path import exists
from recipe_program import *
import ast
flask_recipes = Flask(__name__)
time_global = int
current_user = None
########################### MONGOLAB ##############
# Sets up server
server = 'ds061661.mongolab.com'
port = 61661
db_name = 'recipe_program_db'
username = 'anisha'
password = 'recipe'
import pymongo
from pymongo import MongoClient
url = "mongodb://"+username+":"+password+"@"+server+":"+str(port)+"/"+db_name
# Initializes Database
client = MongoClient(url)
db = client[db_name] # Get the database
db.authenticate(username, password) # Authenticate
posts = db.posts # Get the things in the db
@flask_recipes.route('/')
def homepage():
"""Renders inital HTML page"""
return render_template('homepage.html')
@flask_recipes.route('/_make_user') # make name
def make_user():
"""Creates a User based on input from HTML page, returns a confirmation of their login and
what is currently in their pantry."""
names = request.args.get('names', 1, type=str) #raw text input from HTML page
global db
global current_user
current_user = User(names, db)
# Adding the user to the db occurs in the user class,
# only in the get_pantry method
str_pantry = current_user.get_pantry()
if str_pantry == "": #if current user doesn't have pantry, return a string that states this
return jsonify(name=current_user.name, pantry = " No Pantry")
list_ingredients = ast.literal_eval(str_pantry) # Convert str to list
str_pantry = " Pantry: " + list_ingredients[0]
for i in range(1, len(list_ingredients)):
str_pantry += ", " + list_ingredients[i]
return jsonify(name=current_user.name, pantry = str_pantry) #returns name and list of ingredients in pantry to HTML page
@flask_recipes.route('/_update_pantry') # add ingredients
def update_pantry():
"""Given a list of ingredients, adds these ingredients to current user's pantry."""
pantry_ingredients = request.args.get('pantry', '', type=str) #raw input from HTML page of ingredients
global current_user
current_user.pantry.make_pantry(pantry_ingredients) #calls recipe_program function make_pantry()
current_user.pantry.save_pantry()
return jsonify(pantry = pantry_ingredients); #returns list of new pantry ingredients to HTML page
@flask_recipes.route('/_timed_recipes') # make name
def timed_recipes():
"""Given the max total cook time from html, returns a confirmation of this time, and sets global time variable"""
time = request.args.get('time', 0, type=int) #raw input from HTML page
global time_global
time_global = time #sets global time to inputted time, for use in search function
return jsonify(cooktime=time_global) #returns a confirmation of the input tiime
@flask_recipes.route('/_food_page') # add ingredients
def food_page():
"""Given a list of ingredients from HTML form, searches for recipes that contain only these ingredients"""
fridge_ingredients = request.args.get('b', 0, type=str) #raw input from HTML form
global current_user
current_user.fridge.make_fridge(fridge_ingredients) #uses function imported from recipe_program
recipe_dictionaries = current_user.get_timed_recipes(time_global) #uses function imported from recipe_program, time global set in timed_recipes()
#initalizing lists
recipe_names = []
recipe_ids = []
recipe_pics = []
cooktimes = []
new_pics = []
for i in range(len(recipe_dictionaries)): #created lists of current recipe links, title, pictures, etc
recipe_names.append(recipe_dictionaries[i]['recipeName'].encode('ascii','ignore')) #recipe name list
recipe_ids.append(recipe_dictionaries[i]['id'].encode('ascii','ignore')) #recipe id list to generate links
recipe_pics.append(recipe_dictionaries[i]['imageUrlsBySize']['90'].encode('ascii','ignore')) #recipe image links
cooktimes.append(int(recipe_dictionaries[i]['totalTimeInSeconds']/60.0)) #recipe cooktime list
for i in range(len(recipe_pics)):
new_pics.append(recipe_pics[i][:len(recipe_pics[i])-4]+'250-c') #this calls an image that is 300x300 px
return jsonify(names = recipe_names, ids = recipe_ids, pics = new_pics, times = cooktimes); #returns lists used to generate html page
if __name__ == '__main__':
flask_recipes.run(host="0.0.0.0",port=int("8081"),debug=True)
|
13,668 | 076849b44c06c17bee7cf5549c28c34a0980ad36 | from .ygritte import Ygritte |
13,669 | 5a57d0c13b0b9d8abdeffbabe3b40c2883da03e1 | """Run through the Picket Fence image bank."""
from unittest import TestCase
from pylinac import PicketFence
from tests_basic.utils import DataBankMixin
def run_pf(path):
"""Function to pass to the process pool executor to process picket fence images."""
try:
mypf = PicketFence(path)
mypf.analyze()
if mypf.max_error > 1.2:
raise Exception("Max MLC peak error > 1.2mm")
return "Success"
except ValueError:
try:
mypf = PicketFence(path, filter=3)
mypf.analyze()
if mypf.max_error > 1.2:
raise Exception("Max MLC peak error > 1.2mm")
return "Success"
except (ValueError,) as e:
return f"Failure: {e} @ {path}"
except Exception as e:
return f"Failure: {e} @ {path}"
class PicketFenceTestBank(DataBankMixin, TestCase):
DATA_DIR = ["Picket Fences"]
write_failures_to_file = True
def file_should_be_processed(self, filepath):
return filepath.endswith(".dcm")
def test_all(self):
super().test_all(run_pf)
|
13,670 | 67559cf4b4626d4a1fbe61fe32f2998ccf6ff832 | import logging
import os
import configparser
import copy
from enum import Enum
from generic_organization_service.interfaces.responses.generic_response import Description
from antidote import register
from generic_organization.settings import BASE_CODE_FOLDER, TEMPLATE_FOLDER_LIST
logger = logging.getLogger(__name__)
class DescriptionMessagesCodes:
UNDEFINED = -1
VERIFY_EXECUTED_SUCCESSFULLY = 1
PROCESSING_ERROR = 2
ERROR_DURING_ISSUE_CONTACT_SUPPORT = 97
ERROR_DURING_VERIFY_CONTACT_SUPPORT = 98
ERROR_DURING_VERIFY = 99
@register(singleton=True)
class DescriptionHandler:
def __init__(self):
self.descriptions = dict()
self.unaivalable_description = list()
self.unaivalable_description.append(Description("it", "Descrizione errore non disponibile"))
self.unaivalable_description.append(Description("en", "Error description not available"))
descriptions_path = os.getenv('I18N_PATH', None)
self.__load_descriptions(descriptions_path)
for root, dirs, files in os.walk(BASE_CODE_FOLDER, topdown=False,
followlinks=False):
if root.endswith("i18n") and not self.__is_template(root):
logger.info("Add description from folder: %s", root)
self.__load_descriptions(root)
def __is_template(self, folder):
for template in TEMPLATE_FOLDER_LIST:
if template in folder:
return True
return False
def __load_descriptions(self, descriptions_path):
if descriptions_path:
for filename in os.listdir(descriptions_path):
config = configparser.ConfigParser()
if filename.endswith(".lang"):
path_join = os.path.join(descriptions_path, filename)
config.read(path_join, encoding='utf-8')
for section in config.sections():
for key in config[section]:
description_elements = self.descriptions.get(key, None)
if not description_elements:
description_elements = list()
description_element = self.descriptions.get(key, None)
if not description_element:
self.descriptions[key] = description_elements
else:
logger.error("The key: %s is already present and it will not be added", key)
desc = Description(section, config[section][key])
description_elements.append(desc)
def get_descriptions(self, description_message_code: DescriptionMessagesCodes, *args):
description_list = self.descriptions.get(str(description_message_code), None)
if args:
description_list = copy.deepcopy(description_list)
if description_list:
for description in description_list:
try:
description.message = description.message.format(*args)
except IndexError as ie:
logger.error("String formatting error for description_message_code: %s - "
"description: %s, provided format list values: %s",
str(description_message_code), description, *args)
else:
logger.warning("Description not found for error code: %s", str(description_message_code))
description_list = self.unaivalable_description
return description_list |
13,671 | 1801e002dcc5cf71b5d7f96980fd1ec042c1df1f |
def convert(filepath,savepath):
content = "@relation train_file\n"
with open(filepath,mode="r") as file:
lines = file.readlines()
for i in range(lines.__len__()):
if i == 0:
tmp = lines[0].split(",")
for j in range(tmp.__len__()-1):
tmp_string = tmp[j]
content+= "@attribute "+tmp_string+" numeric\n"
content+="@attribute class {0,1}\n@data \n"
else:
content+=str(lines[i])[:-1]+"\n"
with open(savepath,mode="w") as file:
file.write(content)
convert("/home/czb/workspace/Summarizor/src1/train.csv","/home/czb/workspace/Summarizor/src1/train.arff")
|
13,672 | 7b7ad917b04f948ebae174eb6e34eaff25cdfeae | #!/usr/bin/env python3
"""
concatenating of two matrices with specific axis
"""
def cat_matrices2D(mat1, mat2, axis=0):
"""
enter a matrix
and Returns a list of concatenated matrices
"""
if (len(mat1[0]) == len(mat2[0])) and (axis == 0):
concat = [ele.copy() for ele in mat1]
concat += [ele.copy() for ele in mat2]
return concat
elif (len(mat1) == len(mat2)) and (axis == 1):
concat = [mat1[j] + mat2[j] for j in range(len(mat1))]
return concat
else:
return None
|
13,673 | dab9cd3fce7fe8d0da2f3883b487007c396bdb87 | """
============================
Author:赵健
Date:2019-09-01
Time:16:28
E-mail:948883947@qq.com
File:constants.py
============================
"""
import os
# 项目路径
OB_DIR = os.path.dirname(os.path.dirname(__file__))
# 测试用例表格存储路径
DATA_DIR = os.path.join(OB_DIR, 'data')
# 配置文件存储路径
CF_DIR = os.path.join(OB_DIR, 'configs')
# 日志存储路径
LOGS_DIR = os.path.join(OB_DIR, 'logs')
# 测试报告存储路径
REPORT_DIR = os.path.join(OB_DIR, 'report')
# 测试用例类存储路径
CASES_DIR = os.path.join(OB_DIR, 'testcases') |
13,674 | 9321af9002df719d383a7f4ad6335c77b1cd6127 | import time
from enum import IntEnum
from typing import Any, ClassVar, Dict, List, TypeVar, Union
from pymilvus.exceptions import (
AutoIDException,
ExceptionsMessage,
InvalidConsistencyLevel,
)
from pymilvus.grpc_gen import common_pb2
from pymilvus.grpc_gen import milvus_pb2 as milvus_types
Status = TypeVar("Status")
ConsistencyLevel = common_pb2.ConsistencyLevel
class Status:
"""
:attribute code: int (optional) default as ok
:attribute message: str (optional) current status message
"""
SUCCESS = common_pb2.Success
UNEXPECTED_ERROR = common_pb2.UnexpectedError
CONNECT_FAILED = 2
PERMISSION_DENIED = 3
COLLECTION_NOT_EXISTS = 4
ILLEGAL_ARGUMENT = 5
ILLEGAL_RANGE = 6
ILLEGAL_DIMENSION = 7
ILLEGAL_INDEX_TYPE = 8
ILLEGAL_COLLECTION_NAME = 9
ILLEGAL_TOPK = 10
ILLEGAL_ROWRECORD = 11
ILLEGAL_VECTOR_ID = 12
ILLEGAL_SEARCH_RESULT = 13
FILE_NOT_FOUND = 14
META_FAILED = 15
CACHE_FAILED = 16
CANNOT_CREATE_FOLDER = 17
CANNOT_CREATE_FILE = 18
CANNOT_DELETE_FOLDER = 19
CANNOT_DELETE_FILE = 20
BUILD_INDEX_ERROR = 21
ILLEGAL_NLIST = 22
ILLEGAL_METRIC_TYPE = 23
OUT_OF_MEMORY = 24
INDEX_NOT_EXIST = 25
EMPTY_COLLECTION = 26
def __init__(self, code: int = SUCCESS, message: str = "Success") -> None:
self.code = code
self.message = message
def __repr__(self) -> str:
attr_list = [f"{key}={value}" for key, value in self.__dict__.items()]
return f"{self.__class__.__name__}({', '.join(attr_list)})"
def __eq__(self, other: Union[int, Status]):
"""Make Status comparable with self by code"""
if isinstance(other, int):
return self.code == other
return isinstance(other, self.__class__) and self.code == other.code
def OK(self):
return self.code == Status.SUCCESS
class DataType(IntEnum):
NONE = 0
BOOL = 1
INT8 = 2
INT16 = 3
INT32 = 4
INT64 = 5
FLOAT = 10
DOUBLE = 11
STRING = 20
VARCHAR = 21
JSON = 23
BINARY_VECTOR = 100
FLOAT_VECTOR = 101
UNKNOWN = 999
class RangeType(IntEnum):
LT = 0 # less than
LTE = 1 # less than or equal
EQ = 2 # equal
GT = 3 # greater than
GTE = 4 # greater than or equal
NE = 5 # not equal
class IndexType(IntEnum):
INVALID = 0
FLAT = 1
IVFLAT = 2
IVF_SQ8 = 3
RNSG = 4
IVF_SQ8H = 5
IVF_PQ = 6
HNSW = 11
ANNOY = 12
# alternative name
IVF_FLAT = IVFLAT
IVF_SQ8_H = IVF_SQ8H
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self._name_}>"
def __str__(self) -> str:
return self._name_
class MetricType(IntEnum):
INVALID = 0
L2 = 1
IP = 2
# Only supported for byte vectors
HAMMING = 3
JACCARD = 4
TANIMOTO = 5
#
SUBSTRUCTURE = 6
SUPERSTRUCTURE = 7
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self._name_}>"
def __str__(self) -> str:
return self._name_
class IndexState(IntEnum):
IndexStateNone = 0
Unissued = 1
InProgress = 2
Finished = 3
Failed = 4
Deleted = 5
class PlaceholderType(IntEnum):
NoneType = 0
BinaryVector = 100
FloatVector = 101
class State(IntEnum):
"""
UndefiedState: Unknown
Executing: indicating this compaction has undone plans.
Completed: indicating all the plans of this compaction are done,
no matter successful or not.
"""
UndefiedState = 0
Executing = 1
Completed = 2
@staticmethod
def new(s: int):
if s == State.Executing:
return State.Executing
if s == State.Completed:
return State.Completed
return State.UndefiedState
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self._name_}>"
def __str__(self) -> str:
return self._name_
class LoadState(IntEnum):
"""
NotExist: collection or partition isn't existed
NotLoad: collection or partition isn't loaded
Loading: collection or partition is loading
Loaded: collection or partition is loaded
"""
NotExist = 0
NotLoad = 1
Loading = 2
Loaded = 3
def __repr__(self) -> str:
return f"<{self.__class__.__name__}: {self._name_}>"
def __str__(self) -> str:
return self._name_
class CompactionState:
"""
in_executing: number of plans in executing
in_timeout: number of plans failed of timeout
completed: number of plans successfully completed
"""
def __init__(
self,
compaction_id: int,
state: State,
in_executing: int,
in_timeout: int,
completed: int,
) -> None:
self.compaction_id = compaction_id
self.state = state
self.in_executing = in_executing
self.in_timeout = in_timeout
self.completed = completed
def __repr__(self) -> str:
return f"""
CompactionState
- compaction id: {self.compaction_id}
- State: {self.state}
- executing plan number: {self.in_executing}
- timeout plan number: {self.in_timeout}
- complete plan number: {self.completed}
"""
class Plan:
def __init__(self, sources: list, target: int) -> None:
self.sources = sources
self.target = target
def __repr__(self) -> str:
return f"""
Plan:
- sources: {self.sources}
- target: {self.target}
"""
class CompactionPlans:
def __init__(self, compaction_id: int, state: int) -> None:
self.compaction_id = compaction_id
self.state = State.new(state)
self.plans = []
def __repr__(self) -> str:
return f"""
Compaction Plans:
- compaction id: {self.compaction_id}
- state: {self.state}
- plans: {self.plans}
"""
def cmp_consistency_level(l1: Union[str, int], l2: Union[str, int]):
if isinstance(l1, str):
try:
l1 = ConsistencyLevel.Value(l1)
except ValueError:
return False
if isinstance(l2, str):
try:
l2 = ConsistencyLevel.Value(l2)
except ValueError:
return False
if isinstance(l1, int) and l1 not in ConsistencyLevel.values():
return False
if isinstance(l2, int) and l2 not in ConsistencyLevel.values():
return False
return l1 == l2
def get_consistency_level(consistency_level: Union[str, int]):
if isinstance(consistency_level, int):
if consistency_level in ConsistencyLevel.values():
return consistency_level
raise InvalidConsistencyLevel(message=f"invalid consistency level: {consistency_level}")
if isinstance(consistency_level, str):
try:
return ConsistencyLevel.Value(consistency_level)
except ValueError as e:
raise InvalidConsistencyLevel(
message=f"invalid consistency level: {consistency_level}"
) from e
raise InvalidConsistencyLevel(message="invalid consistency level")
class Shard:
def __init__(self, channel_name: str, shard_nodes: list, shard_leader: int) -> None:
self._channel_name = channel_name
self._shard_nodes = set(shard_nodes)
self._shard_leader = shard_leader
def __repr__(self) -> str:
return (
f"Shard: <channel_name:{self.channel_name}>, "
f"<shard_leader:{self.shard_leader}>, <shard_nodes:{self.shard_nodes}>"
)
@property
def channel_name(self) -> str:
return self._channel_name
@property
def shard_nodes(self):
return self._shard_nodes
@property
def shard_leader(self) -> int:
return self._shard_leader
class Group:
def __init__(
self,
group_id: int,
shards: List[str],
group_nodes: List[tuple],
resource_group: str,
num_outbound_node: dict,
) -> None:
self._id = group_id
self._shards = shards
self._group_nodes = tuple(group_nodes)
self._resource_group = resource_group
self._num_outbound_node = num_outbound_node
def __repr__(self) -> str:
return (
f"Group: <group_id:{self.id}>, <group_nodes:{self.group_nodes}>, "
f"<shards:{self.shards}>, <resource_group: {self.resource_group}>, "
f"<num_outbound_node: {self.num_outbound_node}>"
)
@property
def id(self):
return self._id
@property
def group_nodes(self):
return self._group_nodes
@property
def shards(self):
return self._shards
@property
def resource_group(self):
return self._resource_group
@property
def num_outbound_node(self):
return self._num_outbound_node
class Replica:
"""
Replica groups:
- Group: <group_id:2>, <group_nodes:(1, 2, 3)>,
<shards:[Shard: <shard_id:10>,
<channel_name:channel-1>,
<shard_leader:1>,
<shard_nodes:(1, 2, 3)>]>
- Group: <group_id:2>, <group_nodes:(1, 2, 3)>,
<shards:[Shard:
<shard_id:10>,
<channel_name:channel-1>,
<shard_leader:1>,
<shard_nodes:(1, 2, 3)>]>
"""
def __init__(self, groups: list) -> None:
self._groups = groups
def __repr__(self) -> str:
s = "Replica groups:"
for g in self.groups:
s += f"\n- {g}"
return s
@property
def groups(self):
return self._groups
class BulkInsertState:
"""enum states of bulk insert task"""
ImportPending = 0
ImportFailed = 1
ImportStarted = 2
ImportPersisted = 5
ImportCompleted = 6
ImportFailedAndCleaned = 7
ImportUnknownState = 100
"""pre-defined keys of bulk insert task info"""
FAILED_REASON = "failed_reason"
IMPORT_FILES = "files"
IMPORT_COLLECTION = "collection"
IMPORT_PARTITION = "partition"
IMPORT_PROGRESS = "progress_percent"
"""
Bulk insert state example:
- taskID : 44353845454358,
- state : "BulkLoadPersisted",
- row_count : 1000,
- infos : {"files": "rows.json",
"collection": "c1",
"partition": "",
"failed_reason": ""},
- id_list : [44353845455401, 44353845456401]
- create_ts : 1661398759,
"""
state_2_state: ClassVar[Dict] = {
common_pb2.ImportPending: ImportPending,
common_pb2.ImportFailed: ImportFailed,
common_pb2.ImportStarted: ImportStarted,
common_pb2.ImportPersisted: ImportPersisted,
common_pb2.ImportCompleted: ImportCompleted,
common_pb2.ImportFailedAndCleaned: ImportFailedAndCleaned,
}
state_2_name: ClassVar[Dict] = {
ImportPending: "Pending",
ImportFailed: "Failed",
ImportStarted: "Started",
ImportPersisted: "Persisted",
ImportCompleted: "Completed",
ImportFailedAndCleaned: "Failed and cleaned",
ImportUnknownState: "Unknown",
}
def __init__(
self,
task_id: int,
state: State,
row_count: int,
id_ranges: list,
infos: Dict,
create_ts: int,
):
self._task_id = task_id
self._state = state
self._row_count = row_count
self._id_ranges = id_ranges
self._create_ts = create_ts
self._infos = {kv.key: kv.value for kv in infos}
def __repr__(self) -> str:
fmt = """<Bulk insert state:
- taskID : {},
- state : {},
- row_count : {},
- infos : {},
- id_ranges : {},
- create_ts : {}
>"""
return fmt.format(
self._task_id,
self.state_name,
self.row_count,
self.infos,
self.id_ranges,
self.create_time_str,
)
@property
def task_id(self):
"""
Return unique id of this task.
"""
return self._task_id
@property
def row_count(self):
"""
If the task is finished, this value is the number of rows imported.
If the task is not finished, this value is the number of rows parsed.
"""
return self._row_count
@property
def state(self):
return self.state_2_state.get(self._state, BulkInsertState.ImportUnknownState)
@property
def state_name(self) -> str:
return self.state_2_name.get(self._state, "unknown state")
@property
def id_ranges(self):
"""
auto generated id ranges if the primary key is auto generated
the id list of response is id ranges
for example, if the response return [1, 100, 200, 250]
the full id list should be [1, 2, 3 ... , 99, 100, 200, 201, 202 ... , 249, 250]
"""
return self._id_ranges
@property
def ids(self):
"""
auto generated ids if the primary key is auto generated
the id list of response is id ranges
for example, if the response return [1, 100, 200, 250], the id ranges: [1,100),[200,250)
the full id list should be [1, 2, 3 ... , 99, 200, 201, 202 ... , 249]
"""
if len(self._id_ranges) % 2 != 0:
raise AutoIDException(message=ExceptionsMessage.AutoIDIllegalRanges)
ids = []
for i in range(int(len(self._id_ranges) / 2)):
begin = self._id_ranges[i * 2]
end = self._id_ranges[i * 2 + 1]
for j in range(begin, end):
ids.append(j)
return ids
@property
def infos(self):
"""more informations about the task, progress percentage, file path, failed reason, etc."""
return self._infos
@property
def failed_reason(self):
"""failed reason of the bulk insert task."""
return self._infos.get(BulkInsertState.FAILED_REASON, "")
@property
def files(self):
"""data files of the bulk insert task."""
return self._infos.get(BulkInsertState.IMPORT_FILES, "")
@property
def collection_name(self):
"""target collection's name of the bulk insert task."""
return self._infos.get(BulkInsertState.IMPORT_COLLECTION, "")
@property
def partition_name(self):
"""target partition's name of the bulk insert task."""
return self._infos.get(BulkInsertState.IMPORT_PARTITION, "")
@property
def create_timestamp(self):
"""the integer timestamp when this task is created."""
return self._create_ts
@property
def create_time_str(self):
"""A readable string converted from the timestamp when this task is created."""
ts = time.localtime(self._create_ts)
return time.strftime("%Y-%m-%d %H:%M:%S", ts)
@property
def progress(self):
"""working progress percent value."""
percent = self._infos.get(BulkInsertState.IMPORT_PROGRESS, "0")
return int(percent)
class GrantItem:
def __init__(self, entity: Any) -> None:
self._object = entity.object.name
self._object_name = entity.object_name
self._db_name = entity.db_name
self._role_name = entity.role.name
self._grantor_name = entity.grantor.user.name
self._privilege = entity.grantor.privilege.name
def __repr__(self) -> str:
return (
f"GrantItem: <object:{self.object}>, <object_name:{self.object_name}>, "
f"<db_name:{self.db_name}>, "
f"<role_name:{self.role_name}>, <grantor_name:{self.grantor_name}>, "
f"<privilege:{self.privilege}>"
)
@property
def object(self):
return self._object
@property
def object_name(self):
return self._object_name
@property
def db_name(self):
return self._db_name
@property
def role_name(self):
return self._role_name
@property
def grantor_name(self):
return self._grantor_name
@property
def privilege(self):
return self._privilege
class GrantInfo:
"""
GrantInfo groups:
- GrantItem: <object:Collection>, <object_name:foo>, <role_name:x>,
<grantor_name:root>, <privilege:Load>
- GrantItem: <object:Global>, <object_name:*>, <role_name:x>,
<grantor_name:root>, <privilege:CreateCollection>
"""
def __init__(self, entities: List[milvus_types.RoleEntity]) -> None:
groups = []
for entity in entities:
if isinstance(entity, milvus_types.GrantEntity):
groups.append(GrantItem(entity))
self._groups = groups
def __repr__(self) -> str:
s = "GrantInfo groups:"
for g in self.groups:
s += f"\n- {g}"
return s
@property
def groups(self):
return self._groups
class UserItem:
def __init__(self, username: str, entities: List[milvus_types.RoleEntity]) -> None:
self._username = username
roles = []
for entity in entities:
if isinstance(entity, milvus_types.RoleEntity):
roles.append(entity.name)
self._roles = tuple(roles)
def __repr__(self) -> str:
return f"UserItem: <username:{self.username}>, <roles:{self.roles}>"
@property
def username(self):
return self._username
@property
def roles(self):
return self._roles
class UserInfo:
"""
UserInfo groups:
- UserItem: <username:root>, <roles:('admin', 'public')>
"""
def __init__(self, results: List[milvus_types.UserResult]):
groups = []
for result in results:
if isinstance(result, milvus_types.UserResult):
groups.append(UserItem(result.user.name, result.roles))
self._groups = groups
def __repr__(self) -> str:
s = "UserInfo groups:"
for g in self.groups:
s += f"\n- {g}"
return s
@property
def groups(self):
return self._groups
class RoleItem:
def __init__(self, role_name: str, entities: List[milvus_types.UserEntity]):
self._role_name = role_name
users = []
for entity in entities:
if isinstance(entity, milvus_types.UserEntity):
users.append(entity.name)
self._users = tuple(users)
def __repr__(self) -> str:
return f"RoleItem: <role_name:{self.role_name}>, <users:{self.users}>"
@property
def role_name(self):
return self._role_name
@property
def users(self):
return self._users
class RoleInfo:
"""
RoleInfo groups:
- UserItem: <role_name:admin>, <users:('root',)>
"""
def __init__(self, results: List[milvus_types.RoleResult]) -> None:
groups = []
for result in results:
if isinstance(result, milvus_types.RoleResult):
groups.append(RoleItem(result.role.name, result.users))
self._groups = groups
def __repr__(self) -> str:
s = "RoleInfo groups:"
for g in self.groups:
s += f"\n- {g}"
return s
@property
def groups(self):
return self._groups
class ResourceGroupInfo:
def __init__(self, resource_group: Any) -> None:
self._name = resource_group.name
self._capacity = resource_group.capacity
self._num_available_node = resource_group.num_available_node
self._num_loaded_replica = resource_group.num_loaded_replica
self._num_outgoing_node = resource_group.num_outgoing_node
self._num_incoming_node = resource_group.num_incoming_node
def __repr__(self) -> str:
return f"""ResourceGroupInfo:
<name:{self.name}>,
<capacity:{self.capacity}>,
<num_available_node:{self.num_available_node}>,
<num_loaded_replica:{self.num_loaded_replica}>,
<num_outgoing_node:{self.num_outgoing_node}>,
<num_incoming_node:{self.num_incoming_node}>"""
@property
def name(self):
return self._name
@property
def capacity(self):
return self._capacity
@property
def num_available_node(self):
return self._num_available_node
@property
def num_loaded_replica(self):
return self._num_loaded_replica
@property
def num_outgoing_node(self):
return self._num_outgoing_node
@property
def num_incoming_node(self):
return self._num_incoming_node
|
13,675 | 812719671ca2ce668a25674d7a82ac62913df920 | # python3
import sys
import math
import unittest
import io
import os
class SeparatingClustersDistanceChecker:
def __init__(self):
self.n = int(sys.stdin.readline())
self.points = [tuple(map(int, sys.stdin.readline().split())) for _ in range(self.n)]
self.k = int(sys.stdin.readline())
self.edge_priority_queue = Heap()
for vertex_one in range(self.n):
for vertex_two in range(vertex_one + 1, self.n):
edge_len = self.euclidean_dist(vertex_one, vertex_two)
self.edge_priority_queue.insert((vertex_one, vertex_two), edge_len)
self.union_find_checker = UnionFind(self.n)
def euclidean_dist(self, idx_1, idx_2):
return math.sqrt((self.points[idx_1][0] - self.points[idx_2][0])**2 +
(self.points[idx_1][1] - self.points[idx_2][1])**2)
def find_max_separating(self):
# a la Kruskal
number_of_clusters = self.n
while number_of_clusters >= self.k:
(vertex_1, vertex_2), weight = self.edge_priority_queue.pop_min()
while self.union_find_checker.belong_to_one_group(vertex_1, vertex_2):
(vertex_1, vertex_2), weight = self.edge_priority_queue.pop_min()
self.union_find_checker.union(vertex_1, vertex_2)
number_of_clusters -= 1
return weight
def do_job(self):
print('{:.9f}'.format(self.find_max_separating()))
class UnionFind:
def __init__(self, n):
self.size = n
self.parent = list(range(self.size))
self.rank = [0] * self.size
def find_representer(self, idx):
current_idx = idx
while current_idx != self.parent[current_idx]:
current_idx = self.parent[current_idx]
group_representer = current_idx
if group_representer != idx:
while idx != group_representer:
self.parent[idx] = group_representer
idx = self.parent[idx]
return group_representer
def union(self, idx_1, idx_2):
representer_1 = self.find_representer(idx_1)
representer_2 = self.find_representer(idx_2)
if representer_1 != representer_2:
smaller_group_representer = min(representer_1, representer_2,
key=lambda x: (self.rank[x], x))
larger_group_representer = max(representer_1, representer_2,
key=lambda x: (self.rank[x], x))
self.parent[smaller_group_representer] = larger_group_representer
# performing path compression during each find_representer run, it is ensured that heights are equal to 1.
# Rank is size
self.rank[larger_group_representer] += self.rank[smaller_group_representer]
def belong_to_one_group(self, idx_1, idx_2):
return self.find_representer(idx_1) == self.find_representer(idx_2)
class Heap:
def __init__(self):
self.heap_values = []
self.heap_identifiers = []
def swap_at_positions(self, idx1, idx2):
self.heap_values[idx1], self.heap_values[idx2] = \
self.heap_values[idx2], self.heap_values[idx1]
self.heap_identifiers[idx1], self.heap_identifiers[idx2] = \
self.heap_identifiers[idx2], self.heap_identifiers[idx1]
def heapify(self, idx=0):
left_child_idx = 2 * idx + 1
right_child_idx = 2 * idx + 2
min_idx = idx
if left_child_idx < len(self.heap_values) and \
self.heap_values[left_child_idx] < self.heap_values[min_idx]:
min_idx = left_child_idx
if right_child_idx < len(self.heap_values) and \
self.heap_values[right_child_idx] < self.heap_values[min_idx]:
min_idx = right_child_idx
if min_idx != idx:
self.swap_at_positions(idx, min_idx)
self.heapify(min_idx)
def move_up(self, idx):
parent_idx = (idx - 1) // 2
while parent_idx >= 0 and self.heap_values[idx] < self.heap_values[parent_idx]:
self.swap_at_positions(parent_idx, idx)
idx = parent_idx
parent_idx = (idx - 1) // 2
def insert(self, identifier, value):
self.heap_identifiers.append(identifier)
self.heap_values.append(value)
inserted_element_idx = len(self.heap_values) - 1
self.move_up(inserted_element_idx)
def pop_min(self):
candidate_id, candidate_value = self.heap_identifiers[0], self.heap_values[0]
if len(self.heap_values) > 1:
self.swap_at_positions(0, len(self.heap_values) - 1)
identifier_to_remove, _ = self.heap_identifiers.pop(), self.heap_values.pop()
self.heapify()
else:
self.heap_identifiers.pop(), self.heap_values.pop()
return candidate_id, candidate_value
def non_empty(self):
return len(self.heap_values) > 0
class Tester(unittest.TestCase):
def test_all_scenarios(self):
path_to_test_cases = os.path.join(os.getcwd(), 'tests')
input_file_names = [f for f in os.listdir(path_to_test_cases)
if os.path.isfile(os.path.join(path_to_test_cases, f)) and f[-2:] != '.a']
current_stdin = sys.stdin
current_stdout = sys.stdout
try:
for file in input_file_names:
file_path = os.path.join(path_to_test_cases, file)
sys.stdout = io.StringIO()
with open(file_path, 'r') as file_object:
sys.stdin = file_object
worker_instance = SeparatingClustersDistanceChecker()
worker_instance.do_job()
instance_output = sys.stdout.getvalue()
answer_file_path = os.path.join(path_to_test_cases, file + '.a')
with open(answer_file_path, 'r') as answer_file_object:
correct_output = answer_file_object.read()
self.assertEqual(instance_output if instance_output != '\n' else '',
correct_output, 'test on file ' + file)
finally:
sys.stdin = current_stdin
sys.stdout = current_stdout
if __name__ == '__main__':
# unittest.main()
worker = SeparatingClustersDistanceChecker()
worker.do_job()
|
13,676 | 84549412fbf4fe8117b8b67667fc5fb8c94976b8 | """
챕터: day6
주제: 정규식
문제: 정규식 기호 연습
작성자: 주동석
작성일: 2018. 11. 22
"""
import re
"""
1. apple에 a가 들어있는지 확인
2. apple에 b가 들어있는지 확인
3. 정규식을 이용하여, 사용자가 입력한 영어 문장에서 a, e, i, o, u가 포함되어 있는지 찾아서 출력하시오. 만족하는 첫번째만 출력한다.
<입력> This is a test.
"""
s1 = "apple"
if re.search("a", s1):
print("a가 들어있습니다.")
else:
print("b가 들어있지 않습니다.")
if re.search("b", s1):
print("b가 들어있습니다.")
else:
print("b가 들어있지 않습니다.")
s2 = input("영어 문장을 입력해 주세요: ")
print(re.search('[aeiou]', s2))
"""
4. 입력한 단어가 a로 시작하는지 확인
5. 입력한 단어가 e로 끝나는지 검사
"""
s3 = input("단어 하나를 입력해 주세요: ")
print(re.search('^a', s3))
print(re.search('e$', s3))
"""
7. 입력된 문장에서 숫자분분을 모두 출력하라.
A. 입력 예: 2017년 3월 8일 5000원
B. 출력 예:
2017
3
8
5000
"""
s4 = input("문장을 입력해 주세요: ")
l = re.findall("\d+", s4)
for i in l:
print(i)
"""
10. 입력된 문장에서 <이후에 나오는 단어들을 출력하라.>
A. 입력 예: <2015> <김일수> <성공회대학교>
"""
s5 = input("문장을 입력해 주세요:")
l = re.findall("^<$\"", s5)
for i in l:
print(i) |
13,677 | bbea4d09e520d06a22c7e04eae3af5bd7abfb8bb | import io_helper
import logging
import sys
import subprocess
import os
IMAGEDIR = ''
SCPPATH = ''
UTTID = sys.argv[1]
def main():
utt2recpath = io_helper.parse_dictfile(SCPPATH)
recpath = utt2recpath[UTTID]
imagename = io_helper.path2uttid(recpath)
imagepath = os.path.join(IMAGEDIR, '%s.png' % imagename)
subprocess.run('eog %s' % imagepath, shell=True)
if __name__ == '__main__':
logging.basicConfig(format='[%(filename)s:%(lineno)d] %(message)s', level=logging.DEBUG)
main() |
13,678 | b9506a7ebc84ca2cd721e8cc7a293aa7c820d249 | import unittest
from appmap._implementation import testing_framework
original_run = unittest.TestCase.run
session = testing_framework.session('unittest')
def get_test_location(cls, method_name):
from appmap._implementation.utils import get_function_location
fn = getattr(cls, method_name)
return get_function_location(fn)
def run(self, result=None):
method_name = self.id().split('.')[-1]
# Use the result's location if provided (e.g. by pytest),
# otherwise cobble one together ourselves.
if hasattr(result, 'location'):
location = result.location
else:
location = get_test_location(self.__class__, method_name)
with session.record(
self.__class__,
method_name,
location=location):
original_run(self, result)
unittest.TestCase.run = run
if __name__ == '__main__':
unittest.main(module=None)
|
13,679 | 2ade7817616eff6ea2bf212ebc3c46dc84536f34 | atomic_weights = {
'C': 12,
'O': 16,
'H': 1,
'N': 14,
'S': 32,
}
amino_acids = {
'A': 'ala',
'R': 'arg',
'N': 'asn',
'D': 'asp',
'C': 'cys',
'E': 'glu',
'Q': 'gln',
'G': 'gly',
'H': 'his',
'I': 'ile',
'L': 'leu',
'K': 'lys',
'M': 'met',
'F': 'phe',
'P': 'pro',
'S': 'ser',
'T': 'thr',
'W': 'trp',
'Y': 'tyr',
'V': 'val',
}
amino_acids_index = {
'A': 0,
'R': 1,
'N': 2,
'D': 3,
'Y': 4,
'C': 5,
'E': 6,
'Q': 7,
'V': 8,
'G': 9,
'H': 10,
'I': 11,
'L': 12,
'K': 13,
'M': 14,
'F': 15,
'P': 16,
'S': 17,
'T': 18,
'W': 19,
}
atom_codes = {
'H': 1,
'C': 2,
'O': 3,
'N': 4,
'S': 5,
}
|
13,680 | 583f042acda1f09fdea5324cdb1e140bb2a1ccc9 | """
Utilities for the *dicom_parser* package.
"""
from dicom_parser.utils.parse_tag import parse_tag
from dicom_parser.utils.path_generator import generate_paths
from dicom_parser.utils.read_file import read_file
from dicom_parser.utils.requires_pandas import requires_pandas
|
13,681 | 4659611a0aaac9605237518d31250dff8d449a7a | # Author: Rishabh Sharma <rishabh.sharma.gunner@gmail.com>
# This module was developed under funding provided by
# Google Summer of Code 2014
import os
from datetime import datetime
from itertools import compress
from urllib.parse import urlsplit
import astropy.units as u
from astropy.time import Time, TimeDelta
from sunpy import config
from sunpy.net.dataretriever import GenericClient
from sunpy.time import TimeRange, parse_time
from sunpy.time.time import _variables_for_parse_time_docstring
from sunpy.util.decorators import add_common_docstring
from sunpy.util.scraper import Scraper
TIME_FORMAT = config.get("general", "time_format")
__all__ = ["XRSClient", "SUVIClient"]
class XRSClient(GenericClient):
"""
Provides access to the GOES XRS fits files archive.
Searches data hosted by the `Solar Data Analysis Center <https://umbra.nascom.nasa.gov/goes/fits/>`__.
Examples
--------
>>> from sunpy.net import Fido, attrs as a
>>> results = Fido.search(a.Time("2016/1/1", "2016/1/2"),
... a.Instrument.xrs) #doctest: +REMOTE_DATA
>>> results #doctest: +REMOTE_DATA
<sunpy.net.fido_factory.UnifiedResponse object at ...>
Results from 1 Provider:
<BLANKLINE>
2 Results from the XRSClient:
Start Time End Time Source Instrument Wavelength
------------------- ------------------- ------ ---------- ----------
2016-01-01 00:00:00 2016-01-01 23:59:59 nasa goes nan
2016-01-02 00:00:00 2016-01-02 23:59:59 nasa goes nan
<BLANKLINE>
<BLANKLINE>
"""
def _get_goes_sat_num(self, date):
"""
Determines the satellite number for a given date.
Parameters
----------
date : `astropy.time.Time`
The date to determine which satellite is active.
"""
goes_operational = {
2: TimeRange("1981-01-01", "1983-04-30"),
5: TimeRange("1983-05-02", "1984-07-31"),
6: TimeRange("1983-06-01", "1994-08-18"),
7: TimeRange("1994-01-01", "1996-08-13"),
8: TimeRange("1996-03-21", "2003-06-18"),
9: TimeRange("1997-01-01", "1998-09-08"),
10: TimeRange("1998-07-10", "2009-12-01"),
11: TimeRange("2006-06-20", "2008-02-15"),
12: TimeRange("2002-12-13", "2007-05-08"),
13: TimeRange("2006-08-01", "2006-08-01"),
14: TimeRange("2009-12-02", "2010-10-04"),
15: TimeRange("2010-09-01", parse_time("now")),
}
results = []
for sat_num in goes_operational:
if date in goes_operational[sat_num]:
# if true then the satellite with sat_num is available
results.append(sat_num)
if results:
# Return the newest satellite
return max(results)
else:
# if no satellites were found then raise an exception
raise ValueError(
"No operational GOES satellites on {}".format(
date.strftime(TIME_FORMAT)
)
)
def _get_time_for_url(self, urls):
times = []
for uri in urls:
uripath = urlsplit(uri).path
# Extract the yymmdd or yyyymmdd timestamp
datestamp = os.path.splitext(os.path.split(uripath)[1])[0][4:]
# 1999-01-15 as an integer.
if int(datestamp) <= 990115:
start = Time.strptime(datestamp, "%y%m%d")
else:
start = Time.strptime(datestamp, "%Y%m%d")
almost_day = TimeDelta(1 * u.day - 1 * u.millisecond)
times.append(TimeRange(start, start + almost_day))
return times
def _get_url_for_timerange(self, timerange, **kwargs):
"""
Returns a URL to the GOES data for the specified date.
Parameters
----------
timerange : `~sunpy.time.TimeRange`
The time range you want the files for.
Returns
-------
`list`
The URL(s) for the corresponding timerange.
"""
timerange = TimeRange(timerange.start.strftime('%Y-%m-%d'), timerange.end)
if timerange.end < parse_time("1999/01/15"):
goes_file = "%Y/go{satellitenumber:02d}%y%m%d.fits"
elif timerange.start < parse_time("1999/01/15") and timerange.end >= parse_time("1999/01/15"):
return self._get_overlap_urls(timerange)
else:
goes_file = "%Y/go{satellitenumber}%Y%m%d.fits"
goes_pattern = f"https://umbra.nascom.nasa.gov/goes/fits/{goes_file}"
satellitenumber = kwargs.get("satellitenumber", self._get_goes_sat_num(timerange.start))
goes_files = Scraper(goes_pattern, satellitenumber=satellitenumber)
return goes_files.filelist(timerange)
def _get_overlap_urls(self, timerange):
"""
Return a list of URLs over timerange when the URL path changed format `%Y` to `%y`
on the date 1999/01/15
Parameters
----------
timerange : `~sunpy.time.TimeRange`
The time range you want the files for.
Returns
-------
`list`
The URL(s) for the corresponding timerange.
"""
tr_before = TimeRange(timerange.start, parse_time("1999/01/14"))
tr_after = TimeRange(parse_time("1999/01/15"), timerange.end)
urls_before = self._get_url_for_timerange(tr_before)
urls_after = self._get_url_for_timerange(tr_after)
return urls_before + urls_after
def _makeimap(self):
"""
Helper function used to hold information about source.
"""
self.map_["source"] = "nasa"
self.map_["instrument"] = "goes"
self.map_["physobs"] = "irradiance"
self.map_["provider"] = "sdac"
@classmethod
def _can_handle_query(cls, *query):
"""
Answers whether client can service the query.
Parameters
----------
query : list of query objects
Returns
-------
boolean
answer as to whether client can service the query
"""
chkattr = ["Time", "Instrument", "SatelliteNumber"]
chklist = [x.__class__.__name__ in chkattr for x in query]
for x in query:
if x.__class__.__name__ == "Instrument" and x.value.lower() in (
"xrs",
"goes",
):
return all(chklist)
return False
@classmethod
def _attrs_module(cls):
return 'goes', 'sunpy.net.dataretriever.attrs.goes'
@classmethod
def register_values(cls):
from sunpy.net import attrs
goes_number = [2, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15]
adict = {attrs.Instrument: [
("GOES", "The Geostationary Operational Environmental Satellite Program."),
("XRS", "GOES X-ray Flux")],
attrs.goes.SatelliteNumber: [(str(x), f"GOES Satellite Number {x}") for x in goes_number]}
return adict
class SUVIClient(GenericClient):
"""
Provides access to data from the GOES Solar Ultraviolet Imager (SUVI).
SUVI data are provided by NOAA at the following url
https://data.ngdc.noaa.gov/platforms/solar-space-observing-satellites/
The SUVI instrument was first included on GOES-16. It produces level-1b as
well as level-2 data products. Level-2 data products are a weighted average
of level-1b product files and therefore provide higher imaging dynamic
range than individual images. The exposure time of level 1b images range
from 1 s to 0.005 s. SUVI supports the following wavelengths;
94, 131, 171, 195, 284, 304 angstrom. If no wavelength is specified, images
from all wavelengths are returned.
Note
----
GOES-16 began providing regular level-1b data on 2018-06-01. At the time
of writing, SUVI on GOES-17 is operational but currently does not provide
Level-2 data.
"""
@add_common_docstring(**_variables_for_parse_time_docstring())
def _get_goes_sat_num(self, date):
"""
Determines the best satellite number for a given date.
Parameters
----------
date : {parse_time_types}
The date to determine which satellite is active.
Note
----
At the time this function was written.
GOES-17 is operational but currently does not provide Level 2 data therefore it is never returned.
The GOES-16 start date is based on the availability of regular level 1b data.
"""
# GOES-17 is operational but currently does not provide Level 2 data
# GOES-16 start date is based on the availability of regular level 1b data
suvi_operational = {
16: TimeRange("2018-06-01", parse_time("now")),
}
results = []
for sat_num in suvi_operational:
if date in suvi_operational[sat_num]:
# if true then the satellite with sat_num is available
results.append(sat_num)
if results:
# Return the newest satellite
return max(results)
else:
# if no satellites were found then raise an exception
raise ValueError(f"No operational SUVI instrument on {date.strftime(TIME_FORMAT)}")
def _get_time_for_url(self, urls):
these_timeranges = []
for this_url in urls:
if this_url.count('/l2/') > 0: # this is a level 2 data file
start_time = parse_time(os.path.basename(this_url).split('_s')[2].split('Z')[0])
end_time = parse_time(os.path.basename(this_url).split('_e')[1].split('Z')[0])
these_timeranges.append(TimeRange(start_time, end_time))
if this_url.count('/l1b/') > 0: # this is a level 1b data file
start_time = datetime.strptime(os.path.basename(this_url).split('_s')[
1].split('_e')[0][:-1], '%Y%j%H%M%S')
end_time = datetime.strptime(os.path.basename(this_url).split('_e')[
1].split('_c')[0][:-1], '%Y%j%H%M%S')
these_timeranges.append(TimeRange(start_time, end_time))
return these_timeranges
def _get_url_for_timerange(self, timerange, **kwargs):
"""
Returns urls to the SUVI data for the specified time range.
Parameters
----------
timerange: `sunpy.time.TimeRange`
Time range for which data is to be downloaded.
level : `str`, optional
The level of the data. Possible values are 1b and 2 (default).
wavelength : `astropy.units.Quantity` or `tuple`, optional
Wavelength band. If not given, all wavelengths are returned.
satellitenumber : `int`, optional
GOES satellite number. Must be >= 16. Default is 16.
"""
base_url = "https://data.ngdc.noaa.gov/platforms/solar-space-observing-satellites/goes/goes{goes_number}/"
supported_waves = [94, 131, 171, 195, 284, 304]
supported_levels = ("2", "1b")
# these are optional requirements so if not provided assume defaults
# if wavelength is not provided assuming all of them
if "wavelength" in kwargs.keys():
wavelength_input = kwargs.get("wavelength")
if isinstance(wavelength_input, u.Quantity): # not a range
if int(wavelength_input.to_value('Angstrom')) not in supported_waves:
raise ValueError(f"Wavelength {kwargs.get('wavelength')} not supported.")
else:
wavelength = [kwargs.get("wavelength")]
else: # Range was provided
compress_index = [wavelength_input.wavemin <= this_wave <=
wavelength_input.wavemax for this_wave in (supported_waves * u.Angstrom)]
if not any(compress_index):
raise ValueError(
f"Wavelength {wavelength_input} not supported.")
else:
wavelength = list(compress(supported_waves, compress_index)) * u.Angstrom
else: # no wavelength provided return all of them
wavelength = supported_waves * u.Angstrom
# check that the input wavelength can be converted to angstrom
waves = [int(this_wave.to_value('angstrom', equivalencies=u.spectral()))
for this_wave in wavelength]
# use the given satellite number or choose the best one
satellitenumber = int(kwargs.get(
"satellitenumber", self._get_goes_sat_num(timerange.start)))
if satellitenumber < 16:
raise ValueError(f"Satellite number {satellitenumber} not supported.")
# default to the highest level of data
level = str(kwargs.get("level", "2")) # make string in case the input is a number
if level not in supported_levels:
raise ValueError(f"Level {level} is not supported.")
results = []
for this_wave in waves:
if level == "2":
search_pattern = base_url + \
r'l{level}/data/suvi-l{level}-ci{wave:03}/%Y/%m/%d/dr_suvi-l{level}-ci{wave:03}_g{goes_number}_s%Y%m%dT%H%M%SZ_.*\.fits'
elif level == "1b":
if this_wave in [131, 171, 195, 284]:
search_pattern = base_url + \
r'l{level}/suvi-l{level}-fe{wave:03}/%Y/%m/%d/OR_SUVI-L{level}-Fe{wave:03}_G{goes_number}_s%Y%j%H%M%S.*\.fits.gz'
elif this_wave == 304:
search_pattern = base_url + \
r'l{level}/suvi-l{level}-he{wave:03}/%Y/%m/%d/OR_SUVI-L{level}-He{wave_minus1:03}_G{goes_number}_s%Y%j%H%M%S.*\.fits.gz'
elif this_wave == 94:
search_pattern = base_url + \
r'l{level}/suvi-l{level}-fe{wave:03}/%Y/%m/%d/OR_SUVI-L{level}-Fe{wave_minus1:03}_G{goes_number}_s%Y%j%H%M%S.*\.fits.gz'
if search_pattern.count('wave_minus1'):
scraper = Scraper(search_pattern, level=level, wave=this_wave,
goes_number=satellitenumber, wave_minus1=this_wave-1)
else:
scraper = Scraper(search_pattern, level=level, wave=this_wave,
goes_number=satellitenumber)
results.extend(scraper.filelist(timerange))
return results
def _makeimap(self):
"""
Helper Function used to hold information about source.
"""
self.map_['source'] = 'GOES'
self.map_['provider'] = 'NOAA'
self.map_['instrument'] = 'SUVI'
self.map_['physobs'] = 'flux'
@classmethod
def _can_handle_query(cls, *query):
"""
Answers whether client can service the query.
Parameters
----------
query : `tuple`
All specified query objects.
Returns
-------
`bool`
answer as to whether client can service the query.
"""
# Import here to prevent circular imports
from sunpy.net import attrs as a
required = {a.Time, a.Instrument}
optional = {a.Wavelength, a.Level, a.goes.SatelliteNumber}
all_attrs = {type(x) for x in query}
ops = all_attrs - required
# check to ensure that all optional requirements are in approved list
if ops and not all(elem in optional for elem in ops):
return False
# if we get this far we have either Instrument and Time
# or Instrument, Time and Wavelength
check_var_count = 0
for x in query:
if isinstance(x, a.Instrument) and x.value.lower() == 'suvi':
check_var_count += 1
if check_var_count == 1:
return True
else:
return False
@classmethod
def _attrs_module(cls):
return 'goes', 'sunpy.net.dataretriever.attrs.goes'
@classmethod
def register_values(cls):
from sunpy.net import attrs
goes_number = [16, 17]
adict = {attrs.Instrument: [
("SUVI", "The Geostationary Operational Environmental Satellite Program.")],
attrs.goes.SatelliteNumber: [(str(x), f"GOES Satellite Number {x}") for x in goes_number]}
return adict
|
13,682 | c2d475169dc4104347997861369f00111d56b6fb | '''
First graph model to be trained for this task.
This file defines the method required to spawn and return a tensorflow graph for the autoencoder model.
coded by: Animesh
'''
import tensorflow as tf
graph = tf.Graph() #create a new graph object
with graph.as_default():
# define the computations of this graph here:
# placeholder for the input data batch
inputs = tf.placeholder(dtype= tf.float32, shape=(None, 32, 32, 3), name="inputs")
# encoder layers:
# The input to this layer is 32 x 32 x 3
encoder_layer1 = tf.layers.conv2d(inputs, 8, [5, 5], strides=(2, 2), padding="SAME")
# The output from this layer would be 16 x 16 x 8
# The input to this layer is same as encoder_layer1 output: 16 x 16 x 8
encoder_layer2 = tf.layers.conv2d(encoder_layer1, 16, [5, 5], strides=(2, 2), padding="SAME")
# The output would be: 8 x 8 x 16
# The input is same as above output: 8 x 8 x 16
encoder_layer3 = tf.layers.conv2d(encoder_layer2, 32, [5, 5], strides=(4, 4), padding="SAME")
# The output would be: 2 x 2 x 32
# This is the latent representation of the input that is 128 dimensional.
# Compression achieved from 32 x 32 x 3 i.e 3072 dimensions to 2 x 2 x 32 i. e. 128
# decoder layers:
# The input to this layer is 2 x 2 x 32
decoder_layer1 = tf.layers.conv2d_transpose(encoder_layer3, 32, [5, 5], strides=(4, 4), padding="SAME")
# Output from this layer: 8 x 8 x 32
# The input to this layer: 8 x 8 x 32
decoder_layer2 = tf.layers.conv2d_transpose(decoder_layer1, 16, [5, 5], strides=(2, 2), padding="SAME")
# output from this layer: 16 x 16 x 16
# The input of this layer: 16 x 16 x 16
decoder_layer3 = tf.layers.conv2d_transpose(decoder_layer2, 3, [5, 5], strides=(2, 2), padding="SAME")
# output of this layer: 32 x 32 x 3 # no. of channels are adjusted
output = tf.identity(encoder_layer3, name = "encoded_representation") # the latent representation of the input image.
y_pred = tf.identity(decoder_layer3, name = "prediction") # output of the decoder
y_true = inputs # input at the beginning
# define the loss for this model:
# calculate the loss and optimize the network
loss = tf.sqrt(tf.reduce_mean(tf.square(y_pred - y_true)), name="loss") # claculate the mean square error loss
train_op = tf.train.AdamOptimizer(learning_rate=1e-5).minimize(loss, name="train_op") # using Adam optimizer for optimization
|
13,683 | 7262b49beadabf3dfc9536fe342f1df02be3aa10 | from __future__ import unicode_literals
from django.shortcuts import render, HttpResponse, redirect
from django.contrib import messages
import random
from models import *
def index(request):
if 'id' not in request.session:
return render(request, "DnD_app/index.html")
return redirect("/profile")
def register(request):
result = User.objects.valid_registration(request.POST)
if result[1] == False:
request.session['id'] = result[0].id
request.session['user_name'] = result[0].username
return redirect("/profile")
else:
for error in result[0]:
messages.error(request, error)
return redirect("/")
def profile(request):
user=User.objects.get(id=request.session['id'])
return render(request,'DnD_app/profile.html')
def login(request):
errors = User.objects.valid_login(request.POST)
if errors:
for error in errors:
messages.error(request,error)
return redirect ('/')
else:
request.session['user_name'] = User.objects.get(username=request.POST['username']).username
request.session['id'] = User.objects.get(username=request.POST['username']).id
return redirect ('/profile')
def logout(request):
del request.session['id']
return redirect('/')
def character(request):
characters = Character.objects.all()
return render(request, "DnD_app/character.html",{'characters':characters})
def save(request):
active = Game.objects.active_game(request.session['id'])
if active[0] == False:
user= User.objects.get(id=request.session['id'])
game= Game.objects.create(user = user, hp = request.session['hp'], gold=request.session['gold'], level=request.session['level'])
else:
for active_game in active[1]:
game = Game.objects.get(id=active_game.id)
game.hp = request.session['hp']
game.gold = request.session['gold']
game.level = request.session['level']
return redirect('/profile')
def restart(request):
del request.session['gold']
del request.session['hp']
del request.session['level']
return redirect ('/new_Game')
def keep_playing(request):
return render(request, "DnD_app/game.html")
# def new_game(request, id):
# character = Character.objects.get(id=id)
# request.session['hp'] = character.hp
# request.session['gold'] = character.gold
# request.session['level'] = 1
# return render(request, "DnD_app/game.html")
def game(request):
game = Game.objects.get(id=id)
game.hp = request.session['hp']
game.gold = request.session['gold']
game.level = request.session['level']
return render(request, "DnD_app/game.html")
|
13,684 | 04bd6f89e13b83d87fcf57da738d5eac40c008f6 | #!/usr/bin/env python
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
import sys
import pprint
if sys.version_info[0] > 2:
from urllib.parse import urlparse
else:
from urlparse import urlparse
from thrift.transport import TTransport, TSocket, TSSLSocket, THttpClient
from thrift.protocol.TBinaryProtocol import TBinaryProtocol
from EACPLog import ncTEACPLog
from EACPLog.ttypes import *
if len(sys.argv) <= 1 or sys.argv[1] == '--help':
print('')
print('Usage: ' + sys.argv[0] + ' [-h host[:port]] [-u url] [-f[ramed]] [-s[sl]] [-novalidate] [-ca_certs certs] [-keyfile keyfile] [-certfile certfile] function [arg1 [arg2...]]')
print('')
print('Functions:')
print(' void Log(ncTLogItem item)')
print(' ncTLogCountInfo GetLogCount(ncTGetLogCountParam param)')
print(' GetPageLog(ncTGetPageLogParam param)')
print(' string ExportLog(ncTExportLogParam param)')
print(' void SetLogRetentionPeriod(i32 period)')
print(' i32 GetLogRetentionPeriod()')
print(' GetHistoryLogs(ncTLogType logType)')
print(' i64 GetHistoryLogCount(ncTGetHistoryLogCountParam param)')
print(' GetPageHistoryLog(ncTGetPageHistoryLogParam param)')
print(' ncTEVFSOSSRequest GetHistoryLogDownLoadInfo(string id, string reqHost, bool useHttps, i64 validSeconds)')
print(' string ReadHistoryLog(string fileId, i64 offset, i32 length)')
print(' ncTLogSpaceInfo GetLogSpaceInfo()')
print(' GetBufferedLogs()')
print(' void SetSyslogFirstPushTime(i64 time)')
print(' void SetLogPushPeriod(i32 period)')
print(' i32 GetLogPushPeriod()')
print('')
sys.exit(0)
pp = pprint.PrettyPrinter(indent=2)
host = 'localhost'
port = 9090
uri = ''
framed = False
ssl = False
validate = True
ca_certs = None
keyfile = None
certfile = None
http = False
argi = 1
if sys.argv[argi] == '-h':
parts = sys.argv[argi + 1].split(':')
host = parts[0]
if len(parts) > 1:
port = int(parts[1])
argi += 2
if sys.argv[argi] == '-u':
url = urlparse(sys.argv[argi + 1])
parts = url[1].split(':')
host = parts[0]
if len(parts) > 1:
port = int(parts[1])
else:
port = 80
uri = url[2]
if url[4]:
uri += '?%s' % url[4]
http = True
argi += 2
if sys.argv[argi] == '-f' or sys.argv[argi] == '-framed':
framed = True
argi += 1
if sys.argv[argi] == '-s' or sys.argv[argi] == '-ssl':
ssl = True
argi += 1
if sys.argv[argi] == '-novalidate':
validate = False
argi += 1
if sys.argv[argi] == '-ca_certs':
ca_certs = sys.argv[argi+1]
argi += 2
if sys.argv[argi] == '-keyfile':
keyfile = sys.argv[argi+1]
argi += 2
if sys.argv[argi] == '-certfile':
certfile = sys.argv[argi+1]
argi += 2
cmd = sys.argv[argi]
args = sys.argv[argi + 1:]
if http:
transport = THttpClient.THttpClient(host, port, uri)
else:
if ssl:
socket = TSSLSocket.TSSLSocket(host, port, validate=validate, ca_certs=ca_certs, keyfile=keyfile, certfile=certfile)
else:
socket = TSocket.TSocket(host, port)
if framed:
transport = TTransport.TFramedTransport(socket)
else:
transport = TTransport.TBufferedTransport(socket)
protocol = TBinaryProtocol(transport)
client = ncTEACPLog.Client(protocol)
transport.open()
if cmd == 'Log':
if len(args) != 1:
print('Log requires 1 args')
sys.exit(1)
pp.pprint(client.Log(eval(args[0]),))
elif cmd == 'GetLogCount':
if len(args) != 1:
print('GetLogCount requires 1 args')
sys.exit(1)
pp.pprint(client.GetLogCount(eval(args[0]),))
elif cmd == 'GetPageLog':
if len(args) != 1:
print('GetPageLog requires 1 args')
sys.exit(1)
pp.pprint(client.GetPageLog(eval(args[0]),))
elif cmd == 'ExportLog':
if len(args) != 1:
print('ExportLog requires 1 args')
sys.exit(1)
pp.pprint(client.ExportLog(eval(args[0]),))
elif cmd == 'SetLogRetentionPeriod':
if len(args) != 1:
print('SetLogRetentionPeriod requires 1 args')
sys.exit(1)
pp.pprint(client.SetLogRetentionPeriod(eval(args[0]),))
elif cmd == 'GetLogRetentionPeriod':
if len(args) != 0:
print('GetLogRetentionPeriod requires 0 args')
sys.exit(1)
pp.pprint(client.GetLogRetentionPeriod())
elif cmd == 'GetHistoryLogs':
if len(args) != 1:
print('GetHistoryLogs requires 1 args')
sys.exit(1)
pp.pprint(client.GetHistoryLogs(eval(args[0]),))
elif cmd == 'GetHistoryLogCount':
if len(args) != 1:
print('GetHistoryLogCount requires 1 args')
sys.exit(1)
pp.pprint(client.GetHistoryLogCount(eval(args[0]),))
elif cmd == 'GetPageHistoryLog':
if len(args) != 1:
print('GetPageHistoryLog requires 1 args')
sys.exit(1)
pp.pprint(client.GetPageHistoryLog(eval(args[0]),))
elif cmd == 'GetHistoryLogDownLoadInfo':
if len(args) != 4:
print('GetHistoryLogDownLoadInfo requires 4 args')
sys.exit(1)
pp.pprint(client.GetHistoryLogDownLoadInfo(args[0], args[1], eval(args[2]), eval(args[3]),))
elif cmd == 'ReadHistoryLog':
if len(args) != 3:
print('ReadHistoryLog requires 3 args')
sys.exit(1)
pp.pprint(client.ReadHistoryLog(args[0], eval(args[1]), eval(args[2]),))
elif cmd == 'GetLogSpaceInfo':
if len(args) != 0:
print('GetLogSpaceInfo requires 0 args')
sys.exit(1)
pp.pprint(client.GetLogSpaceInfo())
elif cmd == 'GetBufferedLogs':
if len(args) != 0:
print('GetBufferedLogs requires 0 args')
sys.exit(1)
pp.pprint(client.GetBufferedLogs())
elif cmd == 'SetSyslogFirstPushTime':
if len(args) != 1:
print('SetSyslogFirstPushTime requires 1 args')
sys.exit(1)
pp.pprint(client.SetSyslogFirstPushTime(eval(args[0]),))
elif cmd == 'SetLogPushPeriod':
if len(args) != 1:
print('SetLogPushPeriod requires 1 args')
sys.exit(1)
pp.pprint(client.SetLogPushPeriod(eval(args[0]),))
elif cmd == 'GetLogPushPeriod':
if len(args) != 0:
print('GetLogPushPeriod requires 0 args')
sys.exit(1)
pp.pprint(client.GetLogPushPeriod())
else:
print('Unrecognized method %s' % cmd)
sys.exit(1)
transport.close()
|
13,685 | ccfba5f50ba45914ae96d2e153299adc6ecfe54f | from django.db import models
from django.contrib.auth.models import User
class Order(models.Model):
cliente = models.ForeignKey(
User,
on_delete=models.SET_NULL,
null=True,
blank=True,
verbose_name="Cliente"
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
def __str__(self):
return "Pedido: #{}".format(self.id)
class Meta:
app_label="order"
verbose_name="Pedido"
verbose_name_plural="Pedidos" |
13,686 | 0d480c5e9d4776a0cee088f958648f92821a2e36 | from django.test import TestCase
import httplib
import json
import urllib
import wechat_utils
# Create your tests here.
def _weichat_msg():
c = httplib.HTTPSConnection("qyapi.weixin.qq.com")
c.request("GET", "/cgi-bin/gettoken?corpid=wx416865667552f10b&corpsecret=60gcQRI8S-1hbMSvqf5CzBnYKBk1O3qOTmPw9Lk37Rxm6bFYifoyu4Me-P5sd53G")
response = c.getresponse()
print response.status, response.reason
data = response.read()
result = json.loads(data)
token= result.get('access_token')
print token
#send message
#https://qyapi.weixin.qq.com/cgi-bin/message/send?access_token=ACCESS_TOKEN
str_1 = '''{
"touser": "fky",
"msgtype": "text",
"agentid": 0,
"text": {
"content": "Thank you for you follow DSA Account, you will get the attendance message at 8pm."
},
"safe":"0"
}'''
# url = "/cgi-bin/message/send?access_token="+token
#
# c.request("POST",url ,str_1)
# response = c.getresponse()
# data = response.read()
#print data
print wechat_utils.validate_weichat_user('lxj',token)
if __name__=='__main__':
_weichat_msg()
|
13,687 | 9a019ba4a9a290b194c36bae40e24a0b93bda7ce | from pylab import scatter, xlabel, ylabel, xlim, ylim, show
from numpy import loadtxt
data = loadtxt("stars.txt", float)
x = data[:,0]
y = data[:,1]
scatter(x,y)
xlabel("Temperture")
ylabel("Magniude")
xlim(0,13000)
ylim(-5,20)
show()
|
13,688 | e1c0faab5138aaca4327b88791a89158c2c7d33e | #implementation of MAML for updating coefficients of Label Shift task.
import torch
from torch import optim
import torch.nn as nn
from torch.autograd import Variable
import numpy as np
# torch.manual_seed(0)
torch.set_default_dtype(torch.double) #bug fix - float matmul
#enable cuda if possible
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class MAML():
"""
Implementation of Model-Agnostic Meta Learning algorithm for performing meta-gradient update
Label Shift weights.
"""
def __init__(self, X, y, model, weights, alpha:float=0.01, beta:float=0.05):
"""
Initialize params.
@Params:
- X : (torch.tensor) validation data
- y : (torch.tensor) validation labels
- model : (Network) DNN
- weights : (array) label shift weights
"""
#store model
self.f = model.double()
#define number of classes
self.cls = len(np.unique(X.numpy()))
#define parameters, theta
self.theta = Variable(torch.DoubleTensor(weights), requires_grad=True).to(device)
#define single task
self.tasks = [X] #use batches for multi-task setting
self.y = y.double()
#define MAML hyperparameters
self.alpha = alpha
self.beta = beta
#define loss and optimizer
self.criteon = nn.MSELoss() #weight=self.theta
self.meta_optim = optim.SGD([self.theta], lr=self.beta)
def update(self, max_norm=1.0):
""" Run a single iteration of MAML algorithm """
theta_prime = []
for i, batch in enumerate(self.tasks):
y_hat = self.constraint(self.theta, self.f(batch)) # gather predictions to single dimension
loss = self.criteon( y_hat, self.y )
#compute gradients
grad = torch.autograd.grad(loss, self.theta)
#update params
theta_prime.append( self.theta - self.alpha * grad[0] )
del loss
#perform meta-update
m_loss = torch.tensor(0.0, requires_grad=True)
for i in range(len(self.tasks)):
theta = theta_prime[i]
batch = self.tasks[i]
y_hat = self.constraint(theta, self.f(batch)) # gather predictions to single dimension
m_loss = m_loss + self.criteon( y_hat, self.y ) # updating meta-loss
#zero gradient before running backward pass
self.meta_optim.zero_grad()
#backward pass
m_loss.backward(retain_graph=True)
#clip gradients
nn.utils.clip_grad_norm_([self.theta], max_norm)
#one-step gradient descent
self.meta_optim.step()
def constraint(self, theta, labels):
""" Compute dot product of X and parameters theta """
# N = batch size ; K = batch size
y = labels.to(device) # K x N
dot = torch.matmul( y, theta ) # (K x N) • (N x 1) --> (K x 1)
dot.requires_grad_() #bug fix to retain computational graph
return dot.to(device)
def get_label_weights(self):
weights = self.theta.detach().numpy()
return weights |
13,689 | e57e1fd512812770e613ee7c5477250b777914da | """
Chloe Jane Coleman
"""
name = input("Please enter your name")
while name == "":
name = input("Your name must have at least one character \nPlease enter your name")
print(name[::2])
|
13,690 | 67f6b828bbd826610680f346fb7123b26e52a986 | # словарь с информацией о пользователе
user_info = {
'name': str,
'surname': str,
'year': str,
'city': str,
'email': str,
'phone_num': str,
}
def output_user_info(name, surname, year, city, email, phone_num):
""" функция принимает именованные аргументы и выводит в одну строку """
print(f'{name}, {surname}, {year}, {city}, {email}, {phone_num}')
# в цикле заполняем поля словаря
for key, value in user_info.items():
user_info[key] = input(f'enter {key}: ')
# передаем в функцию заполненный словарь и преобразуем его в именованные аргументы
output_user_info(**user_info)
|
13,691 | 751d1748fe7dfa073d526a649c0bad5904951d2d | # Generated by Django 3.1.7 on 2021-04-10 13:49
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('ups', '0002_package'),
]
operations = [
migrations.RemoveField(
model_name='package',
name='owner',
),
migrations.AddField(
model_name='package',
name='user',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='package_set', to='ups.user'),
),
]
|
13,692 | 1b60854726c4f08abc1f235f1ea7a88e101c6f56 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2018-07-04 09:43:51
# @Author : Zhi Liu (zhiliu.mind@gmail.com)
# @Link : http://iridescent.ink
# @Version : $1.1$
import numpy as np
from improc.blkptcs import showblks
blks = np.uint8(np.random.randint(0, 255, (8, 8, 3, 101)))
showblks(blks, bgcolor='k')
|
13,693 | 410fef0c1e9f30b0d95bc615078622c7190b5459 | #different methods to swap two numbers in python
print('first method')
a=int(input('enter 1st no.: '))
b=int(input('enter 2nd no.: '))
a,b=b,a
print('a=',a)
print('b=',b)
print('second method')
a=int(input('enter 1st no.: '))
b=int(input('enter 2nd no.: '))
b=a*b
a=b/a
b=b/a
print(a)
print(b)
print('third method')
a=int(input('enter 1st no.: '))
b=int(input('enter 2nd no.: '))
a=a+b
b=a-b
a=a-b
print(a)
print(b)
print('fourth method')
a=int(input('enter 1st no.: '))
b=int(input('enter 2nd no.: '))
temp=a
a=b
b=temp
print(a)
print(b)
print('fifth method')
a=int(input('enter 1st no.: '))
b=int(input('enter 2nd no.: '))
a=a^b
b=a^b
a=a^b
print(a)
print(b)
print('sixth method')
n=(input('enter two numbers: ')).split()
print(n)
n.reverse()
a=int(n[0])
b=int(n[1])
print(a)
print(b)
print('seventh method')
List=[int(input("enter a")),int(input("enter b"))]
List.reverse()
print(List)
|
13,694 | e2add2147a8868b695b55ac28cf0f88ddebf8fb5 |
# FTSE100 Data from https://en.wikipedia.org/wiki/S%26P_100
# Run the below script to get the data
#
"""
var data = [];
for(var x of $("#constituents tr")){
var tds = $(x).find('td')
data.push({'company':$(tds[1]).text().trim(), 'symbol': $(tds[0]).text().trim(), 'sector': $(tds[2]).text().trim()});
}
console.log(JSON.stringify(data,null,1))
"""
SAP100_DATASET = [
{
"company": "Apple Inc.",
"symbol": "AAPL",
"sector": "Information Technology"
},
{
"company": "AbbVie Inc.",
"symbol": "ABBV",
"sector": "Health Care"
},
{
"company": "Abbott Laboratories",
"symbol": "ABT",
"sector": "Health Care"
},
{
"company": "Accenture",
"symbol": "ACN",
"sector": "Information Technology"
},
{
"company": "Adobe Inc.",
"symbol": "ADBE",
"sector": "Information Technology"
},
{
"company": "American International Group",
"symbol": "AIG",
"sector": "Financials"
},
{
"company": "Amgen Inc.",
"symbol": "AMGN",
"sector": "Health Care"
},
{
"company": "American Tower",
"symbol": "AMT",
"sector": "Real Estate"
},
{
"company": "Amazon.com",
"symbol": "AMZN",
"sector": "Consumer Discretionary"
},
{
"company": "Broadcom Inc.",
"symbol": "AVGO",
"sector": "Information Technology"
},
{
"company": "American Express",
"symbol": "AXP",
"sector": "Financials"
},
{
"company": "Boeing Co.",
"symbol": "BA",
"sector": "Industrials"
},
{
"company": "Bank of America Corp",
"symbol": "BAC",
"sector": "Financials"
},
{
"company": "Biogen",
"symbol": "BIIB",
"sector": "Health Care"
},
{
"company": "The Bank of New York Mellon",
"symbol": "BK",
"sector": "Financials"
},
{
"company": "Booking Holdings",
"symbol": "BKNG",
"sector": "Consumer Discretionary"
},
{
"company": "BlackRock Inc",
"symbol": "BLK",
"sector": "Financials"
},
{
"company": "Bristol-Myers Squibb",
"symbol": "BMY",
"sector": "Health Care"
},
{
"company": "Berkshire Hathaway",
"symbol": "BRK.B",
"sector": "Financials"
},
{
"company": "Citigroup Inc",
"symbol": "C",
"sector": "Financials"
},
{
"company": "Caterpillar Inc.",
"symbol": "CAT",
"sector": "Industrials"
},
{
"company": "Charter Communications",
"symbol": "CHTR",
"sector": "Communication Services"
},
{
"company": "Colgate-Palmolive",
"symbol": "CL",
"sector": "Consumer Staples"
},
{
"company": "Comcast Corp.",
"symbol": "CMCSA",
"sector": "Communication Services"
},
{
"company": "Capital One Financial Corp.",
"symbol": "COF",
"sector": "Financials"
},
{
"company": "ConocoPhillips",
"symbol": "COP",
"sector": "Energy"
},
{
"company": "Costco Wholesale Corp.",
"symbol": "COST",
"sector": "Consumer Staples"
},
{
"company": "Salesforce",
"symbol": "CRM",
"sector": "Information Technology"
},
{
"company": "Cisco Systems",
"symbol": "CSCO",
"sector": "Information Technology"
},
{
"company": "CVS Health",
"symbol": "CVS",
"sector": "Health Care"
},
{
"company": "Chevron Corporation",
"symbol": "CVX",
"sector": "Energy"
},
{
"company": "DuPont de Nemours Inc",
"symbol": "DD",
"sector": "Materials"
},
{
"company": "Danaher Corporation",
"symbol": "DHR",
"sector": "Health Care"
},
{
"company": "The Walt Disney Company",
"symbol": "DIS",
"sector": "Communication Services"
},
{
"company": "Dow Inc.",
"symbol": "DOW",
"sector": "Materials"
},
{
"company": "Duke Energy",
"symbol": "DUK",
"sector": "Utilities"
},
{
"company": "Emerson Electric Co.",
"symbol": "EMR",
"sector": "Industrials"
},
{
"company": "Exelon",
"symbol": "EXC",
"sector": "Utilities"
},
{
"company": "Ford Motor Company",
"symbol": "F",
"sector": "Consumer Discretionary"
},
{
"company": "Facebook, Inc.",
"symbol": "FB",
"sector": "Communication Services"
},
{
"company": "FedEx",
"symbol": "FDX",
"sector": "Industrials"
},
{
"company": "General Dynamics",
"symbol": "GD",
"sector": "Industrials"
},
{
"company": "General Electric",
"symbol": "GE",
"sector": "Industrials"
},
{
"company": "Gilead Sciences",
"symbol": "GILD",
"sector": "Health Care"
},
{
"company": "General Motors",
"symbol": "GM",
"sector": "Consumer Discretionary"
},
{
"company": "Alphabet Inc. (Class C)",
"symbol": "GOOG",
"sector": "Communication Services"
},
{
"company": "Alphabet Inc. (Class A)",
"symbol": "GOOGL",
"sector": "Communication Services"
},
{
"company": "Goldman Sachs",
"symbol": "GS",
"sector": "Financials"
},
{
"company": "The Home Depot",
"symbol": "HD",
"sector": "Consumer Discretionary"
},
{
"company": "Honeywell",
"symbol": "HON",
"sector": "Industrials"
},
{
"company": "International Business Machines",
"symbol": "IBM",
"sector": "Information Technology"
},
{
"company": "Intel Corp.",
"symbol": "INTC",
"sector": "Information Technology"
},
{
"company": "Johnson & Johnson",
"symbol": "JNJ",
"sector": "Health Care"
},
{
"company": "JPMorgan Chase & Co.",
"symbol": "JPM",
"sector": "Financials"
},
{
"company": "Kraft Heinz",
"symbol": "KHC",
"sector": "Consumer Staples"
},
{
"company": "The Coca-Cola Company",
"symbol": "KO",
"sector": "Consumer Staples"
},
{
"company": "Linde plc",
"symbol": "LIN",
"sector": "Industrials"
},
{
"company": "Eli Lilly and Company",
"symbol": "LLY",
"sector": "Health Care"
},
{
"company": "Lockheed Martin",
"symbol": "LMT",
"sector": "Industrials"
},
{
"company": "Lowe's",
"symbol": "LOW",
"sector": "Consumer Discretionary"
},
{
"company": "Mastercard",
"symbol": "MA",
"sector": "Financials"
},
{
"company": "McDonald's Corp",
"symbol": "MCD",
"sector": "Consumer Discretionary"
},
{
"company": "Mondelz International",
"symbol": "MDLZ",
"sector": "Consumer Staples"
},
{
"company": "Medtronic plc",
"symbol": "MDT",
"sector": "Health Care"
},
{
"company": "MetLife Inc.",
"symbol": "MET",
"sector": "Financials"
},
{
"company": "3M Company",
"symbol": "MMM",
"sector": "Industrials"
},
{
"company": "Altria Group",
"symbol": "MO",
"sector": "Consumer Staples"
},
{
"company": "Merck & Co.",
"symbol": "MRK",
"sector": "Health Care"
},
{
"company": "Morgan Stanley",
"symbol": "MS",
"sector": "Financials"
},
{
"company": "Microsoft",
"symbol": "MSFT",
"sector": "Information Technology"
},
{
"company": "NextEra Energy",
"symbol": "NEE",
"sector": "Utilities"
},
{
"company": "Netflix",
"symbol": "NFLX",
"sector": "Communication Services"
},
{
"company": "Nike, Inc.",
"symbol": "NKE",
"sector": "Consumer Discretionary"
},
{
"company": "Nvidia Corporation",
"symbol": "NVDA",
"sector": "Information Technology"
},
{
"company": "Oracle Corporation",
"symbol": "ORCL",
"sector": "Information Technology"
},
{
"company": "PepsiCo",
"symbol": "PEP",
"sector": "Consumer Staples"
},
{
"company": "Pfizer Inc",
"symbol": "PFE",
"sector": "Health Care"
},
{
"company": "Procter & Gamble",
"symbol": "PG",
"sector": "Consumer Staples"
},
{
"company": "Philip Morris International",
"symbol": "PM",
"sector": "Consumer Staples"
},
{
"company": "PayPal",
"symbol": "PYPL",
"sector": "Financials"
},
{
"company": "Qualcomm",
"symbol": "QCOM",
"sector": "Information Technology"
},
{
"company": "Raytheon Technologies",
"symbol": "RTX",
"sector": "Industrials"
},
{
"company": "Starbucks Corp.",
"symbol": "SBUX",
"sector": "Consumer Discretionary"
},
{
"company": "Southern Company",
"symbol": "SO",
"sector": "Utilities"
},
{
"company": "Simon Property Group",
"symbol": "SPG",
"sector": "Real Estate"
},
{
"company": "AT&T Inc",
"symbol": "T",
"sector": "Communication Services"
},
{
"company": "Target Corporation",
"symbol": "TGT",
"sector": "Consumer Staples"
},
{
"company": "Thermo Fisher Scientific",
"symbol": "TMO",
"sector": "Health Care"
},
{
"company": "T-Mobile US",
"symbol": "TMUS",
"sector": "Communication Services"
},
{
"company": "Tesla, Inc.",
"symbol": "TSLA",
"sector": "Consumer Discretionary"
},
{
"company": "Texas Instruments",
"symbol": "TXN",
"sector": "Information Technology"
},
{
"company": "UnitedHealth Group",
"symbol": "UNH",
"sector": "Health Care"
},
{
"company": "Union Pacific Corporation",
"symbol": "UNP",
"sector": "Industrials"
},
{
"company": "United Parcel Service",
"symbol": "UPS",
"sector": "Industrials"
},
{
"company": "U.S. Bancorp",
"symbol": "USB",
"sector": "Financials"
},
{
"company": "Visa Inc.",
"symbol": "V",
"sector": "Financials"
},
{
"company": "Verizon Communications",
"symbol": "VZ",
"sector": "Communication Services"
},
{
"company": "Walgreens Boots Alliance",
"symbol": "WBA",
"sector": "Consumer Staples"
},
{
"company": "Wells Fargo",
"symbol": "WFC",
"sector": "Financials"
},
{
"company": "Walmart",
"symbol": "WMT",
"sector": "Consumer Staples"
},
{
"company": "Exxon Mobil Corp.",
"symbol": "XOM",
"sector": "Energy"
}
]
|
13,695 | 7e76fa4df100f6630f042820069f89ca3032c910 | def sortIntegers(A):
i = 0
length = len(A)
while i < length - 1:
j = length - 1
while j > i:
if A[j]<A[j-1]:
temp = A[j]
A[j] = A[j-1]
A[j-1] = temp
j = j - 1
i = i + 1
return A
A = [3, 1, 2, 5, 4]
print(sortIntegers(A)) |
13,696 | 65aba8e3f3e6617b6ade26c8ccb46db90117a9ac | """ Tests of the configuration
:Author: Jonathan Karr <jonrkarr@gmail.com>
:Date: 2018-08-20
:Copyright: 2018, Karr Lab
:License: MIT
"""
import os
import pathlib
import pkg_resources
import unittest
import wc_env_manager.config.core
class Test(unittest.TestCase):
def test_get_config(self):
config = wc_env_manager.config.core.get_config()
self.assertIn('base_image', config['wc_env_manager'])
self.assertIsInstance(config['wc_env_manager']['base_image']['repo'], str)
def test_get_config_extra(self):
extra = {
'wc_env_manager': {
'base_image': {
'build_args': {
'timezone': 'America/Los_Angeles',
},
},
},
}
config = wc_env_manager.config.core.get_config(extra=extra)
self.assertEqual(config['wc_env_manager']['base_image']['build_args']['timezone'], 'America/Los_Angeles')
def test_get_config_context(self):
extra = {
'wc_env_manager': {
'base_image': {
'dockerfile_template_path': '${HOME}/Dockerfile',
},
},
}
config = wc_env_manager.config.core.get_config(extra=extra)
self.assertEqual(
config['wc_env_manager']['base_image']['dockerfile_template_path'],
'{}/Dockerfile'.format(pathlib.Path.home()))
|
13,697 | fed571d26b6f03237cf629dc4e5176d31b15629c | #!/usr/bin/python
# -*- coding: utf-8 -*-
# context_proc/processor.py
""" Context processor """
from django.conf import settings
def cont_settings_(request):
"""
Get context settings from settings file
"""
return {"settings": settings}
|
13,698 | 60a1e162fee523ea753cbfa83ae1ca1f7f4a379c | import torch
import torch.nn as nn
from entmax import entmax15, sparsemax
from .sparsesoftmax import sparse_softmax
def entropy(p: torch.Tensor):
"""Numerically stable computation of Shannon's entropy
for probability distributions with zero-valued elements.
Arguments:
p {torch.Tensor} -- tensor of probabilities.
Size: [batch_size, n_categories]
Returns:
{torch.Tensor} -- the entropy of p.
Size: [batch_size]
"""
nz = (p > 0).to(p.device)
eps = torch.finfo(p.dtype).eps
p_stable = p.clone().clamp(min=eps, max=1 - eps)
out = torch.where(
nz,
p_stable * torch.log(p_stable),
torch.tensor(0.0, device=p.device, dtype=torch.float),
)
return -(out).sum(-1)
class ExplicitWrapper(nn.Module):
"""
Explicit Marginalization Wrapper for a network.
Assumes that the during the forward pass,
the network returns scores over the potential output categories.
The wrapper transforms them into a tuple of (sample from the Categorical,
log-prob of the sample, entropy for the Categorical).
"""
def __init__(self, agent, normalizer="entmax15"):
super(ExplicitWrapper, self).__init__()
self.agent = agent
normalizer_dict = {
"softmax": torch.softmax,
"sparsemax": sparsemax,
"entmax15": entmax15,
"sparsesoftmax": sparse_softmax,
}
self.normalizer = normalizer_dict[normalizer]
def forward(self, *args, **kwargs):
scores = self.agent(*args, **kwargs)
distr = self.normalizer(scores, dim=-1)
entropy_distr = entropy(distr)
sample = scores.argmax(dim=-1)
return sample, distr, entropy_distr
class Marginalizer(torch.nn.Module):
"""
The training loop for the marginalization method to train discrete latent variables.
Encoder needs to be ExplicitWrapper.
Decoder needs to be utils.DeterministicWrapper.
"""
def __init__(
self,
encoder,
decoder,
loss_fun,
encoder_entropy_coeff=0.0,
decoder_entropy_coeff=0.0,
):
super(Marginalizer, self).__init__()
self.encoder = encoder
self.decoder = decoder
self.loss = loss_fun
self.encoder_entropy_coeff = encoder_entropy_coeff
self.decoder_entropy_coeff = decoder_entropy_coeff
def forward(self, encoder_input, decoder_input, labels):
discrete_latent_z, encoder_probs, encoder_entropy = self.encoder(encoder_input)
batch_size, latent_size = encoder_probs.shape
entropy_loss = -(encoder_entropy.mean() * self.encoder_entropy_coeff)
losses = torch.zeros_like(encoder_probs)
logs_global = None
for possible_discrete_latent_z in range(latent_size):
if encoder_probs[:, possible_discrete_latent_z].sum().detach() != 0:
# if it's zero, all batch examples
# will be multiplied by zero anyway,
# so skip computations
possible_discrete_latent_z_ = possible_discrete_latent_z + torch.zeros(
batch_size, dtype=torch.long
).to(encoder_probs.device)
decoder_output = self.decoder(
possible_discrete_latent_z_, decoder_input
)
loss_sum_term, logs = self.loss(
encoder_input,
discrete_latent_z,
decoder_input,
decoder_output,
labels,
)
losses[:, possible_discrete_latent_z] += loss_sum_term
if not logs_global:
logs_global = {k: 0.0 for k in logs.keys()}
for k, v in logs.items():
if hasattr(v, "mean"):
# expectation of accuracy
logs_global[k] += (
encoder_probs[:, possible_discrete_latent_z] * v
).mean()
for k, v in logs.items():
if hasattr(v, "mean"):
logs[k] = logs_global[k]
# encoder_probs: [batch_size, latent_size]
# losses: [batch_size, latent_size]
# encoder_probs.unsqueeze(1): [batch_size, 1, latent_size]
# losses.unsqueeze(-1): [batch_size, latent_size, 1]
# entropy_loss: []
# full_loss: []
loss = encoder_probs.unsqueeze(1).bmm(losses.unsqueeze(-1)).squeeze()
full_loss = loss.mean() + entropy_loss.mean()
logs["loss"] = loss.mean()
logs["encoder_entropy"] = encoder_entropy.mean()
logs["support"] = (encoder_probs != 0).sum(-1).to(torch.float).mean()
logs["distr"] = encoder_probs
return {"loss": full_loss, "log": logs}
|
13,699 | e2f6b96784624751d75f673fd47ede1c22985977 | # Use partial function from functools module to rewrite the method for doubleNum and tripleNum
from functools import partial
def multiply(x, y):
return x * y
def doubleNum(x):
return multiply(x, 2)
def tripleNum(x):
return multiply(x, 3)
newDoubleNum = partial(multiply, y=2)
print(newDoubleNum(69))
newTripleNum = partial(multiply, 3)
print(newTripleNum(69)) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.