blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2
values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313
values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107
values | src_encoding stringclasses 20
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 4 6.02M | extension stringclasses 78
values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ef7317dea244070c4acc131772bde1098dc30efb | 09da41a4f4b8032f92ceeaae631f3274aa834921 | /hello.py | 4d3005d090ae27c92520d5086a1ddda463c7d05b | [
"BSD-2-Clause"
] | permissive | green10-syntra-ab-python-adv/cloudnative-ch01 | 04845118fb296caf90ec70fa2671ccc0969a893a | eb9342004f81f81358dbb6e4f78eed1e9e733e66 | refs/heads/master | 2022-04-30T13:51:36.455655 | 2022-03-08T18:16:20 | 2022-03-08T18:16:20 | 218,303,092 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World of Syntra-AB!"
if __name__ == '__main__':
app.run(host='0.0.0.0', port=5000, debug=True)
| [
"hans.vandenbogaerde@gmail.com"
] | hans.vandenbogaerde@gmail.com |
9a8733f429c6c5da8b7bfb76da18290b5c0b8a08 | e5ee8e4f142d42f69473ba4191dd58817fc111f7 | /config.py | 0d5c60843aa9c895c69e173d88bb46a19711250f | [] | no_license | pxch/script_model | f23691ca20b2371624af5139319b8ed0a9d1eae1 | c8206b46a7dcdc3e0a10df8a65fc1b5375dbf540 | refs/heads/master | 2021-09-11T23:34:01.050638 | 2018-04-13T02:18:39 | 2018-04-13T02:18:39 | 104,591,997 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,485 | py | import platform
import socket
from os.path import dirname, join, realpath
class DefaultConfig(object):
# absolute path to the root of this repository
repo_root = dirname(realpath(__file__))
# path to the data directory
@property
def data_path(self):
return join(self.repo_root, 'dataset/data')
# root directory for all corpora
corpus_root = '/Users/pengxiang/corpora/'
# path to Penn Treebank WSJ corpus (relative to corpus_root)
wsj_path = 'penn-treebank-rel3/parsed/mrg/wsj'
# file pattern to read PTB data from WSJ corpus
wsj_file_pattern = '\d\d/wsj_.*\.mrg'
@property
def wsj_root(self):
return join(self.corpus_root, self.wsj_path)
# path to Propbank corpus (relative to corpus_root)
propbank_path = 'propbank-LDC2004T14/data'
# file name of propositions in Propbank corpus
propbank_file = 'prop.txt'
# file name of verb list in Propbank corpus
propbank_verbs_file = 'verbs.txt'
@property
def propbank_root(self):
return join(self.corpus_root, self.propbank_path)
# path to Nombank corpus (relative to corpus_root)
nombank_path = 'nombank.1.0'
# file name of propositions in Nombank corpus
nombank_file = 'nombank.1.0_sorted'
# file name of noun list in Nombank corpus
nombank_nouns_file = 'nombank.1.0.words'
@property
def nombank_root(self):
return join(self.corpus_root, self.nombank_path)
# file pattern to read frame data from Propbank/Nombank corpus
frame_file_pattern = 'frames/.*\.xml'
# path to Ontonotes corpus (relative to corpus_root)
ontonotes_path = 'ontonotes-release-5.0/data/files/data/'
@property
def ontonotes_root(self):
return join(self.corpus_root, self.ontonotes_path)
class CondorConfig(DefaultConfig):
corpus_root = '/scratch/cluster/pxcheng/corpora/'
class MaverickConfig(DefaultConfig):
corpus_root = '/work/03155/pxcheng/maverick/corpora/'
def get_config():
system_name = platform.system()
# local MacBook
if system_name == 'Darwin':
return DefaultConfig()
if system_name == 'Linux':
host_name = socket.getfqdn()
# UTCS Condor cluster
if 'cs.utexas.edu' in host_name:
return CondorConfig()
# TACC Maverick cluster
if 'maverick.tacc.utexas.edu' in host_name:
return MaverickConfig()
raise RuntimeError('Unrecognized platform')
cfg = get_config()
| [
"pxcheng@utexas.edu"
] | pxcheng@utexas.edu |
e9b797ee2dc708b4d5b228546e0c2efc78a60f0b | 2a7e989154abbff264b46f12990d7caf62876da0 | /wikiprereq_finder.py | 56e343b5b367bfe7519766973a2a6a0ada19b032 | [
"MIT"
] | permissive | arunbalachandran/DocumentPrereqAnalysis | 54bcfc345e94bf26aa670e0b0aaa6b120a433739 | 50df4c78b7bb2d59547b86d156fdd7917b19bb15 | refs/heads/master | 2021-01-20T09:55:11.968731 | 2017-06-14T20:28:04 | 2017-06-14T20:28:04 | 90,301,834 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,182 | py | import json
import subprocess, shlex
import sys
import os
with open('title_links_noenglish.json') as title_link_dict:
title_links = json.load(title_link_dict)
system_encoding = sys.stdout.encoding
def weights(conceptA, conceptB):
if (conceptA in title_links[conceptB]):
return 1
return 0
def relation(conceptA, conceptB):
if (conceptB in title_links[conceptA]):
return 1
return 0
def ref_distance(conceptA, conceptB):
total_weightsA, total_weightsB = 0, 0
ref_dist = 0.0
for article in title_links:
total_weightsA += weights(article, conceptA)
total_weightsB += weights(article, conceptB)
if total_weightsA == 0:
total_weightsA += 1
elif total_weightsB == 0:
total_weightsB += 1
for article in title_links:
ref_dist += relation(article, conceptB)*weights(article, conceptA)/total_weightsA - relation(article, conceptA)*weights(article, conceptB)/total_weightsB
if (ref_dist != 0): # how are the print statements working?
try:
print ("The distance between", conceptA, "and", conceptB, "is ", ref_dist)
except:
print ("Cannot print concept")
return (ref_dist, conceptB)
# Search whether the node exists in other nodes as subset
def recursive_search(dictionary, search_key):
for key in dictionary:
if key != search_key and "'"+search_key+"'" in str(dictionary[key]): # don't search for yourself
return True
return False
def get_prereq_concept(concepts):
concept_prereq = {}
if (concepts != []):
for concept in concepts: # find prerequisites for each concept
concept_prereq[concept] = {}
sys.stdout.write('\n'+str(concept_prereq)+'\n')
# simplifying assumption that the links that you have in your page contain the thing that links to you
# for concept in concept_prereq:
recursive_concept_fill(concept_prereq, depth=0, all_concepts=concepts) # depth limited recursive concept fetch
deletion_list = []
for search_key in concept_prereq:
if recursive_search(concept_prereq, search_key):
deletion_list.append(search_key)
# print ('deletion list is', deletion_list)
# input()
for key in deletion_list:
del(concept_prereq[key])
return concept_prereq
return concepts # if none exist
def get_concepts(filepath):
# make this platform independent
print ('filepath is ', filepath)
filename = os.path.basename(filepath)
original_path = os.getcwd()
os.chdir(os.path.abspath(os.path.join(filepath, os.pardir)))
print ('changed directory to', os.getcwd())
if sys.platform == 'win32':
cmd = 'gswin64c -q -dNODISPLAY -dSAFER -dDELAYBIND -dWRITESYSTEMDICT -dSIMPLE -c save -f ps2ascii.ps ' + filename + ' -c quit'
proc = subprocess.Popen(shlex.split(cmd, posix=False), stdout=subprocess.PIPE) # don't need the posix option if the filesystem is not windows
else:
cmd = 'gs -q -dNODISPLAY -dSAFER -dDELAYBIND -dWRITESYSTEMDICT -dSIMPLE -c save -f ps2ascii.ps ' + filename + ' -c quit'
proc = subprocess.Popen(shlex.split(cmd), stdout=subprocess.PIPE) # don't need the posix option if the filesystem is not windows
pdftext, stderr = proc.communicate()
pdftext = str(pdftext).lower()
print ('command is', cmd)
os.chdir(original_path)
print ('changed back to', os.getcwd())
# future scope -> check bigrams and trigrams and also improve simple word checking using search library
# how do I reduce the number of concepts? or maybe implement a hide functionality for nodes
concepts_with_count = [] # need to refine this stupid condition
for concept in title_links:
if ' '+concept.lower()+' ' in pdftext:
concepts_with_count.append((concept, pdftext.count(concept.lower())))
elif ' '+concept.replace('_', ' ').lower()+' ' in pdftext:
concepts_with_count.append((concept, pdftext.count(concept.replace('_', ' ').lower())))
# concepts = [concept for concept in title_links if concept.lower() in pdftext or ' '.join(concept.split('_')).lower() in pdftext]
concepts = [concept[0] for concept in concepts_with_count if concept[1] > 2] # keep this count variable for experiment
# remove unnecessary concepts
concepts = [concept for concept in concepts if concept not in ['(', ')', '{', '}', '[', ']']]
concept_prereq = {}
if (concepts != []):
for concept in concepts: # find prerequisites for each concept
concept_prereq[concept] = {}
sys.stdout.write('\n'+str(concept_prereq)+'\n')
# simplifying assumption that the links that you have in your page contain the thing that links to you
recursive_concept_fill(concept_prereq, depth=0, all_concepts=concepts) # depth limited recursive concept fetch
deletion_list = []
for search_key in concept_prereq:
if recursive_search(concept_prereq, search_key):
deletion_list.append(search_key)
for key in deletion_list:
del(concept_prereq[key])
return concept_prereq
return concepts # if none exist
def recursive_concept_fill(concept_dict, depth, all_concepts):
if (depth >= 7):
return
for concept in concept_dict:
# max concept is the prerequisite for that concept
print ('concept is ', concept, 'with dict', concept_dict)
if (title_links.get(concept) != None):
# super rare case
list_articles = [ref_distance(concept, article) for article in title_links[concept] if title_links.get(article)]
if list_articles != []:
max_concept = max(list_articles , key=lambda x: x[0])
# need to verify the pruning logic
if (max_concept):
all_concepts.append(max_concept[1]) # unique concept that doesn't exist anywhere in the dicitonary
concept_dict[concept][max_concept[1]] = {}
recursive_concept_fill(concept_dict[concept], depth=depth+1, all_concepts=all_concepts)
| [
"balachandran.arun94@gmail.com"
] | balachandran.arun94@gmail.com |
81bd565ffe7d12d279ab0249323224c34927c333 | f5b936c57a82a479983a2adc71c2abe7d6b3ec9b | /HackerRank/Implementation/findDigits.py | f205a873b0afd9f50b9a46805062dd8e971b2db7 | [] | no_license | FranciscoThiesen/OldProblems | 002e2099fcb7f0c874f3d8927a60d1644521bbdf | 809747fceb5a75127aae832697d6f91b63d234f5 | refs/heads/master | 2021-08-31T16:20:28.969377 | 2017-12-22T02:07:05 | 2017-12-22T02:07:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | from math import *
i = int(raw_input())
j = 0
while(j < i):
total = 0
num1 = int(input())
num = str(num1)
q = map(int,num)
for x in q:
if x != 0 and num1%x == 0:
total = total + 1
print(total)
j = j + 1
| [
"francisco@MacBook-Pro-de-francisco.local"
] | francisco@MacBook-Pro-de-francisco.local |
c459f64e1807df8a30a789284087c6e1ec564b20 | 5f233df8e5d3ad962920c9891c6490dcf62bcbc0 | /Disaggregation-Ukdale/VRNN_theano_version/models/vrnn_gmm_aggVS1_mseFocused.py | c088fa794ce04e75172b3455954ce7de582f230c | [] | no_license | oneway3124/disaggregation-vrnn | 3113a79bf082b28ef671abeb3abdc4f5b566f05c | d4bc620278f3a4ce33521f23c6632ab512a38de9 | refs/heads/master | 2022-12-04T15:24:34.005913 | 2018-11-10T01:01:12 | 2018-11-10T01:01:12 | 289,849,290 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,072 | py | import ipdb
import numpy as np
import theano
import theano.tensor as T
import datetime
import shutil
import os
from cle.cle.cost import BiGMM, KLGaussianGaussian, GMM
from cle.cle.data import Iterator
from cle.cle.models import Model
from cle.cle.layers import InitCell
from cle.cle.layers.feedforward import FullyConnectedLayer
from cle.cle.layers.recurrent import LSTM
from cle.cle.train import Training
from cle.cle.train.ext import (
EpochCount,
GradientClipping,
Monitoring,
Picklize,
EarlyStopping,
WeightNorm
)
from cle.cle.train.opt import Adam
from cle.cle.utils import init_tparams, sharedX
from cle.cle.utils.compat import OrderedDict
from cle.cle.utils.op import Gaussian_sample, GMM_sample
from cle.cle.utils.gpu_op import concatenate
from preprocessing.ukdale import UKdale
from preprocessing.ukdale_utils import fetch_ukdale
appliances = [ 'kettle','microwave', 'washing machine', 'dish washer' , 'fridge']#
windows = {1:("2013-02-27", "2015-02-27")}#, 2:("2013-02-27", "2013-04-27")
listDates = ['2013-08-26 07:57','2014-01-22 07:41']
def main(args):
#theano.optimizer='fast_compile'
#theano.config.exception_verbosity='high'
trial = int(args['trial'])
pkl_name = 'vrnn_gmm_%d' % trial
channel_name = 'valid_nll_upper_bound'
data_path = args['data_path']
save_path = args['save_path'] #+'/gmm/'+datetime.datetime.now().strftime("%y-%m-%d_%H-%M")
flgMSE = int(args['flgMSE'])
period = int(args['period'])
n_steps = int(args['n_steps'])
stride_train = int(args['stride_train'])
stride_test = n_steps# int(args['stride_test'])
monitoring_freq = int(args['monitoring_freq'])
epoch = int(args['epoch'])
batch_size = int(args['batch_size'])
x_dim = int(args['x_dim'])
y_dim = int(args['y_dim'])
flgAgg = int(args['flgAgg'])
z_dim = int(args['z_dim'])
rnn_dim = int(args['rnn_dim'])
k = int(args['num_k']) #a mixture of K Gaussian functions
lr = float(args['lr'])
debug = int(args['debug'])
num_sequences_per_batch = int(args['numSequences']) #based on appliance
loadParam = args['loadAsKelly']
target_inclusion_prob = float(args['target_inclusion_prob'])
loadAsKelly = True
if(loadParam == 'N' or loadParam == 'n' or loadParam == 'no' or loadParam == 'NO' or loadParam == 'No'):
loadAsKelly = False
print "trial no. %d" % trial
print "batch size %d" % batch_size
print "learning rate %f" % lr
print "saving pkl file '%s'" % pkl_name
print "to the save path '%s'" % save_path
q_z_dim = 100#150
p_z_dim = 60#150
p_x_dim = 20#250
x2s_dim = 40#250
z2s_dim = 40#150
target_dim = k#x_dim #(x_dim-1)*k
model = Model()
Xtrain, ytrain, Xval, yval, reader = fetch_ukdale(target_inclusion_prob, data_path, windows, appliances,numApps=flgAgg, period=period,
n_steps= n_steps, stride_train = stride_train, stride_test = stride_test,
isKelly= loadAsKelly, seq_per_batch=num_sequences_per_batch, flgAggSumScaled = 1, flgFilterZeros = 1)
instancesPlot = {0:[4,20], 2:[5,10]} #for now use hard coded instancesPlot for kelly sampling
if(not loadAsKelly):
instancesPlot = reader.build_dict_instances_plot(listDates, batch_size, Xval.shape[0])
train_data = UKdale(name='train',
prep='normalize',
cond=True,# False
#path=data_path,
inputX=Xtrain,
labels=ytrain)
X_mean = train_data.X_mean
X_std = train_data.X_std
valid_data = UKdale(name='valid',
prep='normalize',
cond=True,# False
#path=data_path,
X_mean=X_mean,
X_std=X_std,
inputX=Xval,
labels = yval)
init_W = InitCell('rand')
init_U = InitCell('ortho')
init_b = InitCell('zeros')
init_b_sig = InitCell('const', mean=0.6)
x, mask, y , y_mask = train_data.theano_vars()
x.name = 'x_original'
if debug:
x.tag.test_value = np.zeros((15, batch_size, x_dim), dtype=np.float32)
temp = np.ones((15, batch_size), dtype=np.float32)
temp[:, -2:] = 0.
mask.tag.test_value = temp
x_1 = FullyConnectedLayer(name='x_1',
parent=['x_t'],
parent_dim=[x_dim],
nout=x2s_dim,
unit='relu',
init_W=init_W,
init_b=init_b)
z_1 = FullyConnectedLayer(name='z_1',
parent=['z_t'],
parent_dim=[z_dim],
nout=z2s_dim,
unit='relu',
init_W=init_W,
init_b=init_b)
rnn = LSTM(name='rnn',
parent=['x_1', 'z_1'],
parent_dim=[x2s_dim, z2s_dim],
nout=rnn_dim,
unit='tanh',
init_W=init_W,
init_U=init_U,
init_b=init_b)
'''
dissag_pred = FullyConnectedLayer(name='disag_1',
parent=['s_tm1'],
parent_dim=[rnn_dim],
nout=num_apps,
unit='relu',
init_W=init_W,
init_b=init_b)
'''
phi_1 = FullyConnectedLayer(name='phi_1',
parent=['x_1', 's_tm1'],
parent_dim=[x2s_dim, rnn_dim],
nout=q_z_dim,
unit='relu',
init_W=init_W,
init_b=init_b)
phi_mu = FullyConnectedLayer(name='phi_mu',
parent=['phi_1'],
parent_dim=[q_z_dim],
nout=z_dim,
unit='linear',
init_W=init_W,
init_b=init_b)
phi_sig = FullyConnectedLayer(name='phi_sig',
parent=['phi_1'],
parent_dim=[q_z_dim],
nout=z_dim,
unit='softplus',
cons=1e-4,
init_W=init_W,
init_b=init_b_sig)
prior_1 = FullyConnectedLayer(name='prior_1',
parent=['s_tm1'],
parent_dim=[rnn_dim],
nout=p_z_dim,
unit='relu',
init_W=init_W,
init_b=init_b)
prior_mu = FullyConnectedLayer(name='prior_mu',
parent=['prior_1'],
parent_dim=[p_z_dim],
nout=z_dim,
unit='linear',
init_W=init_W,
init_b=init_b)
prior_sig = FullyConnectedLayer(name='prior_sig',
parent=['prior_1'],
parent_dim=[p_z_dim],
nout=z_dim,
unit='softplus',
cons=1e-4,
init_W=init_W,
init_b=init_b_sig)
theta_1 = FullyConnectedLayer(name='theta_1',
parent=['z_1', 's_tm1'],
parent_dim=[z2s_dim, rnn_dim],
nout=p_x_dim,
unit='relu',
init_W=init_W,
init_b=init_b)
theta_mu = FullyConnectedLayer(name='theta_mu',
parent=['theta_1'],
parent_dim=[p_x_dim],
nout=target_dim,
unit='linear',
init_W=init_W,
init_b=init_b)
theta_sig = FullyConnectedLayer(name='theta_sig',
parent=['theta_1'],
parent_dim=[p_x_dim],
nout=target_dim,
unit='softplus',
cons=1e-4,
init_W=init_W,
init_b=init_b_sig)
coeff = FullyConnectedLayer(name='coeff',
parent=['theta_1'],
parent_dim=[p_x_dim],
nout=k,
unit='softmax',
init_W=init_W,
init_b=init_b)
corr = FullyConnectedLayer(name='corr',
parent=['theta_1'],
parent_dim=[p_x_dim],
nout=k,
unit='tanh',
init_W=init_W,
init_b=init_b)
binary = FullyConnectedLayer(name='binary',
parent=['theta_1'],
parent_dim=[p_x_dim],
nout=1,
unit='sigmoid',
init_W=init_W,
init_b=init_b)
nodes = [rnn,
x_1, z_1, #dissag_pred,
phi_1, phi_mu, phi_sig,
prior_1, prior_mu, prior_sig,
theta_1, theta_mu, theta_sig, coeff]#, corr, binary
params = OrderedDict()
for node in nodes:
if node.initialize() is not None:
params.update(node.initialize())
params = init_tparams(params)
s_0 = rnn.get_init_state(batch_size)
x_1_temp = x_1.fprop([x], params)
def inner_fn(x_t, s_tm1):
phi_1_t = phi_1.fprop([x_t, s_tm1], params)
phi_mu_t = phi_mu.fprop([phi_1_t], params)
phi_sig_t = phi_sig.fprop([phi_1_t], params)
prior_1_t = prior_1.fprop([s_tm1], params)
prior_mu_t = prior_mu.fprop([prior_1_t], params)
prior_sig_t = prior_sig.fprop([prior_1_t], params)
z_t = Gaussian_sample(phi_mu_t, phi_sig_t)
z_1_t = z_1.fprop([z_t], params)
theta_1_t = theta_1.fprop([z_1_t, s_tm1], params)
theta_mu_t = theta_mu.fprop([theta_1_t], params)
theta_sig_t = theta_sig.fprop([theta_1_t], params)
coeff_t = coeff.fprop([theta_1_t], params)
#corr_t = corr.fprop([theta_1_t], params)
#binary_t = binary.fprop([theta_1_t], params)
pred = GMM_sample(theta_mu_t, theta_sig_t, coeff_t) #Gaussian_sample(theta_mu_t, theta_sig_t)
s_t = rnn.fprop([[x_t, z_1_t], [s_tm1]], params)
#y_pred = dissag_pred.fprop([s_t], params)
return s_t, phi_mu_t, phi_sig_t, prior_mu_t, prior_sig_t, z_t, z_1_t, theta_1_t, theta_mu_t, theta_sig_t, coeff_t, pred#, y_pred
#corr_temp, binary_temp
((s_temp, phi_mu_temp, phi_sig_temp, prior_mu_temp, prior_sig_temp,z_t_temp, z_1_temp, theta_1_temp, theta_mu_temp, theta_sig_temp, coeff_temp, prediction), updates) =\
theano.scan(fn=inner_fn,
sequences=[x_1_temp],
outputs_info=[s_0, None, None, None, None, None, None, None, None, None, None, None])
for k, v in updates.iteritems():
k.default_update = v
s_temp = concatenate([s_0[None, :, :], s_temp[:-1]], axis=0)# seems like this is for creating an additional dimension to s_0
s_temp.name = 'h_1'#gisse
z_1_temp.name = 'z_1'#gisse
z_t_temp.name = 'z'
theta_mu_temp.name = 'theta_mu_temp'
theta_sig_temp.name = 'theta_sig_temp'
coeff_temp.name = 'coeff'
#corr_temp.name = 'corr'
#binary_temp.name = 'binary'
if (flgAgg == -1 ):
prediction.name = 'x_reconstructed'
mse = T.mean((prediction - x)**2) # CHECK RESHAPE with an assertion
mae = T.mean( T.abs(prediction - x) )
mse.name = 'mse'
pred_in = x.reshape((x_shape[0]*x_shape[1], -1))
else:
prediction.name = 'pred_'+str(flgAgg)
#[:,:,flgAgg].reshape((y.shape[0],y.shape[1],1)
mse = T.mean((prediction - y)**2) # As axis = None is calculated for all
mae = T.mean( T.abs_(prediction - y) )
mse.name = 'mse'
mae.name = 'mae'
pred_in = y.reshape((y.shape[0]*y.shape[1],-1))
kl_temp = KLGaussianGaussian(phi_mu_temp, phi_sig_temp, prior_mu_temp, prior_sig_temp)
x_shape = x.shape
theta_mu_in = theta_mu_temp.reshape((x_shape[0]*x_shape[1], -1))
theta_sig_in = theta_sig_temp.reshape((x_shape[0]*x_shape[1], -1))
coeff_in = coeff_temp.reshape((x_shape[0]*x_shape[1], -1))
#corr_in = corr_temp.reshape((x_shape[0]*x_shape[1], -1))
#binary_in = binary_temp.reshape((x_shape[0]*x_shape[1], -1))
recon = GMM(pred_in, theta_mu_in, theta_sig_in, coeff_in)# BiGMM(x_in, theta_mu_in, theta_sig_in, coeff_in, corr_in, binary_in)
recon = recon.reshape((x_shape[0], x_shape[1]))
recon.name = 'gmm_out'
#recon = recon * mask
recon_term = recon.sum(axis=0).mean()
recon_term.name = 'recon_term'
#kl_temp = kl_temp * mask
kl_term = kl_temp.sum(axis=0).mean()
kl_term.name = 'kl_term'
if (flgMSE):
nll_upper_bound = recon_term + kl_term + mse
else:
nll_upper_bound = recon_term + kl_term #+ mse
nll_upper_bound.name = 'nll_upper_bound'
max_x = x.max()
mean_x = x.mean()
min_x = x.min()
max_x.name = 'max_x'
mean_x.name = 'mean_x'
min_x.name = 'min_x'
max_theta_mu = theta_mu_in.max()
mean_theta_mu = theta_mu_in.mean()
min_theta_mu = theta_mu_in.min()
max_theta_mu.name = 'max_theta_mu'
mean_theta_mu.name = 'mean_theta_mu'
min_theta_mu.name = 'min_theta_mu'
max_theta_sig = theta_sig_in.max()
mean_theta_sig = theta_sig_in.mean()
min_theta_sig = theta_sig_in.min()
max_theta_sig.name = 'max_theta_sig'
mean_theta_sig.name = 'mean_theta_sig'
min_theta_sig.name = 'min_theta_sig'
coeff_max = coeff_in.max()
coeff_min = coeff_in.min()
coeff_mean_max = coeff_in.mean(axis=0).max()
coeff_mean_min = coeff_in.mean(axis=0).min()
coeff_max.name = 'coeff_max'
coeff_min.name = 'coeff_min'
coeff_mean_max.name = 'coeff_mean_max'
coeff_mean_min.name = 'coeff_mean_min'
max_phi_sig = phi_sig_temp.max()
mean_phi_sig = phi_sig_temp.mean()
min_phi_sig = phi_sig_temp.min()
max_phi_sig.name = 'max_phi_sig'
mean_phi_sig.name = 'mean_phi_sig'
min_phi_sig.name = 'min_phi_sig'
max_prior_sig = prior_sig_temp.max()
mean_prior_sig = prior_sig_temp.mean()
min_prior_sig = prior_sig_temp.min()
max_prior_sig.name = 'max_prior_sig'
mean_prior_sig.name = 'mean_prior_sig'
min_prior_sig.name = 'min_prior_sig'
model.inputs = [x, mask, y, y_mask]
model.params = params
model.nodes = nodes
optimizer = Adam(
lr=lr
)
extension = [
GradientClipping(batch_size=batch_size),
EpochCount(epoch),
Monitoring(freq=monitoring_freq,
ddout=[nll_upper_bound, recon_term, kl_term, mse, mae,
theta_mu_temp, theta_sig_temp, z_t_temp, prediction,#corr_temp, binary_temp,
s_temp, z_1_temp],
indexSep=5,
indexDDoutPlot = [(0,theta_mu_temp), (2, z_t_temp), (3,prediction)],
instancesPlot = instancesPlot, #{0:[4,20],2:[5,10]},#, 80,150
data=[Iterator(valid_data, batch_size)],
savedFolder = save_path),
Picklize(freq=monitoring_freq, path=save_path),
EarlyStopping(freq=monitoring_freq, path=save_path, channel=channel_name),
WeightNorm()
]
lr_iterations = {0:lr}
mainloop = Training(
name=pkl_name,
data=Iterator(train_data, batch_size),
model=model,
optimizer=optimizer,
cost=nll_upper_bound,
outputs=[nll_upper_bound],
extension=extension,
lr_iterations=lr_iterations
)
mainloop.run()
fLog = open(save_path+'/output.csv', 'w')
fLog.write("log,kl,nll_upper_bound,mse,mae\n")
for i , item in enumerate(mainloop.trainlog.monitor['nll_upper_bound']):
a = mainloop.trainlog.monitor['recon_term'][i]
b = mainloop.trainlog.monitor['kl_term'][i]
c = mainloop.trainlog.monitor['nll_upper_bound'][i]
d = mainloop.trainlog.monitor['mse'][i]
e = mainloop.trainlog.monitor['mae'][i]
fLog.write("{},{},{},{},{}\n".format(a,b,c,d,e))
if __name__ == "__main__":
import sys, time
if len(sys.argv) > 1:
config_file_name = sys.argv[-1]
else:
config_file_name = 'config.txt'
f = open(config_file_name, 'r')
lines = f.readlines()
params = OrderedDict()
for line in lines:
line = line.split('\n')[0]
param_list = line.split(' ')
param_name = param_list[0]
param_value = param_list[1]
params[param_name] = param_value
params['save_path'] = params['save_path']+'/gmm_mse/'+datetime.datetime.now().strftime("%y-%m-%d_%H-%M")
os.makedirs(params['save_path'])
shutil.copy('config.txt', params['save_path']+'/config.txt')
main(params)
| [
"gissemari@gmail.com"
] | gissemari@gmail.com |
07e0a3f5263b42f5cfbf6a40d74adadb61a31aac | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03836/s870323568.py | ad0d20b04f7d66288bd881e8921f6962e16fadd5 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | def main():
sx, sy, tx, ty = map(int, input().split())
x = tx - sx
y = ty - sy
s = ""
for i in range(2):
tmp = ""
if i:
tmp += "D"
tmp += "R"*(x+1)
tmp += "U"*(y+1)
tmp += "L"
else:
tmp += "R"*x
tmp += "U"*y
s += tmp + tmp.translate(str.maketrans({"U": "D",
"D": "U", "L": "R", "R": "L"}))
print(s)
if __name__ == "__main__":
main()
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
2aa1ae4d150ed24a64366443fb9b2d8849508527 | daaee41b0fae1453a9a3716f61e4f129031aefd2 | /task5_2.py | f5f9647005ce634e3656f40a7d9d4095c163ec66 | [] | no_license | willnight/PythonHomeWork | 335ce7fab365f132d23d54162489ac027ee3a406 | bae08ee91b9a4417f9550f05808756c5f122246b | refs/heads/master | 2023-01-19T00:28:37.163476 | 2020-11-27T19:31:58 | 2020-11-27T19:31:58 | 309,486,164 | 0 | 0 | null | 2020-12-02T15:23:34 | 2020-11-02T20:24:28 | Python | UTF-8 | Python | false | false | 326 | py | from utils import f_path
with open(f"{f_path}count_me.txt", "r", encoding='utf-8') as f_obj:
content = f_obj.readlines()
print(f"Кол-во строк в файле {f_obj.name} - {len(content)}")
print("-" * 30)
for num, line in enumerate(content, start=1):
print(f"{num}: {len(line.split(' '))}")
| [
"dmitrofanova@outlook.com"
] | dmitrofanova@outlook.com |
d7c480c1e7b1e541f1c460b215f39a1e4d16d84d | 3227d718331a096ce5f6b72babf2946644a2d845 | /Server/db_init.py | d5435a2b68303949f725cd56f90ff99caac70bef | [] | no_license | ziaoang/CustomCS | 0a403bcf6d149cfe60be9f427e0280623d27b227 | 6f6e8091c038cb7bafd31555d87d6d3171bf19e6 | refs/heads/master | 2020-12-30T18:02:23.247881 | 2017-06-05T09:35:57 | 2017-06-05T09:35:57 | 90,941,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 495 | py | # encoding=utf-8
import sys
reload(sys)
sys.setdefaultencoding("utf8")
from index import db, as_dict, md5, User
def init_user():
userList = []
for line in open('data.txt'):
t = line.strip().split('\t')
user = User()
user.username = t[0]
user.password_hash = md5('123456')
user.max_score = int(t[1])
user.is_save = False
userList.append(user)
db.session.add_all(userList)
db.session.commit()
def main():
init_user()
if __name__ == '__main__':
main()
| [
"ziaoang@gmail.com"
] | ziaoang@gmail.com |
63bfd9f43ed9970ab5788f329283691ae2cb726d | c4c2b7ab6760bdb5acbc21c6dda6b3351217dd98 | /junbo/LC_670.py | eaf78b5c97366d0c7024e389c29b806d29fd5912 | [] | no_license | JunboChen94/Leetcode | 2de3c5f499b6e25ea4799dfbab276ee728c64993 | 5aa06207856ac6d44629bed76709387b356d565f | refs/heads/master | 2023-01-06T13:31:55.456009 | 2020-11-02T03:03:30 | 2020-11-02T03:03:30 | 291,859,519 | 2 | 3 | null | 2020-09-02T03:09:15 | 2020-09-01T00:56:55 | Python | UTF-8 | Python | false | false | 601 | py | class Solution:
def maximumSwap(self, num: int) -> int:
# help random access digit and swap
i, D = 1, list(str(num))
# first first decreasing
while i < len(D) and D[i-1] >= D[i]:
i += 1
# swap A, B: A in [:i], B in [i:]
# B is the rightest duplicate of the largest element in [i:]
# A is the leftest element < B
if i < len(D):
p1, p2 = 0, str(num).rfind(max(D[i:]))
while p1 < i-1 and D[p1] >= D[p2]:
p1 += 1
D[p1], D[p2] = D[p2], D[p1]
return int(''.join(D))
| [
"jc4648@columbia.edu"
] | jc4648@columbia.edu |
5f61ac00c9bc6094a0e98a2a712db02aa7f68b95 | fe4ae5b66476425bf298049f8176512c9137dec7 | /color_average.py | fa4ecacd1b7dcaa24bc7117c98f29123b085f76e | [] | no_license | trisetiobr/detektif-tomat | 20685fc1298659f3a85566494502f081f70f9049 | b9503f0a261c42198c0b24c471558af4efefcb9a | refs/heads/master | 2021-05-01T22:30:59.107205 | 2017-01-11T10:46:47 | 2017-01-11T10:46:47 | 77,390,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 261 | py | import numpy as np
def getAverage(image):
average_color_per_row = np.average(image, axis = 0)
average_color = np.round(np.average(average_color_per_row, axis = 0))
output = [average_color[0], average_color[1], average_color[2]]
return output
| [
"trisetiobr@gmail.com"
] | trisetiobr@gmail.com |
9139c93e7b26ce8b64bd9f7b056725ae4ff4a3c5 | 80a9bc980d6b71bf991abe303f6f6f9e042e0fc3 | /trunk/src/nemoversion/repository/svncommand.py | b886d4c489feaabfbc53e517eb08b8ec15471859 | [] | no_license | BackupTheBerlios/nemoversion-svn | 218a080b25f0ae8f9e0ddbe3a1de08bc465d80c2 | ca153ae44d294b37dcf67e3bf7ad88cb2be4e83f | refs/heads/master | 2020-05-07T12:59:54.838710 | 2008-02-26T14:56:57 | 2008-02-26T14:56:57 | 40,823,571 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,230 | py | import commands
class Command(object):
def __init__(self, file):
self.output = None
self.__file = file
def outputFormatedAsDict(self):
return {"status":self.output[0], "output":self.output[1]}
def getFile(self):
return self.__file.get_uri()[7:]
#TODO: remove this shit
def setFile(self, value):
self.__file = value
def preExecute(self):
raise NotImplementedError
def execute(self):
self.preExecute()
return self.outputFormatedAsDict()
def __eq__(self, other):
sameFile = other.getFile() == self.getFile()
sameClass = other.__class__.__name__ == self.__class__.__name__
if sameFile and sameClass:
return True
return False
file = property(getFile, setFile)
class AddCommand(Command):
def preExecute(self):
self.output = commands.getstatusoutput("svn add " + self.file)
class RemoveCommand(Command):
def preExecute(self):
self.output = commands.getstatusoutput("svn remove " + self.file)
class StatusCommand(Command):
def preExecute(self):
self.output = commands.getstatusoutput("svn status " + self.file) | [
"marcosmachado@e640259a-e338-0410-83ff-e42d3441542b"
] | marcosmachado@e640259a-e338-0410-83ff-e42d3441542b |
d66db28298be6a2199d7c88640fdf9e79b0785eb | d1a58bd58111db63c9d59cf70153a100a3c262b3 | /model/__init__.py | 5e4c28f1e4a88d5ccc0754a786abcad4f355d046 | [] | no_license | zhenfeng021/feng | 007f15401acaeb37084da6a46575f6baf6e7164c | 91c59e84238568f003e23fca55f2bb13f6f84363 | refs/heads/master | 2020-05-21T09:19:34.008768 | 2019-05-10T15:32:29 | 2019-05-10T15:32:29 | 185,995,732 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | py | #!/usr/bin/env python
# encoding: utf-8
__author__ = 'ethan'
from resource.config import DB as conf
from peewee import MySQLDatabase
from playhouse.signals import Model, pre_save, post_save
db = MySQLDatabase(database=conf['database'],
host=conf["host"],
port=conf['port'],
user=conf['user'],
passwd=conf['password'],
charset=conf['charset'])
def table_function(model):
name = model.__name__
result = []
count = 0
for s in name:
if 65 <= ord(s) <= 90:
if count == 0 and s != name[0]:
count = count + 1
result.append('_')
result.append(s.lower())
elif 97 <= ord(s) <= 122:
result.append(s.lower())
else:
result.append("_")
return ''.join(result)
class BaseModel(Model):
class Meta:
database = db
table_function = table_function
| [
"zhenfeng@live.cn"
] | zhenfeng@live.cn |
4396a793bab21b4ed475acccc31b662595b289c2 | 75c769c12b9f4df8c7b373b60bbc7b8a8e2881d8 | /django/smartcam/cam/views.py | d03c9aa580f783dad74b38ad47bad01ceb4a8263 | [] | no_license | Eddytor/ba-smart-cam | c8bb55b7d79c133dfee45b23193b9dfb4c9a746a | e529eb55042accbe98a4e9cf3221828983b958dd | refs/heads/main | 2023-04-03T20:03:37.719466 | 2021-04-07T08:01:38 | 2021-04-07T08:01:38 | 355,450,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,865 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.db import connection
from django.http import JsonResponse
from django.core.files.uploadedfile import SimpleUploadedFile
from django.shortcuts import render
from django import forms
from PIL import Image
from .forms import ImageForm
from django.core.files import File
from io import BytesIO
from .models import ImageFile
from .models import ImageView
from django.conf import settings
from django.contrib.auth.decorators import login_required
import os
def home(request):
return HttpResponse('<h1>Smart Cam<h1>')
@login_required
def db_request(request):
cursor = connection.cursor()
cursor.execute("SELECT * FROM detections")
dicts = dictfetchall(cursor)
return JsonResponse(dicts, safe=False)
@login_required
def db_request_cams(request):
cursor = connection.cursor()
cursor.execute("SELECT * FROM cameras")
dicts = dictfetchall(cursor)
return JsonResponse(dicts, safe=False)
@login_required
def image_view(request):
return render(request, 'cam/image.html', context={"view": ImageView.objects.first()})
def img_request(request):
cursor = connection.cursor()
cursor.execute("SELECT * FROM detections")
dicts = dictfetchall(cursor)
for entry in dicts:
img = Image.open(entry['image_path'])
print(entry['image_path'])
head_tail = os.path.split(entry['image_path'])
print("tail", head_tail[1])
new_path = settings.MEDIA_ROOT + head_tail[1]
print("new path", new_path)
img.save(new_path, "JPEG")
return HttpResponse('<h1>Updated Images<h1>')
def dictfetchall(cursor):
"Return all rows from a cursor as a dict"
columns = [col[0] for col in cursor.description]
return [
dict(zip(columns, row))
for row in cursor.fetchall()
]
| [
"schroeder.eduard@gmail.com"
] | schroeder.eduard@gmail.com |
ebca5a69981b65a76fc4c5c52dc7719fe9aabee3 | 0529196c4d0f8ac25afa8d657413d4fc1e6dd241 | /runnie0427/13015/13015.py2.py | 93bae9ee407b969a75da385bdc1739a27aa9d784 | [] | no_license | riyuna/boj | af9e1054737816ec64cbef5df4927c749808d04e | 06420dd38d4ac8e7faa9e26172b30c9a3d4e7f91 | refs/heads/master | 2023-03-17T17:47:37.198570 | 2021-03-09T06:11:41 | 2021-03-09T06:11:41 | 345,656,935 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 17,370 | py | <!DOCTYPE html>
<html lang="ko">
<head>
<title>Baekjoon Online Judge</title><meta name="viewport" content="width=device-width, initial-scale=1.0"><meta charset="utf-8"><meta name="author" content="스타트링크 (Startlink)"><meta name="keywords" content="ACM-ICPC, ICPC, 프로그래밍, 온라인 저지, 정보올림피아드, 코딩, 알고리즘, 대회, 올림피아드, 자료구조"><meta http-equiv="X-UA-Compatible" content="IE=edge"><meta property="og:type" content="website"><meta property="og:image" content="http://onlinejudgeimages.s3-ap-northeast-1.amazonaws.com/images/boj-og-1200.png"><meta property="og:site_name" content="Baekjoon Online Judge"><meta name="format-detection" content = "telephone=no"><meta name="msapplication-config" content="none"><link rel="apple-touch-icon" sizes="180x180" href="/apple-touch-icon.png"><link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png"><link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png"><link rel="manifest" href="/site.webmanifest"><link rel="mask-icon" href="/safari-pinned-tab.svg" color="#0076c0"><meta name="msapplication-TileColor" content="#00aba9"><meta name="theme-color" content="#ffffff"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/css/bootstrap.min.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/style.css?version=20210107"><link href="https://fonts.googleapis.com/css?family=Noto+Sans+KR:400,700|Open+Sans:400,400i,700,700i|Source+Code+Pro&subset=korean" rel="stylesheet"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/connect.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/result.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/custom.css?version=20210107"><link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/4.6.3/css/font-awesome.css"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/theme-colors/blue.css?version=20210107"><link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/css/pace.css">
<script async src="https://www.googletagmanager.com/gtag/js?id=UA-10874097-3"></script>
<script>
window.dataLayer = window.dataLayer || [];
function gtag(){dataLayer.push(arguments);}
gtag('js', new Date());
gtag('config', 'UA-10874097-3');
</script>
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.css" /><meta name="username" content="">
<link rel="stylesheet" href="https://ddo7jzca0m2vt.cloudfront.net/unify/css/pages/page_404_error.css">
</head>
<body>
<div class="wrapper">
<div class="header no-print"><div class="topbar"><div class="container"><ul class="loginbar pull-right"><li><a href = "/register">회원가입</a></li><li class="topbar-devider"></li><li><a href = "/login?next=%2Fsource%2Fdownload%2F9854798">로그인</a></li></ul></div></div><div class="navbar navbar-default mega-menu" role="navigation"><div class="container"><div class="navbar-header"><button type="button" class="navbar-toggle" data-toggle="collapse" data-target=".navbar-responsive-collapse"><span class="sr-only">Toggle navigation</span><span class="fa fa-bars"></span></button><a class="navbar-brand" href="/"><img id="logo-header" src="https://d2gd6pc034wcta.cloudfront.net/images/logo@2x.png" alt="Logo" data-retina></a></div><div class="collapse navbar-collapse navbar-responsive-collapse"><ul class="nav navbar-nav"><li class="dropdown mega-menu-fullwidth "><a href="javascript:void(0);" class="dropdown-toggle" data-toggle="dropdown">문제</a><ul class="dropdown-menu"><li><div class="mega-menu-content"><div class="container"><div class="row equal-height"><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href = "/problemset">전체 문제</a></li><li><a href = "/category">문제 출처</a></li><li><a href = "/step">단계별로 풀어보기</a></li><li><a href = "/problem/tags">알고리즘 분류</a></li><li><a href = "/problem/added">새로 추가된 문제</a></li><li><a href = "/problem/added/1">새로 추가된 영어 문제</a></li><li><a href = "/problem/ranking">문제 순위</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>문제</h3></li><li><a href="/problem/only">푼 사람이 한 명인 문제</a></li><li><a href="/problem/nobody">아무도 못 푼 문제</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/problem/random">랜덤</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>출처</h3></li><li><a href = "/category/1">ICPC</a></li><li><a href = "/category/2">Olympiad</a></li><li><a href = "/category/55">한국정보올림피아드</a></li><li><a href = "/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href = "/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href = "/category/5">대학교 대회</a></li><li><a href = "/category/428">카카오 코드 페스티벌</a></li><li><a href = "/category/215">Coder's High</a></li></ul></div><div class="col-md-3 equal-height-in"><ul class="list-unstyled equal-height-list"><li><h3>ICPC</h3></li><li><a href = "/category/7">Regionals</a></li><li><a href = "/category/4">World Finals</a></li><li><a href = "/category/211">Korea Regional</a></li><li><a href = "/category/34">Africa and the Middle East Regionals</a></li><li><a href = "/category/10">Europe Regionals</a></li><li><a href = "/category/103">Latin America Regionals</a></li><li><a href = "/category/8">North America Regionals</a></li><li><a href = "/category/92">South Pacific Regionals</a></li></ul></div></div></div></div></li></ul></li><li><a href = "/workbook/top">문제집</a></li><li><a href = "/contest/official/list">대회<span class='badge badge-red rounded-2x'>2</span></a></li><li><a href = "/status">채점 현황</a></li><li><a href = "/ranklist">랭킹</a></li><li><a href = "/board/list/all">게시판</a></li><li><a href = "/group/list/all">그룹</a></li><li><a href = "/blog/list">블로그</a></li><li><a href = "/lectures">강의</a></li><li><a href = "/search"><i class="fa fa-search search-btn"></i></a></li></ul></div></div></div></div><form action="/logout" method="post" id="logout_form"><input type='hidden' value='%2Fsource%2Fdownload%2F9854798' name="next"></form>
<div class="container content">
<div class="col-md-8 col-md-offset-2">
<div class="error-v1">
<span class="error-v1-title">404</span>
<span>Not found</span>
<div class="margin-bottom-20"></div>
</div>
<div class="text-center">
<span style="font-size:18px;">강의 슬라이드의 첨부 소스 코드가 404 에러가 뜨는 경우에는 링크를 복사/붙여넣기 해주세요.</span>
</div>
<div class="margin-bottom-40"></div>
</div>
</div>
<div class="footer-v3 no-print"><div class="footer"><div class="container"><div class="row"><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>Baekjoon Online Judge</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/about">소개</a></li><li><a href="/news">뉴스</a></li><li><a href="/live">생중계</a></li><li><a href="/poll">설문조사</a></li><li><a href="/blog">블로그</a></li><li><a href="/calendar">캘린더</a></li><li><a href="/donate">기부하기</a></li><li><a href="https://github.com/Startlink/BOJ-Feature-Request">기능 추가 요청</a></li><li><a href="https://github.com/Startlink/BOJ-spj">스페셜 저지 제작</a></li><li><a href="/labs">실험실</a></li></ul><div class="thumb-headline"><h2>채점 현황</h2></div><ul class="list-unstyled simple-list"><li><a href="/status">채점 현황</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>문제</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/problemset">문제</a></li><li><a href="/step">단계별로 풀어보기</a></li><li><a href="/problem/tags">알고리즘 분류</a></li><li><a href="/problem/added">새로 추가된 문제</a></li><li><a href="/problem/added/1">새로 추가된 영어 문제</a></li><li><a href="/problem/ranking">문제 순위</a></li><li><a href="/problem/recent/submit">최근 제출된 문제</a></li><li><a href="/problem/recent/accepted">최근 풀린 문제</a></li><li><a href="/change">재채점 및 문제 수정</a></li></ul><div class="thumb-headline"><h2>유저 대회 / 고등학교 대회</h2></div><ul class="list-inline simple-list margin-bottom"><li><a href="/category/353">FunctionCup</a></li><li><a href="/category/319">kriiicon</a></li><li><a href="/category/420">구데기컵</a></li><li><a href="/category/358">꼬마컵</a></li><li><a href="/category/421">네블컵</a></li><li><a href="/category/413">소프트콘</a></li><li><a href="/category/416">웰노운컵</a></li><li><a href="/category/detail/1743">HYEA Cup</a></li><li><a href="/category/364">경기과학고등학교</a></li><li><a href="/category/417">대구과학고등학교</a></li><li><a href="/category/429">부산일과학고</a></li><li><a href="/category/435">서울과학고등학교</a></li><li><a href="/category/394">선린인터넷고등학교</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>출처</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/category/1">ICPC</a></li><li><a href="/category/211">ICPC Korea Regional</a></li><li><a href="/category/2">Olympiad</a></li><li><a href="/category/55">한국정보올림피아드</a></li><li><a href="/category/57">한국정보올림피아드시․도지역본선</a></li><li><a href="/category/318">전국 대학생 프로그래밍 대회 동아리 연합</a></li><li><a href="/category/5">대학교 대회</a></li><li><a href="/category/428">카카오 코드 페스티벌</a></li><li><a href="/category/215">Coder's High</a></li></ul><div class="thumb-headline"><h2>대학교 대회</h2></div><ul class="list-inline simple-list"><li><a href="/category/320">KAIST</a></li><li><a href="/category/426">POSTECH</a></li><li><a href="/category/341">고려대학교</a></li><li><a href="/category/434">광주과학기술원</a></li><li><a href="/category/361">국민대학교</a></li><li><a href="/category/83">서강대학교</a></li><li><a href="/category/354">서울대학교</a></li><li><a href="/category/352">숭실대학교</a></li><li><a href="/category/408">아주대학교</a></li><li><a href="/category/334">연세대학교</a></li><li><a href="/category/336">인하대학교</a></li><li><a href="/category/347">전북대학교</a></li><li><a href="/category/400">중앙대학교</a></li><li><a href="/category/402">충남대학교</a></li><li><a href="/category/418">한양대 ERICA</a></li><li><a href="/category/363">홍익대학교</a></li><li><a href="/category/409">경인지역 6개대학 연합 프로그래밍 경시대회</a></li></ul></div><div class="col-sm-3 md-margin-bottom-40"><div class="thumb-headline"><h2>도움말</h2></div><ul class="list-unstyled simple-list margin-bottom-10"><li><a href="/help/judge">채점 도움말 및 채점 환경</a></li><li><a href="/help/rejudge">재채점 안내</a></li><li><a href="/help/rte">런타임 에러 도움말</a></li><li><a href="/help/problem">문제 스타일 안내</a></li><li><a href="/help/language">컴파일 또는 실행 옵션, 컴파일러 버전, 언어 도움말</a></li><li><a href="/help/workbook">문제집 도움말</a></li><li><a href="/help/contest">대회 개최 안내</a></li><li><a href="/help/problem-add">문제 출제 안내</a></li><li><a href="/help/rule">이용 규칙</a></li><li><a href="/help/stat">통계 도움말</a></li><li><a href="/help/question">질문 도움말</a></li><li><a href="/help/faq">자주묻는 질문</a></li><li><a href="/help/lecture">강의 안내</a></li><li><a href="/help/short">짧은 주소 안내</a></li><li><a href="/help/ad">광고 안내</a></li></ul></div></div></div><div class="copyright"><div class="container"><div class="row"><div class="col-md-9 col-sm-12"><p>© 2021 All Rights Reserved. <a href="https://startlink.io">주식회사 스타트링크</a> | <a href="/terms">서비스 약관</a> | <a href="/privacy">개인정보 보호</a> | <a href="/terms/payment">결제 이용 약관</a> | <a href="https://boj.startlink.help/hc/ko">도움말</a> | <a href="http://startl.ink/2pmlJaY">광고 문의</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj.md">업데이트 노트</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-issues.md">이슈</a> | <a href="https://github.com/Startlink/update-note/blob/master/boj-todo.md">TODO</a></p></div><div class="col-md-3 col-sm-12"><ul class="social-icons pull-right"><li><a href="https://www.facebook.com/onlinejudge" data-original-title="Facebook" class="rounded-x social_facebook"></a></li><li><a href="https://startlink.blog" data-original-title="Wordpress" class="rounded-x social_wordpress"></a></li></ul></div></div><div class="row"><div class="col-sm-12"><a href="https://startlink.io" class="hidden-xs"><img src="https://d2gd6pc034wcta.cloudfront.net/logo/startlink-logo-white-only.png" class="pull-right startlink-logo"></a><ul class="list-unstyled simple-list"><li>사업자 등록 번호: 541-88-00682</li><li>대표자명: 최백준</li><li>주소: 서울시 서초구 서초대로74길 29 서초파라곤 412호</li><li>전화번호: 02-521-0487 (이메일로 연락 주세요)</li><li>이메일: <a href="mailto:contacts@startlink.io">contacts@startlink.io</a></li><li>통신판매신고번호: 제 2017-서울서초-2193 호</li></ul></div><div class="col-xs-9"><p id="no-acm-icpc"></p></div><div class="col-xs-3"></div></div></div></div></div>
</div>
<div id="fb-root"></div><script>
window.fbAsyncInit = function() {
FB.init({
appId : '322026491226049',
cookie : true,
xfbml : true,
version : 'v2.8'
});
};
(function(d, s, id) {
var js, fjs = d.getElementsByTagName(s)[0];
if (d.getElementById(id)) return;
js = d.createElement(s); js.id = id;
js.src = "//connect.facebook.net/ko_KR/sdk.js";
fjs.parentNode.insertBefore(js, fjs);
}(document, 'script', 'facebook-jssdk'));
</script>
<script>
!function(f,b,e,v,n,t,s){ if(f.fbq)return;n=f.fbq=function(){ n.callMethod?
n.callMethod.apply(n,arguments):n.queue.push(arguments) };if(!f._fbq)f._fbq=n;
n.push=n;n.loaded=!0;n.version='2.0';n.queue=[];t=b.createElement(e);t.async=!0;
t.src=v;s=b.getElementsByTagName(e)[0];s.parentNode.insertBefore(t,s) }(window,
document,'script','//connect.facebook.net/en_US/fbevents.js');
fbq('init', '1670563073163149');
fbq('track', 'PageView');
</script>
<noscript><img height="1" width="1" style="display:none" src="https://www.facebook.com/tr?id=1670563073163149&ev=PageView&noscript=1"/></noscript><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.2.1/jquery.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/jquery-migrate/3.0.1/jquery-migrate.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/twitter-bootstrap/3.2.0/js/bootstrap.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/moment.min.js"></script><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/moment.js/2.21.0/locale/ko.js"></script><script type="text/javascript" src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/app.min.js?version=20210107"></script><script type="text/javascript">jQuery(document).ready(function() {App.init(0);});</script><!--[if lt IE 9]><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/respond.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/plugins/html5shiv.js"></script><script src="https://ddo7jzca0m2vt.cloudfront.net/unify/js/plugins/placeholder-IE-fixes.js"></script><![endif]--><script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/pace/1.0.2/pace.min.js"></script><script src="https://js.pusher.com/4.2/pusher.min.js"></script><script src="https://cdnjs.cloudflare.com/ajax/libs/noty/3.1.4/noty.min.js"></script>
<script>
window.MathJax = {
tex: {
inlineMath: [ ['$', '$'], ['\\(', '\\)'] ],
displayMath: [ ['$$','$$'], ["\\[","\\]"] ],
processEscapes: true,
tags: "ams",
autoload: {
color: [],
colorv2: ['color']
},
packages: { '[+]': ['noerrors'] }
},
options: {
ignoreHtmlClass: "no-mathjax|redactor-editor",
processHtmlClass: 'mathjax',
enableMenu: false
},
chtml: {
scale: 0.9
},
loader: {
load: ['input/tex', 'output/chtml', '[tex]/noerrors'],
}
};
</script><script src="https://polyfill.io/v3/polyfill.min.js?features=es6"></script><script id="MathJax-script" async src="https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js"></script>
</body>
</html> | [
"riyuna0427@gmail.com"
] | riyuna0427@gmail.com |
a6f5d2df9f60004210cd8da9b253e7f9e055e0ba | c92fe7e2898c10001928bf8c3dc07dc1587b6575 | /Programs/inheritance.py | 50716a2003559e36accb55b00806739897f7d3ff | [] | no_license | NithinNitz12/ProgrammingLab-Python | 1b2a59961d99a10ea3d4ac9f073b68ceff1bc5ce | 17c29e125821f20bc0e58986592e57237dbee657 | refs/heads/main | 2023-03-22T18:22:08.381848 | 2021-03-18T11:58:50 | 2021-03-18T11:58:50 | 315,343,187 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 579 | py | class Student:
#information about the class
def getdata(self,roll,name,course):
self.roll = roll
self.name = name
self.course = course
def display(self):
print("Roll no=",self.roll)
print("Name=",self.name)
print("Course Name=",self.course)
class Test(Student): #Inherited
def getmarks(self,mark):
self.mark = mark
def displaymarks(self):
self.display()
print("Mark=",self.mark)
s1 = Test()
s1.getdata(501,"Tony Stark","MCA")
s1.getmarks(200)
s1.displaymarks() | [
"nithinraj10@hotmail.com"
] | nithinraj10@hotmail.com |
57ecb81883b3069b011a2cd07ec7f784e51400d9 | 0c3e6efcdc4c2d51facbd3a9641ef4dff8b23a2b | /Instanssi/arkisto/models.py | 3f7435ad71e9d998eefecd2a4cce61499e8a42b9 | [
"MIT"
] | permissive | maakuth/Instanssi.org | 62fefb83daa458ca6b37d1d8329c1dc3ad905161 | 0a4ff37c1afef24b65ec3f25bee348c38597b90d | refs/heads/master | 2020-12-24T17:54:06.560578 | 2012-02-22T20:05:50 | 2012-02-22T20:05:50 | 3,518,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,255 | py | # -*- coding: utf-8 -*-
from django.db import models
from django.contrib import admin
from imagekit.models import ImageSpec
from imagekit.processors import resize
class Tag(models.Model):
name = models.CharField('Tag', max_length=32)
def __unicode__(self):
return self.name
class Meta:
verbose_name=u"tagi"
verbose_name_plural=u"tagit"
class Event(models.Model):
name = models.CharField('Nimi', max_length=32)
date = models.DateField('Päivämäärä')
def __unicode__(self):
return self.name
class Meta:
verbose_name=u"tapahtuma"
verbose_name_plural=u"tapahtumat"
class Compo(models.Model):
event = models.ForeignKey(Event)
name = models.CharField('Nimi', max_length=32)
description = models.TextField('Kuvaus')
def __unicode__(self):
return self.event.name + ": " + self.name
class Meta:
verbose_name=u"kompo"
verbose_name_plural=u"kompot"
class Entry(models.Model):
compo = models.ForeignKey(Compo)
name = models.CharField('Nimi', max_length=32)
description = models.TextField('Kuvaus')
creator = models.CharField('Tekijä', max_length=64)
file = models.FileField('Tiedosto', upload_to='arkisto/entryfiles/')
imagefile_original = models.ImageField(u'Kuva', upload_to='arkisto/entryimages/', blank=True, null=True)
imagefile_small = ImageSpec([resize.Fit(160, 100)], image_field='imagefile_original', format='JPEG', options={'quality': 90})
imagefile_medium = ImageSpec([resize.Fit(640, 420)], image_field='imagefile_original', format='JPEG', options={'quality': 90})
youtube_url = models.URLField('Youtube URL', blank=True)
tags = models.ManyToManyField(Tag)
position = models.IntegerField('Sijoitus')
def __unicode__(self):
return self.compo.name + " " + self.name
class Meta:
verbose_name=u"tuotos"
verbose_name_plural=u"tuotokset"
# Register models to admin panel
try:
admin.site.register(Tag)
except:
pass
try:
admin.site.register(Event)
except:
pass
try:
admin.site.register(Compo)
except:
pass
try:
admin.site.register(Entry)
except:
pass
| [
"tuomas.virtanen@jyu.fi"
] | tuomas.virtanen@jyu.fi |
244725f21a56f4c6e7d4aa22e8ee7ec9b95c9b80 | 4e5274116a41570906319af4cdf0f2f40db4e869 | /src/cross-val/roc.py | 7b593a207bf47029e02e00d0c7438f3aa89e8384 | [] | no_license | sienaguerrero/Brain-Computer-Interfaces-Project | fb570aab4b26a4489d824901b249e6edabb813c6 | daa764eb289853a24169a649540883ee05644de9 | refs/heads/master | 2020-05-20T09:08:34.002904 | 2019-05-06T12:28:56 | 2019-05-06T12:28:56 | 185,493,390 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 230 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
# Test with function from randomized search
def cv_roc(n, fit_function, Ytest, Xtest):
probas = fit_function.predict_proba(Xtest)
| [
"garpchoo@gmail.com"
] | garpchoo@gmail.com |
e5c14b65bc335fba0c7eb7ed0be656e3fb64f366 | 1031e4bfcf3f2b6a5576a7ad6910c2f6612cb75f | /src/plain_code/day8.py | 0e215e500028791c5084a8700c8acb4ad11f11c1 | [
"MIT"
] | permissive | czf0613/python-tutorial | c6b4aa5fc8aef7d076baafa953402c9612762c20 | 10680e7c3c1fce544af447e446a2fd98bc455fb3 | refs/heads/main | 2023-03-08T18:29:08.875700 | 2021-02-19T15:39:10 | 2021-02-19T15:39:10 | 333,831,091 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 266 | py | # set类型
a = set()
a.add('hello')
a.add('nope')
a.add('hello')
print(a)
a.remove('hello')
print(a)
a = set('hello') # a = {'hello'} 不能用a = {}来初始化空集合
b = set('world')
print(a, b)
print(a & b)
print(a | b)
c=set('hello world')
print(c - a)
| [
"1354016594@qq.com"
] | 1354016594@qq.com |
92764ee5fa04ada4b84d89a4c40d1645710675ca | afc8d5a9b1c2dd476ea59a7211b455732806fdfd | /Configurations/WW/FullRunII/Full2017_v9/njets/plot.py | b869826b875f6efbb25b48b65c4cd12b7f151232 | [] | no_license | latinos/PlotsConfigurations | 6d88a5ad828dde4a7f45c68765081ed182fcda21 | 02417839021e2112e740607b0fb78e09b58c930f | refs/heads/master | 2023-08-18T20:39:31.954943 | 2023-08-18T09:23:34 | 2023-08-18T09:23:34 | 39,819,875 | 10 | 63 | null | 2023-08-10T14:08:04 | 2015-07-28T07:36:50 | Python | UTF-8 | Python | false | false | 5,738 | py | # plot configuration
nbins = 4
# groupPlot = {}
#
# Groups of samples to improve the plots.
# If not defined, normal plots is used
#
groupPlot['top'] = {
'nameHR' : 'tW and t#bar{t}',
'isSignal' : 0,
'color': 400, # kYellow
'samples' : ['top']
}
groupPlot['WW'] = {
'nameHR' : 'WW',
'isSignal' : 0,
'color': 851, # kAzure -9
'samples' : ['WW_B%d'%i for i in xrange(nbins)]+['ggWW_B%d'%i for i in xrange(nbins)]
}
groupPlot['WW_nonfid'] = {
'nameHR' : 'WW nonfid',
'isSignal' : 0,
'color': 853, # kAzure -9
'samples' : ['WW_nonfid', 'ggWW_nonfid']
}
groupPlot['WWewk'] = {
'nameHR' : 'WWewk',
'isSignal' : 0,
'color': 852, # kAzure -9
'samples' : ['WWewk']
}
groupPlot['Fake'] = {
'nameHR' : 'nonprompt',
'isSignal' : 0,
'color': 921, # kGray + 1
'samples' : ['Fake_me', 'Fake_em']
}
groupPlot['DY'] = {
'nameHR' : "DY",
'isSignal' : 0,
'color': 418, # kGreen+2
'samples' : ['DY']
}
groupPlot['VVV'] = {
'nameHR' : 'VVV',
'isSignal' : 0,
'color': 857, # kAzure -3
'samples' : ['VVV']
}
groupPlot['WZ'] = {
'nameHR' : "WZ",
'isSignal' : 0,
'color' : 617, # kViolet + 1
'samples' : ['WZ']
}
groupPlot['ZZ'] = {
'nameHR' : "ZZ",
'isSignal' : 0,
'color' : 618, # kViolet + 1
'samples' : ['ZZ']
}
groupPlot['Vg'] = {
'nameHR' : "V#gamma",
'isSignal' : 0,
'color' : 811, # kOrange + 10
'samples' : ['Vg']
}
groupPlot['Higgs'] = {
'nameHR' : 'Higgs',
'isSignal' : 0,
'color': 632, # kRed
'samples' : ['Higgs' ]
}
#plot = {}
# keys here must match keys in samples.py
#
plot['DY'] = {
'color': 418, # kGreen+2
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0,
}
plot['Fake_me'] = {
'color': 921, # kGray + 1
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
plot['Fake_em'] = {
'color': 921, # kGray + 1
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
plot['top'] = {
'nameHR' : 'tW and t#bar{t}',
'color': 400, # kYellow
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0,
}
for i in xrange(nbins):
plot['WW_B%d'%i] = {
'color': 851, # kAzure -9
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0 # ele/mu trigger efficiency datadriven
}
plot['ggWW_B%d'%i] = {
'color': 851, # kAzure -9
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0 # ele/mu trigger efficiency datadriven
}
plot['WW_nonfid'] = {
'color': 853, # kAzure -9
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0 # ele/mu trigger efficiency datadriven
}
plot['ggWW_nonfid'] = {
'color': 853, # kAzure -9
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0 # ele/mu trigger efficiency datadriven
}
plot['WWewk'] = {
'color': 851, # kAzure -9
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0 # ele/mu trigger efficiency datadriven
}
plot['Vg'] = {
'color': 859, # kAzure -1
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
plot['WZ'] = {
'color': 858, # kAzure -2
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
plot['ZZ'] = {
'color': 858, # kAzure -2
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
plot['VVV'] = {
'color': 857, # kAzure -3
'isSignal' : 0,
'isData' : 0,
'scale' : 1.0
}
# Higgs
plot['Higgs'] = {
'nameHR' : 'Higgs',
'color': 632, # kRed
'isSignal' : 0,
'isData' : 0,
'scale' : 1 #
}
# data
plot['DATA'] = {
'nameHR' : 'Data',
'color': 1 ,
'isSignal' : 0,
'isData' : 1 ,
'isBlind' : 1
}
# additional options
legend['lumi'] = 'L = 41.5/fb'
legend['sqrt'] = '#sqrt{s} = 13 TeV'
| [
"saumya.phor4252@gmail.com"
] | saumya.phor4252@gmail.com |
018440f84f40bfcb4de4dbd4916932d807d54935 | 2c7f025568bceb560888d26828aef30e5ae23393 | /src/home-20170804/migrations/0015_auto_20170601_1925.py | aac40a81d41a1db348df5d1c203aa4f5437164e5 | [] | no_license | GustavoCruz12/educacao | 6271ebc71830ee1964f8311d3ef21ec8abf58e50 | d0faa633ed1d588d84c74a3e15ccf5fa4dd9839e | refs/heads/master | 2022-12-08T09:34:42.066372 | 2018-08-03T06:38:49 | 2018-08-03T06:38:49 | 143,387,426 | 0 | 0 | null | 2022-12-08T00:01:52 | 2018-08-03T06:31:03 | Python | UTF-8 | Python | false | false | 493 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-06-01 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('home', '0014_auto_20170601_1923'),
]
operations = [
migrations.AlterField(
model_name='videoyoutube',
name='slug',
field=models.SlugField(max_length=250, unique=True, verbose_name='URL (slug)'),
),
]
| [
"gustavocruz201419@gmail.com"
] | gustavocruz201419@gmail.com |
4c59848d671ab8c62cfb1602235eb339772320fb | 1f27c4697ffd787eb709dc6560823e8d42196342 | /motorhome/migrations/0003_auto_20200305_1050.py | ea566cb7010e86a1e1a441f6f7356fc50cab025e | [
"MIT"
] | permissive | Forestriver/Motorhome | 965f387b32919efc57ba28bba292e9f444e8bb97 | 14e2998e06b3063897e4ff8d01eef6a129bec017 | refs/heads/master | 2022-11-14T20:12:03.147399 | 2020-07-09T20:30:34 | 2020-07-09T20:30:34 | 83,025,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 372 | py | # Generated by Django 2.2.5 on 2020-03-05 08:50
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('motorhome', '0002_auto_20200305_0138'),
]
operations = [
migrations.AlterModelOptions(
name='choice',
options={'verbose_name_plural': 'Пропозиції'},
),
]
| [
"vitaly.romas1@gmail.com"
] | vitaly.romas1@gmail.com |
7c720b9d543c458945310c10348f41a006a16bdb | 72900b002fc2c27ea92564da10a87b4c9ab75f63 | /contracts/tests/test_schedule_metadata.py | 66fe05df5396217794d2f39d48bc50611e0b4899 | [
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | firefoxxy8/calc | f495443d28f7fd71da5e881a6d05d92aad2ba0fd | 7ceb2fcadd67bc9fbcb64cdaba50dbbdead0e803 | refs/heads/master | 2020-03-27T19:19:24.127137 | 2018-08-20T22:23:33 | 2018-08-20T22:23:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,230 | py | from django.core.management import call_command
from django.test import SimpleTestCase, TestCase
from django.utils.safestring import SafeText
from contracts.models import ScheduleMetadata
def populate_schedule_metadata():
# This is SUPER weird. Previous TransactionTestCase runs (usually
# the result of a LiveServerTestCase) could have removed the
# schedule metadata populated by our migration, so we'll forcibly
# wipe our schedule metadata and re-run the migration just in case.
ScheduleMetadata.objects.all().delete()
call_command('migrate', 'contracts', '0023_schedulemetadata', '--fake')
call_command('migrate', 'contracts', '0024_populate_schedulemetadata')
call_command('migrate', '--fake')
class InitialScheduleMetadataTests(TestCase):
def test_populate_data_migration_works(self):
populate_schedule_metadata()
env = ScheduleMetadata.objects.get(sin='899')
self.assertEqual(env.schedule, 'Environmental')
self.assertEqual(env.name, 'Legacy Environmental')
self.assertIn('pollution', env.description)
self.assertEqual(list(
sm.schedule
for sm in ScheduleMetadata.objects.all().order_by('schedule')
), [
'AIMS',
'Consolidated',
'Environmental',
'FABS',
'IT Schedule 70',
'Language Services',
'Logistics',
'MOBIS',
'PES',
])
class SimpleTests(SimpleTestCase):
def test_full_name_includes_sin_when_present(self):
sm = ScheduleMetadata(sin='123', name='blarg')
self.assertEqual(sm.full_name, '123 - blarg')
def test_full_name_works_when_sin_is_absent(self):
sm = ScheduleMetadata(name='blarg')
self.assertEqual(sm.full_name, 'blarg')
def test_description_html_works(self):
sm = ScheduleMetadata(description='hello *there*')
self.assertEqual(
sm.description_html,
'<p>hello <em>there</em></p>'
)
self.assertIsInstance(sm.description_html, SafeText)
def test_str_works(self):
sm = ScheduleMetadata(sin='123', name='blarg')
self.assertEqual(str(sm), '123 - blarg')
| [
"varmaa@gmail.com"
] | varmaa@gmail.com |
975e0f1699dee999f8a1eb00d40e816bc38b0714 | 9ec849da0df382fe8a2ae25fc45e37b2d532a0d4 | /ubereats/ubereats/constants/.ipynb_checkpoints/trip-checkpoint.py | d8b68853191ef54b05545c4de3cc0ecf33e336de | [
"MIT"
] | permissive | macchino/delivery-analisys | b71cf06eb9c4f33064cdee192cffb69ecb6e6cf7 | 7d0507580532e87143c49d91693ee25aaf504a0b | refs/heads/master | 2023-02-15T03:00:00.591446 | 2021-01-10T22:55:23 | 2021-01-10T22:55:23 | 328,191,304 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | BASE_DOMAIN = "partners.uber.com"
BASE_URL = "https://" + BASE_DOMAIN
WEEKLY_EARNINGS_BASE_URL = BASE_URL + "/p3/payments/weekly-earnings"
| [
"mac.kimitoshi@gmial.com"
] | mac.kimitoshi@gmial.com |
27427752ce84e5dac6ccb005952ec43a5fca4ec5 | 744b4cb836c7a11b0054e7d2e5b0efabd18ab5da | /app.py | f0e7a118c060c4831d3ec33c9a682e964ad40833 | [] | no_license | EpicRowan/Flask_Rest_API | fbf4b4684a45c7fb4c1ee1cc5fb6a8684ce70a19 | 218498fc41ab37e2dd4e807671c7bec033b0f119 | refs/heads/master | 2022-10-16T22:09:24.945878 | 2020-06-11T20:48:49 | 2020-06-11T20:48:49 | 271,582,042 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,405 | py | from flask import Flask, request, jsonify
from flask_sqlalchemy import SQLAlchemy
from flask_marshmallow import Marshmallow
import os
app = Flask(__name__)
# make sure we can correcly locate the db file
basedir =os.path.abspath(os.path.dirname(__file__))
# will look for a db called sqlite in current folder
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////' + os.path.join(basedir, 'db.sqlite')
app.config['SQLALCHEMY_TRACK_MODIFCAITONS'] = False
db = SQLAlchemy(app)
ma = Marshmallow(app)
class Product(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(100), unique=True)
description = db.Column(db.String(200))
price = db.Column(db.Float)
qty = db.Column(db. Integer)
def __init__(self, name, description, price, qty):
self.name = name
self.description = description
self.price = price
self.qty = qty
class ProductSchema(ma.Schema):
class Meta:
#fields that we want to show
fields = ('id', 'name', 'description', 'price', 'qty')
#Init schema
product_schema = ProductSchema()
products_schema = ProductSchema(many=True)
#Create a product
@app.route('/product', methods=['POST'])
def add_product():
name = request.json['name']
description = request.json['description']
price = request.json['price']
qty = request.json['qty']
new_product = Product(name, description, price, qty)
db.session.add(new_product)
db.session.commit()
return product_schema.jsonify(new_product)
#Get all products
@app.route('/product', methods=['GET'])
def get_products():
all_products = Product.query.all()
# Because dealing with an array
result = products_schema.dump(all_products)
return jsonify(result)
#Get single product
@app.route('/product/<id>', methods=['GET'])
def get_product(id):
product = Product.query.get(id)
return product_schema.jsonify(product)
#Update a product
@app.route('/product/<id>', methods=['PUT'])
def update_product(id):
product = Product.query.get(id)
name = request.json['name']
description = request.json['description']
price = request.json['price']
qty = request.json['qty']
product.name = name
product.description = description
product.price = price
product.qty = qty
db.session.commit()
return product_schema.jsonify(product)
# @app.route('/', methods=['GET'])
# def get():
# return jsonify({'msg': "Hello!!!"})
app.secret_key = "supersecret"
if __name__ == '__main__':
app.run(debug=True) | [
"shepherd.rowan.k@gmail.com"
] | shepherd.rowan.k@gmail.com |
d97ba30c0f806dbaabd1c57dff019379eb9ecf95 | 03e88bfb88da079b1198ff236ab76b8722e4516f | /apps/belt_app/migrations/0003_user_friend.py | 786e62f5ed5180fb9b8bec4c98bc60e0858942e6 | [] | no_license | derberbaby/book_reviews | 72cf5dde0c27d971833dfb90d0a0a12c298e14a5 | 997191b6ae3cbca7511b04b952c7fac433dec6ba | refs/heads/master | 2021-06-18T03:07:17.079925 | 2017-06-27T01:27:27 | 2017-06-27T01:27:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 484 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2017-06-27 00:00
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('belt_app', '0002_review_comment'),
]
operations = [
migrations.AddField(
model_name='user',
name='friend',
field=models.ManyToManyField(related_name='_user_friend_+', to='belt_app.User'),
),
]
| [
"debbiewang@gmail.com"
] | debbiewang@gmail.com |
9ed3621af43e4f23d160e89f19f6d78356630961 | 3940b4a507789e1fbbaffeb200149aee215f655a | /lc/145.BinaryTreePostorderTraversal.py | cea3a772e720331356728cbbd39b0eca590101ba | [] | no_license | akimi-yano/algorithm-practice | 15f52022ec79542d218c6f901a54396a62080445 | 1abc28919abb55b93d3879860ac9c1297d493d09 | refs/heads/master | 2023-06-11T13:17:56.971791 | 2023-06-10T05:17:56 | 2023-06-10T05:17:56 | 239,395,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,915 | py | # 145. Binary Tree Postorder Traversal
# Medium
# 2416
# 112
# Add to List
# Share
# Given the root of a binary tree, return the postorder traversal of its nodes' values.
# Example 1:
# Input: root = [1,null,2,3]
# Output: [3,2,1]
# Example 2:
# Input: root = []
# Output: []
# Example 3:
# Input: root = [1]
# Output: [1]
# Example 4:
# Input: root = [1,2]
# Output: [2,1]
# Example 5:
# Input: root = [1,null,2]
# Output: [2,1]
# Constraints:
# The number of the nodes in the tree is in the range [0, 100].
# -100 <= Node.val <= 100
# Follow up:
# Recursive solution is trivial, could you do it iteratively?
# This solution works:
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def postorderTraversal(self, root: TreeNode) -> List[int]:
def helper(cur):
if not cur:
return
nonlocal arr
helper(cur.left)
helper(cur.right)
arr.append(cur.val)
arr = []
helper(root)
return arr
# This solution works - iterative:
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def postorderTraversal(self, root: TreeNode) -> List[int]:
stack = [(root, False)]
ans = []
while stack:
node, traversed = stack.pop()
if not node:
continue
if not traversed:
stack.append((node, True))
stack.append((node.right, False))
stack.append((node.left, False))
else:
ans.append(node.val)
return ans
| [
"akimi.mimi.yano@gmail.com"
] | akimi.mimi.yano@gmail.com |
62481b0b9929d2fac20216833eac2a7e88fc36a4 | 776754df08b3f286e26b2cddb97730fe3eaff471 | /populate_domain.py | e8e40283b183c99cc5bca269806bc1cfde7dc254 | [] | no_license | kimjungsoo2/testhub2 | 8cffb9cb887092d9f353b7292404d45080e2aed9 | f1159b47c3d610fa002a25a6d972b602d6ace5b9 | refs/heads/master | 2016-09-02T00:59:27.326597 | 2015-08-18T21:03:33 | 2015-08-18T21:03:33 | 40,996,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,821 | py | #!/usr/bin/env python
import os
def populate():
p = add_primary('BAP Nuke')
p = add_primary('Big Red Button')
p = add_primary('Carrier')
add_secondary(primary=p, name='E2E')
add_secondary(primary=p, name='Network')
p = add_primary('CAS')
add_secondary(primary=p, name='CAS Server')
p = add_primary('Core Services')
add_secondary(primary=p, name='CCE-DeviceProvisioning')
add_secondary(primary=p, name='SSO')
add_secondary(primary=p, name='AccessIdentifyPortal')
p = add_primary('CSP')
add_secondary(primary=p, name='CSP Service')
add_secondary(primary=p, name='CSP Client SDK')
p = add_primary('Data Cloud')
add_secondary(primary=p, name='Checkin')
add_secondary(primary=p, name='Lotus')
add_secondary(primary=p, name='Luna')
p = add_primary('Droid Zap')
add_secondary(primary=p, name='GCSP Client')
add_secondary(primary=p, name='GCSP Backend')
p = add_primary('GdrivePromo')
add_secondary(primary=p, name='PromotionalOffer')
p = add_primary('Motocare')
add_secondary(primary=p, name='Client')
p = add_primary('Notification')
add_secondary(primary=p, name='Client')
p = add_primary('OTA')
add_secondary(primary=p, name='CDS Portal')
add_secondary(primary=p, name='SUP Portal')
add_secondary(primary=p, name='SSOTA Portal')
p = add_primary('OTA-Client')
add_secondary(primary=p, name='8226-Recovery')
add_secondary(primary=p, name='Dual-sim')
add_secondary(primary=p, name='10 sec timer')
add_secondary(primary=p, name='Finger prints')
add_secondary(primary=p, name='Autenticated download')
add_secondary(primary=p, name='Silent-APK')
add_secondary(primary=p, name='All')
add_secondary(primary=p, name='Verizon')
add_secondary(primary=p, name='Http error codes')
add_secondary(primary=p, name='Low_battery')
add_secondary(primary=p, name='cds-migration')
add_secondary(primary=p, name='Setup-intiated')
add_secondary(primary=p, name='Silent-OTA')
add_secondary(primary=p, name='ROW')
add_secondary(primary=p, name='ROW-UI Template')
add_secondary(primary=p, name='VZW-UI Template')
add_secondary(primary=p, name='VZW')
add_secondary(primary=p, name='Verification_Methods')
add_secondary(primary=p, name='UI-screens')
add_secondary(primary=p, name='UI-Non-VZW screens')
add_secondary(primary=p, name='UI-VZW screens')
add_secondary(primary=p, name='AT&T port')
add_secondary(primary=p, name='AT&T')
add_secondary(primary=p, name='Exploratory')
add_secondary(primary=p, name='Extra-space')
add_secondary(primary=p, name='AT&T FOTA')
add_secondary(primary=p, name='Middleware-Update')
add_secondary(primary=p, name='Multi-user profile')
add_secondary(primary=p, name='VZW-Zero-rated')
add_secondary(primary=p, name='WI-FI_Discovery')
add_secondary(primary=p, name='Download to data')
add_secondary(primary=p, name='AT&T 30 sec timer')
add_secondary(primary=p, name='Rich text')
add_secondary(primary=p, name='Recovery')
add_secondary(primary=p, name='Polling-Enhancements')
add_secondary(primary=p, name='Ota-Cancel')
add_secondary(primary=p, name='Motodrop')
add_secondary(primary=p, name='Package Creation')
add_secondary(primary=p, name='Rooted')
add_secondary(primary=p, name='Instrumentation')
add_secondary(primary=p, name='KPI')
add_secondary(primary=p, name='Sdcard')
add_secondary(primary=p, name='Network Info')
add_secondary(primary=p, name='Sync polling for Jbrel1 only')
add_secondary(primary=p, name='Sync polling for Jbrel2 only')
add_secondary(primary=p, name='Recovery-Stress')
p = add_primary('Tethered-Client')
add_secondary(primary=p, name='Win-MAC')
add_secondary(primary=p, name='All')
add_secondary(primary=p, name='Win-MAC-Repair')
add_secondary(primary=p, name='MDM')
# Print out what we have added to the db
for p in Primarydomain.objects.all():
for s in Secondarydomain.objects.filter(primary=p):
print "- {0} - {1}".format(str(p), str(s))
def add_primary(name):
p = Primarydomain.objects.get_or_create(name=name)[0]
return p
def add_secondary(primary, name):
s = Secondarydomain.objects.get_or_create(primary=primary, name=name)[0]
return s
def add_component(secondary, name):
c = Component.objects.get_or_create(secondary=secondary, name=name)
return c
if __name__ == '__main__':
print "Starting Domain population script..."
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testhub2.settings')
from testbrowser.models import Primarydomain, Secondarydomain, Component
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
populate() | [
"kim.jungsoo2@gmail.com"
] | kim.jungsoo2@gmail.com |
3abf9558a0e5b8b9236e52a66eeb09cacc7a7629 | f492c69e6cd7e25f11fe72750a8ce2bda676d730 | /day_1/ex3_4.py | cff9e2bc1b380d7b4d6d194c3536ca3ecb3ba046 | [] | no_license | conrongvang/exercises-python | c0fab80061eba3013cb5c4cb5177d48ed2d80f5e | 090913c5d870831813cc585d69f4a6433c0fde8e | refs/heads/master | 2023-05-28T15:36:24.335916 | 2021-06-22T17:13:10 | 2021-06-22T17:13:10 | 378,943,052 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 999 | py | #!/usr/bin/env python3
"""
Viết chương trình loại bỏ phần mở rộng của một tên file bất kỳ.
Ví dụ::
input_data = '....slsslslsls...sls'
output = '....slsslslsls..'
input_data = 'maria.data.mp9'
output = 'maria.data'
Read: https://docs.python.org/3/library/stdtypes.html#str.rfind
"""
def solve(input_data):
"""Trả về tên file sau khi loại bỏ phần mở rộng
:param input_data: tên file bất kì
:rtype: str
"""
length = len(input_data)
last = 0
result = None
print()
if ("." in input_data):
for index in range(length):
if (input_data[index] == "."):
last = index
result = input_data[:last]
else:
result = "This file is not exist extend."
# Xoá dòng sau và viết code vào đây set các giá trị phù hợp
return result
def main():
data = "maria.data.mp9"
print(solve(data))
if __name__ == "__main__":
main()
| [
"42422284+conrongvang@users.noreply.github.com"
] | 42422284+conrongvang@users.noreply.github.com |
5c249391962088fad4694ffcfc8086656191c01d | 9038394d643b33b37cec8fe6c50332d9f669fd95 | /main.py | 62d3f3a12a1c40b61d2cd43dd71af63d975361b3 | [] | no_license | whimo/gitspector | 25a19be521cb57d93c5e2c64e8eea32889180750 | d57c4d03f343e0991521c68bcb0a2db8282c0ee5 | refs/heads/master | 2020-03-17T23:23:35.303364 | 2018-05-20T15:56:34 | 2018-05-20T15:56:34 | 134,042,861 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | from app import app
app.run(app.config['HOST'], app.config['PORT'], app.config['DEBUG']) | [
"Qwertygid@gmail.com"
] | Qwertygid@gmail.com |
9fe0f29f1163433f9c350a27ea282131f40f2550 | 61b998663888131beaa84674f4819f0542d173ae | /userproject/app2/admin.py | f083077dd1a3ef115583c703de3879d1ea568742 | [] | no_license | getvishalprajapati/test | 0205a85fdf5dfc6a2994a5fe4fc66d2ddb6826a5 | c052d065dcf968e40f63a2c70a40dbc858c5908d | refs/heads/master | 2023-02-06T05:45:05.453184 | 2020-12-23T13:57:40 | 2020-12-23T13:57:40 | 323,915,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | from django.contrib import admin
# Register your models here.
from .models import student
admin.site.register(student) | [
"getvishalprajapati@gmail.com"
] | getvishalprajapati@gmail.com |
6e646ddab273f1e1a7db8cda9f93e205ce0b8405 | eb4d858113cb6395fc1d8a7ee7a431ca09b3043d | /models/tripletext2seq.py | 0acb6fba6545f2d4aa82b9de988e7d5d45c795cc | [
"MIT"
] | permissive | Joyce-Ng/Zeroshot-QuestionGeneration | b2788bb0da72fb71f3201c72a3c89478c332d11a | e02eb38adb047f7b50bc861f3a3383af87ae7079 | refs/heads/master | 2021-09-11T19:33:12.281250 | 2018-04-11T15:02:57 | 2018-04-11T15:02:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 29,034 | py | from __future__ import print_function
import time
import math
import pickle
import tensorflow as tf
from tensorflow.python.layers.core import Dense
import numpy as np
class TripleText2SeqModel():
"""
This Model is called triples sequences to sequence model
model takes a single triple and multiple sequences as an input and outputs
a single sequence.
This model is equipped by two attention modules
- attention over the input triples
- attention over each encoded vector of each word in the
input sequences
- Triple Encoder:
- Entities Encoded through Entity Embeddings
- Predicates Encoded Through Predicate Embeddings
- Sequences Encoder:
- a separate RNN over Word Embeddings of each input sequence
Data preparation:
- Thise model doesn't handle Additional tokens `<unk> <rare> <pad>`
those are expected to be added beforehand to the vocabulary
- vocabulary is created offline
- The inputs to the decoder are preprocessed beforehand to start with `<s>` and `<\s>`
- targets are decoder inputs shifted by one (to ignore start symbol)
"""
def __init__(self, config, mode='training'):
print('Initializing new seq 2 seq model')
assert mode in ['training', 'evaluation', 'inference']
self.mode = mode
self.config = config
self.__create_placeholders()
self.__create_encoder()
self.__create_decoder()
def __create_placeholders(self):
"""
Function to create placeholders for each
:return:
"""
# Encoder Inputs
#################
# Input Triple
###############
# The input triple is given in the form of list of entities [sub,obj] and list of predicates [pred]
# This design allows also inputting multiple triples at once since order matters [s1,s2,o1,o2] [p1,p2]
self.encoder_entities_inputs = tf.placeholder(tf.int32, shape=[None, self.config.ENTITIESLENGTH], name="encoder_entities_inputs")
self.encoder_predicates_inputs = tf.placeholder(tf.int32, shape=[None, self.config.PREDICATESLENGTH], name="encoder_predicates_inputs")
self.encoder_predicates_direction = tf.placeholder(tf.float32, shape=[None], name="encoder_predicates_direction")
# Input Sequences
# textual evidences = input sequences
######################################
# input sequences with padding
# :size = NUMBER_OF_TEXTUAL_EVIDENCES x BATCHSIZE x input sequence max length
self.encoder_text_inputs = tf.placeholder(dtype=tf.int32, shape=[self.config.NUMBER_OF_TEXTUAL_EVIDENCES, None, None], name='encoder_text_inputs')
# actual lengths of each input sequence
# :size = NUMBER_OF_TEXTUAL_EVIDENCES x 1
# each batch has a fixed input sequence length
self.encoder_text_inputs_length = tf.placeholder(dtype=tf.int32, shape=[self.config.NUMBER_OF_TEXTUAL_EVIDENCES, None], name='encoder_text_inputs_length')
self.batch_size = tf.shape(self.encoder_entities_inputs)[0]
# Decoder placeholders:
# these are the raw inputs to the decoder same as input sequences
# output sequence with padding
# :size = BATCHSIZE x output sequence max length
self.decoder_inputs = tf.placeholder(tf.int32, shape=[None, None], name="decoder_inputs")
# number indicating actual lengths of the output sequence
# :size = BATCHSIZE x 1
self.decoder_inputs_length = tf.placeholder(dtype=tf.int32, shape=(None,), name='decoder_inputs_length')
if self.mode == "training":
self.decoder_inputs_train = self.decoder_inputs
# for training our targets are decoder inputs shifted by one (to ignore the <s> symbol)
# as shown in figure https://www.tensorflow.org/images/basic_seq2seq.png
self.decoder_targets_train = self.decoder_inputs[:, 1:]
# decoder_inputs_length_train: [batch_size x 1]
self.decoder_inputs_length_train = self.decoder_inputs_length
self.decoder_targets_length_train = self.decoder_inputs_length - 1
# calculating max_decoder_length
self.decoder_max_length = tf.reduce_max(self.decoder_targets_length_train)
elif self.mode == "inference":
# at inference time there's no decoder input so we set the Decode length to a maximum.
self.decoder_max_length = self.config.MAX_DECODE_LENGTH
# global step
self.global_step = tf.Variable(0, trainable=False, name='global_step')
def __build_single_rnn_cell(self, hidden_size):
cell = tf.nn.rnn_cell.GRUCell(hidden_size)
# if self.use_dropout:
# cell = DropoutWrapper(cell, dtype=self.dtype,
# output_keep_prob=self.keep_prob_placeholder, )
return cell
def __create_triple_encoder(self):
print('building Triples encoder ...')
start = time.time()
with tf.variable_scope('encoder'):
# Create Embeddings Weights
if self.config.USE_PRETRAINED_KB_EMBEDDINGS:
ent_kb_emb = pickle.load(open(self.config.PRETRAINED_ENTITIES_EMBEDDINGS_PATH))
self.encoder_entities_embeddings = tf.Variable(ent_kb_emb, name="entities_embeddings", trainable=self.config.TRAIN_KB_EMBEDDINGS)
pred_kb_emb = pickle.load(open(self.config.PRETRAINED_PREDICATES_EMBEDDINGS_PATH))
self.encoder_predicates_embeddings = tf.Variable(pred_kb_emb, name="predicates_embeddings",
trainable=self.config.TRAIN_KB_EMBEDDINGS)
else:
self.encoder_entities_embeddings = tf.get_variable("entities_embeddings",
shape=[self.config.ENTITIES_VOCAB, self.config.ENTITIES_EMBEDDING_SIZE],
initializer=self.__helper__initializer(),
dtype=tf.float32
)
self.encoder_predicates_embeddings = tf.get_variable("predicates_embeddings",
shape=[self.config.PREDICATES_VOCAB,
self.config.PREDICATES_EMBEDDING_SIZE],
initializer=self.__helper__initializer(),
dtype=tf.float32
)
# embedding the encoder inputs
# encoder_inputs is of size [Batch size x 3]
# encoder_inputs_embedded is of size [Batch size x 3 x TRIPLES_EMBEDDING_SIZE]
self.encoder_entities_inputs_embedded = tf.nn.embedding_lookup(self.encoder_entities_embeddings, self.encoder_entities_inputs)
self.encoder_predicates_inputs_embedded = tf.nn.embedding_lookup(self.encoder_predicates_embeddings, self.encoder_predicates_inputs)
direction = tf.expand_dims(self.encoder_predicates_direction, axis=1)
direction = tf.expand_dims(direction, axis=2)
self.encoder_predicates_inputs_embedded = tf.multiply(self.encoder_predicates_inputs_embedded, direction)
self.encoder_triples_inputs_embedded = tf.concat((self.encoder_entities_inputs_embedded, self.encoder_predicates_inputs_embedded), axis=1)
# Encode input triple into a vector
# encoder_state: [batch_size, cell_output_size]
self.encoder_triples_last_state = tf.concat(tf.unstack(self.encoder_triples_inputs_embedded, axis=1), axis=1)
print('Building encoder in: ', time.time() - start, ' secs')
def __create_seq_encoder(self):
print('Building Input Sequence Encoder ...')
start = time.time()
with tf.variable_scope('encoder'):
###################
# Word Embeddings #
###################
# Create Word Embeddings Weights
if self.config.USE_PRETRAINED_WORD_EMBEDDINGS:
word_emb = pickle.load(open(self.config.PRETRAINED_WORD_EMBEDDINGS_PATH)).astype(np.float32)
self.encoder_word_embeddings = tf.Variable(word_emb, name="encoder_word_embeddings",
trainable=self.config.TRAIN_WORD_EMBEDDINGS)
else:
self.encoder_word_embeddings = tf.get_variable("encoder_word_embeddings",
shape=[self.config.DECODER_VOCAB_SIZE,
self.config.INPUT_SEQ_EMBEDDING_SIZE],
initializer=self.__helper__initializer(),
dtype=tf.float32
)
# Embedding the encoder inputs
# Encoder Input size = NUMBER_OF_TEXTUAL_EVIDENCES x BATCH x input_length
# Embedded Input size = NUMBER_OF_TEXTUAL_EVIDENCES x BATCH x input_length x word_embeddings_size
self.encoder_text_inputs_embedded = tf.nn.embedding_lookup(self.encoder_word_embeddings,
self.encoder_text_inputs)
#######
# RNN #
#######
# building a multilayer RNN for each Textual Evidence
# Encode input sequences into context vectors:
# encoder_outputs: [Num_text_evidence, batch_size, max_time_step, cell_output_size]
# encoder_state: [Num_text_evidence, batch_size, cell_output_size]
self.encoder_text_outputs = []
self.encoder_text_last_state = []
# If not bidirectional encoder
self.encoder_cell = []
rnn = self.__build_single_rnn_cell(self.config.INPUT_SEQ_RNN_HIDDEN_SIZE)
if "bi" not in self.config.ENCODER_RNN_CELL_TYPE:
for _ in range(self.config.NUMBER_OF_TEXTUAL_EVIDENCES):
#rnn = self.__build_single_rnn_cell(self.config.INPUT_SEQ_RNN_HIDDEN_SIZE)
self.encoder_cell.append(tf.nn.rnn_cell.MultiRNNCell([rnn] * self.config.NUM_LAYERS))
for i in range(self.config.NUMBER_OF_TEXTUAL_EVIDENCES):
out, state = tf.nn.dynamic_rnn(
cell=self.encoder_cell[i],
inputs=self.encoder_text_inputs_embedded[i],
sequence_length=self.encoder_text_inputs_length[i],
dtype=tf.float32
)
self.encoder_text_outputs.append(out)
self.encoder_text_last_state.append(tf.squeeze(state, axis=0))
# If bidirectional encoder
else:
self.fwd_encoder_cell = []
self.bw_encoder_cell = []
for _ in range(self.config.NUMBER_OF_TEXTUAL_EVIDENCES):
# two rnn decoders for each layer for each input sequence\
#fwrnn = self.__build_single_rnn_cell(self.config.INPUT_SEQ_RNN_HIDDEN_SIZE)
#bwrnn = self.__build_single_rnn_cell(self.config.INPUT_SEQ_RNN_HIDDEN_SIZE)
self.fwd_encoder_cell.append([rnn] * self.config.NUM_LAYERS)
self.bw_encoder_cell.append([rnn] * self.config.NUM_LAYERS)
for i in range(self.config.NUMBER_OF_TEXTUAL_EVIDENCES):
out, fwd_state, bk_state = tf.contrib.rnn.stack_bidirectional_dynamic_rnn(
cells_fw=self.fwd_encoder_cell[i],
cells_bw=self.bw_encoder_cell[i],
inputs=self.encoder_text_inputs_embedded[i],
sequence_length=self.encoder_text_inputs_length[i],
dtype=tf.float32
)
self.encoder_text_outputs.append(tf.concat(out, 2))
self.encoder_text_last_state.append(tf.squeeze(tf.concat([fwd_state, bk_state], 2), axis=0))
print('Building encoder in: ', time.time() - start, ' secs')
def __create_encoder(self):
self.__create_triple_encoder()
self.__create_seq_encoder()
# concatinating last state of the triple encoder with the last state of each text input being encoded
last_states = [self.encoder_triples_last_state] + self.encoder_text_last_state
self.encoder_last_state = tf.concat(last_states, axis=1)
def __create_decoder_cell(self):
self.decoder_cell = tf.nn.rnn_cell.GRUCell(self.config.DECODER_RNN_HIDDEN_SIZE)
# fully connected layer to change size of Encoder Last state to Decoder Hidden size
decoder_hidden_state_reshape = Dense(self.config.DECODER_RNN_HIDDEN_SIZE)
self.decoder_initial_state = (decoder_hidden_state_reshape(self.encoder_last_state), )
def __create_decoder_attention_cell_old(self):
"""
create decoder RNN with attention
:return:
"""
memory = tf.concat([self.encoder_triples_inputs_embedded] + self.encoder_text_outputs, axis=1)
self.attention_mechanism = tf.contrib.seq2seq.BahdanauAttention(
num_units=self.config.TRIPLES_EMBEDDING_SIZE, # the depth of the Attention layer
memory=memory,
name="Attention"
)
# create decoder cell:
gru = self.__build_single_rnn_cell(self.config.DECODER_RNN_HIDDEN_SIZE)
self.decoder_cell_list = [gru] * self.config.NUM_LAYERS
decoder_hidden_state_reshape = Dense(self.config.DECODER_RNN_HIDDEN_SIZE)
self.decoder_cell_list[-1] = tf.contrib.seq2seq.AttentionWrapper(
cell=self.decoder_cell_list[-1],
attention_layer_size=self.config.DECODER_RNN_HIDDEN_SIZE, # the output hidden size of the last decoder
attention_mechanism=self.attention_mechanism,
initial_cell_state=decoder_hidden_state_reshape(self.encoder_last_state),
alignment_history=False,
name="Attention_Wrapper"
)
self.decoder_cell = tf.nn.rnn_cell.MultiRNNCell(self.decoder_cell_list)
# To be compatible with AttentionWrapper, the encoder last state
# of the top layer should be converted into the AttentionWrapperState form
# We can easily do this by calling AttentionWrapper.zero_state
# self.decoder_initial_state = self.encoder_last_state
init_state = self.decoder_cell_list[-1].zero_state(
batch_size=self.batch_size,
dtype=tf.float32
)
# a tuple because decode initial state has to take a tuple
self.decoder_initial_state = (init_state,)
def __create_decoder_attention_cell(self):
"""
create decoder RNN with attention
:return:
"""
triple_memory = self.encoder_triples_inputs_embedded
self.triple_attention_mechanism = tf.contrib.seq2seq.BahdanauAttention(
num_units=self.config.TRIPLES_EMBEDDING_SIZE, # the depth of the Attention layer
memory=triple_memory,
name="TripleAttention"
)
context_memory = tf.concat(self.encoder_text_outputs, axis=1)
self.context_attention_mechanism = tf.contrib.seq2seq.BahdanauAttention(
num_units=self.config.INPUT_SEQ_RNN_HIDDEN_SIZE if "bi" not in self.config.ENCODER_RNN_CELL_TYPE
else self.config.INPUT_SEQ_RNN_HIDDEN_SIZE * 2, # the depth of the Attention layer
memory=context_memory,
name="ContextAttention"
)
# create decoder cell:
gru = self.__build_single_rnn_cell(self.config.DECODER_RNN_HIDDEN_SIZE)
self.decoder_cell_list = [gru] * self.config.NUM_LAYERS
decoder_hidden_state_reshape = Dense(self.config.DECODER_RNN_HIDDEN_SIZE)
self.decoder_cell_list[-1] = tf.contrib.seq2seq.AttentionWrapper(
cell=self.decoder_cell_list[-1],
# the output hidden size of the last decoder
attention_layer_size=[self.config.TRIPLES_EMBEDDING_SIZE,
self.config.INPUT_SEQ_RNN_HIDDEN_SIZE if "bi" not in self.config.ENCODER_RNN_CELL_TYPE
else self.config.INPUT_SEQ_RNN_HIDDEN_SIZE * 2],
attention_mechanism=[self.triple_attention_mechanism, self.context_attention_mechanism],
initial_cell_state=decoder_hidden_state_reshape(self.encoder_last_state),
alignment_history=False,
name="Attention_Wrapper"
)
self.decoder_cell = tf.nn.rnn_cell.MultiRNNCell(self.decoder_cell_list)
# To be compatible with AttentionWrapper, the encoder last state
# of the top layer should be converted into the AttentionWrapperState form
# We can easily do this by calling AttentionWrapper.zero_state
# self.decoder_initial_state = self.encoder_last_state
init_state = self.decoder_cell_list[-1].zero_state(
batch_size=self.batch_size,
dtype=tf.float32
)
# a tuple because decode initial state has to take a tuple
self.decoder_initial_state = (init_state,)
def __create_decoder(self):
print("building decoder and attention ..")
start = time.time()
with tf.variable_scope('decoder'):
# input and output layers to the decoder
# decoder_input_layer = Dense(self.config.DECODER_RNN_HIDDEN_SIZE, dtype=tf.float32, name='decoder_input_projection')
decoder_output_layer = Dense(self.config.DECODER_VOCAB_SIZE, name="decoder_output_projection")
if self.config.COUPLE_ENCODER_DECODER_WORD_EMBEDDINGS:
# connect encoder and decoder word embeddings
self.decoder_embeddings = self.encoder_word_embeddings
elif self.config.USE_PRETRAINED_WORD_EMBEDDINGS:
word_emb = pickle.load(open(self.config.PRETRAINED_WORD_EMBEDDINGS_PATH)).astype(np.float32)
self.decoder_embeddings = tf.Variable(word_emb, name="decoder_embeddings",
trainable=self.config.TRAIN_WORD_EMBEDDINGS)
else:
self.decoder_embeddings = tf.get_variable("decoder_embeddings",
shape=[self.config.DECODER_VOCAB_SIZE, self.config.DECODER_EMBEDDING_SIZE],
initializer=self.__helper__initializer(),
dtype=tf.float32
)
if self.config.USE_ATTENTION:
self.__create_decoder_attention_cell()
else:
self.__create_decoder_cell()
######################################
# Build the decoder in training mode #
######################################
if self.mode == 'training':
# changing inputs to embeddings and then through the input projection
# decoder_inputs_embedded: [batch_size, max_time_step + 1, embedding_size]
self.decoder_inputs_embedded = tf.nn.embedding_lookup(params=self.decoder_embeddings,
ids=self.decoder_inputs_train)
# self.decoder_inputs_embedded = decoder_input_layer(self.decoder_inputs_embedded)
# Helper to feed inputs to the training:
self.training_helper = tf.contrib.seq2seq.TrainingHelper(
inputs=self.decoder_inputs_embedded,
sequence_length=self.decoder_inputs_length_train,
name='training_helper')
# Build the decoder
self.training_decoder = tf.contrib.seq2seq.BasicDecoder(
cell=self.decoder_cell,
helper=self.training_helper,
initial_state=self.decoder_initial_state,
output_layer=decoder_output_layer)
# decoder outputs are of type tf.contrib.seq2seq.BasicDecoderOutput
# has two fields `rnn_output` and `sample_id`
self.decoder_outputs_train, self.decoder_last_state_train, self.decoder_outputs_length_decode_train = tf.contrib.seq2seq.dynamic_decode(
decoder=self.training_decoder,
impute_finished=True,
maximum_iterations=self.decoder_max_length
)
# In the training mode only create LOSS and Optimizer
self.__create_loss()
self.__create_optimizer()
######################################
# Build the decoder in sampling mode #
######################################
elif self.mode == 'inference':
start_tokens = tf.ones([self.batch_size, ], dtype=tf.int32) * self.config.DECODER_START_TOKEN_ID
end_token = self.config.DECODER_END_TOKEN_ID
def decoder_inputs_embedder(inputs):
# return decoder_input_layer(tf.nn.embedding_lookup(self.decoder_embeddings, inputs))
return tf.nn.embedding_lookup(self.decoder_embeddings, inputs)
# end token is needed so the helper stop feeding new inputs again once the <end> mark is shown.
decoder_helper = tf.contrib.seq2seq.GreedyEmbeddingHelper(decoder_inputs_embedder, start_tokens, end_token)
# Basic decoder performs greedy decoding at each time step
print("Building Greedy Decoder ...")
inference_decoder = tf.contrib.seq2seq.BasicDecoder(cell=self.decoder_cell,
helper=decoder_helper,
initial_state=self.decoder_initial_state,
output_layer=decoder_output_layer)
self.decoder_outputs_inference, self.decoder_last_state_inference, self.decoder_outputs_length_inference = tf.contrib.seq2seq.dynamic_decode(
decoder=inference_decoder,
output_time_major=False,
maximum_iterations=self.decoder_max_length
)
self.decoder_pred_inference = tf.expand_dims(self.decoder_outputs_inference.sample_id, -1)
print('Building decoder in: ', time.time() - start, ' secs')
def __create_loss(self):
print('Creating loss...')
start = time.time()
self.decoder_logits = tf.identity(self.decoder_outputs_train.rnn_output, name="decoder_logits")
self.decoder_pred = tf.argmax(self.decoder_logits, axis=-1, name="decoder_pred")
# masking the sequence in order to calculate the error according to the calculated
mask = tf.sequence_mask(self.decoder_inputs_length_train, maxlen=self.decoder_max_length, dtype=tf.float32,
name="masks")
# Control loss dimensions with `average_across_timesteps` and `average_across_batch`
self.loss = tf.contrib.seq2seq.sequence_loss(logits=self.decoder_logits,
targets=self.decoder_targets_train,
average_across_timesteps=False,
average_across_batch=False,
weights=mask,
name="batch_loss")
print('Building loss in: ', time.time() - start, ' secs')
def __create_optimizer(self):
print('creating optimizer...')
start = time.time()
learning_rate = tf.train.exponential_decay(self.config.LR, self.global_step, 200, 0.97, staircase=True)
self.opt = tf.train.RMSPropOptimizer(learning_rate=learning_rate)
# self.opt = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
# normalize the gradients of a parameter vector when its L2 norm exceeds a certain threshold according to
trainable_params = tf.trainable_variables()
# calculate gradients of the loss given all the trainable parameters
gradients = tf.gradients(self.loss, trainable_params)
# Gradient clipping: new_gradients = gradients * threshold / l2_norm(gradients)
clip_gradients, _ = tf.clip_by_global_norm(gradients, self.config.MAX_GRAD_NORM)
self.updates = self.opt.apply_gradients(zip(clip_gradients, trainable_params), global_step=self.global_step)
print('Building optimizer in: ', time.time() - start, ' secs')
def __helper__initializer(self):
sqrt3 = math.sqrt(3) # Uniform(-sqrt(3), sqrt(3)) has variance=1.
initializer = tf.random_uniform_initializer(-sqrt3, sqrt3, dtype=tf.float32)
return initializer
def train(self, sess, encoder_triples_inputs, encoder_text_inputs, encoder_text_inputs_length, decoder_inputs, decoder_inputs_lengths, encoder_predicates_direction):
feed_dict = {
# self.encoder_triples_inputs: encoder_triples_inputs,
self.encoder_entities_inputs: encoder_triples_inputs[:, [0, 2]], # pick up subjects and objects
self.encoder_predicates_inputs: encoder_triples_inputs[:, [1]], # pick up predicates
self.encoder_text_inputs: encoder_text_inputs,
self.encoder_text_inputs_length: encoder_text_inputs_length,
self.decoder_inputs: decoder_inputs,
self.decoder_inputs_length: decoder_inputs_lengths,
self.encoder_predicates_direction: encoder_predicates_direction
}
_, loss = sess.run([self.updates, self.loss], feed_dict=feed_dict)
return loss
def eval(self, sess, encoder_triples_inputs, encoder_text_inputs, encoder_text_inputs_length, decoder_inputs, decoder_inputs_lengths, encoder_predicates_direction):
"""
Run a evaluation step of the model feeding the given inputs
:param sess:
:param encoder_inputs:
:param encoder_inputs_length:
:param decoder_inputs:
:param decoder_inputs_lengths:
:return:
"""
feed_dict = {
# self.encoder_triples_inputs: encoder_triples_inputs,
self.encoder_entities_inputs: encoder_triples_inputs[:, [0, 2]], # pick up subjects and objects
self.encoder_predicates_inputs: encoder_triples_inputs[:, [1]], # pick up predicates
self.encoder_text_inputs: encoder_text_inputs,
self.encoder_text_inputs_length: encoder_text_inputs_length,
self.decoder_inputs: decoder_inputs,
self.decoder_inputs_length: decoder_inputs_lengths,
self.encoder_predicates_direction: encoder_predicates_direction
}
_, loss = sess.run([self.updates, self.loss], feed_dict=feed_dict)
return loss
def predict(self, sess, encoder_triples_inputs, encoder_text_inputs, encoder_text_inputs_length, encoder_predicates_direction):
"""
predict the output given an input
"""
feed_dict = {
# self.encoder_triples_inputs: encoder_triples_inputs,
self.encoder_entities_inputs: encoder_triples_inputs[:, [0, 2]], # pick up subjects and objects
self.encoder_predicates_inputs: encoder_triples_inputs[:, [1]], # pick up predicates
self.encoder_text_inputs: encoder_text_inputs,
self.encoder_text_inputs_length: encoder_text_inputs_length,
self.encoder_predicates_direction: encoder_predicates_direction
}
output = sess.run([self.decoder_pred_inference], feed_dict=feed_dict)
return output[0]
def save(self, sess, path, var_list=None, global_step=None):
saver = tf.train.Saver(var_list)
path = saver.save(sess, save_path=path, global_step=global_step)
print("model saved in %s" % path)
return path
def restore(self, sess, path, var_list=None):
"""
restore trained model from a specific path
:param sess:
:param path:
:param var_list: if None restore all list
:return:
"""
saver = tf.train.Saver(var_list)
saver.restore(sess, path)
print("model restored from %s" % path)
| [
"noreply@github.com"
] | noreply@github.com |
3f0136ea75534bfb93dcc9eee9be0a1bae63cfb0 | 0e26bc132e251360c10e76715f4e7e802eefc823 | /indexacion.py | 48469369461e430a93b3e6a3a7d16b82597d7061 | [] | no_license | juanmendoza31/t08_mendoza.anacleto | e0726cfb04d5a61a47da34d0b6179af28335bcf1 | 80a65bfdcda9012b17f2b66083d6a57b306c0d13 | refs/heads/master | 2020-11-24T00:45:34.851707 | 2019-12-13T18:14:52 | 2019-12-13T18:14:52 | 227,889,121 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,164 | py | #EJERCICIO 01
# 0 10 20 30
# 01234567890123456789012345678901
msj="HOLA FELICINDA"
print(msj[8])
#EJERCICIO 02
# 0 10 20 30
# 01234567890123456789012345678901
msj="LA RISA ES LA MEJOR MEDICINA"
print(msj[23])
#EJERCICIO 03
# 0 10 20 30
# 01234567890123456789012345678901
msj="LA CUARTA RONDA"
print(msj[9])
#EJERCIO 04
# 0 10 20 30
# 0123456789012345678901234567890123
msj="EL MEJOR ESPEJO ES UN VIEJO AMIGO"
print(msj[29])
#EJERCICIO 05
# 0 10 20 30
# 01234567890123456789012345678901
msj="PREFIERO MORIR EN EL INTENTO"
print(msj[13])
#EJERCICIO 06
# 0 10 20 30
# 01234567890123456789012345678901
msj="TE QUIERO A MI LADO"
print(msj[6])
#EJERCICIO 07
# 0 10 20 30
# 01234567890123456789012345678901
msj="UNA GOLONDRINA NO HACE VERANO"
print(msj[14])
#EJERCICIO 08
# 0 10 20 30
# 01234567890123456789012345678901234567
msj="EL SUFRIMIENTO HACE LAS HORAS ETERNAS"
print(msj[27])
#EJRCICIO 09
# 0 10 20 30 40
# 012345678901234567890123456789012345678901
msj="ALGUNAS VECES SE GANA, OTRAS SE APRENDE"
print(msj[34])
#EJRCICIO 10
# 0 10 20 30
# 01234567890123456789012345678901234
msj="FUE COMPLICADO PERO LO CONSEGUI"
print(msj[27])
#EJRCICIO 11
# 0 10 20 30
# 01234567890123456789012345678901
msj="APRECIA LO QUE TIENES"
print(msj[5])
#EJRCICIO 12
# 0 10 20 30
# 01234567890123456789012345678901
msj="PIENSA MENOS Y HAZ MAS"
print(msj[14])
#EJRCICIO 13
# 0 10 20 30
# 0123456789012345678901234567890123456789
msj="HAZ DE TUS SUEÑOS UNA REALIDAD"
print(msj[22])
#EJRCICIO 14
# 0 10 20 30 40
# 0123456789012345678901234567890123456789012345678
msj="CAMARON QUE SE DUERME SE LO LLEVA LA CORRIENTE"
print(msj[40])
#EJRCICIO 15
# 0 10 20 30
# 01234567890123456789012345678901
msj="LA VIDA DA MUCHAS VUELTAS"
print(msj[17]) | [
"jmendozaan@unprg.edu.pe"
] | jmendozaan@unprg.edu.pe |
eac099b088137656f9ed7c3baaaaad7af9cd21e1 | 1c67b46208a8756e978ac2bbbcfe88c11c3f45d2 | /0x0C-python-almost_a_circle/tests/test_models/test_rectangle.py | 019d3da26f38bd0028ae6339e2f8398b181a4985 | [] | no_license | FatChicken277/holbertonschool-higher_level_programming | eb5de8cb61961461ae351549f140e4caccdc3898 | 520d6310a5e2a874f8c5f5185d0fb769b6412e7c | refs/heads/master | 2022-12-18T23:40:44.484883 | 2020-09-27T20:45:24 | 2020-09-27T20:45:24 | 259,402,379 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 14,264 | py | #!/usr/bin/python3
"""This module constains a class that test rectangle class.
"""
import io
import pep8
import unittest
from models.base import Base
from models.rectangle import Rectangle
from contextlib import redirect_stdout as out
class TestRectangle(unittest.TestCase):
"""Tester class.
"""
def setUp(self):
"""Sets nb_objects to 0 for each test.
"""
Base._Base__nb_objects = 0
def test_style_base(self):
"""test pep8
"""
style = pep8.StyleGuide()
m = style.check_files(["models/rectangle.py"])
self.assertEqual(m.total_errors, 0, "fix pep8")
def test_docstring(self):
"""Test doc strings.
"""
self.assertIsNotNone(Rectangle.__doc__)
self.assertIsNotNone(Rectangle.__init__.__doc__)
self.assertIsNotNone(Rectangle.width.__doc__)
self.assertIsNotNone(Rectangle.height.__doc__)
self.assertIsNotNone(Rectangle.x.__doc__)
self.assertIsNotNone(Rectangle.y.__doc__)
self.assertIsNotNone(Rectangle.area.__doc__)
self.assertIsNotNone(Rectangle.display.__doc__)
self.assertIsNotNone(Rectangle.update.__doc__)
self.assertIsNotNone(Rectangle.to_dictionary.__doc__)
self.assertIsNotNone(Rectangle.__str__.__doc__)
def test_attributes(self):
"""Test the rectangle basic attributes.
"""
rec = Rectangle(1, 1)
self.assertEqual(rec.width, 1)
self.assertEqual(rec.height, 1)
self.assertEqual(rec.x, 0)
self.assertEqual(rec.y, 0)
self.assertEqual(rec.id, 1)
rec = Rectangle(1, 8, 0, 3, 3)
self.assertEqual(rec.width, 1)
self.assertEqual(rec.height, 8)
self.assertEqual(rec.x, 0)
self.assertEqual(rec.y, 3)
self.assertEqual(rec.id, 3)
def test_id(self):
"""Test id attribute.
"""
Rectangle(1, 1)
rec2 = Rectangle(1, 1)
rec3 = Rectangle(1, 1, 1, 1, 4)
self.assertEqual(rec2.id, 2)
self.assertEqual(rec3.id, 4)
def test_width_int(self):
"""Test width with negative integer.
"""
with self.assertRaises(ValueError) as error:
Rectangle(-2, 1)
self.assertEqual("width must be > 0", str(error.exception))
def test_width_none(self):
"""Test width with none value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(None, 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_float(self):
"""Test width with float value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(3.1416, 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_boolean(self):
"""Test width with boolean value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(True, 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_str(self):
"""Test width with string value.
"""
with self.assertRaises(TypeError) as error:
Rectangle("i'm a error >:O", 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_list(self):
"""Test width with list value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(["i", "m", "a", "bad", "guy", "duh"], 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_tuple(self):
"""Test width with tuple value.
"""
with self.assertRaises(TypeError) as error:
Rectangle((1, 0, 0, 1, 1), 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_dict(self):
"""Test width with dictionary value.
"""
with self.assertRaises(TypeError) as error:
Rectangle({"Tyler": "The creator", "A$AP": "Rocky"}, 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_width_set(self):
"""Test width with set value.
"""
with self.assertRaises(TypeError) as error:
Rectangle({"i'm", "a", "set"}, 1)
self.assertEqual("width must be an integer", str(error.exception))
def test_height_int(self):
"""Test height with negative integer.
"""
with self.assertRaises(ValueError) as error:
Rectangle(1, -2)
self.assertEqual("height must be > 0", str(error.exception))
def test_height_none(self):
"""Test height with none value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, None)
self.assertEqual("height must be an integer", str(error.exception))
def test_height_float(self):
"""Test height with float value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 3.1416)
self.assertEqual("height must be an integer", str(error.exception))
def test_height_boolean(self):
"""Test height with boolean value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, True)
self.assertEqual("height must be an integer", str(error.exception))
def test_height_str(self):
"""Test height with string value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, "i'm a error >:O")
self.assertEqual("height must be an integer", str(error.exception))
def test_height_list(self):
"""Test height with list value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, ["i", "m", "a", "bad", "guy", "duh"])
self.assertEqual("height must be an integer", str(error.exception))
def test_height_tuple(self):
"""Test height with tuple value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, (1, 0, 0, 1, 1))
self.assertEqual("height must be an integer", str(error.exception))
def test_height_dict(self):
"""Test height with dictionary value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, {"Tyler": "The creator", "A$AP": "Rocky"})
self.assertEqual("height must be an integer", str(error.exception))
def test_height_set(self):
"""Test height with set value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, {"i'm", "a", "set"})
self.assertEqual("height must be an integer", str(error.exception))
def test_x_int(self):
"""Test x with negative integer.
"""
with self.assertRaises(ValueError) as error:
Rectangle(1, 1, -2)
self.assertEqual("x must be >= 0", str(error.exception))
def test_x_none(self):
"""Test x with none value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, None)
self.assertEqual("x must be an integer", str(error.exception))
def test_x_float(self):
"""Test x with float value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 3.1416)
self.assertEqual("x must be an integer", str(error.exception))
def test_x_boolean(self):
"""Test x with boolean value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, True)
self.assertEqual("x must be an integer", str(error.exception))
def test_x_str(self):
"""Test x with string value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, "i'm a error >:O")
self.assertEqual("x must be an integer", str(error.exception))
def test_x_list(self):
"""Test x with list value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, ["i", "m", "a", "bad", "guy", "duh"])
self.assertEqual("x must be an integer", str(error.exception))
def test_x_tuple(self):
"""Test x with tuple value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, (1, 0, 0, 1, 1))
self.assertEqual("x must be an integer", str(error.exception))
def test_x_dict(self):
"""Test x with dictionay value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, {"Tyler": "The creator", "A$AP": "Rocky"})
self.assertEqual("x must be an integer", str(error.exception))
def test_x_set(self):
"""Test x with set value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, {"i'm", "a", "set"})
self.assertEqual("x must be an integer", str(error.exception))
def test_y_int(self):
"""Test y with negative integer.
"""
with self.assertRaises(ValueError) as error:
Rectangle(1, 1, 1, -2)
self.assertEqual("y must be >= 0", str(error.exception))
def test_y_none(self):
"""Test y with none value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, None)
self.assertEqual("y must be an integer", str(error.exception))
def test_y_float(self):
"""Test y with float value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, 3.1416)
self.assertEqual("y must be an integer", str(error.exception))
def test_y_boolean(self):
"""Test y with boolean value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, True)
self.assertEqual("y must be an integer", str(error.exception))
def test_y_str(self):
"""Test y with string value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, "i'm a error >:O")
self.assertEqual("y must be an integer", str(error.exception))
def test_y_list(self):
"""Test y with list value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, ["i", "m", "a", "bad", "guy", "duh"])
self.assertEqual("y must be an integer", str(error.exception))
def test_y_tuple(self):
"""Test y with tuple value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, (1, 0, 0, 1, 1))
self.assertEqual("y must be an integer", str(error.exception))
def test_y_dict(self):
"""Test y with dictionay value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, {"Tyler": "The creator", "A$AP": "Rocky"})
self.assertEqual("y must be an integer", str(error.exception))
def test_y_set(self):
"""Test y with set value.
"""
with self.assertRaises(TypeError) as error:
Rectangle(1, 1, 1, {"i'm", "a", "set"})
self.assertEqual("y must be an integer", str(error.exception))
def test_area(self):
"""Test rectangle area.
"""
rect4 = Rectangle(34623, 12312)
self.assertEqual(rect4.area(), 426278376)
def test_display(self):
"""Test rectangle display.
"""
with io.StringIO() as buff, out(buff):
rect5 = Rectangle(4, 6)
rect5.display()
expected_display = "####\n" * 6
self.assertEqual(buff.getvalue(), expected_display)
with io.StringIO() as buff, out(buff):
rect5 = Rectangle(1, 1)
rect5.display()
expected_display = "#\n"
self.assertEqual(buff.getvalue(), expected_display)
def test_str(self):
"""Test rectangle str.
"""
rect6 = Rectangle(32, 2, 2, 1, 53)
self.assertEqual(str(rect6), "[Rectangle] (53) 2/1 - 32/2")
rect7 = Rectangle(1, 1)
self.assertEqual(str(rect7), "[Rectangle] (1) 0/0 - 1/1")
def test_display1(self):
"""Test rectangle display1.
"""
with io.StringIO() as buff, out(buff):
rect7 = Rectangle(2, 2, 1, 4)
rect7.display()
expected_display = "\n\n\n\n"+" ##\n" * 2
self.assertEqual(buff.getvalue(), expected_display)
with io.StringIO() as buff, out(buff):
rect8 = Rectangle(1, 1, 0, 0)
rect8.display()
expected_display = "#\n"
self.assertEqual(buff.getvalue(), expected_display)
def test_update_args(self):
"""Test rectangle update (args).
"""
rect9 = Rectangle(1, 2, 3, 4, 5)
rect9.update(100, 20, 23)
self.assertEqual(rect9.id, 100)
self.assertEqual(rect9.width, 20)
self.assertEqual(rect9.height, 23)
self.assertEqual(rect9.x, 3)
self.assertEqual(rect9.y, 4)
def test_update_kwargs(self):
"""Test rectangle update (kwargs).
"""
rect10 = Rectangle(1, 2, 3, 4, 5)
rect10.update(id=34, x=0, height=23)
self.assertEqual(rect10.id, 34)
self.assertEqual(rect10.width, 1)
self.assertEqual(rect10.height, 23)
self.assertEqual(rect10.x, 0)
self.assertEqual(rect10.y, 4)
def test_update_args_kwargs(self):
"""Test rectangle update (args/kwargs).
"""
rect11 = Rectangle(1, 2, 3, 4, 5)
rect11.update(100, 20, width=69)
self.assertEqual(rect11.id, 100)
self.assertEqual(rect11.width, 20)
self.assertEqual(rect11.height, 2)
self.assertEqual(rect11.x, 3)
self.assertEqual(rect11.y, 4)
def test_to_dictionay(self):
"""Test Rectangle to_dictionary method.
"""
rect12 = Rectangle(1, 1, 1, 1, 1)
dictionary = rect12.to_dictionary()
self.assertEqual(type(dictionary), type({}))
self.assertEqual("id" in dictionary, True)
self.assertEqual("width" in dictionary, True)
self.assertEqual("height" in dictionary, True)
self.assertEqual("x" in dictionary, True)
self.assertEqual("y" in dictionary, True)
if __name__ == "__main__":
unittest.main()
| [
"1637@holbertonschool.com"
] | 1637@holbertonschool.com |
35897f40e8b2c67f13adecfaf1673d09f16913b5 | 7b93650a756aa4d05f7894f5a978d76fb3c4384f | /frontend.py | 235e82f2c4f904b3ec3fdd2f61867ce3884d6c39 | [] | no_license | igor27g/Database_application | 8c72271ba59ef8591952f7e224463b30eb0cf494 | 56a9a5e90e16987dab6a438d18f71e026e065700 | refs/heads/master | 2020-04-02T09:11:30.250385 | 2018-10-23T07:58:36 | 2018-10-23T07:58:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,611 | py | from tkinter import *
import backend
def get_selected_row(event):
try:
global selected_tuple
index=list1.curselection()[0]
selected_tuple=list1.get(index)
e1.delete(0,END)
e1.insert(END,selected_tuple[1])
e2.delete(0,END)
e2.insert(END,selected_tuple[2])
e3.delete(0,END)
e3.insert(END,selected_tuple[3])
e4.delete(0,END)
e4.insert(END,selected_tuple[4])
except IndexError:
pass
def view_command():
list1.delete(0,END)
for row in backend.view():
list1.insert(END,row)
def search_command():
list1.delete(0,END)
for row in backend.search(player_text.get(),club_text.get(),age_text.get(),position_text.get()):
list1.insert(END,row)
def add_command():
backend.insert(player_text.get(),club_text.get(),age_text.get(),position_text.get())
list1.delete(0,END)
list1.insert(END,(player_text.get(),club_text.get(),age_text.get(),position_text.get()))
def delete_command():
backend.delete(selected_tuple[0])
def update_command():
backend.update(selected_tuple[0],player_text.get(),club_text.get(),age_text.get(),position_text.get())
window=Tk()
window.wm_title("players")
l1=Label(window,text="Player")
l1.grid(row=0,column=0)
l2=Label(window,text="Club")
l2.grid(row=0,column=2)
l3=Label(window,text="Age")
l3.grid(row=1,column=0)
l4=Label(window,text="Position")
l4.grid(row=1,column=2)
player_text=StringVar()
e1=Entry(window,textvariable=player_text)
e1.grid(row=0,column=1)
club_text=StringVar()
e2=Entry(window,textvariable=club_text)
e2.grid(row=0,column=3)
age_text=StringVar()
e3=Entry(window,textvariable=age_text)
e3.grid(row=1,column=1)
position_text=StringVar()
e4=Entry(window,textvariable=position_text)
e4.grid(row=1,column=3)
list1=Listbox(window, height=6,width=35)
list1.grid(row=2,column=0,rowspan=6,columnspan=2)
sb1=Scrollbar(window)
sb1.grid(row=2,column=2,rowspan=6)
list1.configure(yscrollcommand=sb1.set)
sb1.configure(command=list1.yview)
list1.bind('<<ListboxSelect>>',get_selected_row)
b1=Button(window,text="View all", width=12,command=view_command)
b1.grid(row=2,column=3)
b2=Button(window,text="Search entry", width=12,command=search_command)
b2.grid(row=3,column=3)
b3=Button(window,text="Add entry", width=12,command=add_command)
b3.grid(row=4,column=3)
b4=Button(window,text="Update selected", width=12,command=update_command)
b4.grid(row=5,column=3)
b5=Button(window,text="Delete selected", width=12,command=delete_command)
b5.grid(row=6,column=3)
b6=Button(window,text="Close", width=12,command=window.destroy)
b6.grid(row=7,column=3)
window.mainloop()
| [
"igor.piotr.grabowski@gmail.com"
] | igor.piotr.grabowski@gmail.com |
93d27555c7c2ec318b321b018d63a96077aec83b | a03033b483a90ab9705b15fb658e288ca9ad5329 | /venv/bin/pip | 3469214cb1058cc0fd4968bfd53ea7dda04b9348 | [] | no_license | DrekStyler/BI_Tool | bae31892238b66c900582adb05e0bd58ee513825 | 6de0ed229a1291d528d3271890e00e37e2030666 | refs/heads/master | 2020-07-25T20:22:17.207247 | 2016-11-14T16:17:17 | 2016-11-14T16:17:17 | 73,650,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 262 | #!/Users/derekstyer/galvanize/unit-3/week5/fishin_Analytics/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"dmstyer@gmail.com"
] | dmstyer@gmail.com | |
b49bb19316f64f19646ae7823fee45bc6bf781df | 12904afb9fe3b58d7dde4b390a3b8c29373cc194 | /test/test_genre.py | da20f17ba268a98539072f1913d63b61dc7362ad | [] | no_license | adito0/AditiFlix | 9e46d2ed0ba0b19a944fc78012c0a8ded4a05020 | d43866d1d3287cf78760cab3a055167c2760e81d | refs/heads/master | 2022-12-28T00:46:38.773372 | 2020-09-03T19:43:37 | 2020-09-03T19:43:37 | 292,663,671 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,604 | py | from domainmodel.genre import Genre
import pytest
@pytest.fixture
def genre():
return Genre('Drama')
def test_init(genre):
assert genre.genre_name == 'Drama'
genre1 = Genre("Comedy")
assert repr(genre1) == "<Genre Comedy>"
genre2 = Genre("")
assert genre2.genre_name is None
genre3 = Genre(42)
assert genre3.genre_name is None
assert repr(genre2) == "<Genre None>"
assert repr(genre3) == "<Genre None>"
def test_compare():
genre1 = Genre("Comedy")
genre2 = Genre("Comedy")
assert genre1 == genre2
genre2 = 4
assert genre1 != genre2
genre1 = Genre("")
genre2 = Genre(45)
assert genre1 == genre2
genre1 = Genre("Comedy")
genre2 = Genre("comedy")
assert genre1 != genre2
def test_lt():
genre1 = Genre("Comedy")
genre2 = Genre("Drama")
assert genre1 < genre2
assert genre2 > genre1
def test_le():
genre1 = Genre("Comedy")
genre2 = Genre("Drama")
assert genre1 <= genre2
assert genre2 >= genre1
genre1 = Genre("Comedy")
genre2 = Genre("Comedy")
assert genre1 <= genre2
assert genre2 >= genre1
def test_hash():
genre1 = Genre("Comedy")
genre2 = Genre("Comedy")
assert hash(genre1) == hash(genre2)
genre2 = Genre("Comedz")
assert hash(genre1) != hash(genre2)
dict1 = dict()
dict1[genre1] = genre2
assert dict1[genre1] == genre2
assert repr(dict1[genre1]) == "<Genre Comedz>"
genre1 = Genre("")
genre2 = Genre(7)
dict1[genre1] = genre2
assert dict1[genre1] == genre2
assert repr(dict1[genre1]) == "<Genre None>" | [
"aditi.ramalingam@outlook.com"
] | aditi.ramalingam@outlook.com |
662e95d8ce8427a597398fbeb8540cae4e74d908 | d5ea81e376bc7043752672498862fd8c44d23aba | /Schema/schema_insert.py | fec91fdf0ab229c39da1051b8eb67536bde13941 | [] | no_license | NatnaelT/biograkn-covid | 3a0885d7ecf60ee16a73a4571219b98f8b09a000 | 1771c010f0b061145d5fba1fa0b47c448167905f | refs/heads/master | 2022-11-18T09:01:56.881435 | 2020-07-14T14:59:00 | 2020-07-14T14:59:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 527 | py | from grakn.client import GraknClient
import csv
import os
def insertSchema(uri, keyspace):
client = GraknClient(uri=uri)
session = client.session(keyspace=keyspace)
print('.....')
print('Inserting schema...')
print('.....')
with open("../biograkn-covid/Schema/biograkn-covid.gql", "r") as graql_file:
schema = graql_file.read()
with session.transaction().write() as write_transaction:
write_transaction.query(schema)
write_transaction.commit()
print('.....')
print('Success inserting schema!')
print('.....') | [
"tomas@grakn.ai"
] | tomas@grakn.ai |
dd7c07a09e6b06c03da104c70355e5d84aa90c57 | c2fb83340f389b9209cd7d16231eb97a7fc28fb8 | /JJEhr/event/form.py | be1bc4d58c51e0869e292cd7623760ea6d4f1a3a | [] | no_license | shalocfolmos/JJEhr | f642df216c3fd1a47ccccf227d336c69a02d9a6e | 71e8b1049366dd1be78b362a00d2e5370218b288 | refs/heads/master | 2020-12-30T10:36:25.796334 | 2012-12-05T12:50:58 | 2012-12-05T12:50:58 | 3,364,314 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 185 | py | #-*- coding: UTF-8 -*-
from django.forms.models import ModelForm
from JJEhr.event.models import EventType
class AddEventTypeForm(ModelForm):
class Meta:
model = EventType
| [
"shalocfolmos@gmail.com"
] | shalocfolmos@gmail.com |
a490e7f1e6b2c65087dfabc08bfafafeb3f0052c | 5e9576c368e98927e2965bd2fb23bd35d9993d69 | /featuretools/primitives/standard/aggregation/time_since_last_false.py | da04a8d96f2ab98be1971c71fbac3b5a5bab51cb | [
"BSD-3-Clause"
] | permissive | alteryx/featuretools | c6e319e063e8e84e7684bf232376f95dc5272160 | c284c2d27a95b81e0bae913ac90df2b02c8f3b37 | refs/heads/main | 2023-08-25T12:21:33.945418 | 2023-08-23T16:30:25 | 2023-08-23T16:30:25 | 102,908,804 | 1,783 | 201 | BSD-3-Clause | 2023-09-07T18:53:19 | 2017-09-08T22:15:17 | Python | UTF-8 | Python | false | false | 2,223 | py | import numpy as np
import pandas as pd
from woodwork.column_schema import ColumnSchema
from woodwork.logical_types import Boolean, BooleanNullable, Datetime, Double
from featuretools.primitives.base import AggregationPrimitive
class TimeSinceLastFalse(AggregationPrimitive):
"""Calculates the time since the last `False` value.
Description:
Using a series of Datetimes and a series of Booleans, find the last
record with a `False` value. Return the seconds elapsed between that record
and the instance's cutoff time. Return nan if no values are `False`.
Examples:
>>> from datetime import datetime
>>> time_since_last_false = TimeSinceLastFalse()
>>> cutoff_time = datetime(2010, 1, 1, 12, 0, 0)
>>> times = [datetime(2010, 1, 1, 11, 45, 0),
... datetime(2010, 1, 1, 11, 55, 15),
... datetime(2010, 1, 1, 11, 57, 30)]
>>> booleans = [True, False, True]
>>> time_since_last_false(times, booleans, time=cutoff_time)
285.0
"""
name = "time_since_last_false"
input_types = [
[
ColumnSchema(logical_type=Datetime, semantic_tags={"time_index"}),
ColumnSchema(logical_type=Boolean),
],
[
ColumnSchema(logical_type=Datetime, semantic_tags={"time_index"}),
ColumnSchema(logical_type=BooleanNullable),
],
]
return_type = ColumnSchema(logical_type=Double, semantic_tags={"numeric"})
uses_calc_time = True
stack_on_self = False
default_value = 0
def get_function(self):
def time_since_last_false(datetime_col, bool_col, time=None):
df = pd.DataFrame(
{
"datetime": datetime_col,
"bool": bool_col,
},
).dropna()
if df.empty:
return np.nan
false_indices = df[~df["bool"]]
if false_indices.empty:
return np.nan
last_false_index = false_indices.index[-1]
time_since = time - datetime_col.loc[last_false_index]
return time_since.total_seconds()
return time_since_last_false
| [
"noreply@github.com"
] | noreply@github.com |
4b70f2149f976820d7d9d778768e9d465458d548 | 1c697fe4acf4bf93214c1bec5e0a4c595ea255c8 | /build/lib.linux-x86_64-3.6/CRADLE/CorrectBias/vari.py | 750ee4a04fe8be88c5cc33c2b75b918ac014df17 | [
"MIT"
] | permissive | Young-Sook/CRADLE | b7ec17147a8af26bc12b020eb1427b2accb20aa1 | af0c807d2f894de211cb3d914abe9885cb43fef8 | refs/heads/master | 2021-06-27T21:11:09.271078 | 2020-12-01T18:14:22 | 2020-12-01T18:14:22 | 194,312,322 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 74,889 | py | import os
import math
import numpy as np
import sys
import multiprocessing
import pyBigWig
def setGlobalVariables(args):
### input bigwig files
setInputFiles(args.ctrlbw, args.expbw)
setOutputDirectory(args.o)
setBiasFiles(args)
setFragLen(args.l)
setAnlaysisRegion(args.r, args.bl)
setFilterCriteria(args.mi)
setBinSize(args.binSize)
setNumProcess(args.p)
setNormalization(args.norm, args.generateNormBW)
return
def setInputFiles(ctrlbwFiles, expbwFiles):
global CTRLBW_NAMES
global EXPBW_NAMES
global CTRLBW_NUM
global EXPBW_NUM
global SAMPLE_NUM
global COEFCTRL
global COEFEXP
global COEFCTRL_HIGHRC
global COEFEXP_HIGHRC
global HIGHRC
CTRLBW_NUM = len(ctrlbwFiles)
EXPBW_NUM = len(expbwFiles)
SAMPLE_NUM = int(CTRLBW_NUM + EXPBW_NUM)
CTRLBW_NAMES = [0] * CTRLBW_NUM
for i in range(CTRLBW_NUM):
CTRLBW_NAMES[i] = ctrlbwFiles[i]
EXPBW_NAMES = [0] * EXPBW_NUM
for i in range(EXPBW_NUM):
EXPBW_NAMES[i] = expbwFiles[i]
return
def setOutputDirectory(outputDir):
global OUTPUT_DIR
if(outputDir == None):
outputDir = os.getcwd() + "/CRADLE_correctionResult"
OUTPUT_DIR = outputDir
dirExist = os.path.isdir(OUTPUT_DIR)
if(dirExist == False):
os.makedirs(OUTPUT_DIR)
return
def setBiasFiles(args):
global SHEAR # indicator variable whether each type of bias will be corrected.
global PCR
global MAP
global GQUAD
global COVARI_NUM
global FA
global COVARI_ORDER
SHEAR = 0
PCR = 0
MAP = 0
GQUAD = 0
FA = args.faFile
COVARI_NUM = 0
COVARI_ORDER = ['Intercept']
biasType = [x.lower() for x in args.biasType]
for i in range(len(biasType)):
if( (biasType[i] != 'shear') and (biasType[i] != 'pcr') and (biasType[i] != 'map') and (biasType[i] != 'gquad')):
print("Error! Wrong value in -biasType. Only 'shear', 'pcr', 'map', 'gquad' are allowed")
sys.exit()
if('shear' in biasType):
SHEAR = 1
COVARI_NUM = COVARI_NUM + 2
COVARI_ORDER.extend(["MGW_shear", "ProT_shear"])
global MGW
global PROT
global N_MGW
global N_PROT
####### MGW
MGW = [ ['AAAAA', -1.76850260706379], ['AAAAC', -1.00063188030791], ['AAAAG', -1.351608803132], ['AAAAT', -1.41010500981361],
['AAACA', -0.608589792531882], ['AAACC', -0.992663710658729], ['AAACG', -0.735195416803445], ['AAACT', -1.17498526745268],
['AAAGA', -1.02492457287695], ['AAAGC', -1.01676126223779], ['AAAGG', -1.00063188030791], ['AAAGT', -1.82161243237774],
['AAATA', -1.23382576747562], ['AAATC', -0.946143695023836], ['AAATG', -1.18455471846883], ['AAATT', -4.2195077051761],
['AACAA', -0.449048264069748], ['AACAC', -0.412845215405787], ['AACAG', -0.458307589482544], ['AACAT', -0.818310323513951],
['AACCA', -0.798507696217772], ['AACCC', -1.01676126223779], ['AACCG', -0.77908961036067], ['AACCT', -1.39812881876689],
['AACGA', -0.530628251062171], ['AACGC', -0.614009860001222], ['AACGG', -0.624938930533412], ['AACGT', -0.880185727232039],
['AACTA', -0.859132318034206], ['AACTC', -1.08401348924696], ['AACTG', -0.699046902687133], ['AACTT', -1.83996157104593],
['AAGAA', -0.530628251062171], ['AAGAC', -0.608589792531882], ['AAGAG', -0.592503654780258], ['AAGAT', -1.12846525181779],
['AAGCA', -0.630448586344381], ['AAGCC', -0.908964691782082], ['AAGCG', -0.619459464768786], ['AAGCT', -0.931105817659296],
['AAGGA', -0.811665780795283], ['AAGGC', -0.687282061107547], ['AAGGG', -0.741349282377823], ['AAGGT', -1.27506872600967],
['AAGTA', -1.01676126223779], ['AAGTC', -1.2856508353402], ['AAGTG', -0.931105817659296], ['AAGTT', -1.83996157104593],
['AATAA', -0.219473822425247], ['AATAC', -0.307484699747961], ['AATAG', -0.608589792531882], ['AATAT', -0.530628251062171],
['AATCA', -0.716957829253664], ['AATCC', -0.894471684479515], ['AATCG', -0.658461622572055], ['AATCT', -1.27506872600967],
['AATGA', -0.716957829253664], ['AATGC', -0.486611365645396], ['AATGG', -0.399599988655766], ['AATGT', -0.838513030831471],
['AATTA', -0.77908961036067], ['AATTC', -1.27506872600967], ['AATTG', -0.859132318034206], ['AATTT', -4.2195077051761],
['ACAAA', -0.344148684119552], ['ACAAC', -0.20132450391957], ['ACAAG', -0.352482065678696], ['ACAAT', -0.421773846150088],
['ACACA', -0.230523658611832], ['ACACC', -0.303492678478423], ['ACACG', -0.241696959209958], ['ACACT', -0.390866308687012],
['ACAGA', -0.295556128882687], ['ACAGC', -0.303492678478423], ['ACAGG', -0.3114927211455], ['ACAGT', -0.348306694268216],
['ACATA', -0.373624502252506], ['ACATC', -0.520677920209002], ['ACATG', -0.439873887793706], ['ACATT', -0.838513030831471],
['ACCAA', -0.399599988655766], ['ACCAC', -0.27598603268859], ['ACCAG', -0.352482065678696], ['ACCAT', -0.449048264069748],
['ACCCA', -0.603198943897006], ['ACCCC', -0.647162067318122], ['ACCCG', -0.581921545449721], ['ACCCT', -0.824999311664748],
['ACCGA', -0.426268235737928], ['ACCGC', -0.399599988655766], ['ACCGG', -0.365113812584597], ['ACCGT', -0.566255428705321],
['ACCTA', -0.592503654780258], ['ACCTC', -0.772699812261899], ['ACCTG', -0.669890318395678], ['ACCTT', -1.27506872600967],
['ACGAA', -0.344148684119552], ['ACGAC', -0.291611350591671], ['ACGAG', -0.377907164044507], ['ACGAT', -0.530628251062171],
['ACGCA', -0.31955728098223], ['ACGCC', -0.399599988655766], ['ACGCG', -0.348306694268216], ['ACGCT', -0.501069448820626],
['ACGGA', -0.408410618337921], ['ACGGC', -0.399599988655766], ['ACGGG', -0.472359342938195], ['ACGGT', -0.566255428705321],
['ACGTA', -0.510825623765991], ['ACGTC', -0.581921545449721], ['ACGTG', -0.505935638471799], ['ACGTT', -0.880185727232039],
['ACTAA', -0.14877300859314], ['ACTAC', -0.187038546672093], ['ACTAG', -0.377907164044507], ['ACTAT', -0.501069448820626],
['ACTCA', -0.486611365645396], ['ACTCC', -0.566255428705321], ['ACTCG', -0.467653451900782], ['ACTCT', -0.501069448820626],
['ACTGA', -0.23795863709935], ['ACTGC', -0.268263986594679], ['ACTGG', -0.299516530098784], ['ACTGT', -0.348306694268216],
['ACTTA', -0.704981638206948], ['ACTTC', -0.811665780795283], ['ACTTG', -0.681451140796754], ['ACTTT', -1.82161243237774],
['AGAAA', -0.561087458546879], ['AGAAC', -0.505935638471799], ['AGAAG', -0.693147180559945], ['AGAAT', -0.735195416803445],
['AGACA', -0.439873887793706], ['AGACC', -0.566255428705321], ['AGACG', -0.51573963856842], ['AGACT', -0.652795885036378],
['AGAGA', -0.435318071257845], ['AGAGC', -0.417299565755167], ['AGAGG', -0.449048264069748], ['AGAGT', -0.501069448820626],
['AGATA', -0.571450245582426], ['AGATC', -0.77908961036067], ['AGATG', -0.603198943897006], ['AGATT', -1.27506872600967],
['AGCAA', -0.399599988655766], ['AGCAC', -0.256791585432443], ['AGCAG', -0.365113812584597], ['AGCAT', -0.335884174269658],
['AGCCA', -0.675654023112428], ['AGCCC', -0.630448586344381], ['AGCCG', -0.51573963856842], ['AGCCT', -0.798507696217772],
['AGCGA', -0.352482065678696], ['AGCGC', -0.356674943938733], ['AGCGG', -0.369360103466048], ['AGCGT', -0.501069448820626],
['AGCTA', -0.491407537908889], ['AGCTC', -0.619459464768786], ['AGCTG', -0.530628251062171], ['AGCTT', -0.931105817659296],
['AGGAA', -0.55083095837969], ['AGGAC', -0.525640709551132], ['AGGAG', -0.619459464768786], ['AGGAT', -0.687282061107547],
['AGGCA', -0.520677920209002], ['AGGCC', -0.571450245582426], ['AGGCG', -0.545741888872219], ['AGGCT', -0.798507696217772],
['AGGGA', -0.592503654780258], ['AGGGC', -0.566255428705321], ['AGGGG', -0.624938930533412], ['AGGGT', -0.824999311664748],
['AGGTA', -0.798507696217772], ['AGGTC', -0.901691932453002], ['AGGTG', -0.741349282377823], ['AGGTT', -1.39812881876689],
['AGTAA', -0.279869532714987], ['AGTAC', -0.215817511222137], ['AGTAG', -0.348306694268216], ['AGTAT', -0.279869532714987],
['AGTCA', -0.486611365645396], ['AGTCC', -0.687282061107547], ['AGTCG', -0.641559811769452], ['AGTCT', -0.652795885036378],
['AGTGA', -0.340007891453521], ['AGTGC', -0.307484699747961], ['AGTGG', -0.303492678478423], ['AGTGT', -0.390866308687012],
['AGTTA', -0.597837000755621], ['AGTTC', -0.852211875189633], ['AGTTG', -0.76635058458324], ['AGTTT', -1.17498526745268],
['ATAAA', -0.172953806790354], ['ATAAC', -0.187038546672093], ['ATAAG', -0.23795863709935], ['ATAAT', -0.315516871445225],
['ATACA', -0.118518600235337], ['ATACC', -0.245449308828508], ['ATACG', -0.219473822425247], ['ATACT', -0.279869532714987],
['ATAGA', -0.162518929497775], ['ATAGC', -0.279869532714987], ['ATAGG', -0.194156014440958], ['ATAGT', -0.501069448820626],
['ATATA', -0.138586163286147], ['ATATC', -0.268263986594679], ['ATATG', -0.299516530098784], ['ATATT', -0.530628251062171],
['ATCAA', -0.22314355131421], ['ATCAC', -0.287682072451781], ['ATCAG', -0.291611350591671], ['ATCAT', -0.535640792885715],
['ATCCA', -0.462969602588355], ['ATCCC', -0.525640709551132], ['ATCCG', -0.710951805193452], ['ATCCT', -0.687282061107547],
['ATCGA', -0.335884174269658], ['ATCGC', -0.315516871445225], ['ATCGG', -0.32768740706548], ['ATCGT', -0.530628251062171],
['ATCTA', -0.561087458546879], ['ATCTC', -0.561087458546879], ['ATCTG', -0.444450554821118], ['ATCTT', -1.12846525181779],
['ATGAA', -0.230523658611832], ['ATGAC', -0.249215791623985], ['ATGAG', -0.335884174269658], ['ATGAT', -0.535640792885715],
['ATGCA', -0.212174519943636], ['ATGCC', -0.295556128882687], ['ATGCG', -0.194156014440958], ['ATGCT', -0.335884174269658],
['ATGGA', -0.291611350591671], ['ATGGC', -0.335884174269658], ['ATGGG', -0.352482065678696], ['ATGGT', -0.449048264069748],
['ATGTA', -0.31955728098223], ['ATGTC', -0.426268235737928], ['ATGTG', -0.369360103466048], ['ATGTT', -0.818310323513951],
['ATTAA', -0.20132450391957], ['ATTAC', -0.252996514463891], ['ATTAG', -0.204928111422868], ['ATTAT', -0.315516871445225],
['ATTCA', -0.340007891453521], ['ATTCC', -0.587198602550564], ['ATTCG', -0.491407537908889], ['ATTCT', -0.735195416803445],
['ATTGA', -0.245449308828508], ['ATTGC', -0.369360103466048], ['ATTGG', -0.307484699747961], ['ATTGT', -0.421773846150088],
['ATTTA', -0.55594605904646], ['ATTTC', -0.838513030831471], ['ATTTG', -0.946143695023836], ['ATTTT', -1.41010500981361],
['CAAAA', -0.55083095837969], ['CAAAC', -0.458307589482544], ['CAAAG', -0.681451140796754], ['CAAAT', -0.946143695023836],
['CAACA', -0.331777392317005], ['CAACC', -0.520677920209002], ['CAACG', -0.444450554821118], ['CAACT', -0.76635058458324],
['CAAGA', -0.472359342938195], ['CAAGC', -0.462969602588355], ['CAAGG', -0.608589792531882], ['CAAGT', -0.681451140796754],
['CAATA', -0.426268235737928], ['CAATC', -0.525640709551132], ['CAATG', -0.587198602550564], ['CAATT', -0.859132318034206],
['CACAA', -0.348306694268216], ['CACAC', -0.14877300859314], ['CACAG', -0.299516530098784], ['CACAT', -0.369360103466048],
['CACCA', -0.510825623765991], ['CACCC', -0.545741888872219], ['CACCG', -0.472359342938195], ['CACCT', -0.741349282377823],
['CACGA', -0.3114927211455], ['CACGC', -0.32768740706548], ['CACGG', -0.390866308687012], ['CACGT', -0.505935638471799],
['CACTA', -0.386527907088414], ['CACTC', -0.540678586915672], ['CACTG', -0.444450554821118], ['CACTT', -0.931105817659296],
['CAGAA', -0.373624502252506], ['CAGAC', -0.340007891453521], ['CAGAG', -0.373624502252506], ['CAGAT', -0.444450554821118],
['CAGCA', -0.31955728098223], ['CAGCC', -0.458307589482544], ['CAGCG', -0.377907164044507], ['CAGCT', -0.530628251062171],
['CAGGA', -0.472359342938195], ['CAGGC', -0.462969602588355], ['CAGGG', -0.505935638471799], ['CAGGT', -0.669890318395678],
['CAGTA', -0.377907164044507], ['CAGTC', -0.435318071257845], ['CAGTG', -0.444450554821118], ['CAGTT', -0.699046902687133],
['CATAA', -0.0543940720657989], ['CATAC', -0.082742427070054], ['CATAG', -0.219473822425247], ['CATAT', -0.299516530098784],
['CATCA', -0.340007891453521], ['CATCC', -0.510825623765991], ['CATCG', -0.462969602588355], ['CATCT', -0.603198943897006],
['CATGA', -0.31955728098223], ['CATGC', -0.230523658611832], ['CATGG', -0.291611350591671], ['CATGT', -0.439873887793706],
['CATTA', -0.481838086892738], ['CATTC', -0.760041415389976], ['CATTG', -0.587198602550564], ['CATTT', -1.18455471846883],
['CCAAA', -0.26060111384911], ['CCAAC', -0.26060111384911], ['CCAAG', -0.303492678478423], ['CCAAT', -0.307484699747961],
['CCACA', -0.125163142954006], ['CCACC', -0.208544751893057], ['CCACG', -0.179971379449001], ['CCACT', -0.303492678478423],
['CCAGA', -0.208544751893057], ['CCAGC', -0.219473822425247], ['CCAGG', -0.268263986594679], ['CCAGT', -0.299516530098784],
['CCATA', -0.0988458346366327], ['CCATC', -0.219473822425247], ['CCATG', -0.291611350591671], ['CCATT', -0.399599988655766],
['CCCAA', -0.252996514463891], ['CCCAC', -0.264425210287514], ['CCCAG', -0.27211755591067], ['CCCAT', -0.352482065678696],
['CCCCA', -0.486611365645396], ['CCCCC', -0.55594605904646], ['CCCCG', -0.462969602588355], ['CCCCT', -0.624938930533412],
['CCCGA', -0.340007891453521], ['CCCGC', -0.307484699747961], ['CCCGG', -0.352482065678696], ['CCCGT', -0.472359342938195],
['CCCTA', -0.439873887793706], ['CCCTC', -0.540678586915672], ['CCCTG', -0.505935638471799], ['CCCTT', -0.741349282377823],
['CCGAA', -0.27598603268859], ['CCGAC', -0.215817511222137], ['CCGAG', -0.176456437341557], ['CCGAT', -0.32768740706548],
['CCGCA', -0.197733835788841], ['CCGCC', -0.3114927211455], ['CCGCG', -0.226826796730506], ['CCGCT', -0.369360103466048],
['CCGGA', -0.27211755591067], ['CCGGC', -0.283768173130644], ['CCGGG', -0.352482065678696], ['CCGGT', -0.365113812584597],
['CCGTA', -0.295556128882687], ['CCGTC', -0.390866308687012], ['CCGTG', -0.390866308687012], ['CCGTT', -0.624938930533412],
['CCTAA', -0.073203404023295], ['CCTAC', -0.138586163286147], ['CCTAG', -0.162518929497775], ['CCTAT', -0.194156014440958],
['CCTCA', -0.307484699747961], ['CCTCC', -0.467653451900782], ['CCTCG', -0.356674943938733], ['CCTCT', -0.449048264069748],
['CCTGA', -0.241696959209958], ['CCTGC', -0.22314355131421], ['CCTGG', -0.268263986594679], ['CCTGT', -0.3114927211455],
['CCTTA', -0.525640709551132], ['CCTTC', -0.619459464768786], ['CCTTG', -0.608589792531882], ['CCTTT', -1.00063188030791],
['CGAAA', -0.444450554821118], ['CGAAC', -0.390866308687012], ['CGAAG', -0.472359342938195], ['CGAAT', -0.491407537908889],
['CGACA', -0.315516871445225], ['CGACC', -0.520677920209002], ['CGACG', -0.352482065678696], ['CGACT', -0.641559811769452],
['CGAGA', -0.360885476475076], ['CGAGC', -0.348306694268216], ['CGAGG', -0.356674943938733], ['CGAGT', -0.467653451900782],
['CGATA', -0.299516530098784], ['CGATC', -0.453667209926042], ['CGATG', -0.462969602588355], ['CGATT', -0.658461622572055],
['CGCAA', -0.121835352861332], ['CGCAC', -0.0988458346366327], ['CGCAG', -0.169463401850586], ['CGCAT', -0.194156014440958],
['CGCCA', -0.408410618337921], ['CGCCC', -0.453667209926042], ['CGCCG', -0.395223614055968], ['CGCCT', -0.545741888872219],
['CGCGA', -0.26060111384911], ['CGCGC', -0.215817511222137], ['CGCGG', -0.226826796730506], ['CGCGT', -0.348306694268216],
['CGCTA', -0.340007891453521], ['CGCTC', -0.426268235737928], ['CGCTG', -0.377907164044507], ['CGCTT', -0.619459464768786],
['CGGAA', -0.395223614055968], ['CGGAC', -0.399599988655766], ['CGGAG', -0.430782916092454], ['CGGAT', -0.710951805193452],
['CGGCA', -0.360885476475076], ['CGGCC', -0.453667209926042], ['CGGCG', -0.395223614055968], ['CGGCT', -0.51573963856842],
['CGGGA', -0.458307589482544], ['CGGGC', -0.458307589482544], ['CGGGG', -0.462969602588355], ['CGGGT', -0.581921545449721],
['CGGTA', -0.453667209926042], ['CGGTC', -0.587198602550564], ['CGGTG', -0.472359342938195], ['CGGTT', -0.77908961036067],
['CGTAA', -0.0268272422331441], ['CGTAC', -0.082742427070054], ['CGTAG', -0.135213478807508], ['CGTAT', -0.219473822425247],
['CGTCA', -0.27211755591067], ['CGTCC', -0.426268235737928], ['CGTCG', -0.352482065678696], ['CGTCT', -0.51573963856842],
['CGTGA', -0.155622350438715], ['CGTGC', -0.145365850271525], ['CGTGG', -0.179971379449001], ['CGTGT', -0.241696959209958],
['CGTTA', -0.369360103466048], ['CGTTC', -0.561087458546879], ['CGTTG', -0.444450554821118], ['CGTTT', -0.735195416803445],
['CTAAA', -0.118518600235337], ['CTAAC', -0.125163142954006], ['CTAAG', -0.234234238008368], ['CTAAT', -0.204928111422868],
['CTACA', -0.145365850271525], ['CTACC', -0.212174519943636], ['CTACG', -0.135213478807508], ['CTACT', -0.348306694268216],
['CTAGA', -0.111917916203985], ['CTAGC', -0.169463401850586], ['CTAGG', -0.162518929497775], ['CTAGT', -0.377907164044507],
['CTATA', -0.128502044219521], ['CTATC', -0.215817511222137], ['CTATG', -0.219473822425247], ['CTATT', -0.608589792531882],
['CTCAA', -0.092373320131015], ['CTCAC', -0.125163142954006], ['CTCAG', -0.208544751893057], ['CTCAT', -0.335884174269658],
['CTCCA', -0.426268235737928], ['CTCCC', -0.505935638471799], ['CTCCG', -0.430782916092454], ['CTCCT', -0.619459464768786],
['CTCGA', -0.26060111384911], ['CTCGC', -0.245449308828508], ['CTCGG', -0.176456437341557], ['CTCGT', -0.377907164044507],
['CTCTA', -0.264425210287514], ['CTCTC', -0.467653451900782], ['CTCTG', -0.373624502252506], ['CTCTT', -0.592503654780258],
['CTGAA', -0.082742427070054], ['CTGAC', -0.169463401850586], ['CTGAG', -0.208544751893057], ['CTGAT', -0.291611350591671],
['CTGCA', -0.128502044219521], ['CTGCC', -0.264425210287514], ['CTGCG', -0.169463401850586], ['CTGCT', -0.365113812584597],
['CTGGA', -0.303492678478423], ['CTGGC', -0.256791585432443], ['CTGGG', -0.27211755591067], ['CTGGT', -0.352482065678696],
['CTGTA', -0.264425210287514], ['CTGTC', -0.323614081677844], ['CTGTG', -0.299516530098784], ['CTGTT', -0.458307589482544],
['CTTAA', -0.20132450391957], ['CTTAC', -0.187038546672093], ['CTTAG', -0.234234238008368], ['CTTAT', -0.23795863709935],
['CTTCA', -0.369360103466048], ['CTTCC', -0.619459464768786], ['CTTCG', -0.472359342938195], ['CTTCT', -0.693147180559945],
['CTTGA', -0.268263986594679], ['CTTGC', -0.249215791623985], ['CTTGG', -0.303492678478423], ['CTTGT', -0.352482065678696],
['CTTTA', -0.581921545449721], ['CTTTC', -0.77908961036067], ['CTTTG', -0.681451140796754], ['CTTTT', -1.351608803132],
['GAAAA', -0.785520500690961], ['GAAAC', -0.561087458546879], ['GAAAG', -0.77908961036067], ['GAAAT', -0.838513030831471],
['GAACA', -0.505935638471799], ['GAACC', -0.716957829253664], ['GAACG', -0.561087458546879], ['GAACT', -0.852211875189633],
['GAAGA', -0.681451140796754], ['GAAGC', -0.520677920209002], ['GAAGG', -0.619459464768786], ['GAAGT', -0.811665780795283],
['GAATA', -0.687282061107547], ['GAATC', -0.77908961036067], ['GAATG', -0.760041415389976], ['GAATT', -1.27506872600967],
['GACAA', -0.204928111422868], ['GACAC', -0.234234238008368], ['GACAG', -0.323614081677844], ['GACAT', -0.426268235737928],
['GACCA', -0.510825623765991], ['GACCC', -0.723000143709626], ['GACCG', -0.587198602550564], ['GACCT', -0.901691932453002],
['GACGA', -0.365113812584597], ['GACGC', -0.395223614055968], ['GACGG', -0.390866308687012], ['GACGT', -0.581921545449721],
['GACTA', -0.399599988655766], ['GACTC', -0.675654023112428], ['GACTG', -0.435318071257845], ['GACTT', -1.2856508353402],
['GAGAA', -0.597837000755621], ['GAGAC', -0.417299565755167], ['GAGAG', -0.467653451900782], ['GAGAT', -0.561087458546879],
['GAGCA', -0.390866308687012], ['GAGCC', -0.597837000755621], ['GAGCG', -0.426268235737928], ['GAGCT', -0.619459464768786],
['GAGGA', -0.530628251062171], ['GAGGC', -0.505935638471799], ['GAGGG', -0.540678586915672], ['GAGGT', -0.772699812261899],
['GAGTA', -0.587198602550564], ['GAGTC', -0.675654023112428], ['GAGTG', -0.540678586915672], ['GAGTT', -1.08401348924696],
['GATAA', -0.0956043407124617], ['GATAC', -0.176456437341557], ['GATAG', -0.215817511222137], ['GATAT', -0.268263986594679],
['GATCA', -0.545741888872219], ['GATCC', -0.619459464768786], ['GATCG', -0.453667209926042], ['GATCT', -0.77908961036067],
['GATGA', -0.399599988655766], ['GATGC', -0.27598603268859], ['GATGG', -0.219473822425247], ['GATGT', -0.520677920209002],
['GATTA', -0.525640709551132], ['GATTC', -0.77908961036067], ['GATTG', -0.525640709551132], ['GATTT', -0.946143695023836],
['GCAAA', -0.264425210287514], ['GCAAC', -0.159064694629687], ['GCAAG', -0.249215791623985], ['GCAAT', -0.369360103466048],
['GCACA', -0.0795526317019536], ['GCACC', -0.208544751893057], ['GCACG', -0.145365850271525], ['GCACT', -0.307484699747961],
['GCAGA', -0.141970261270387], ['GCAGC', -0.169463401850586], ['GCAGG', -0.22314355131421], ['GCAGT', -0.268263986594679],
['GCATA', -0.138586163286147], ['GCATC', -0.27598603268859], ['GCATG', -0.230523658611832], ['GCATT', -0.486611365645396],
['GCCAA', -0.208544751893057], ['GCCAC', -0.245449308828508], ['GCCAG', -0.256791585432443], ['GCCAT', -0.335884174269658],
['GCCCA', -0.453667209926042], ['GCCCC', -0.520677920209002], ['GCCCG', -0.458307589482544], ['GCCCT', -0.566255428705321],
['GCCGA', -0.256791585432443], ['GCCGC', -0.279869532714987], ['GCCGG', -0.283768173130644], ['GCCGT', -0.399599988655766],
['GCCTA', -0.449048264069748], ['GCCTC', -0.505935638471799], ['GCCTG', -0.462969602588355], ['GCCTT', -0.687282061107547],
['GCGAA', -0.219473822425247], ['GCGAC', -0.208544751893057], ['GCGAG', -0.245449308828508], ['GCGAT', -0.315516871445225],
['GCGCA', -0.155622350438715], ['GCGCC', -0.264425210287514], ['GCGCG', -0.215817511222137], ['GCGCT', -0.356674943938733],
['GCGGA', -0.31955728098223], ['GCGGC', -0.279869532714987], ['GCGGG', -0.307484699747961], ['GCGGT', -0.399599988655766],
['GCGTA', -0.299516530098784], ['GCGTC', -0.395223614055968], ['GCGTG', -0.32768740706548], ['GCGTT', -0.614009860001222],
['GCTAA', -0.118518600235337], ['GCTAC', -0.0988458346366327], ['GCTAG', -0.169463401850586], ['GCTAT', -0.279869532714987],
['GCTCA', -0.268263986594679], ['GCTCC', -0.412845215405787], ['GCTCG', -0.348306694268216], ['GCTCT', -0.417299565755167],
['GCTGA', -0.145365850271525], ['GCTGC', -0.169463401850586], ['GCTGG', -0.219473822425247], ['GCTGT', -0.303492678478423],
['GCTTA', -0.430782916092454], ['GCTTC', -0.520677920209002], ['GCTTG', -0.462969602588355], ['GCTTT', -1.01676126223779],
['GGAAA', -0.619459464768786], ['GGAAC', -0.510825623765991], ['GGAAG', -0.619459464768786], ['GGAAT', -0.587198602550564],
['GGACA', -0.426268235737928], ['GGACC', -0.530628251062171], ['GGACG', -0.426268235737928], ['GGACT', -0.687282061107547],
['GGAGA', -0.472359342938195], ['GGAGC', -0.412845215405787], ['GGAGG', -0.467653451900782], ['GGAGT', -0.566255428705321],
['GGATA', -0.444450554821118], ['GGATC', -0.619459464768786], ['GGATG', -0.510825623765991], ['GGATT', -0.894471684479515],
['GGCAA', -0.307484699747961], ['GGCAC', -0.172953806790354], ['GGCAG', -0.264425210287514], ['GGCAT', -0.295556128882687],
['GGCCA', -0.467653451900782], ['GGCCC', -0.545741888872219], ['GGCCG', -0.453667209926042], ['GGCCT', -0.571450245582426],
['GGCGA', -0.268263986594679], ['GGCGC', -0.264425210287514], ['GGCGG', -0.3114927211455], ['GGCGT', -0.399599988655766],
['GGCTA', -0.435318071257845], ['GGCTC', -0.597837000755621], ['GGCTG', -0.458307589482544], ['GGCTT', -0.908964691782082],
['GGGAA', -0.566255428705321], ['GGGAC', -0.481838086892738], ['GGGAG', -0.505935638471799], ['GGGAT', -0.525640709551132],
['GGGCA', -0.426268235737928], ['GGGCC', -0.545741888872219], ['GGGCG', -0.453667209926042], ['GGGCT', -0.630448586344381],
['GGGGA', -0.55083095837969], ['GGGGC', -0.520677920209002], ['GGGGG', -0.55594605904646], ['GGGGT', -0.647162067318122],
['GGGTA', -0.576672189563577], ['GGGTC', -0.723000143709626], ['GGGTG', -0.545741888872219], ['GGGTT', -1.01676126223779],
['GGTAA', -0.256791585432443], ['GGTAC', -0.111917916203985], ['GGTAG', -0.212174519943636], ['GGTAT', -0.245449308828508],
['GGTCA', -0.462969602588355], ['GGTCC', -0.530628251062171], ['GGTCG', -0.520677920209002], ['GGTCT', -0.566255428705321],
['GGTGA', -0.268263986594679], ['GGTGC', -0.208544751893057], ['GGTGG', -0.208544751893057], ['GGTGT', -0.303492678478423],
['GGTTA', -0.496226824344838], ['GGTTC', -0.716957829253664], ['GGTTG', -0.520677920209002], ['GGTTT', -0.992663710658729],
['GTAAA', -0.089152705430973], ['GTAAC', -0.105360515657826], ['GTAAG', -0.187038546672093], ['GTAAT', -0.252996514463891],
['GTACA', 0.0], ['GTACC', -0.111917916203985], ['GTACG', -0.082742427070054], ['GTACT', -0.215817511222137],
['GTAGA', -0.138586163286147], ['GTAGC', -0.0988458346366327], ['GTAGG', -0.138586163286147], ['GTAGT', -0.187038546672093],
['GTATA', -0.0575044944801913], ['GTATC', -0.176456437341557], ['GTATG', -0.082742427070054], ['GTATT', -0.307484699747961],
['GTCAA', -0.089152705430973], ['GTCAC', -0.0575044944801913], ['GTCAG', -0.169463401850586], ['GTCAT', -0.249215791623985],
['GTCCA', -0.352482065678696], ['GTCCC', -0.481838086892738], ['GTCCG', -0.399599988655766], ['GTCCT', -0.525640709551132],
['GTCGA', -0.179971379449001], ['GTCGC', -0.208544751893057], ['GTCGG', -0.215817511222137], ['GTCGT', -0.291611350591671],
['GTCTA', -0.299516530098784], ['GTCTC', -0.417299565755167], ['GTCTG', -0.340007891453521], ['GTCTT', -0.608589792531882],
['GTGAA', -0.0795526317019536], ['GTGAC', -0.0575044944801913], ['GTGAG', -0.125163142954006], ['GTGAT', -0.287682072451781],
['GTGCA', -0.0795526317019536], ['GTGCC', -0.172953806790354], ['GTGCG', -0.0988458346366327], ['GTGCT', -0.256791585432443],
['GTGGA', -0.20132450391957], ['GTGGC', -0.245449308828508], ['GTGGG', -0.264425210287514], ['GTGGT', -0.27598603268859],
['GTGTA', -0.145365850271525], ['GTGTC', -0.234234238008368], ['GTGTG', -0.14877300859314], ['GTGTT', -0.412845215405787],
['GTTAA', -0.108633841002796], ['GTTAC', -0.105360515657826], ['GTTAG', -0.125163142954006], ['GTTAT', -0.187038546672093],
['GTTCA', -0.287682072451781], ['GTTCC', -0.510825623765991], ['GTTCG', -0.390866308687012], ['GTTCT', -0.505935638471799],
['GTTGA', -0.176456437341557], ['GTTGC', -0.159064694629687], ['GTTGG', -0.26060111384911], ['GTTGT', -0.20132450391957],
['GTTTA', -0.390866308687012], ['GTTTC', -0.561087458546879], ['GTTTG', -0.458307589482544], ['GTTTT', -1.00063188030791],
['TAAAA', -0.486611365645396], ['TAAAC', -0.390866308687012], ['TAAAG', -0.581921545449721], ['TAAAT', -0.55594605904646],
['TAACA', -0.360885476475076], ['TAACC', -0.496226824344838], ['TAACG', -0.369360103466048], ['TAACT', -0.597837000755621],
['TAAGA', -0.467653451900782], ['TAAGC', -0.430782916092454], ['TAAGG', -0.525640709551132], ['TAAGT', -0.704981638206948],
['TAATA', -0.386527907088414], ['TAATC', -0.525640709551132], ['TAATG', -0.481838086892738], ['TAATT', -0.77908961036067],
['TACAA', -0.0956043407124617], ['TACAC', -0.145365850271525], ['TACAG', -0.264425210287514], ['TACAT', -0.31955728098223],
['TACCA', -0.377907164044507], ['TACCC', -0.576672189563577], ['TACCG', -0.453667209926042], ['TACCT', -0.798507696217772],
['TACGA', -0.190590948276461], ['TACGC', -0.299516530098784], ['TACGG', -0.295556128882687], ['TACGT', -0.510825623765991],
['TACTA', -0.295556128882687], ['TACTC', -0.587198602550564], ['TACTG', -0.377907164044507], ['TACTT', -1.01676126223779],
['TAGAA', -0.295556128882687], ['TAGAC', -0.299516530098784], ['TAGAG', -0.264425210287514], ['TAGAT', -0.561087458546879],
['TAGCA', -0.283768173130644], ['TAGCC', -0.435318071257845], ['TAGCG', -0.340007891453521], ['TAGCT', -0.491407537908889],
['TAGGA', -0.444450554821118], ['TAGGC', -0.449048264069748], ['TAGGG', -0.439873887793706], ['TAGGT', -0.592503654780258],
['TAGTA', -0.295556128882687], ['TAGTC', -0.399599988655766], ['TAGTG', -0.386527907088414], ['TAGTT', -0.859132318034206],
['TATAA', -0.0389854467129537], ['TATAC', -0.0575044944801913], ['TATAG', -0.128502044219521], ['TATAT', -0.138586163286147],
['TATCA', -0.27598603268859], ['TATCC', -0.444450554821118], ['TATCG', -0.299516530098784], ['TATCT', -0.571450245582426],
['TATGA', -0.111917916203985], ['TATGC', -0.138586163286147], ['TATGG', -0.0988458346366327], ['TATGT', -0.373624502252506],
['TATTA', -0.386527907088414], ['TATTC', -0.687282061107547], ['TATTG', -0.426268235737928], ['TATTT', -1.23382576747562],
['TCAAA', -0.194156014440958], ['TCAAC', -0.176456437341557], ['TCAAG', -0.268263986594679], ['TCAAT', -0.245449308828508],
['TCACA', -0.111917916203985], ['TCACC', -0.268263986594679], ['TCACG', -0.155622350438715], ['TCACT', -0.340007891453521],
['TCAGA', -0.241696959209958], ['TCAGC', -0.145365850271525], ['TCAGG', -0.241696959209958], ['TCAGT', -0.23795863709935],
['TCATA', -0.111917916203985], ['TCATC', -0.399599988655766], ['TCATG', -0.31955728098223], ['TCATT', -0.716957829253664],
['TCCAA', -0.26060111384911], ['TCCAC', -0.20132450391957], ['TCCAG', -0.303492678478423], ['TCCAT', -0.291611350591671],
['TCCCA', -0.467653451900782], ['TCCCC', -0.55083095837969], ['TCCCG', -0.458307589482544], ['TCCCT', -0.592503654780258],
['TCCGA', -0.283768173130644], ['TCCGC', -0.31955728098223], ['TCCGG', -0.27211755591067], ['TCCGT', -0.408410618337921],
['TCCTA', -0.444450554821118], ['TCCTC', -0.530628251062171], ['TCCTG', -0.472359342938195], ['TCCTT', -0.811665780795283],
['TCGAA', -0.141970261270387], ['TCGAC', -0.179971379449001], ['TCGAG', -0.26060111384911], ['TCGAT', -0.335884174269658],
['TCGCA', -0.165985137474261], ['TCGCC', -0.268263986594679], ['TCGCG', -0.26060111384911], ['TCGCT', -0.352482065678696],
['TCGGA', -0.283768173130644], ['TCGGC', -0.256791585432443], ['TCGGG', -0.340007891453521], ['TCGGT', -0.426268235737928],
['TCGTA', -0.190590948276461], ['TCGTC', -0.365113812584597], ['TCGTG', -0.3114927211455], ['TCGTT', -0.530628251062171],
['TCTAA', -0.0606246218164349], ['TCTAC', -0.138586163286147], ['TCTAG', -0.111917916203985], ['TCTAT', -0.162518929497775],
['TCTCA', -0.3114927211455], ['TCTCC', -0.472359342938195], ['TCTCG', -0.360885476475076], ['TCTCT', -0.435318071257845],
['TCTGA', -0.241696959209958], ['TCTGC', -0.141970261270387], ['TCTGG', -0.208544751893057], ['TCTGT', -0.295556128882687],
['TCTTA', -0.467653451900782], ['TCTTC', -0.681451140796754], ['TCTTG', -0.472359342938195], ['TCTTT', -1.02492457287695],
['TGAAA', -0.268263986594679], ['TGAAC', -0.287682072451781], ['TGAAG', -0.369360103466048], ['TGAAT', -0.340007891453521],
['TGACA', -0.256791585432443], ['TGACC', -0.462969602588355], ['TGACG', -0.27211755591067], ['TGACT', -0.486611365645396],
['TGAGA', -0.3114927211455], ['TGAGC', -0.268263986594679], ['TGAGG', -0.307484699747961], ['TGAGT', -0.486611365645396],
['TGATA', -0.27598603268859], ['TGATC', -0.545741888872219], ['TGATG', -0.340007891453521], ['TGATT', -0.716957829253664],
['TGCAA', -0.131852131104803], ['TGCAC', -0.0795526317019536], ['TGCAG', -0.128502044219521], ['TGCAT', -0.212174519943636],
['TGCCA', -0.331777392317005], ['TGCCC', -0.426268235737928], ['TGCCG', -0.360885476475076], ['TGCCT', -0.520677920209002],
['TGCGA', -0.165985137474261], ['TGCGC', -0.155622350438715], ['TGCGG', -0.197733835788841], ['TGCGT', -0.31955728098223],
['TGCTA', -0.283768173130644], ['TGCTC', -0.390866308687012], ['TGCTG', -0.31955728098223], ['TGCTT', -0.630448586344381],
['TGGAA', -0.449048264069748], ['TGGAC', -0.352482065678696], ['TGGAG', -0.426268235737928], ['TGGAT', -0.462969602588355],
['TGGCA', -0.331777392317005], ['TGGCC', -0.467653451900782], ['TGGCG', -0.408410618337921], ['TGGCT', -0.675654023112428],
['TGGGA', -0.467653451900782], ['TGGGC', -0.453667209926042], ['TGGGG', -0.486611365645396], ['TGGGT', -0.603198943897006],
['TGGTA', -0.377907164044507], ['TGGTC', -0.510825623765991], ['TGGTG', -0.510825623765991], ['TGGTT', -0.798507696217772],
['TGTAA', -0.0637545148253625], ['TGTAC', 0.0], ['TGTAG', -0.145365850271525], ['TGTAT', -0.118518600235337],
['TGTCA', -0.256791585432443], ['TGTCC', -0.426268235737928], ['TGTCG', -0.315516871445225], ['TGTCT', -0.439873887793706],
['TGTGA', -0.111917916203985], ['TGTGC', -0.0795526317019536], ['TGTGG', -0.125163142954006], ['TGTGT', -0.230523658611832],
['TGTTA', -0.360885476475076], ['TGTTC', -0.505935638471799], ['TGTTG', -0.331777392317005], ['TGTTT', -0.608589792531882],
['TTAAA', -0.14877300859314], ['TTAAC', -0.108633841002796], ['TTAAG', -0.20132450391957], ['TTAAT', -0.20132450391957],
['TTACA', -0.0637545148253625], ['TTACC', -0.256791585432443], ['TTACG', -0.0268272422331441], ['TTACT', -0.279869532714987],
['TTAGA', -0.0606246218164349], ['TTAGC', -0.118518600235337], ['TTAGG', -0.073203404023295], ['TTAGT', -0.14877300859314],
['TTATA', -0.0389854467129537], ['TTATC', -0.0956043407124617], ['TTATG', -0.0543940720657989], ['TTATT', -0.219473822425247],
['TTCAA', -0.0606246218164349], ['TTCAC', -0.0795526317019536], ['TTCAG', -0.082742427070054], ['TTCAT', -0.230523658611832],
['TTCCA', -0.449048264069748], ['TTCCC', -0.566255428705321], ['TTCCG', -0.395223614055968], ['TTCCT', -0.55083095837969],
['TTCGA', -0.141970261270387], ['TTCGC', -0.219473822425247], ['TTCGG', -0.27598603268859], ['TTCGT', -0.344148684119552],
['TTCTA', -0.295556128882687], ['TTCTC', -0.597837000755621], ['TTCTG', -0.373624502252506], ['TTCTT', -0.530628251062171],
['TTGAA', -0.0606246218164349], ['TTGAC', -0.089152705430973], ['TTGAG', -0.092373320131015], ['TTGAT', -0.22314355131421],
['TTGCA', -0.131852131104803], ['TTGCC', -0.307484699747961], ['TTGCG', -0.121835352861332], ['TTGCT', -0.399599988655766],
['TTGGA', -0.26060111384911], ['TTGGC', -0.208544751893057], ['TTGGG', -0.252996514463891], ['TTGGT', -0.399599988655766],
['TTGTA', -0.0956043407124617], ['TTGTC', -0.204928111422868], ['TTGTG', -0.348306694268216], ['TTGTT', -0.449048264069748],
['TTTAA', -0.14877300859314], ['TTTAC', -0.089152705430973], ['TTTAG', -0.118518600235337], ['TTTAT', -0.172953806790354],
['TTTCA', -0.268263986594679], ['TTTCC', -0.619459464768786], ['TTTCG', -0.444450554821118], ['TTTCT', -0.561087458546879],
['TTTGA', -0.194156014440958], ['TTTGC', -0.264425210287514], ['TTTGG', -0.26060111384911], ['TTTGT', -0.344148684119552],
['TTTTA', -0.486611365645396], ['TTTTC', -0.785520500690961], ['TTTTG', -0.55083095837969], ['TTTTT', -1.76850260706379]]
temp = np.array(MGW)
N_MGW = temp[:,1].astype(float).min()
####### ProT
PROT = [['AAAAA', -7.4079243225598], ['AAAAC', -2.08491434342119], ['AAAAG', -2.19298856495061], ['AAAAT', -2.31417412175284],
['AAACA', -1.55859954261274], ['AAACC', -1.38933110806336], ['AAACG', -1.58784139220724], ['AAACT', -1.48366852514507],
['AAAGA', -1.04317356570769], ['AAAGC', -1.02104500319695], ['AAAGG', -0.81351086280982], ['AAAGT', -1.22583941584297],
['AAATA', -1.44434497894115], ['AAATC', -1.35583515363518], ['AAATG', -1.24250646832818], ['AAATT', -2.35167851721129],
['AACAA', -0.954299323666907], ['AACAC', -0.839846411147623], ['AACAG', -0.820374307734803], ['AACAT', -0.962204503174021],
['AACCA', -0.766742152819007], ['AACCC', -0.623467259921955], ['AACCG', -0.656822853622839], ['AACCT', -0.678100252070124],
['AACGA', -0.871232724968294], ['AACGC', -0.914170482707913], ['AACGG', -0.934033626207324], ['AACGT', -0.989559386623388],
['AACTA', -0.456152158160688], ['AACTC', -0.378836758409937], ['AACTG', -0.395809028253219], ['AACTT', -0.474501296828884],
['AAGAA', -0.233200012723223], ['AAGAC', -0.261152143106962], ['AAGAG', -0.214990101343799], ['AAGAT', -0.240115138243155],
['AAGCA', -0.0913761453766228], ['AAGCC', -0.0967059381399706], ['AAGCG', -0.121047910808899], ['AAGCT', -0.0973741640251764],
['AAGGA', -0.082116819963826], ['AAGGC', -0.0933714902355187], ['AAGGG', -0.00669305814658414], ['AAGGT', -0.0920408180498138],
['AAGTA', -0.296412206063442], ['AAGTC', -0.281033513660791], ['AAGTG', -0.314519696690832], ['AAGTT', -0.474501296828884],
['AATAA', -0.875590030337249], ['AATAC', -0.999395531500101], ['AATAG', -1.21148019476508], ['AATAT', -0.996106054849702],
['AATCA', -0.914170482707913], ['AATCC', -0.878505484297374], ['AATCG', -0.855416435525009], ['AATCT', -0.97659324062612],
['AATGA', -0.787851116029243], ['AATGC', -0.866894323369696], ['AATGG', -0.782531954551643], ['AATGT', -1.062287961731],
['AATTA', -1.33949873431549], ['AATTC', -1.1275284835994], ['AATTG', -1.27887411249905], ['AATTT', -2.35167851721129],
['ACAAA', -1.30090143481734], ['ACAAC', -1.229980208509], ['ACAAG', -1.04489621901913], ['ACAAT', -1.28105513844541],
['ACACA', -0.830062964838552], ['ACACC', -0.729582207905267], ['ACACG', -0.844068796027471], ['ACACT', -0.718325053380633],
['ACAGA', -0.549359287768234], ['ACAGC', -0.585726931939108], ['ACAGG', -0.526513018917064], ['ACAGT', -0.674522430722239],
['ACATA', -0.723312594891672], ['ACATC', -0.739696074142196], ['ACATG', -0.951154666987436], ['ACATT', -1.062287961731],
['ACCAA', -0.332115458581212], ['ACCAC', -0.358669481303762], ['ACCAG', -0.311202944064839], ['ACCAT', -0.302138193078327],
['ACCCA', -0.227093123515043], ['ACCCC', -0.153039512482261], ['ACCCG', -0.238574305888999], ['ACCCT', -0.170865296434861],
['ACCGA', -0.298862186872426], ['ACCGC', -0.320350617001626], ['ACCGG', -0.361277044710843], ['ACCGT', -0.340604473906123],
['ACCTA', -0.096038158482434], ['ACCTC', -0.0529224014543424], ['ACCTG', -0.105427898832273], ['ACCTT', -0.0920408180498138],
['ACGAA', -0.514267967956964], ['ACGAC', -0.509209788229611], ['ACGAG', -0.498171040914789], ['ACGAT', -0.533725827106305],
['ACGCA', -0.295596877848687], ['ACGCC', -0.246302319620411], ['ACGCG', -0.335502422022228], ['ACGCT', -0.269851288515252],
['ACGGA', -0.225572210674336], ['ACGGC', -0.273033470993715], ['ACGGG', -0.184628642997284], ['ACGGT', -0.340604473906123],
['ACGTA', -0.650991933312045], ['ACGTC', -0.650991933312045], ['ACGTG', -0.756352448969872], ['ACGTT', -0.989559386623388],
['ACTAA', -0.672144308317272], ['ACTAC', -0.679295709474897], ['ACTAG', -0.596679943958306], ['ACTAT', -0.691329549038621],
['ACTCA', -0.603309802496975], ['ACTCC', -0.523437670516816], ['ACTCG', -0.634843946904063], ['ACTCT', -0.640581197294207],
['ACTGA', -0.559919047983236], ['ACTGC', -0.635988766719996], ['ACTGG', -0.574892589773398], ['ACTGT', -0.674522430722239],
['ACTTA', -0.92176353361551], ['ACTTC', -0.762833353053955], ['ACTTG', -0.960618460018386], ['ACTTT', -1.22583941584297],
['AGAAA', -1.016007209167], ['AGAAC', -1.02272992356187], ['AGAAG', -0.974984229820419], ['AGAAT', -0.951154666987436],
['AGACA', -0.719569608612837], ['AGACC', -0.665043686767696], ['AGACG', -0.760235948996269], ['AGACT', -0.638282345707096],
['AGAGA', -0.510219379430963], ['AGAGC', -0.498171040914789], ['AGAGG', -0.48626613840847], ['AGAGT', -0.640581197294207],
['AGATA', -0.687704167424304], ['AGATC', -0.687704167424304], ['AGATG', -0.642885345779057], ['AGATT', -0.97659324062612],
['AGCAA', -0.225572210674336], ['AGCAC', -0.258792724002192], ['AGCAG', -0.243977638217052], ['AGCAT', -0.250188838309692],
['AGCCA', -0.18098830406631], ['AGCCC', -0.160843737973842], ['AGCCG', -0.201547031087346], ['AGCCT', -0.150921615467526],
['AGCGA', -0.240886445647379], ['AGCGC', -0.265096921397978], ['AGCGG', -0.25800748642749], ['AGCGT', -0.269851288515252],
['AGCTA', -0.0801437841379677], ['AGCTC', -0.0497305698265662], ['AGCTG', -0.0854138885622047], ['AGCTT', -0.0973741640251764],
['AGGAA', -0.348306694268216], ['AGGAC', -0.393109971284054], ['AGGAG', -0.342310958961881], ['AGGAT', -0.384165367821156],
['AGGCA', -0.153746476103081], ['AGGCC', -0.141096975039008], ['AGGCG', -0.185358303737428], ['AGGCT', -0.150921615467526],
['AGGGA', -0.110833317399181], ['AGGGC', -0.118996628038342], ['AGGGG', -0.0814587087192768], ['AGGGT', -0.170865296434861],
['AGGTA', -0.396710335209232], ['AGGTC', -0.418589056585039], ['AGGTG', -0.512241624811731], ['AGGTT', -0.678100252070124],
['AGTAA', -0.608868460500803], ['AGTAC', -0.679295709474897], ['AGTAG', -0.679295709474897], ['AGTAT', -0.685294527704151],
['AGTCA', -0.673332662586651], ['AGTCC', -0.585726931939108], ['AGTCG', -0.678100252070124], ['AGTCT', -0.638282345707096],
['AGTGA', -0.559919047983236], ['AGTGC', -0.623467259921955], ['AGTGG', -0.592284332485267], ['AGTGT', -0.718325053380633],
['AGTTA', -1.04835045388722], ['AGTTC', -0.999395531500101], ['AGTTG', -1.14834285849468], ['AGTTT', -1.48366852514507],
['ATAAA', -1.3796458023289], ['ATAAC', -1.00932938802439], ['ATAAG', -1.01433356860897], ['ATAAT', -1.14452605996797],
['ATACA', -0.775922545163969], ['ATACC', -0.703509967595492], ['ATACG', -0.791859137426782], ['ATACT', -0.685294527704151],
['ATAGA', -0.552515523949671], ['ATAGC', -0.532692235283022], ['ATAGG', -0.535796221220613], ['ATAGT', -0.691329549038621],
['ATATA', -0.77856106912215], ['ATATC', -0.696183927503419], ['ATATG', -0.699840238706529], ['ATATT', -0.996106054849702],
['ATCAA', -0.562044447295549], ['ATCAC', -0.501169543911045], ['ATCAG', -0.503173552597761], ['ATCAT', -0.463837114330072],
['ATCCA', -0.421357863153172], ['ATCCC', -0.350887340861707], ['ATCCG', -0.31535060658492], ['ATCCT', -0.384165367821156],
['ATCGA', -0.557798156414098], ['ATCGC', -0.545166409508198], ['ATCGG', -0.565241040321177], ['ATCGT', -0.533725827106305],
['ATCTA', -0.213487471459264], ['ATCTC', -0.182442849777304], ['ATCTG', -0.176637318231984], ['ATCTT', -0.240115138243155],
['ATGAA', -0.456152158160688], ['ATGAC', -0.485280431083711], ['ATGAG', -0.454240111689062], ['ATGAT', -0.463837114330072],
['ATGCA', -0.254090520980756], ['ATGCC', -0.207499429614642], ['ATGCG', -0.277825812434021], ['ATGCT', -0.250188838309692],
['ATGGA', -0.226332377947734], ['ATGGC', -0.207499429614642], ['ATGGG', -0.153039512482261], ['ATGGT', -0.302138193078327],
['ATGTA', -0.592284332485267], ['ATGTC', -0.559919047983236], ['ATGTG', -0.762833353053955], ['ATGTT', -0.962204503174021],
['ATTAA', -1.15409551098413], ['ATTAC', -1.15794908030012], ['ATTAG', -1.20538880537168], ['ATTAT', -1.14452605996797],
['ATTCA', -1.06404388843327], ['ATTCC', -0.869784498791929], ['ATTCG', -1.01768365549425], ['ATTCT', -0.951154666987436],
['ATTGA', -1.0799875388304], ['ATTGC', -1.02441768767559], ['ATTGG', -1.04489621901913], ['ATTGT', -1.28105513844541],
['ATTTA', -1.82819449657338], ['ATTTC', -1.31435455251446], ['ATTTG', -1.65218210897269], ['ATTTT', -2.31417412175284],
['CAAAA', -1.79845252737464], ['CAAAC', -1.57311358549699], ['CAAAG', -1.50256247450503], ['CAAAT', -1.65218210897269],
['CAACA', -1.22790766890703], ['CAACC', -0.286671869315058], ['CAACG', -1.23205705245384], ['CAACT', -1.14834285849468],
['CAAGA', -0.801274136361383], ['CAAGC', -0.737158001713725], ['CAAGG', -0.806694203830723], ['CAAGT', -0.960618460018386],
['CAATA', -0.882894664716137], ['CAATC', -0.979819049875003], ['CAATG', -0.994465365392242], ['CAATT', -1.27887411249905],
['CACAA', -0.687704167424304], ['CACAC', -0.679295709474897], ['CACAG', -0.674522430722239], ['CACAT', -0.762833353053955],
['CACCA', -0.549359287768234], ['CACCC', -0.439073944217651], ['CACCG', -0.504177064975001], ['CACCT', -0.512241624811731],
['CACGA', -0.654486403961819], ['CACGC', -0.657993128771029], ['CACGG', -0.715840580052971], ['CACGT', -0.756352448969872],
['CACTA', -0.294782213852511], ['CACTC', -0.221780018037274], ['CACTG', -0.317847486783507], ['CACTT', -0.314519696690832],
['CAGAA', -0.237804779109971], ['CAGAC', -0.273033470993715], ['CAGAG', -0.203031812354926], ['CAGAT', -0.176637318231984],
['CAGCA', -0.108126955801438], ['CAGCC', -0.0735949722590621], ['CAGCG', -0.108126955801438], ['CAGCT', -0.0854138885622047],
['CAGGA', -0.0542019921599675], ['CAGGC', -0.0788305863130073], ['CAGGG', -0.00669305814658414], ['CAGGT', -0.105427898832273],
['CAGTA', -0.291530178466134], ['CAGTC', -0.272236975531455], ['CAGTG', -0.317847486783507], ['CAGTT', -0.395809028253219],
['CATAA', -0.667404962953376], ['CATAC', -0.710890074893114], ['CATAG', -0.68409188173839], ['CATAT', -0.699840238706529],
['CATCA', -0.675713616092393], ['CATCC', -0.620079340250019], ['CATCG', -0.6986199823013], ['CATCT', -0.642885345779057],
['CATGA', -0.820374307734803], ['CATGC', -0.844068796027471], ['CATGG', -0.786518670795464], ['CATGT', -0.951154666987436],
['CATTA', -1.01099466734345], ['CATTC', -0.844068796027471], ['CATTG', -0.994465365392242], ['CATTT', -1.24250646832818],
['CCAAA', -0.828673110549498], ['CCAAC', -0.897665982036449], ['CCAAG', -0.884362016410087], ['CCAAT', -1.04489621901913],
['CCACA', -0.584638200203912], ['CCACC', -0.560981182974219], ['CCACG', -0.64519481562772], ['CCACT', -0.592284332485267],
['CCAGA', -0.458067867558826], ['CCAGC', -0.437194244416074], ['CCAGG', -0.406678700490123], ['CCAGT', -0.574892589773398],
['CCATA', -0.517315202412432], ['CCATC', -0.601094962167422], ['CCATG', -0.786518670795464], ['CCATT', -0.782531954551643],
['CCCAA', -0.232434608935377], ['CCCAC', -0.194156014440957], ['CCCAG', -0.160131740791753], ['CCCAT', -0.153039512482261],
['CCCCA', -0.128605487144979], ['CCCCC', -0.0625594821427302], ['CCCCG', -0.123789516364394], ['CCCCT', -0.0814587087192768],
['CCCGA', -0.192684343829501], ['CCCGC', -0.180261823830944], ['CCCGG', -0.218002151813791], ['CCCGT', -0.184628642997284],
['CCCTA', -0.0326685445498448], ['CCCTC', 0.0], ['CCCTG', -0.00669305814658414], ['CCCTT', -0.00669305814658414],
['CCGAA', -0.457109554117015], ['CCGAC', -0.522414652524782], ['CCGAG', -0.460948330424181], ['CCGAT', -0.565241040321177],
['CCGCA', -0.285864440930456], ['CCGCC', -0.208245976868426], ['CCGCG', -0.295596877848687], ['CCGCT', -0.25800748642749],
['CCGGA', -0.21273700238089], ['CCGGC', -0.242430847498753], ['CCGGG', -0.218002151813791], ['CCGGT', -0.361277044710843],
['CCGTA', -0.518333014205133], ['CCGTC', -0.565241040321177], ['CCGTG', -0.715840580052971], ['CCGTT', -0.934033626207324],
['CCTAA', -0.427848381997836], ['CCTAC', -0.46770185343996], ['CCTAG', -0.499169543244378], ['CCTAT', -0.535796221220613],
['CCTCA', -0.484295694421172], ['CCTCC', -0.42598964540321], ['CCTCG', -0.480366416281282], ['CCTCT', -0.48626613840847],
['CCTGA', -0.385947899487439], ['CCTGC', -0.441900135453486], ['CCTGG', -0.406678700490123], ['CCTGT', -0.526513018917064],
['CCTTA', -0.675713616092393], ['CCTTC', -0.590093751105449], ['CCTTG', -0.806694203830723], ['CCTTT', -0.81351086280982],
['CGAAA', -1.02104500319695], ['CGAAC', -1.00435012462478], ['CGAAG', -0.897665982036449], ['CGAAT', -1.01768365549425],
['CGACA', -0.690119627535908], ['CGACC', -0.670957364557743], ['CGACG', -0.719569608612837], ['CGACT', -0.678100252070124],
['CGAGA', -0.52856851809916], ['CGAGC', -0.526513018917064], ['CGAGG', -0.480366416281282], ['CGAGT', -0.634843946904063],
['CGATA', -0.617827087045694], ['CGATC', -0.635988766719996], ['CGATG', -0.6986199823013], ['CGATT', -0.855416435525009],
['CGCAA', -0.286671869315058], ['CGCAC', -0.285864440930456], ['CGCAG', -0.26351714223846], ['CGCAT', -0.277825812434021],
['CGCCA', -0.197105869087379], ['CGCCC', -0.190480890863066], ['CGCCG', -0.211237751725249], ['CGCCT', -0.185358303737428],
['CGCGA', -0.297228199580772], ['CGCGC', -0.293154874193136], ['CGCGG', -0.295596877848687], ['CGCGT', -0.335502422022228],
['CGCTA', -0.10206428987559], ['CGCTC', -0.0814587087192768], ['CGCTG', -0.108126955801438], ['CGCTT', -0.121047910808899],
['CGGAA', -0.390418179618343], ['CGGAC', -0.456152158160688], ['CGGAG', -0.364764406571259], ['CGGAT', -0.31535060658492],
['CGGCA', -0.20675343927792], ['CGGCC', -0.178085544408349], ['CGGCG', -0.211237751725249], ['CGGCT', -0.201547031087346],
['CGGGA', -0.166558039237281], ['CGGGC', -0.178810444766297], ['CGGGG', -0.123789516364394], ['CGGGT', -0.238574305888999],
['CGGTA', -0.448525810425623], ['CGGTC', -0.448525810425623], ['CGGTG', -0.504177064975001], ['CGGTT', -0.656822853622839],
['CGTAA', -0.626866696623419], ['CGTAC', -0.764134589411927], ['CGTAG', -0.712125405501107], ['CGTAT', -0.791859137426782],
['CGTCA', -0.717082045141036], ['CGTCC', -0.673332662586651], ['CGTCG', -0.719569608612837], ['CGTCT', -0.760235948996269],
['CGTGA', -0.669771827963642], ['CGTGC', -0.708423982397921], ['CGTGG', -0.64519481562772], ['CGTGT', -0.844068796027471],
['CGTTA', -1.15217428080623], ['CGTTC', -1.10530534681469], ['CGTTG', -1.23205705245384], ['CGTTT', -1.58784139220724],
['CTAAA', -1.1275284835994], ['CTAAC', -1.0328995027315], ['CTAAG', -0.97659324062612], ['CTAAT', -1.20538880537168],
['CTACA', -0.733362930745173], ['CTACC', -0.668587695202425], ['CTACG', -0.712125405501107], ['CTACT', -0.679295709474897],
['CTAGA', -0.547260651111312], ['CTAGC', -0.502171046248135], ['CTAGG', -0.499169543244378], ['CTAGT', -0.596679943958306],
['CTATA', -0.624599121955639], ['CTATC', -0.769356533393079], ['CTATG', -0.68409188173839], ['CTATT', -1.21148019476508],
['CTCAA', -0.484295694421172], ['CTCAC', -0.498171040914789], ['CTCAG', -0.457109554117015], ['CTCAT', -0.454240111689062],
['CTCCA', -0.398515389850962], ['CTCCC', -0.324536474934304], ['CTCCG', -0.364764406571259], ['CTCCT', -0.342310958961881],
['CTCGA', -0.520371750894981], ['CTCGC', -0.479386504394933], ['CTCGG', -0.460948330424181], ['CTCGT', -0.498171040914789],
['CTCTA', -0.214990101343799], ['CTCTC', -0.165126399765843], ['CTCTG', -0.203031812354926], ['CTCTT', -0.214990101343799],
['CTGAA', -0.444734336689361], ['CTGAC', -0.49120930220599], ['CTGAG', -0.457109554117015], ['CTGAT', -0.503173552597761],
['CTGCA', -0.250188838309692], ['CTGCC', -0.213487471459264], ['CTGCG', -0.26351714223846], ['CTGCT', -0.243977638217052],
['CTGGA', -0.197105869087379], ['CTGGC', -0.224053607497146], ['CTGGG', -0.160131740791753], ['CTGGT', -0.311202944064839],
['CTGTA', -0.497173534597663], ['CTGTC', -0.536833027949053], ['CTGTG', -0.674522430722239], ['CTGTT', -0.820374307734803],
['CTTAA', -0.864012476994807], ['CTTAC', -0.97659324062612], ['CTTAG', -0.97659324062612], ['CTTAT', -1.01433356860897],
['CTTCA', -0.9653841560914], ['CTTCC', -0.845480228865879], ['CTTCG', -0.897665982036449], ['CTTCT', -0.974984229820419],
['CTTGA', -0.9248169711024], ['CTTGC', -0.912658766622591], ['CTTGG', -0.884362016410087], ['CTTGT', -1.04489621901913],
['CTTTA', -1.50256247450503], ['CTTTC', -1.27887411249905], ['CTTTG', -1.50256247450503], ['CTTTT', -2.19298856495061],
['GAAAA', -1.59081316259639], ['GAAAC', -1.4783351791697], ['GAAAG', -1.27887411249905], ['GAAAT', -1.31435455251446],
['GAACA', -1.0782034170369], ['GAACC', -0.940225596455245], ['GAACG', -1.10530534681469], ['GAACT', -0.999395531500101],
['GAAGA', -0.681690920200852], ['GAAGC', -0.661512193986225], ['GAAGG', -0.590093751105449], ['GAAGT', -0.762833353053955],
['GAATA', -0.92941468035103], ['GAATC', -0.848309085066357], ['GAATG', -0.844068796027471], ['GAATT', -1.1275284835994],
['GACAA', -0.601094962167422], ['GACAC', -0.575970756993744], ['GACAG', -0.536833027949053], ['GACAT', -0.559919047983236],
['GACCA', -0.465767616860129], ['GACCC', -0.387733614247674], ['GACCG', -0.448525810425623], ['GACCT', -0.418589056585039],
['GACGA', -0.560981182974219], ['GACGC', -0.590093751105449], ['GACGG', -0.565241040321177], ['GACGT', -0.650991933312045],
['GACTA', -0.26747127945844], ['GACTC', -0.20675343927792], ['GACTG', -0.272236975531455], ['GACTT', -0.281033513660791],
['GAGAA', -0.155161904506411], ['GAGAC', -0.233200012723223], ['GAGAG', -0.165126399765843], ['GAGAT', -0.182442849777304],
['GAGCA', -0.0567660961289052], ['GAGCC', -0.0307906097256446], ['GAGCG', -0.0814587087192768], ['GAGCT', -0.0497305698265662],
['GAGGA', -0.0251778728206872], ['GAGGC', -0.0414791742319996], ['GAGGG', 0.0], ['GAGGT', -0.0529224014543424],
['GAGTA', -0.202289146149235], ['GAGTC', -0.20675343927792], ['GAGTG', -0.221780018037274], ['GAGTT', -0.378836758409937],
['GATAA', -0.621207371954518], ['GATAC', -0.667404962953376], ['GATAG', -0.769356533393079], ['GATAT', -0.696183927503419],
['GATCA', -0.638282345707096], ['GATCC', -0.609983909584668], ['GATCG', -0.635988766719996], ['GATCT', -0.687704167424304],
['GATGA', -0.589000257284078], ['GATGC', -0.614458189979589], ['GATGG', -0.601094962167422], ['GATGT', -0.739696074142196],
['GATTA', -0.966977781926678], ['GATTC', -0.848309085066357], ['GATTG', -0.979819049875003], ['GATTT', -1.35583515363518],
['GCAAA', -1.07287007106154], ['GCAAC', -1.0500820560515], ['GCAAG', -0.912658766622591], ['GCAAT', -1.02441768767559],
['GCACA', -0.696183927503419], ['GCACC', -0.647509631476171], ['GCACG', -0.708423982397921], ['GCACT', -0.623467259921955],
['GCAGA', -0.517315202412432], ['GCAGC', -0.524461736146506], ['GCAGG', -0.441900135453486], ['GCAGT', -0.635988766719996],
['GCATA', -0.673332662586651], ['GCATC', -0.614458189979589], ['GCATG', -0.844068796027471], ['GCATT', -0.866894323369696],
['GCCAA', -0.26351714223846], ['GCCAC', -0.277825812434021], ['GCCAG', -0.224053607497146], ['GCCAT', -0.207499429614642],
['GCCCA', -0.181715312458927], ['GCCCC', -0.136220615672231], ['GCCCG', -0.178810444766297], ['GCCCT', -0.118996628038342],
['GCCGA', -0.228616353055564], ['GCCGC', -0.254090520980756], ['GCCGG', -0.242430847498753], ['GCCGT', -0.273033470993715],
['GCCTA', -0.0755551166305366], ['GCCTC', -0.0414791742319996], ['GCCTG', -0.0788305863130073], ['GCCTT', -0.0933714902355187],
['GCGAA', -0.457109554117015], ['GCGAC', -0.538909871893892], ['GCGAG', -0.479386504394933], ['GCGAT', -0.545166409508198],
['GCGCA', -0.270645885299213], ['GCGCC', -0.227854448256803], ['GCGCG', -0.293154874193136], ['GCGCT', -0.265096921397978],
['GCGGA', -0.209740745457656], ['GCGGC', -0.254090520980756], ['GCGGG', -0.180261823830944], ['GCGGT', -0.320350617001626],
['GCGTA', -0.572739736412297], ['GCGTC', -0.590093751105449], ['GCGTG', -0.657993128771029], ['GCGTT', -0.914170482707913],
['GCTAA', -0.460948330424181], ['GCTAC', -0.545166409508198], ['GCTAG', -0.502171046248135], ['GCTAT', -0.532692235283022],
['GCTCA', -0.523437670516816], ['GCTCC', -0.456152158160688], ['GCTCG', -0.526513018917064], ['GCTCT', -0.498171040914789],
['GCTGA', -0.484295694421172], ['GCTGC', -0.524461736146506], ['GCTGG', -0.437194244416074], ['GCTGT', -0.585726931939108],
['GCTTA', -0.774605889279222], ['GCTTC', -0.661512193986225], ['GCTTG', -0.737158001713725], ['GCTTT', -1.02104500319695],
['GGAAA', -0.94177959832198], ['GGAAC', -0.914170482707913], ['GGAAG', -0.845480228865879], ['GGAAT', -0.869784498791929],
['GGACA', -0.626866696623419], ['GGACC', -0.608868460500803], ['GGACG', -0.673332662586651], ['GGACT', -0.585726931939108],
['GGAGA', -0.459027100246287], ['GGAGC', -0.456152158160688], ['GGAGG', -0.42598964540321], ['GGAGT', -0.523437670516816],
['GGATA', -0.599989378859673], ['GGATC', -0.609983909584668], ['GGATG', -0.620079340250019], ['GGATT', -0.878505484297374],
['GGCAA', -0.208993081871425], ['GGCAC', -0.229378838795899], ['GGCAG', -0.213487471459264], ['GGCAT', -0.207499429614642],
['GGCCA', -0.165126399765843], ['GGCCC', -0.122417774036813], ['GGCCG', -0.178085544408349], ['GGCCT', -0.141096975039008],
['GGCGA', -0.197844694388811], ['GGCGC', -0.227854448256803], ['GGCGG', -0.208245976868426], ['GGCGT', -0.246302319620411],
['GGCTA', -0.0749013081731177], ['GGCTC', -0.0307906097256446], ['GGCTG', -0.0735949722590621], ['GGCTT', -0.0967059381399706],
['GGGAA', -0.307072413615549], ['GGGAC', -0.383275292105963], ['GGGAG', -0.324536474934304], ['GGGAT', -0.350887340861707],
['GGGCA', -0.163696806956249], ['GGGCC', -0.122417774036813], ['GGGCG', -0.190480890863066], ['GGGCT', -0.160843737973842],
['GGGGA', -0.0893847739916974], ['GGGGC', -0.136220615672231], ['GGGGG', -0.0625594821427302], ['GGGGT', -0.153039512482261],
['GGGTA', -0.359537913837716], ['GGGTC', -0.387733614247674], ['GGGTG', -0.439073944217651], ['GGGTT', -0.623467259921955],
['GGTAA', -0.571665045282532], ['GGTAC', -0.674522430722239], ['GGTAG', -0.668587695202425], ['GGTAT', -0.703509967595492],
['GGTCA', -0.655653946417857], ['GGTCC', -0.608868460500803], ['GGTCG', -0.670957364557743], ['GGTCT', -0.665043686767696],
['GGTGA', -0.553569820304577], ['GGTGC', -0.647509631476171], ['GGTGG', -0.560981182974219], ['GGTGT', -0.729582207905267],
['GGTTA', -1.06404388843327], ['GGTTC', -0.940225596455245], ['GGTTG', -0.286671869315058], ['GGTTT', -1.38933110806336],
['GTAAA', -1.27019726847337], ['GTAAC', -1.06404388843327], ['GTAAG', -0.97659324062612], ['GTAAT', -1.15794908030012],
['GTACA', -0.660337795730283], ['GTACC', -0.674522430722239], ['GTACG', -0.764134589411927], ['GTACT', -0.679295709474897],
['GTAGA', -0.48626613840847], ['GTAGC', -0.545166409508198], ['GTAGG', -0.46770185343996], ['GTAGT', -0.679295709474897],
['GTATA', -0.646351553755543], ['GTATC', -0.667404962953376], ['GTATG', -0.710890074893114], ['GTATT', -0.999395531500101],
['GTCAA', -0.556739395065856], ['GTCAC', -0.53062825106217], ['GTCAG', -0.49120930220599], ['GTCAT', -0.485280431083711],
['GTCCA', -0.428779047490789], ['GTCCC', -0.383275292105963], ['GTCCG', -0.456152158160688], ['GTCCT', -0.393109971284054],
['GTCGA', -0.516298425507345], ['GTCGC', -0.538909871893892], ['GTCGG', -0.522414652524782], ['GTCGT', -0.509209788229611],
['GTCTA', -0.245526825203881], ['GTCTC', -0.233200012723223], ['GTCTG', -0.273033470993715], ['GTCTT', -0.261152143106962],
['GTGAA', -0.469639838542639], ['GTGAC', -0.53062825106217], ['GTGAG', -0.498171040914789], ['GTGAT', -0.501169543911045],
['GTGCA', -0.254872687622119], ['GTGCC', -0.229378838795899], ['GTGCG', -0.285864440930456], ['GTGCT', -0.258792724002192],
['GTGGA', -0.234732580073001], ['GTGGC', -0.277825812434021], ['GTGGG', -0.194156014440957], ['GTGGT', -0.358669481303762],
['GTGTA', -0.506187115903025], ['GTGTC', -0.575970756993744], ['GTGTG', -0.679295709474897], ['GTGTT', -0.839846411147623],
['GTTAA', -0.997749440593432], ['GTTAC', -1.06404388843327], ['GTTAG', -1.0328995027315], ['GTTAT', -1.00932938802439],
['GTTCA', -0.987929394412457], ['GTTCC', -0.914170482707913], ['GTTCG', -1.00435012462478], ['GTTCT', -1.02272992356187],
['GTTGA', -0.962204503174021], ['GTTGC', -1.0500820560515], ['GTTGG', -0.897665982036449], ['GTTGT', -1.229980208509],
['GTTTA', -1.71083083605419], ['GTTTC', -1.4783351791697], ['GTTTG', -1.57311358549699], ['GTTTT', -2.08491434342119],
['TAAAA', -1.63027199933694], ['TAAAC', -1.71083083605419], ['TAAAG', -1.50256247450503], ['TAAAT', -1.82819449657338],
['TAACA', -1.15988144805117], ['TAACC', -1.06404388843327], ['TAACG', -1.15217428080623], ['TAACT', -1.04835045388722],
['TAAGA', -0.72206337549124], ['TAAGC', -0.774605889279222], ['TAAGG', -0.675713616092393], ['TAAGT', -0.92176353361551],
['TAATA', -0.9248169711024], ['TAATC', -0.966977781926678], ['TAATG', -1.01099466734345], ['TAATT', -1.33949873431549],
['TACAA', -0.567377793270912], ['TACAC', -0.506187115903025], ['TACAG', -0.497173534597663], ['TACAT', -0.592284332485267],
['TACCA', -0.445680858293392], ['TACCC', -0.359537913837716], ['TACCG', -0.448525810425623], ['TACCT', -0.396710335209232],
['TACGA', -0.496177022307924], ['TACGC', -0.572739736412297], ['TACGG', -0.518333014205133], ['TACGT', -0.650991933312045],
['TACTA', -0.273033470993715], ['TACTC', -0.202289146149235], ['TACTG', -0.291530178466134], ['TACTT', -0.296412206063442],
['TAGAA', -0.254090520980756], ['TAGAC', -0.245526825203881], ['TAGAG', -0.214990101343799], ['TAGAT', -0.213487471459264],
['TAGCA', -0.100051541795893], ['TAGCC', -0.0749013081731177], ['TAGCG', -0.10206428987559], ['TAGCT', -0.0801437841379677],
['TAGGA', -0.0860746087712429], ['TAGGC', -0.0755551166305366], ['TAGGG', -0.0326685445498448], ['TAGGT', -0.096038158482434],
['TAGTA', -0.273033470993715], ['TAGTC', -0.26747127945844], ['TAGTG', -0.294782213852511], ['TAGTT', -0.456152158160688],
['TATAA', -0.564174373553374], ['TATAC', -0.646351553755543], ['TATAG', -0.624599121955639], ['TATAT', -0.77856106912215],
['TATCA', -0.583550652516513], ['TATCC', -0.599989378859673], ['TATCG', -0.617827087045694], ['TATCT', -0.687704167424304],
['TATGA', -0.549359287768234], ['TATGC', -0.673332662586651], ['TATGG', -0.517315202412432], ['TATGT', -0.723312594891672],
['TATTA', -0.9248169711024], ['TATTC', -0.92941468035103], ['TATTG', -0.882894664716137], ['TATTT', -1.44434497894115],
['TCAAA', -0.971773954190171], ['TCAAC', -0.962204503174021], ['TCAAG', -0.9248169711024], ['TCAAT', -1.0799875388304],
['TCACA', -0.639431110910969], ['TCACC', -0.553569820304577], ['TCACG', -0.669771827963642], ['TCACT', -0.559919047983236],
['TCAGA', -0.480366416281282], ['TCAGC', -0.484295694421172], ['TCAGG', -0.385947899487439], ['TCAGT', -0.559919047983236],
['TCATA', -0.549359287768234], ['TCATC', -0.589000257284078], ['TCATG', -0.820374307734803], ['TCATT', -0.787851116029243],
['TCCAA', -0.179535870985995], ['TCCAC', -0.234732580073001], ['TCCAG', -0.197105869087379], ['TCCAT', -0.226332377947734],
['TCCCA', -0.156579339187385], ['TCCCC', -0.0893847739916974], ['TCCCG', -0.166558039237281], ['TCCCT', -0.110833317399181],
['TCCGA', -0.201547031087346], ['TCCGC', -0.209740745457656], ['TCCGG', -0.21273700238089], ['TCCGT', -0.225572210674336],
['TCCTA', -0.0860746087712429], ['TCCTC', -0.0251778728206872], ['TCCTG', -0.0542019921599675], ['TCCTT', -0.082116819963826],
['TCGAA', -0.448525810425623], ['TCGAC', -0.516298425507345], ['TCGAG', -0.520371750894981], ['TCGAT', -0.557798156414098],
['TCGCA', -0.260365051370145], ['TCGCC', -0.197844694388811], ['TCGCG', -0.297228199580772], ['TCGCT', -0.240886445647379],
['TCGGA', -0.201547031087346], ['TCGGC', -0.228616353055564], ['TCGGG', -0.192684343829501], ['TCGGT', -0.298862186872426],
['TCGTA', -0.496177022307924], ['TCGTC', -0.560981182974219], ['TCGTG', -0.654486403961819], ['TCGTT', -0.871232724968294],
['TCTAA', -0.509209788229611], ['TCTAC', -0.48626613840847], ['TCTAG', -0.547260651111312], ['TCTAT', -0.552515523949671],
['TCTCA', -0.510219379430963], ['TCTCC', -0.459027100246287], ['TCTCG', -0.52856851809916], ['TCTCT', -0.510219379430963],
['TCTGA', -0.480366416281282], ['TCTGC', -0.517315202412432], ['TCTGG', -0.458067867558826], ['TCTGT', -0.549359287768234],
['TCTTA', -0.72206337549124], ['TCTTC', -0.681690920200852], ['TCTTG', -0.801274136361383], ['TCTTT', -1.04317356570769],
['TGAAA', -1.06053511290359], ['TGAAC', -0.987929394412457], ['TGAAG', -0.9653841560914], ['TGAAT', -1.06404388843327],
['TGACA', -0.693753792650127], ['TGACC', -0.655653946417857], ['TGACG', -0.717082045141036], ['TGACT', -0.673332662586651],
['TGAGA', -0.510219379430963], ['TGAGC', -0.523437670516816], ['TGAGG', -0.484295694421172], ['TGAGT', -0.603309802496975],
['TGATA', -0.583550652516513], ['TGATC', -0.638282345707096], ['TGATG', -0.675713616092393], ['TGATT', -0.914170482707913],
['TGCAA', -0.259578578659531], ['TGCAC', -0.254872687622119], ['TGCAG', -0.250188838309692], ['TGCAT', -0.254090520980756],
['TGCCA', -0.197105869087379], ['TGCCC', -0.163696806956249], ['TGCCG', -0.20675343927792], ['TGCCT', -0.153746476103081],
['TGCGA', -0.260365051370145], ['TGCGC', -0.270645885299213], ['TGCGG', -0.285864440930456], ['TGCGT', -0.295596877848687],
['TGCTA', -0.100051541795893], ['TGCTC', -0.0567660961289052], ['TGCTG', -0.108126955801438], ['TGCTT', -0.0913761453766228],
['TGGAA', -0.37618306379647], ['TGGAC', -0.428779047490789], ['TGGAG', -0.398515389850962], ['TGGAT', -0.421357863153172],
['TGGCA', -0.197105869087379], ['TGGCC', -0.165126399765843], ['TGGCG', -0.197105869087379], ['TGGCT', -0.18098830406631],
['TGGGA', -0.156579339187385], ['TGGGC', -0.181715312458927], ['TGGGG', -0.128605487144979], ['TGGGT', -0.227093123515043],
['TGGTA', -0.445680858293392], ['TGGTC', -0.465767616860129], ['TGGTG', -0.549359287768234], ['TGGTT', -0.766742152819007],
['TGTAA', -0.608868460500803], ['TGTAC', -0.660337795730283], ['TGTAG', -0.733362930745173], ['TGTAT', -0.775922545163969],
['TGTCA', -0.693753792650127], ['TGTCC', -0.626866696623419], ['TGTCG', -0.690119627535908], ['TGTCT', -0.719569608612837],
['TGTGA', -0.639431110910969], ['TGTGC', -0.696183927503419], ['TGTGG', -0.584638200203912], ['TGTGT', -0.830062964838552],
['TGTTA', -1.15988144805117], ['TGTTC', -1.0782034170369], ['TGTTG', -1.22790766890703], ['TGTTT', -1.55859954261274],
['TTAAA', -1.04145387482816], ['TTAAC', -0.997749440593432], ['TTAAG', -0.864012476994807], ['TTAAT', -1.15409551098413],
['TTACA', -0.608868460500803], ['TTACC', -0.571665045282532], ['TTACG', -0.626866696623419], ['TTACT', -0.608868460500803],
['TTAGA', -0.509209788229611], ['TTAGC', -0.460948330424181], ['TTAGG', -0.427848381997836], ['TTAGT', -0.672144308317272],
['TTATA', -0.564174373553374], ['TTATC', -0.621207371954518], ['TTATG', -0.667404962953376], ['TTATT', -0.875590030337249],
['TTCAA', -0.447576593458291], ['TTCAC', -0.469639838542639], ['TTCAG', -0.444734336689361], ['TTCAT', -0.456152158160688],
['TTCCA', -0.37618306379647], ['TTCCC', -0.307072413615549], ['TTCCG', -0.390418179618343], ['TTCCT', -0.348306694268216],
['TTCGA', -0.448525810425623], ['TTCGC', -0.457109554117015], ['TTCGG', -0.457109554117015], ['TTCGT', -0.514267967956964],
['TTCTA', -0.254090520980756], ['TTCTC', -0.155161904506411], ['TTCTG', -0.237804779109971], ['TTCTT', -0.233200012723223],
['TTGAA', -0.447576593458291], ['TTGAC', -0.556739395065856], ['TTGAG', -0.484295694421172], ['TTGAT', -0.562044447295549],
['TTGCA', -0.259578578659531], ['TTGCC', -0.208993081871425], ['TTGCG', -0.286671869315058], ['TTGCT', -0.225572210674336],
['TTGGA', -0.179535870985995], ['TTGGC', -0.26351714223846], ['TTGGG', -0.232434608935377], ['TTGGT', -0.332115458581212],
['TTGTA', -0.567377793270912], ['TTGTC', -0.601094962167422], ['TTGTG', -0.687704167424304], ['TTGTT', -0.954299323666907],
['TTTAA', -1.04145387482816], ['TTTAC', -1.27019726847337], ['TTTAG', -1.1275284835994], ['TTTAT', -1.3796458023289],
['TTTCA', -1.06053511290359], ['TTTCC', -0.94177959832198], ['TTTCG', -1.02104500319695], ['TTTCT', -1.016007209167],
['TTTGA', -0.971773954190171], ['TTTGC', -1.07287007106154], ['TTTGG', -0.828673110549498], ['TTTGT', -1.30090143481734],
['TTTTA', -1.63027199933694], ['TTTTC', -1.59081316259639], ['TTTTG', -1.79845252737464], ['TTTTT', -7.4079243225598]]
temp = np.array(PROT)
N_PROT = temp[:,1].astype(float).min()
if('pcr' in biasType):
PCR = 1
COVARI_NUM = COVARI_NUM + 2 # ANNEAL & DENATURE
COVARI_ORDER.extend(["Anneal_pcr", "Denature_pcr"])
global GIBBS
global ENTROPY
global MIN_TM
global MAX_TM
global PARA1
global PARA2
global N_GIBBS
GIBBS = [['AA', -1.04], ['AC', -2.04], ['AG', -1.29], ['AT', -1.27],
['CA', -0.78], ['CC', -1.97], ['CG', -1.44], ['CT', -1.29],
['GA', -1.66], ['GC', -2.7], ['GG', -1.97], ['GT', -2.04],
['TA', -0.12], ['TC', -1.66], ['TG', -0.78], ['TT', -1.04]]
ENTROPY = -0.02485
temp = np.array(GIBBS)
MIN_TM = -0.12 / ENTROPY
MAX_TM = -2.7 / ENTROPY
PARA1 = (math.pow(10, 6) - math.exp(1)) / (math.pow(10, 6) - 1)
PARA2 = math.pow(10, -6) / (1-PARA1)
N_GIBBS = np.median(temp[:,1].astype(float))
if('map' in biasType):
MAP = 1
COVARI_NUM = COVARI_NUM + 1
COVARI_ORDER.extend(["Map_map"])
global MAPFILE
global KMER
if(args.mapFile == None):
print("ERROR: No map file was specified !")
sys.exit()
if(args.kmer == None):
print("ERROR: No kmer parameter was specified !")
sys.exit()
MAPFILE = args.mapFile
KMER = int(args.kmer)
if('gquad' in biasType):
GQUAD = 1
COVARI_NUM = COVARI_NUM + 1
COVARI_ORDER.extend(["Gquad_gquad"])
global GQAUDFILE
global GQAUD_MAX
guadFileNum = len(args.gquadFile)
if(guadFileNum == 0):
print("ERROR: No g-quadruplex file was specified !")
sys.exit()
GQAUDFILE = [0] * guadFileNum
for i in range(guadFileNum):
GQAUDFILE[i] = args.gquadFile[i]
GQAUD_MAX = args.gquadMax
return
def setFragLen(fragLen):
global FRAGLEN
FRAGLEN = int(fragLen)
return
def setAnlaysisRegion(region, bl):
global REGION
REGION = []
input_filename = region
input_stream = open(input_filename)
input_file = input_stream.readlines()
for i in range(len(input_file)):
temp = input_file[i].split()
temp[1] = int(temp[1])
temp[2] = int(temp[2])
REGION.append(temp)
input_stream.close()
if(len(REGION) > 1):
REGION = np.array(REGION)
REGION = REGION[np.lexsort(( REGION[:,1].astype(int), REGION[:,0]) ) ]
REGION = REGION.tolist()
region_merged = []
pos = 0
pastChromo = REGION[pos][0]
pastStart = int(REGION[pos][1])
pastEnd = int(REGION[pos][2])
region_merged.append([ pastChromo, pastStart, pastEnd])
resultIdx = 0
pos = 1
while( pos < len(REGION) ):
currChromo = REGION[pos][0]
currStart = int(REGION[pos][1])
currEnd = int(REGION[pos][2])
if( (currChromo == pastChromo) and (currStart >= pastStart) and (currStart <= pastEnd)):
region_merged[resultIdx][2] = currEnd
pos = pos + 1
pastChromo = currChromo
pastStart = currStart
pastEnd = currEnd
else:
region_merged.append([currChromo, currStart, currEnd])
resultIdx = resultIdx + 1
pos = pos + 1
pastChromo = currChromo
pastStart = currStart
pastEnd = currEnd
REGION = region_merged
if(bl != None): ### REMOVE BLACKLIST REGIONS FROM 'REGION'
bl_region_temp = []
input_stream = open(bl)
input_file = input_stream.readlines()
for i in range(len(input_file)):
temp = input_file[i].split()
temp[1] = int(temp[1])
temp[2] = int(temp[2])
bl_region_temp.append(temp)
## merge overlapping blacklist regions
if(len(bl_region_temp) == 1):
bl_region = bl_region_temp
bl_region = np.array(bl_region)
else:
bl_region_temp = np.array(bl_region_temp)
bl_region_temp = bl_region_temp[np.lexsort( ( bl_region_temp[:,1].astype(int), bl_region_temp[:,0] ) )]
bl_region_temp = bl_region_temp.tolist()
bl_region = []
pos = 0
pastChromo = bl_region_temp[pos][0]
pastStart = int(bl_region_temp[pos][1])
pastEnd = int(bl_region_temp[pos][2])
bl_region.append([pastChromo, pastStart, pastEnd])
resultIdx = 0
pos = 1
while( pos < len(bl_region_temp) ):
currChromo = bl_region_temp[pos][0]
currStart = int(bl_region_temp[pos][1])
currEnd = int(bl_region_temp[pos][2])
if( (currChromo == pastChromo) and (currStart >= pastStart) and (currStart <= pastEnd)):
bl_region[resultIdx][2] = currEnd
pos = pos + 1
pastChromo = currChromo
pastStart = currStart
pastEnd = currEnd
else:
bl_region.append([currChromo, currStart, currEnd])
resultIdx = resultIdx + 1
pos = pos + 1
pastChromo = currChromo
pastStart = currStart
pastEnd = currEnd
bl_region = np.array(bl_region)
region_woBL = []
for region in REGION:
regionChromo = region[0]
regionStart = int(region[1])
regionEnd = int(region[2])
overlapped_bl = []
## overlap Case 1 : A blacklist region completely covers the region.
idx = np.where( (bl_region[:,0] == regionChromo) & (bl_region[:,1].astype(int) <= regionStart) & (bl_region[:,2].astype(int) >= regionEnd) )[0]
if(len(idx) > 0):
continue
## overlap Case 2
idx = np.where( (bl_region[:,0] == regionChromo) & (bl_region[:,2].astype(int) > regionStart) & (bl_region[:,2].astype(int) <= regionEnd) )[0]
if(len(idx) > 0):
overlapped_bl.extend( bl_region[idx].tolist() )
## overlap Case 3
idx = np.where( (bl_region[:,0] == regionChromo) & (bl_region[:,1].astype(int) >= regionStart) & (bl_region[:,1].astype(int) < regionEnd) )[0]
if(len(idx) > 0):
overlapped_bl.extend( bl_region[idx].tolist() )
if(len(overlapped_bl) == 0):
region_woBL.append(region)
continue
overlapped_bl = np.array(overlapped_bl)
overlapped_bl = overlapped_bl[overlapped_bl[:,1].astype(int).argsort()]
overlapped_bl = np.unique(overlapped_bl, axis=0)
overlapped_bl = overlapped_bl[overlapped_bl[:,1].astype(int).argsort()]
currStart = regionStart
for pos in range(len(overlapped_bl)):
blStart = int(overlapped_bl[pos][1])
blEnd = int(overlapped_bl[pos][2])
if( blStart <= regionStart ):
currStart = blEnd
else:
if(currStart == blStart):
currStart = blEnd
continue
region_woBL.append([ regionChromo, currStart, blStart ])
currStart = blEnd
if( (pos == (len(overlapped_bl)-1)) and (blEnd < regionEnd) ):
if(blEnd == regionEnd):
break
region_woBL.append([ regionChromo, blEnd, regionEnd ])
REGION = region_woBL
# check if all chromosomes in the REGION in bigwig files
bw = pyBigWig.open(CTRLBW_NAMES[0])
region_final = []
for regionIdx in range(len(REGION)):
chromo = REGION[regionIdx][0]
start = int(REGION[regionIdx][1])
end = int(REGION[regionIdx][2])
chromoLen = bw.chroms(chromo)
if(chromoLen == None):
continue
if(end > chromoLen):
REGION[regionIdx][2] = chromoLen
if( chromoLen <= start ):
continue
region_final.append([chromo, start, end])
bw.close()
REGION = region_final
return
def setFilterCriteria(minFrag):
global FILTERVALUE
if(minFrag == None):
FILTERVALUE = int(SAMPLE_NUM)
else:
FILTERVALUE = int(minFrag)
return
def setScaler(scalerResult):
global CTRLSCALER
global EXPSCALER
CTRLSCALER = [0] * CTRLBW_NUM
EXPSCALER = [0] * EXPBW_NUM
CTRLSCALER[0] = 1
for i in range(1, CTRLBW_NUM):
CTRLSCALER[i] = scalerResult[i-1]
for i in range(EXPBW_NUM):
EXPSCALER[i] = scalerResult[i+CTRLBW_NUM-1]
return
def setBinSize(binSize):
global BINSIZE
BINSIZE = int(binSize)
return
def setNumProcess(numProcess):
global NUMPROCESS
system_cpus = int(multiprocessing.cpu_count())
if(numProcess == None):
NUMPROCESS = int(system_cpus / 2.0 )
else:
NUMPROCESS = int(numProcess)
if(NUMPROCESS > system_cpus):
print("ERROR: You specified too many cpus! (-p). Running with the maximum cpus in the system")
NUMPROCESS = system_cpus
return
def setNormalization(norm, generateNormBW):
global I_NORM
global I_GENERATE_NormBW
if(norm.lower() == 'false'):
I_NORM = False
else:
I_NORM = True
if(generateNormBW.lower() == 'false'):
I_GENERATE_NormBW = False
else:
I_GENERATE_NormBW = True
if((I_NORM == False) and (I_GENERATE_NormBW == True)):
print("ERROR: I_NOMR should be 'True' if I_GENERATE_NormBW is 'True'")
sys.exit()
return
| [
"kys91240@gmail.com"
] | kys91240@gmail.com |
a1ee59e5d9561ba069c0ab6680b4a2eece618c8f | b15ee3b26319177114e2f8d53d3d4c9ea9a86a96 | /IoT/Shelf 10/submitBook.py | 889eab400664706c4df368c975891c695d36d2b9 | [] | no_license | pmanaktala/Smart-Library | e54f932833e163d064a27c5128cce71b38a8ff01 | c01e0bf250f48c5f1763530b21f1a68b171bccf8 | refs/heads/master | 2023-01-10T15:29:44.807824 | 2020-09-30T18:46:43 | 2020-09-30T18:46:43 | 179,119,846 | 0 | 1 | null | 2023-01-04T13:01:31 | 2019-04-02T16:43:46 | Python | UTF-8 | Python | false | false | 1,753 | py | import requests
import json
#Specify here the shelf number of the RFID READER
shelf_number = 10
def loadData():
url = "https://fv3md359db.execute-api.ap-south-1.amazonaws.com/final/readrecords"
r = requests.get(url)
data=json.loads(r.text)
return data
def findNameFromTag(tag):
for i in range(0, No_Of_Books):
if tag in booksData[i]["Tags"]:
global index
index = i
return booksData[i]["Name"]
def addBook(name,tag):
url = "https://fv3md359db.execute-api.ap-south-1.amazonaws.com/final/readlocation?find=" + name
r = requests.get(url)
data=json.loads(r.text)
if ("Shelf " + str(shelf_number)) in data[0]["Location"]:
arr = data[0]["Location"]["Shelf " + str(shelf_number)]
arr.append(tag)
data[0]["Location"]["Shelf " + str(shelf_number)] = arr
else:
data[0]["Location"]["Shelf " + str(shelf_number)] = [tag]
updateurl = "https://fv3md359db.execute-api.ap-south-1.amazonaws.com/final/updatebook"
newData = {
"bookname" : name,
"book" : data[0]["Location"]
}
print(newData)
r = requests.post(updateurl, json=newData)
print(r)
def updateData(tag,regno):
old_json = booksData[index]['Issued']
del old_json[tag]
data={
"bookname" : book_to_add,
"updated" : old_json
}
print(data)
url = "https://fv3md359db.execute-api.ap-south-1.amazonaws.com/final/updatebookinrecord"
r = requests.post(url, json=data)
print(r)
booksData = loadData()
No_Of_Books = len(booksData)
tag_from_rfid = "707eff2b"
book_to_add = findNameFromTag(tag_from_rfid)
addBook(book_to_add,tag_from_rfid)
updateData(tag_from_rfid,"RA1511008010137")
| [
"parthmanaktala@Parths-MacBook-Pro-7.local"
] | parthmanaktala@Parths-MacBook-Pro-7.local |
50b62a9b11b632703ea4b8d30ae6468d1be6f240 | 949ab04894f44f8c6c2c224cb92834518fa3069b | /mairie/mairie/wsgi.py | 7a9ea27c9f5a55058b8d4d6a149dd8ec0e258e66 | [] | no_license | yeroSambaKa/Mairie | 9575126bacb0e3d4a8419f0d20c948942ee70b09 | dc0d03f89e42c26917242f11685e7bb38219b7d4 | refs/heads/master | 2023-07-16T04:52:12.477249 | 2021-08-30T13:17:31 | 2021-08-30T13:17:31 | 400,856,198 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | """
WSGI config for mairie project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'mairie.settings')
application = get_wsgi_application()
| [
"yeroka1394@gmail.com"
] | yeroka1394@gmail.com |
7155f91558771fee29787a17d166ff3b58a5b822 | a4198750ef81e7afa3af26f5b9fb71947425954a | /src/python_smallfbx/package/setup.py | e92ffda94b9d2690f3e4ba4152b07ce2dafa774f | [
"BSD-2-Clause"
] | permissive | think-biq/verwandlung | 007be76c1936501d834d8031819a626011c33953 | d2403ea68e74624db93479e82595011588f0fc4f | refs/heads/master | 2023-03-26T19:35:32.301974 | 2021-03-28T23:34:14 | 2021-03-28T23:34:14 | 350,286,650 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,375 | py | """
Utility to inspect FBX files.
2021-∞ (c) blurryroots innovation qanat OÜ. All rights reserved.
See license.md for details.
https://think-biq.com
"""
import setuptools
import setuptools.dist
import os
from src import version
PROJECT_PATH = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(PROJECT_PATH, 'readme.md'), 'r') as fh:
long_description = fh.read()
class BinaryDistribution(setuptools.dist.Distribution):
"""
Distribution which always forces a binary package with platform name.
Thanks to https://stackoverflow.com/a/36886459/949561
"""
def has_ext_modules(foo):
return True
setuptools.setup(
python_requires='>=3.9',
name="smallfbx",
version=version(),
description="Python binding for the SmallFBX library.",
long_description=long_description,
long_description_content_type="text/markdown",
package_dir = {'smallfbx': 'src'},
packages=['smallfbx'],
include_package_data=True,
package_data={'smallfbx': ['*.pyd', '*.so']},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: MIT License",
"Operating System :: OS Independent",
],
author="biq",
author_email="sf@think-biq.com",
url="https://gitlab.com/think-biq/python-smallfbx",
distclass=BinaryDistribution
) | [
"blurryroots@posteo.de"
] | blurryroots@posteo.de |
5681e155f9ce07761b62f8ab53053d972cf701c9 | d102fef03c29acc3fda106c8f85312aa58348a2b | /scripts/cloudReceiver.py | 4b6020f8b3b4e992c608ac1a91ca86b5ec789e3e | [] | no_license | pyrosnowman24/sensor_dist_ros | f92d01779c638ed038baa352f96e7cf3ff709072 | 8673c3ba041a5b8734d54321e54d40b68c14bc9e | refs/heads/master | 2020-06-20T08:29:31.787875 | 2020-02-20T19:55:54 | 2020-02-20T19:55:54 | 197,059,322 | 0 | 0 | null | 2020-02-20T19:55:56 | 2019-07-15T19:26:19 | Python | UTF-8 | Python | false | false | 2,116 | py | #!/usr/bin/#!/usr/bin/env python
import pika
import yaml
import json
import signal
import time
import rospy
from sensor_dist_ros.msg import floatArray as floatArray2
from sensor_dist_ros.msg import sens as Data
global r0,r1,r2,r3
rospy.init_node('rosCloudReciever',anonymous=True)
r0 = rospy.Publisher("/robot0/poses",Data,queue_size=1)
r1 = rospy.Publisher("/robot1/poses",Data,queue_size=1)
r2 = rospy.Publisher("/robot2/poses",Data,queue_size=1)
r3 = rospy.Publisher("/robot2/poses",Data,queue_size=1)
rate = rospy.Rate(10)
def close_pika(signal, frame):
print('Closing Pika Connection')
connection.close()
sys.exit(0)
def callback(ch, method, properties, body):
global r0,r1,r2,r3
info = json.loads(body.decode('utf-8'))
if info[0] == 0:
r0.publish(info[1],info[2],info[3])
if info[0] == 1:
r1.publish(info[1],info[2],info[3])
if info[0] == 2:
r2.publish(info[1],info[2],info[3])
if info[0] == 3:
r3.publish(info[1],info[2],info[3])
def initialize():
global r0, r1, r2, r3
### Read config parameters for RabbitMQ
signal.signal(signal.SIGTERM, close_pika)
with open('config.yaml') as f:
config = yaml.safe_load(f)
hostname = config['hostname']
username = config['username']
password = config['password']
port = config['port']
credentials = pika.PlainCredentials(username, password)
connection = pika.BlockingConnection(pika.ConnectionParameters(host=hostname, credentials=credentials, port=port))
channel = connection.channel()
channel.exchange_declare(exchange='robot_pos_info', exchange_type='direct')
results = channel.queue_declare(queue="",exclusive=True)
queue_name = results.method.queue
channel.queue_bind(exchange='robot_pos_info',queue=queue_name,routing_key='key_robot_pos_info')
channel.basic_consume(on_message_callback=callback,queue=queue_name)
channel.start_consuming()
# Main Loop
while not rospy.is_shutdown():
rate.sleep()
if __name__ == '__main__':
try:
initialize()
except rospy.ROSInterruptException:
pass
| [
"s.ackels@yahoo.com"
] | s.ackels@yahoo.com |
91fabff6ba01b5eacf95c4e1a5b3bc139c470d85 | e4afcdf78f64b39cde9f5156eb205820a7aff978 | /test/perf_test/generate_perf_report.py | 3672ad725e9c137444b0b1e4996d62695e00e11e | [
"BSD-3-Clause",
"MIT",
"LGPL-2.1-or-later",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-x11-hanson",
"CC0-1.0",
"ISC",
"Apache-2.0",
"BSD-2-Clause",
"MPL-2.0",
"LicenseRef-scancode-generic-cla"
] | permissive | Amrinder-Singh29/azure-storage-fuse | e18b67ff535feedf413d29642383038dc37495db | 771f66b78f162bd5348a49f737cc6b780e8b82bf | refs/heads/main | 2023-04-12T20:31:21.542901 | 2023-03-22T05:26:02 | 2023-03-22T05:26:02 | 349,494,035 | 0 | 0 | MIT | 2021-03-19T16:52:22 | 2021-03-19T16:52:21 | null | UTF-8 | Python | false | false | 1,845 | py | # Python program to read
# json file
import json
import argparse
import sys
import os
import math
def compare_numbers(job_one, job_two, metrics_list, log_file):
f = open(log_file, mode='r+')
data = json.load(f)
result = {'performance_diff':{}}
for i in metrics_list:
metric_value = math.floor(((data[job_one][i]/data[job_two][i])*100)-100)
if metric_value < 0:
result['performance_diff'][i] = metric_value
sys.stdout.write('{} has regressed - there is a perf regression of {}%\n'.format(i, metric_value))
if metric_value < -3:
raise ValueError("large perf regression in {} detected of {}".format(i, metric_value))
if metric_value >= 0:
result['performance_diff'][i] = metric_value
sys.stdout.write('{} has a perf improvement of {}%\n'.format(i, metric_value))
data.update(result)
f.seek(0)
json.dump(data, f)
f.close()
if __name__ == "__main__":
# parse argument
parser = argparse.ArgumentParser("compare performance")
parser.add_argument('-j1', '--job1', default='main', help='name of the first job', required=False)
parser.add_argument('-j2', '--job2', default='binary', help='name of the second job', required=False)
parser.add_argument('-m','--metrics', nargs='+', help='metrics to compare from log file', required=True)
parser.add_argument('-lf',
'--log',
default="./blobfuse2-perf.json",
help='path of log file',
required=False)
args = vars(parser.parse_args())
log_file = args['log']
job_one_name = args['job1']
job_two_name = args['job2']
metrics_list = args['metrics']
compare_numbers(job_one_name, job_two_name, metrics_list, log_file)
| [
"noreply@github.com"
] | noreply@github.com |
d69efc6966a7a0dd85157fa1130e54581402b0c7 | 2feb8dfebbf10ffd03f02bea643195c942ed6739 | /list_doublesplits.py | e6f81a0f131079bc59c5d382b3d93bc6415a1860 | [] | no_license | Prads16/PythonForEverybodyCourseraSpecialization | f961648eb1c92fe7cc41468c278589b151e59cab | 997056b3de23ed1d01f6bb1c94c6b850c114368e | refs/heads/master | 2021-04-12T05:02:42.912896 | 2018-03-27T18:53:26 | 2018-03-27T18:53:26 | 125,952,601 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 807 | py | #Open the file mbox-short.txt and read it line by line. When you find a line that
#starts with 'From ' like the following line:
#From stephen.marquard@uct.ac.za Sat Jan 5 09:14:16 2008
#You will parse the From line using split() and print out the second word in the
#line (i.e. the entire address of the person who sent the message). Then print
#out a count at the end.
#Hint: make sure not to include the lines that start with 'From:'.
fname = input("Enter file name: ")
#if len(fname) < 1 : fname = "mbox-short.txt"
fh = open(fname)
count = 0
for line in fh:
line = line.rstrip()
if line.startswith('From') and not line.startswith('From:'):
count = count + 1
words = line.split()
print(words[1])
print("There were", count, "lines in the file with From as the first word")
| [
"pradnya.ambre16@gmail.com"
] | pradnya.ambre16@gmail.com |
27eaa975a85aedbd978087f69ba9b732ee63ca0e | e58828e074174cbaed80938713ba19536269c871 | /wildcard.py | fde6f353715e472cb222722a1c1ca88087896bbe | [] | no_license | aberke/search_engine_2 | e9cb7dda44c8c9e5154fda3e5830e269c96637d1 | 951ac513482e93b58a82a4c5aed1ef83fab1f083 | refs/heads/master | 2016-09-11T05:51:57.949383 | 2013-05-04T00:59:27 | 2013-05-04T00:59:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,163 | py | # file that minipulates loaded dictionary into a permuterm index btree --- deals with the handling of WildCard queries
from BTrees.OOBTree import OOBTree
# takes in term that has been rotated around the $ at the end of term -- want to get back orinal term
# input: rotated permuterm index term from which we want to retrieve real word -- ie input may be 'llo$he'
# output: word represented by rotated term -- ie if input is 'llo$he' then should return 'hello'
def unrotateTerm(term):
term = term.split('$')
return term[1]+term[0]
# creates permuterm index from dictionary index to aid with wildcard queries
# input: dictionary (index)
# output: permuterm-index in btree data structure (permutermIndex)
def permutermIndex_create(index):
tree = OOBTree()
for term in index: #'hello'
term += '$' # 'hello$'
for i in range(len(term)):
tree[term] = True
term = term[1:]+term[0]
return tree
# input: wildcard query
# output: set of terms matching wildcard query
#
# can assume each wildcard word contains at most two *'s and that they will not be contiguous (ie, may have lol*t* but never strangel**ve)
# Consider the wildcard query m*n.
# Rotate wildcard query st * appears at end of string - rotated wildcard query becomes n$m*. L
# Look up this string in the permuterm index, where seeking n$m* (via a search tree) leads to rotations of (among others) the terms man and moron
def wildcard(tree, term):
results = {}
# rotate word
end = ''
middle = ''
front = ''
term = term.split("*") #expect: 0 < stars = len(term)-1 <= 2
if len(term) == 1:
return {term[0]:True}
elif len(term) == 2:
front = term[0]
end = term[1] + '$'
elif len(term) == 3:
front = term[0]
middle = term[1]
end = term[2] + '$'
else:
print("ERROR IN WILDCARD -- TOO MANY **************** IN QUERY")
return results
seek = end+front
seekMax = seek[:len(seek)-1]+chr(ord(seek[len(seek)-1])+1)
# get results
theRange = tree.keys(min=seek, max=seekMax, excludemin=False, excludemax=True)
for t in theRange:
if middle in t:
# found a match! but need to rotate it back into an actual term
results[unrotateTerm(t)] = True
return results
| [
"Alexandra@Alexandras-MacBook-Pro.local"
] | Alexandra@Alexandras-MacBook-Pro.local |
0bdb300392b9da3373e9e581a46570bf5a315763 | 4b7dce428c7bd29f3446725a502377ed24e2af7d | /Source Code/Main_Window.py | 85e2f4c25a392cf37418c13a447c7d6d80cf74a0 | [] | no_license | umairtehami/Twitter-Social-Capital | 24b569a67cd71335ea60fabe98cd054023b1abc7 | a09007472f4a6a6e179c02f3d9d30167b94dcb28 | refs/heads/main | 2023-08-23T21:04:46.965665 | 2021-10-27T15:42:20 | 2021-10-27T15:42:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64,465 | py | from PyQt5.QtGui import QIcon
from Feedback import Feedback
from New_Credentials import *
from New_Project import *
from PyQt5.QtCore import QThread, QObject, pyqtSignal, QTime
from List import *
from OAuth1 import *
from OAuth2 import *
from Project import *
from Unweighted import *
from Weighted import *
from Tweets import *
import os
# Communicate class allow us to send and receive signals between threads.
class Communicate(QObject):
sig = pyqtSignal(int)
# Worker class execute the extraction in background.
class Worker(QObject):
finished = pyqtSignal()
intCount = pyqtSignal(int)
def __init__(self, extraction, communicate = Communicate()):
super(Worker, self).__init__()
self.extraction = extraction
self.com = communicate
# Ran the different extractions.
def run(self):
"""Long-running task."""
if(self.extraction.type == "followers"):
self.extraction.execute_followers(self.com)
elif (self.extraction.type == "mentions"):
self.extraction.execute_mentions(self.com)
elif(self.extraction.type == "followers_weighted"):
self.extraction.execute_followers_weighted(self.com)
elif (self.extraction.type == "mentions_weighted"):
self.extraction.execute_mentions_weighted(self.com)
elif (self.extraction.type == "Tweets"):
self.extraction.execute_tweets(self.com)
# OutLog class is used to show the feedback messages in the Log through the visual panel.
class OutLog:
def __init__(self, edit, out=None, color=None):
self.edit = edit
self.out = out
self.color = color
def write(self, m):
if self.color:
self.tc = self.edit.textColor()
self.edit.setTextColor(self.color)
self.edit.moveCursor(QtGui.QTextCursor.End)
self.edit.insertPlainText(m)
if self.color:
self.edit.setTextColor(self.tc)
if self.out:
self.out.write(m)
# Main Window class
class MainWindow(QtWidgets.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.project = None
self.oauth = None
self.setupUi(self)
self.Config()
# Progress bar updating
def updateProgress(self,pro):
self.feedback.update_progress(pro)
#if the progress bar is complete, finish the thread and allow another extraction.
if(pro == 9999):
self.thread.exit()
self.extract.setEnabled(True)
# Used to print the UI components
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(949, 897)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.list_id = QtWidgets.QGroupBox(self.centralwidget)
self.list_id.setGeometry(QtCore.QRect(40, 630, 411, 51))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.list_id.setFont(font)
self.list_id.setObjectName("list_id")
self.edit_list_id = QtWidgets.QLineEdit(self.list_id)
self.edit_list_id.setGeometry(QtCore.QRect(12, 19, 391, 21))
self.edit_list_id.setObjectName("edit_list_id")
self.network = QtWidgets.QGroupBox(self.centralwidget)
self.network.setGeometry(QtCore.QRect(40, 440, 131, 181))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.network.setFont(font)
self.network.setObjectName("network")
self.verticalLayoutWidget = QtWidgets.QWidget(self.network)
self.verticalLayoutWidget.setGeometry(QtCore.QRect(10, 29, 111, 141))
self.verticalLayoutWidget.setObjectName("verticalLayoutWidget")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget)
self.verticalLayout_3.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.followers = QtWidgets.QRadioButton(self.verticalLayoutWidget)
self.followers.setChecked(False)
self.followers.setAutoExclusive(False)
self.followers.setObjectName("followers")
self.verticalLayout_3.addWidget(self.followers)
self.mentions = QtWidgets.QRadioButton(self.verticalLayoutWidget)
self.mentions.setObjectName("mentions")
self.verticalLayout_3.addWidget(self.mentions)
self.type_relations = QtWidgets.QGroupBox(self.centralwidget)
self.type_relations.setGeometry(QtCore.QRect(180, 440, 131, 181))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.type_relations.setFont(font)
self.type_relations.setObjectName("type_relations")
self.verticalLayoutWidget_2 = QtWidgets.QWidget(self.type_relations)
self.verticalLayoutWidget_2.setGeometry(QtCore.QRect(10, 30, 111, 141))
self.verticalLayoutWidget_2.setObjectName("verticalLayoutWidget_2")
self.verticalLayout_4 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_2)
self.verticalLayout_4.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.simple = QtWidgets.QRadioButton(self.verticalLayoutWidget_2)
self.simple.setChecked(True)
self.simple.setObjectName("simple")
self.verticalLayout_4.addWidget(self.simple)
self.weigthed = QtWidgets.QRadioButton(self.verticalLayoutWidget_2)
self.weigthed.setCheckable(True)
self.weigthed.setChecked(False)
self.weigthed.setObjectName("weigthed")
self.verticalLayout_4.addWidget(self.weigthed)
self.type_weight = QtWidgets.QGroupBox(self.centralwidget)
self.type_weight.setEnabled(True)
self.type_weight.setGeometry(QtCore.QRect(320, 440, 131, 181))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.type_weight.setFont(font)
self.type_weight.setObjectName("type_weight")
self.verticalLayoutWidget_3 = QtWidgets.QWidget(self.type_weight)
self.verticalLayoutWidget_3.setGeometry(QtCore.QRect(10, 30, 111, 141))
self.verticalLayoutWidget_3.setObjectName("verticalLayoutWidget_3")
self.verticalLayout_5 = QtWidgets.QVBoxLayout(self.verticalLayoutWidget_3)
self.verticalLayout_5.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.weight_mentions = QtWidgets.QCheckBox(self.verticalLayoutWidget_3)
self.weight_mentions.setChecked(False)
self.weight_mentions.setObjectName("weight_mentions")
self.verticalLayout_5.addWidget(self.weight_mentions)
self.weight_retweets = QtWidgets.QCheckBox(self.verticalLayoutWidget_3)
self.weight_retweets.setObjectName("weight_retweets")
self.verticalLayout_5.addWidget(self.weight_retweets)
self.weight_replies = QtWidgets.QCheckBox(self.verticalLayoutWidget_3)
self.weight_replies.setObjectName("weight_replies")
self.verticalLayout_5.addWidget(self.weight_replies)
self.acces_configuration = QtWidgets.QGroupBox(self.centralwidget)
self.acces_configuration.setGeometry(QtCore.QRect(10, 160, 931, 241))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.acces_configuration.setFont(font)
self.acces_configuration.setObjectName("acces_configuration")
self.groupBox_3 = QtWidgets.QGroupBox(self.acces_configuration)
self.groupBox_3.setGeometry(QtCore.QRect(311, 101, 601, 101))
font = QtGui.QFont()
font.setPointSize(8)
self.groupBox_3.setFont(font)
self.groupBox_3.setObjectName("groupBox_3")
self.formLayoutWidget_2 = QtWidgets.QWidget(self.groupBox_3)
self.formLayoutWidget_2.setGeometry(QtCore.QRect(10, 20, 581, 80))
self.formLayoutWidget_2.setObjectName("formLayoutWidget_2")
self.formLayout_2 = QtWidgets.QFormLayout(self.formLayoutWidget_2)
self.formLayout_2.setContentsMargins(0, 0, 0, 0)
self.formLayout_2.setObjectName("formLayout_2")
self.label_3 = QtWidgets.QLabel(self.formLayoutWidget_2)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.label_3.setFont(font)
self.label_3.setObjectName("label_3")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label_3)
self.label_5 = QtWidgets.QLabel(self.formLayoutWidget_2)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setObjectName("label_5")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.LabelRole, self.label_5)
self.label_4 = QtWidgets.QLabel(self.formLayoutWidget_2)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.label_4.setFont(font)
self.label_4.setObjectName("label_4")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_4)
self.access_token = QtWidgets.QLineEdit(self.formLayoutWidget_2)
self.access_token.setObjectName("access_token")
self.formLayout_2.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.access_token)
self.aceess_secret_token = QtWidgets.QLineEdit(self.formLayoutWidget_2)
self.aceess_secret_token.setObjectName("aceess_secret_token")
self.formLayout_2.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.aceess_secret_token)
self.bearer_token = QtWidgets.QLineEdit(self.formLayoutWidget_2)
self.bearer_token.setObjectName("bearer_token")
self.formLayout_2.setWidget(3, QtWidgets.QFormLayout.FieldRole, self.bearer_token)
self.formLayoutWidget = QtWidgets.QWidget(self.acces_configuration)
self.formLayoutWidget.setGeometry(QtCore.QRect(30, 30, 871, 52))
self.formLayoutWidget.setObjectName("formLayoutWidget")
self.formLayout = QtWidgets.QFormLayout(self.formLayoutWidget)
self.formLayout.setContentsMargins(0, 0, 0, 0)
self.formLayout.setObjectName("formLayout")
self.label = QtWidgets.QLabel(self.formLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setObjectName("label")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.LabelRole, self.label)
self.consumer_key = QtWidgets.QLineEdit(self.formLayoutWidget)
self.consumer_key.setObjectName("consumer_key")
self.formLayout.setWidget(0, QtWidgets.QFormLayout.FieldRole, self.consumer_key)
self.label_2 = QtWidgets.QLabel(self.formLayoutWidget)
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.label_2.setFont(font)
self.label_2.setObjectName("label_2")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.LabelRole, self.label_2)
self.consumer_secret_key = QtWidgets.QLineEdit(self.formLayoutWidget)
self.consumer_secret_key.setObjectName("consumer_secret_key")
self.formLayout.setWidget(1, QtWidgets.QFormLayout.FieldRole, self.consumer_secret_key)
self.new_credentials = QtWidgets.QPushButton(self.acces_configuration)
self.new_credentials.setGeometry(QtCore.QRect(820, 210, 81, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.new_credentials.setFont(font)
self.new_credentials.setObjectName("new_credentials")
self.type_access = QtWidgets.QGroupBox(self.acces_configuration)
self.type_access.setGeometry(QtCore.QRect(30, 100, 131, 121))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.type_access.setFont(font)
self.type_access.setObjectName("type_access")
self.horizontalLayoutWidget = QtWidgets.QWidget(self.type_access)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(10, 20, 111, 92))
self.horizontalLayoutWidget.setObjectName("horizontalLayoutWidget")
self.verticalLayout = QtWidgets.QVBoxLayout(self.horizontalLayoutWidget)
self.verticalLayout.setContentsMargins(0, 0, 0, 0)
self.verticalLayout.setSpacing(15)
self.verticalLayout.setObjectName("verticalLayout")
self.standard = QtWidgets.QRadioButton(self.horizontalLayoutWidget)
self.standard.setChecked(True)
self.standard.setObjectName("standard")
self.verticalLayout.addWidget(self.standard)
self.academic = QtWidgets.QRadioButton(self.horizontalLayoutWidget)
self.academic.setObjectName("academic")
self.verticalLayout.addWidget(self.academic)
self.premium = QtWidgets.QRadioButton(self.horizontalLayoutWidget)
self.premium.setObjectName("premium")
self.verticalLayout.addWidget(self.premium)
self.type_oauth = QtWidgets.QGroupBox(self.acces_configuration)
self.type_oauth.setGeometry(QtCore.QRect(171, 101, 131, 121))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.type_oauth.setFont(font)
self.type_oauth.setObjectName("type_oauth")
self.horizontalLayoutWidget_2 = QtWidgets.QWidget(self.type_oauth)
self.horizontalLayoutWidget_2.setGeometry(QtCore.QRect(10, 20, 111, 91))
self.horizontalLayoutWidget_2.setObjectName("horizontalLayoutWidget_2")
self.verticalLayout_2 = QtWidgets.QVBoxLayout(self.horizontalLayoutWidget_2)
self.verticalLayout_2.setContentsMargins(0, 0, 0, 0)
self.verticalLayout_2.setSpacing(15)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.oauth1 = QtWidgets.QRadioButton(self.horizontalLayoutWidget_2)
self.oauth1.setChecked(True)
self.oauth1.setObjectName("oauth1")
self.verticalLayout_2.addWidget(self.oauth1)
self.oauth2 = QtWidgets.QRadioButton(self.horizontalLayoutWidget_2)
self.oauth2.setObjectName("oauth2")
self.verticalLayout_2.addWidget(self.oauth2)
self.save_credentials = QtWidgets.QPushButton(self.acces_configuration)
self.save_credentials.setGeometry(QtCore.QRect(730, 210, 81, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.save_credentials.setFont(font)
self.save_credentials.setObjectName("save_credentials")
self.import_credentials = QtWidgets.QPushButton(self.acces_configuration)
self.import_credentials.setGeometry(QtCore.QRect(630, 210, 91, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.import_credentials.setFont(font)
self.import_credentials.setObjectName("import_credentials")
self.execution = QtWidgets.QGroupBox(self.centralwidget)
self.execution.setGeometry(QtCore.QRect(480, 710, 461, 131))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.execution.setFont(font)
self.execution.setObjectName("execution")
self.extract = QtWidgets.QPushButton(self.execution)
self.extract.setGeometry(QtCore.QRect(130, 80, 201, 21))
self.extract.setObjectName("extract")
self.network_configuration = QtWidgets.QGroupBox(self.centralwidget)
self.network_configuration.setGeometry(QtCore.QRect(10, 410, 931, 291))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.network_configuration.setFont(font)
self.network_configuration.setObjectName("network_configuration")
self.groupBox = QtWidgets.QGroupBox(self.network_configuration)
self.groupBox.setGeometry(QtCore.QRect(470, 30, 441, 251))
self.groupBox.setTitle("")
self.groupBox.setObjectName("groupBox")
self.tweet_information = QtWidgets.QGroupBox(self.groupBox)
self.tweet_information.setEnabled(True)
self.tweet_information.setGeometry(QtCore.QRect(10, 50, 421, 191))
font = QtGui.QFont()
font.setFamily("Arial")
font.setBold(True)
font.setWeight(75)
self.tweet_information.setFont(font)
self.tweet_information.setObjectName("tweet_information")
self.gridLayoutWidget = QtWidgets.QWidget(self.tweet_information)
self.gridLayoutWidget.setGeometry(QtCore.QRect(10, 20, 401, 161))
self.gridLayoutWidget.setObjectName("gridLayoutWidget")
self.gridLayout = QtWidgets.QGridLayout(self.gridLayoutWidget)
self.gridLayout.setContentsMargins(0, 0, 0, 0)
self.gridLayout.setObjectName("gridLayout")
self.checkBox_2 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_2.setObjectName("checkBox_2")
self.gridLayout.addWidget(self.checkBox_2, 2, 0, 1, 1)
self.checkBox = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox.setEnabled(True)
self.checkBox.setCheckable(True)
self.checkBox.setObjectName("checkBox")
self.gridLayout.addWidget(self.checkBox, 0, 0, 1, 1)
self.checkBox_1 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_1.setEnabled(True)
self.checkBox_1.setObjectName("checkBox_1")
self.gridLayout.addWidget(self.checkBox_1, 1, 0, 1, 1)
self.checkBox_5 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_5.setObjectName("checkBox_5")
self.gridLayout.addWidget(self.checkBox_5, 5, 0, 1, 1)
self.checkBox_4 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_4.setObjectName("checkBox_4")
self.gridLayout.addWidget(self.checkBox_4, 4, 0, 1, 1)
self.checkBox_3 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_3.setObjectName("checkBox_3")
self.gridLayout.addWidget(self.checkBox_3, 3, 0, 1, 1)
self.checkBox_7 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_7.setObjectName("checkBox_7")
self.gridLayout.addWidget(self.checkBox_7, 6, 0, 1, 1)
self.checkBox_12 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_12.setObjectName("checkBox_12")
self.gridLayout.addWidget(self.checkBox_12, 4, 1, 1, 1)
self.checkBox_6 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_6.setObjectName("checkBox_6")
self.gridLayout.addWidget(self.checkBox_6, 2, 1, 1, 1)
self.checkBox_11 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_11.setObjectName("checkBox_11")
self.gridLayout.addWidget(self.checkBox_11, 3, 1, 1, 1)
self.checkBox_9 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_9.setObjectName("checkBox_9")
self.gridLayout.addWidget(self.checkBox_9, 1, 1, 1, 1)
self.checkBox_8 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_8.setObjectName("checkBox_8")
self.gridLayout.addWidget(self.checkBox_8, 0, 1, 1, 1)
self.checkBox_10 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_10.setObjectName("checkBox_10")
self.gridLayout.addWidget(self.checkBox_10, 5, 1, 1, 1)
self.checkBox_13 = QtWidgets.QCheckBox(self.gridLayoutWidget)
self.checkBox_13.setObjectName("checkBox_13")
self.gridLayout.addWidget(self.checkBox_13, 6, 1, 1, 1)
self.groupBox_2 = QtWidgets.QGroupBox(self.groupBox)
self.groupBox_2.setGeometry(QtCore.QRect(10, 10, 421, 31))
self.groupBox_2.setTitle("")
self.groupBox_2.setObjectName("groupBox_2")
self.tweets = QtWidgets.QRadioButton(self.groupBox_2)
self.tweets.setGeometry(QtCore.QRect(10, 7, 109, 16))
self.tweets.setObjectName("tweets")
self.line = QtWidgets.QFrame(self.network_configuration)
self.line.setGeometry(QtCore.QRect(450, 20, 20, 261))
self.line.setFrameShape(QtWidgets.QFrame.VLine)
self.line.setFrameShadow(QtWidgets.QFrame.Sunken)
self.line.setObjectName("line")
self.project_managment = QtWidgets.QGroupBox(self.centralwidget)
self.project_managment.setGeometry(QtCore.QRect(10, 10, 931, 141))
font = QtGui.QFont()
font.setFamily("Arial")
font.setPointSize(10)
font.setBold(True)
font.setWeight(75)
self.project_managment.setFont(font)
self.project_managment.setObjectName("project_managment")
self.horizontalLayoutWidget_3 = QtWidgets.QWidget(self.project_managment)
self.horizontalLayoutWidget_3.setGeometry(QtCore.QRect(30, 30, 871, 31))
self.horizontalLayoutWidget_3.setObjectName("horizontalLayoutWidget_3")
self.horizontalLayout = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_3)
self.horizontalLayout.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label_7 = QtWidgets.QLabel(self.horizontalLayoutWidget_3)
self.label_7.setObjectName("label_7")
self.horizontalLayout.addWidget(self.label_7)
self.name = QtWidgets.QLineEdit(self.horizontalLayoutWidget_3)
self.name.setObjectName("name")
self.horizontalLayout.addWidget(self.name)
self.label_6 = QtWidgets.QLabel(self.horizontalLayoutWidget_3)
self.label_6.setObjectName("label_6")
self.horizontalLayout.addWidget(self.label_6)
self.path = QtWidgets.QLineEdit(self.horizontalLayoutWidget_3)
self.path.setObjectName("path")
self.horizontalLayout.addWidget(self.path)
self.horizontalLayoutWidget_4 = QtWidgets.QWidget(self.project_managment)
self.horizontalLayoutWidget_4.setGeometry(QtCore.QRect(30, 70, 871, 31))
self.horizontalLayoutWidget_4.setObjectName("horizontalLayoutWidget_4")
self.horizontalLayout_2 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_4)
self.horizontalLayout_2.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.label_8 = QtWidgets.QLabel(self.horizontalLayoutWidget_4)
self.label_8.setObjectName("label_8")
self.horizontalLayout_2.addWidget(self.label_8)
self.description = QtWidgets.QLineEdit(self.horizontalLayoutWidget_4)
self.description.setObjectName("description")
self.horizontalLayout_2.addWidget(self.description)
self.save_project = QtWidgets.QPushButton(self.project_managment)
self.save_project.setGeometry(QtCore.QRect(730, 110, 81, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.save_project.setFont(font)
self.save_project.setObjectName("save_project")
self.import_project = QtWidgets.QPushButton(self.project_managment)
self.import_project.setGeometry(QtCore.QRect(630, 110, 91, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.import_project.setFont(font)
self.import_project.setObjectName("import_project")
self.new_project = QtWidgets.QPushButton(self.project_managment)
self.new_project.setGeometry(QtCore.QRect(820, 110, 81, 21))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(False)
font.setWeight(50)
self.new_project.setFont(font)
self.new_project.setObjectName("new_project")
self.listView = QtWidgets.QListView(self.centralwidget)
self.listView.setGeometry(QtCore.QRect(0, -40, 1231, 951))
self.listView.setObjectName("listView")
self.groupBox_4 = QtWidgets.QGroupBox(self.centralwidget)
self.groupBox_4.setGeometry(QtCore.QRect(10, 710, 451, 131))
self.groupBox_4.setObjectName("groupBox_4")
self.horizontalLayoutWidget_5 = QtWidgets.QWidget(self.groupBox_4)
self.horizontalLayoutWidget_5.setGeometry(QtCore.QRect(50, 90, 371, 24))
self.horizontalLayoutWidget_5.setObjectName("horizontalLayoutWidget_5")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout(self.horizontalLayoutWidget_5)
self.horizontalLayout_3.setContentsMargins(0, 0, 0, 0)
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.startDate_show = QtWidgets.QLineEdit(self.horizontalLayoutWidget_5)
self.startDate_show.setEnabled(False)
self.startDate_show.setAlignment(QtCore.Qt.AlignCenter)
self.startDate_show.setObjectName("startDate_show")
self.horizontalLayout_3.addWidget(self.startDate_show)
self.endDate_show = QtWidgets.QLineEdit(self.horizontalLayoutWidget_5)
self.endDate_show.setEnabled(False)
self.endDate_show.setAlignment(QtCore.Qt.AlignCenter)
self.endDate_show.setObjectName("endDate_show")
self.horizontalLayout_3.addWidget(self.endDate_show)
self.label_9 = QtWidgets.QLabel(self.groupBox_4)
self.label_9.setGeometry(QtCore.QRect(110, 20, 61, 16))
self.label_9.setObjectName("label_9")
self.label_10 = QtWidgets.QLabel(self.groupBox_4)
self.label_10.setGeometry(QtCore.QRect(260, 20, 31, 16))
self.label_10.setObjectName("label_10")
self.dateEdit_end = QtWidgets.QDateEdit(self.groupBox_4)
self.dateEdit_end.setGeometry(QtCore.QRect(110, 40, 110, 22))
self.dateEdit_end.setCalendarPopup(True)
self.dateEdit_end.setObjectName("dateEdit_end")
self.label_11 = QtWidgets.QLabel(self.groupBox_4)
self.label_11.setGeometry(QtCore.QRect(240, 40, 21, 16))
self.label_11.setObjectName("label_11")
self.spinBox = QtWidgets.QSpinBox(self.groupBox_4)
self.spinBox.setGeometry(QtCore.QRect(260, 40, 81, 22))
self.spinBox.setProperty("showGroupSeparator", False)
self.spinBox.setObjectName("spinBox")
self.listView.raise_()
self.project_managment.raise_()
self.execution.raise_()
self.network_configuration.raise_()
self.acces_configuration.raise_()
self.list_id.raise_()
self.network.raise_()
self.type_relations.raise_()
self.type_weight.raise_()
self.groupBox_4.raise_()
MainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(MainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 949, 21))
self.menubar.setObjectName("menubar")
self.menuFile = QtWidgets.QMenu(self.menubar)
self.menuFile.setObjectName("menuFile")
self.menuEdit = QtWidgets.QMenu(self.menubar)
self.menuEdit.setObjectName("menuEdit")
self.menuView = QtWidgets.QMenu(self.menubar)
self.menuView.setObjectName("menuView")
MainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(MainWindow)
self.statusbar.setObjectName("statusbar")
MainWindow.setStatusBar(self.statusbar)
self.actionOpen_credentials = QtWidgets.QAction(MainWindow)
self.actionOpen_credentials.setObjectName("actionOpen_credentials")
self.actionSave_credentials = QtWidgets.QAction(MainWindow)
self.actionSave_credentials.setObjectName("actionSave_credentials")
self.actionImport_Credentials = QtWidgets.QAction(MainWindow)
self.actionImport_Credentials.setObjectName("actionImport_Credentials")
self.actionSave_credentials_2 = QtWidgets.QAction(MainWindow)
self.actionSave_credentials_2.setObjectName("actionSave_credentials_2")
self.actionClose_Project = QtWidgets.QAction(MainWindow)
self.actionClose_Project.setObjectName("actionClose_Project")
self.menuFile.addAction(self.actionOpen_credentials)
self.menuFile.addAction(self.actionSave_credentials)
self.menuFile.addAction(self.actionImport_Credentials)
self.menuFile.addAction(self.actionSave_credentials_2)
self.menuFile.addAction(self.actionClose_Project)
self.menubar.addAction(self.menuFile.menuAction())
self.menubar.addAction(self.menuEdit.menuAction())
self.menubar.addAction(self.menuView.menuAction())
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
# Used to print the UI components
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "MainWindow"))
self.list_id.setTitle(_translate("MainWindow", "Twitter List ID"))
self.network.setTitle(_translate("MainWindow", "Network"))
self.followers.setText(_translate("MainWindow", "Followers"))
self.mentions.setText(_translate("MainWindow", "Mentions"))
self.type_relations.setTitle(_translate("MainWindow", "Type of network"))
self.simple.setText(_translate("MainWindow", "No Weighted"))
self.weigthed.setText(_translate("MainWindow", "Weighted"))
self.type_weight.setTitle(_translate("MainWindow", "Type of weight"))
self.weight_mentions.setText(_translate("MainWindow", "Mentions"))
self.weight_retweets.setText(_translate("MainWindow", "Retweets"))
self.weight_replies.setText(_translate("MainWindow", "Replies"))
self.acces_configuration.setTitle(_translate("MainWindow", "Access configuration"))
self.groupBox_3.setTitle(_translate("MainWindow", "Tokens"))
self.label_3.setText(_translate("MainWindow", "Access Token:"))
self.label_5.setText(_translate("MainWindow", "Bearer Token:"))
self.label_4.setText(_translate("MainWindow", "Access Secret Token:"))
self.label.setText(_translate("MainWindow", "Consumer Key:"))
self.label_2.setText(_translate("MainWindow", "Consumer Secret Key:"))
self.new_credentials.setText(_translate("MainWindow", "New"))
self.type_access.setTitle(_translate("MainWindow", "Type Access"))
self.standard.setText(_translate("MainWindow", "Standard"))
self.academic.setText(_translate("MainWindow", "Academic"))
self.premium.setText(_translate("MainWindow", "Premium"))
self.type_oauth.setTitle(_translate("MainWindow", "Type OAuth"))
self.oauth1.setText(_translate("MainWindow", "OAuth1"))
self.oauth2.setText(_translate("MainWindow", "OAuth2"))
self.save_credentials.setText(_translate("MainWindow", "Save"))
self.import_credentials.setText(_translate("MainWindow", "Import"))
self.execution.setTitle(_translate("MainWindow", "Execution"))
self.extract.setText(_translate("MainWindow", "Extract"))
self.network_configuration.setTitle(_translate("MainWindow", "Network configuration"))
self.tweet_information.setTitle(_translate("MainWindow", "Tweet information"))
self.checkBox_2.setText(_translate("MainWindow", "Favorites"))
self.checkBox.setText(_translate("MainWindow", "Author"))
self.checkBox_1.setText(_translate("MainWindow", "Date"))
self.checkBox_5.setText(_translate("MainWindow", "Users mentioned"))
self.checkBox_4.setText(_translate("MainWindow", "Number of mentions"))
self.checkBox_3.setText(_translate("MainWindow", "Retweets"))
self.checkBox_7.setText(_translate("MainWindow", "Text"))
self.checkBox_12.setText(_translate("MainWindow", "Tweet url"))
self.checkBox_6.setText(_translate("MainWindow", "Location"))
self.checkBox_11.setText(_translate("MainWindow", "Urls"))
self.checkBox_9.setText(_translate("MainWindow", "Hashtags"))
self.checkBox_8.setText(_translate("MainWindow", "Sensitive"))
self.checkBox_10.setText(_translate("MainWindow", "User description"))
self.checkBox_13.setText(_translate("MainWindow", "User followers/followees"))
self.tweets.setText(_translate("MainWindow", "Tweets"))
self.project_managment.setTitle(_translate("MainWindow", "Project Managment"))
self.label_7.setText(_translate("MainWindow", "Name:"))
self.label_6.setText(_translate("MainWindow", "Path:"))
self.label_8.setText(_translate("MainWindow", "Description:"))
self.save_project.setText(_translate("MainWindow", "Save"))
self.import_project.setText(_translate("MainWindow", "Import"))
self.new_project.setText(_translate("MainWindow", "New"))
self.groupBox_4.setTitle(_translate("MainWindow", "Date especification"))
self.label_9.setText(_translate("MainWindow", "End Date"))
self.label_10.setText(_translate("MainWindow", "Days"))
self.label_11.setText(_translate("MainWindow", "-"))
self.menuFile.setTitle(_translate("MainWindow", "File"))
self.menuEdit.setTitle(_translate("MainWindow", "Edit"))
self.menuView.setTitle(_translate("MainWindow", "View"))
self.actionOpen_credentials.setText(_translate("MainWindow", "New Project"))
self.actionSave_credentials.setText(_translate("MainWindow", "Open Poject"))
self.actionImport_Credentials.setText(_translate("MainWindow", "Import Credentials"))
self.actionSave_credentials_2.setText(_translate("MainWindow", "Save credentials"))
self.actionClose_Project.setText(_translate("MainWindow", "Close Project"))
# Visual and Logical configurations of the Main Window.
def Config(self):
self.path.setEnabled(False)
self.extract.clicked.connect(lambda: self.extract_data())
self.dateEdit_end.setDate(datetime.now())
self.dateEdit_end.dateChanged.connect(lambda: self.dateshow())
self.spinBox.valueChanged.connect(lambda: self.dateshow())
self.spinBox.setValue(7)
self.spinBox.setMaximum(1820)
self.spinBox.setMinimum(1)
self.setWindowTitle('Extraction Configuration')
self.setWindowIcon(QIcon('network.png'))
self.type_access.setEnabled(False)
self.type_oauth.setEnabled(False)
self.new_project.clicked.connect(lambda: self.new_proj())
self.import_project.clicked.connect(lambda: self.import_created_proj())
self.save_project.clicked.connect(lambda: self.save_edited_proj())
self.new_credentials.clicked.connect(lambda: self.new_cred())
self.import_credentials.clicked.connect(lambda: self.import_cred())
self.save_credentials.clicked.connect(lambda: self.save_edited_cred())
self.followers.clicked.connect(lambda: self.enable_followers())
self.mentions.clicked.connect(lambda: self.enable_mentions())
self.tweets.clicked.connect(lambda: self.enable_tweets())
self.followers.setAutoExclusive(False)
self.followers.setChecked(True)
self.mentions.setAutoExclusive(False)
self.type_weight.setEnabled(False)
self.tweet_information.setEnabled(False)
self.simple.clicked.connect(lambda: self.type_weight.setEnabled(False))
self.weigthed.clicked.connect(lambda: self.check_weigth())
self.extract.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.import_project.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.save_project.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.new_project.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.import_credentials.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.save_credentials.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.new_credentials.setStyleSheet("background-color : rgb(1, 130, 153); color : white; style : outset; border-radius : 6px")
self.communicate_proj = Communicate()
self.communicate = Communicate()
self.communicate_cred = Communicate()
self.communicate_proj.sig[int].connect(self.import_new_proj)
self.communicate.sig[int].connect(self.updateProgress)
self.communicate_cred.sig[int].connect(self.import_new_cred)
# Allow weighted checkbox if followers network is checked.
def check_weigth(self):
if(self.followers.isChecked()):
self.type_weight.setEnabled(True)
# Calculate and show the extraction interval date.
def dateshow(self):
end_date = self.dateEdit_end.dateTime().toPyDateTime().astimezone().isoformat()
start_date = (self.dateEdit_end.dateTime().toPyDateTime().astimezone() - timedelta(days=self.spinBox.value())).isoformat()
self.startDate_show.setText(end_date[:10])
self.endDate_show.setText(start_date[:10])
# Configurations if followers network is checked.
def enable_followers(self):
self.type_relations.setEnabled(True)
self.weigthed.setEnabled(True)
self.type_weight.setEnabled(False)
self.simple.setChecked(True)
self.tweets.setChecked(False)
self.mentions.setChecked(False)
self.tweet_information.setEnabled(False)
# Configurations if mentions network is checked.
def enable_mentions(self):
self.type_relations.setEnabled(True)
self.weigthed.setEnabled(True)
self.type_weight.setEnabled(False)
self.simple.setChecked(True)
self.tweets.setChecked(False)
self.followers.setChecked(False)
self.tweet_information.setEnabled(False)
# Configurations if tweets extraction is checked.
def enable_tweets(self):
self.type_relations.setEnabled(False)
self.followers.setChecked(False)
self.simple.setChecked(True)
self.type_weight.setEnabled(False)
self.mentions.setChecked(False)
self.tweet_information.setEnabled(True)
# Detect authentication type of a imported project.
def detect_aouth(self, filename, message = True):
with open(filename, 'r') as fp:
data = json.load(fp)
if (data["Type"] == "oauth1"):
self.consumer_key.setText(data["consumer_key"])
self.consumer_secret_key.setText(data["consumer_secret_key"])
self.access_token.setText(data["access_token"])
self.aceess_secret_token.setText(data["aceess_secret_token"])
self.oauth = OAuth1(data["name"], filename, self.consumer_key.text(), self.consumer_secret_key.text(),self.access_token.text(), self.aceess_secret_token.text(), self.communicate)
self.bearer_token.clear()
self.disable_oauth2()
elif (data["Type"] == "oauth2"):
self.bearer_token.setText(data["bearer_token"])
self.oauth = OAuth2(data["name"], filename, self.bearer_token.text())
self.consumer_key.clear()
self.consumer_secret_key.clear()
self.access_token.clear()
self.aceess_secret_token.clear()
self.disable_oauth1()
if(message):
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Imported")
msg.setInformativeText('Imported successfully')
msg.setWindowTitle("successfully")
msg.exec_()
# Configurations if authentication type is oauth1.
def disable_oauth2(self):
self.standard.setChecked(True)
self.oauth1.setChecked(True)
self.spinBox.setEnabled(False)
self.bearer_token.setEnabled(False)
self.consumer_key.setEnabled(True)
self.consumer_secret_key.setEnabled(True)
self.access_token.setEnabled(True)
self.aceess_secret_token.setEnabled(True)
# Configurations if authentication type is oauth2.
def disable_oauth1(self):
self.academic.setChecked(True)
self.oauth2.setChecked(True)
self.spinBox.setEnabled(True)
self.consumer_key.setEnabled(False)
self.consumer_secret_key.setEnabled(False)
self.access_token.setEnabled(False)
self.aceess_secret_token.setEnabled(False)
self.bearer_token.setEnabled(True)
# Configurations if the network extraction is not checked.
def disable_network(self):
self.followers.setChecked(False)
self.mentions.setChecked(False)
self.type_weight.setEnabled(False)
self.type_relations.setEnabled(False)
self.tweet_information.setEnabled(True)
# Configurations if the tweets extraction is not checked.
def disable_tweets(self):
self.tweets.setChecked(False)
self.type_relations.setEnabled(True)
self.tweet_information.setEnabled(False)
# Take the current Project JSON file information and set the last status of them.
def set_data(self,data):
if(("name" in data) and ("description" in data) and ("path" in data)):
self.name.setText(data["name"])
self.description.setText(data["description"])
self.path.setText(data["path"])
if("bearer_token" in data and not self.consumer_key.text() and not self.bearer_token.text()):
if(data['bearer_token'] != ""):
self.bearer_token.setText(data['bearer_token'])
self.oauth = OAuth2(data["credentials_name"], data["credentials_path"], self.bearer_token.text())
self.disable_oauth1()
if(data["type_access"] == "premium"):
self.premium.setChecked(True)
elif(data["type_access"] == "academic"):
self.academic.setChecked(True)
elif(data['consumer_key'] != ""):
self.consumer_key.setText(data['consumer_key'])
self.consumer_secret_key.setText(data['consumer_secret_key'])
self.access_token.setText(data['access_token'])
self.aceess_secret_token.setText(data['aceess_secret_token'])
self.oauth = OAuth1(data["credentials_name"], data["credentials_path"], self.consumer_key.text(), self.consumer_secret_key.text(),self.access_token.text(), self.aceess_secret_token.text(), self.communicate)
self.disable_oauth2()
if("network" in data):
if(data['network'] == "followers"):
self.followers.setChecked(True)
if (data["type_network"] == "simple"):
self.simple.setChecked(True)
self.type_weight.setEnabled(False)
else:
self.weigthed.setChecked(True)
self.type_weight.setEnabled(True)
for atributo in data["attributes"]:
if (atributo == "mentions"):
self.weight_mentions.setChecked(True)
if (atributo == "replies"):
self.weight_replies.setChecked(True)
if (atributo == "retweets"):
self.weight_retweets.setChecked(True)
elif(data["network"] == "mentions"):
self.followers.setChecked(False)
self.mentions.setChecked(True)
if (data["type_network"] == "simple"):
self.simple.setChecked(True)
self.type_weight.setEnabled(False)
else:
self.weigthed.setChecked(True)
self.type_weight.setEnabled(False)
elif(data["network"] == "tweets"):
self.followers.setChecked(False)
self.mentions.setChecked(False)
self.type_relations.setEnabled(False)
self.type_weight.setEnabled(False)
self.tweets.setChecked(True)
self.tweet_information.setEnabled(True)
for information in data["tweets_information"]:
if (information == "author"):
self.checkBox.setChecked(True)
if (information == "date"):
self.checkBox_1.setChecked(True)
if (information == "favorites"):
self.checkBox_2.setChecked(True)
if (information == "retweets"):
self.checkBox_3.setChecked(True)
if (information == "number_mentions"):
self.checkBox_4.setChecked(True)
if (information == "users_mentioned"):
self.checkBox_5.setChecked(True)
if (information == "location"):
self.checkBox_6.setChecked(True)
if (information == "text"):
self.checkBox_7.setChecked(True)
if (information == "sensitive"):
self.checkBox_8.setChecked(True)
if (information == "hashtag"):
self.checkBox_9.setChecked(True)
if (information == "urls"):
self.checkBox_11.setChecked(True)
if (information == "tweet_url"):
self.checkBox_12.setChecked(True)
if (information == "user_description"):
self.checkBox_10.setChecked(True)
if (information == "user_followers"):
self.checkBox_13.setChecked(True)
self.edit_list_id.setText(data["list"])
self.credentials_path = data["credentials_path"]
# Save in a list all the attributes that the user wants to retrieve for each tweet.
def tweet_attributes(self):
lista = []
if (self.checkBox.isChecked()):
lista.append("author")
if (self.checkBox_1.isChecked()):
lista.append("date")
if (self.checkBox_10.isChecked()):
lista.append("user_description")
if (self.checkBox_13.isChecked()):
lista.append("user_followers")
lista.append("user_followees")
if (self.checkBox_2.isChecked()):
lista.append("favorites")
if (self.checkBox_3.isChecked()):
lista.append("retweets")
if (self.checkBox_4.isChecked()):
lista.append("number_mentions")
if (self.checkBox_5.isChecked()):
lista.append("users_mentioned")
if (self.checkBox_6.isChecked()):
lista.append("location")
if (self.checkBox_7.isChecked()):
lista.append("text")
if (self.checkBox_8.isChecked()):
lista.append("sensitive")
if (self.checkBox_9.isChecked()):
lista.append("hashtag")
if (self.checkBox_11.isChecked()):
lista.append("urls")
if (self.checkBox_12.isChecked()):
lista.append("tweet_url")
return lista
# Show the new project window.
def new_proj(self):
self.New_Proj = New_Project(self.communicate_proj)
self.New_Proj.show()
# Used to import a new project.
def import_new_proj(self):
self.project = self.New_Proj.return_project()
if (os.path.isfile(self.project.path + "/config.json")):
with open(self.project.path + "/config.json", 'r') as fp:
data = json.load(fp)
self.set_data(data)
# Show a browser window and allow the user to select an existing Project.
def import_created_proj(self, fileName = None):
if(fileName == None):
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(self, 'Single File', QtCore.QDir.rootPath(), '*.json')
if(len(fileName) != 0):
with open(fileName, 'r') as fp:
data = json.load(fp)
self.project = Project(data["name"], data["description"], data["path"])
self.set_data(data)
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Imported")
msg.setInformativeText('Imported successfully')
msg.setWindowTitle("successfully")
msg.exec_()
else:
with open(fileName, 'r') as fp:
data = json.load(fp)
self.project = Project(data["name"], data["description"], data["path"])
self.set_data(data)
# Save the current Project.
def save_edited_proj(self, message = True):
if(self.project != None):
dictionary = {'name': self.name.text(),
'description': self.description.text(),
'path': self.path.text(),
'consumer_key': self.consumer_key.text(),
'consumer_secret_key': self.consumer_secret_key.text(),
'access_token': self.access_token.text(),
'aceess_secret_token': self.aceess_secret_token.text(),
'bearer_token': self.bearer_token.text(),
'list': self.edit_list_id.text()}
if(self.oauth != None):
dictionary['credentials_name'] = self.oauth.name
dictionary['credentials_path'] = self.oauth.path
if(self.standard.isChecked()):
dictionary['type_access'] = "standard"
elif(self.academic.isChecked()):
dictionary['type_access'] = "academic"
elif(self.premium.isChecked()):
dictionary['type_access'] = "premium"
if(self.followers.isChecked()):
dictionary['network'] = "followers"
elif(self.mentions.isChecked()):
dictionary['network'] = "mentions"
elif (self.tweets.isChecked()):
dictionary['network'] = "tweets"
lista = self.tweet_attributes()
dictionary['tweets_information'] = lista
if(self.simple.isChecked()):
dictionary['type_network'] = "simple"
elif(self.weigthed.isChecked()):
dictionary['type_network'] = "weighted"
lista = []
if(self.weight_mentions.isChecked()):
lista.append("mentions")
if(self.weight_replies.isChecked()):
lista.append("replies")
if(self.weight_retweets.isChecked()):
lista.append("retweets")
dictionary['attributes'] = lista
aux = self.project.path + '/' + 'config.json'
try:
with open(aux, 'w') as fp:
json.dump(dictionary, fp)
if(message):
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Save Correct")
msg.setInformativeText('Saved successfully')
msg.setWindowTitle("Saved")
msg.exec_()
except:
print("failed")
# Show the new credentials window.
def new_cred(self):
self.New_Cred = New_Credentials(self.communicate_cred)
self.New_Cred.show()
# Used to import a new credentials.
def import_new_cred(self):
filename = self.New_Cred.ret_filename()
self.detect_aouth(filename, False)
# Show a browser window and allow the user to select an existing credentials.
def import_cred(self):
if(self.project != None):
try:
fileName, _ = QtWidgets.QFileDialog.getOpenFileName(self, 'Single File', QtCore.QDir.rootPath() , '*.json')
if(len(fileName) != 0):
self.detect_aouth(fileName)
except:
pass
# Save the current credentials.
def save_edited_cred(self):
if (self.oauth != None):
if(self.oauth.id == 1):
dictionary = {'Type': 'oauth1',
'name': self.oauth.name,
'path': self.oauth.path,
'consumer_key': self.consumer_key.text(),
'consumer_secret_key': self.consumer_secret_key.text(),
'access_token': self.access_token.text(),
'aceess_secret_token': self.aceess_secret_token.text()}
aux = OAuth1(self.oauth.name, self.oauth.path, self.consumer_key.text(),self.consumer_secret_key.text(), self.access_token.text(), self.aceess_secret_token.text(), self.communicate)
else:
dictionary = {'Type': 'oauth2',
'name': self.oauth.name,
'path': self.oauth.path,
'bearer_token': self.bearer_token.text()}
aux = OAuth2(self.oauth.name, self.oauth.path, self.bearer_token.text())
try:
with open(self.oauth.path, 'w') as fp:
json.dump(dictionary, fp)
self.oauth = aux
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Save Correct")
msg.setInformativeText('Credentials saved successfully')
msg.setWindowTitle("Saved")
msg.exec_()
except:
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Warning)
msg.setText("Error")
msg.setInformativeText('Please try again')
msg.setWindowTitle("Error")
msg.exec_()
# Extraction data main function
def extract_data(self):
# Check if all the sections are completed and the credentials are correct, in other case, show an error message.
if(self.project != None and self.oauth != None and self.edit_list_id.text() and self.oauth.check_credentials()):
self.feedback = Feedback(self.communicate_proj)
self.feedback.show() # Show a feedback window.
list = List(self.edit_list_id.text())
self.save_edited_proj(False) # Save the las status of the project
self.extract.clicked.connect(lambda: self.extract.setEnabled(False)) # Disable extract button until the extraction finish.
if (self.followers.isChecked() and self.simple.isChecked()): # Execute followers no weighted network.
# Create a new extraction
if (self.dateEdit_end.date().toPyDate() == date.today()):
end_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone() - timedelta(
days=1)).isoformat()
else:
end_date = datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone().isoformat()
start_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('00:00:01'))).astimezone() - timedelta(
days=self.spinBox.value())).isoformat()
extraction = Unweighted("followers",end_date, start_date, list,self.oauth, self.project.path)
self.project.add_extraction(extraction)
# Create a new worker thread.
self.thread = QThread()
self.worker = Worker(extraction, self.communicate)
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.thread.start()
elif(self.mentions.isChecked() and self.simple.isChecked()): # Execute mentions no weighted network.
# Create a new extraction
if (self.dateEdit_end.date().toPyDate() == date.today()):
end_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone() - timedelta(
days=1)).isoformat()
else:
end_date = datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone().isoformat()
start_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('00:00:01'))).astimezone() - timedelta(
days=self.spinBox.value())).isoformat()
extraction = Unweighted("mentions",end_date, start_date, list, self.oauth, self.project.path)
self.project.add_extraction(extraction)
# Create a new worker thread.
self.thread = QThread()
self.worker = Worker(extraction, self.communicate)
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.thread.start()
elif(self.followers.isChecked() and self.weigthed.isChecked()): # Execute followers weighted network.
# Set type of weight
type_weight = []
if(self.weight_mentions.isChecked()):
type_weight.append("M")
if(self.weight_replies.isChecked()):
type_weight.append("RP")
if (self.weight_retweets.isChecked()):
type_weight.append("RT")
# Set the date interval
if (self.dateEdit_end.date().toPyDate() == date.today()):
end_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone() - timedelta(
days=1)).isoformat()
else:
end_date = datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone().isoformat()
start_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('00:00:01'))).astimezone() - timedelta(
days=self.spinBox.value())).isoformat()
# Create a new extraction
extraction = Weighted("followers_weighted", end_date, start_date, list, self.oauth, self.project.path, type_weight)
self.project.add_extraction(extraction)
# Create a new worker thread.
self.thread = QThread()
self.worker = Worker(extraction, self.communicate)
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.thread.start()
elif (self.mentions.isChecked() and self.weigthed.isChecked()): # Execute mentions weighted network.
type_weight = []
# Set the date interval
if (self.dateEdit_end.date().toPyDate() == date.today()):
end_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone() - timedelta(
days=1)).isoformat()
else:
end_date = datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('23:59:00'))).astimezone().isoformat()
start_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),
(time.fromisoformat('00:00:01'))).astimezone() - timedelta(
days=self.spinBox.value())).isoformat()
# Create a new extraction
extraction = Weighted("mentions_weighted", end_date, start_date, list, self.oauth, self.project.path, type_weight)
self.project.add_extraction(extraction)
# Create a new worker thread.
self.thread = QThread()
self.worker = Worker(extraction, self.communicate)
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.thread.start()
elif (self.tweets.isChecked()): # Execute tweets extraction.
# Set tweet atributes to retrieve.
type_attributes = self.tweet_attributes()
# Set the date interval
#end_date = self.dateEdit_end.dateTime().toPyDateTime().astimezone().isoformat()
if(self.dateEdit_end.date().toPyDate() == date.today()):
end_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),(time.fromisoformat('23:59:00'))).astimezone() - timedelta(days=1)).isoformat()
else:
end_date = datetime.combine((self.dateEdit_end.date().toPyDate()),(time.fromisoformat('23:59:00'))).astimezone().isoformat()
start_date = (datetime.combine((self.dateEdit_end.date().toPyDate()),(time.fromisoformat('00:00:01'))).astimezone() - timedelta(days=self.spinBox.value())).isoformat()
# Create a new extraction
extraction = Tweets("Tweets", end_date, start_date, list, self.oauth, self.project.path, type_attributes)
self.project.add_extraction(extraction)
# Create a new worker thread.
self.thread = QThread()
self.worker = Worker(extraction, self.communicate)
self.worker.moveToThread(self.thread)
self.thread.started.connect(self.worker.run)
self.thread.start()
else:
# Show an error message
msg = QtWidgets.QMessageBox()
msg.setIcon(QtWidgets.QMessageBox.Warning)
msg.setText("Error")
msg.setInformativeText('Please introduce all the values')
msg.setWindowTitle("Error")
msg.exec_() | [
"noreply@github.com"
] | noreply@github.com |
64511342517f2c7c03f1974b08febe0281fca0ef | cd673d9f3c2d0169381478f12661402f5d9b07b9 | /scripts/clamsmtp-action.py | ab110007d1ecd01d92ae2e76de3913abbc705436 | [] | no_license | jamespo/commandmisc | 86316740a63a70beaa6ee739e05a5f514d973e8f | d0f3f3b8562172c88eed484a0e60a8fa239ed059 | refs/heads/master | 2023-09-01T18:07:07.783622 | 2023-08-23T09:08:41 | 2023-08-23T09:08:41 | 9,041,431 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,483 | py | #!/usr/bin/env python
# clamsmtp-action.py - VirusAction script for clamsmtp
# based on smtpsend.py
import smtplib
from optparse import OptionParser
import sys
import os
from email.mime.text import MIMEText
def send_mail():
parser = OptionParser()
parser.add_option("-t", help="to", action="store", dest="to")
parser.add_option("-f", help="from", action="store", dest="fromad",
default="clamsmtp")
parser.add_option("-s", help="subject", action="store", dest="subject",
default = "VIRUS found")
parser.add_option("-x", help="SMTP server", action="store", dest="server",
default="localhost")
(options, args) = parser.parse_args()
body = ("An email virus has been blocked", "",
"The supposed sender was: %s" % os.environ.get('SENDER'), "",
"The recipient(s) were: %s" % os.environ.get('RECIPIENTS'), "",
"Virus name is: %s" % os.environ.get('VIRUS'), "",
"Remote Client IP is: %s" % os.environ.get('CLIENT', 'UNKNOWN'), "",
"Quarantine file saved in: %s" % os.environ.get('EMAIL', 'NOT SAVED'), "")
body_str = "\n".join(body)
msg = MIMEText(body_str)
msg['Subject'] = options.subject
msg['From'] = options.fromad
msg['To'] = options.to
s = smtplib.SMTP(options.server)
s.sendmail(options.fromad, [options.to], msg.as_string())
s.quit()
if __name__ == '__main__':
send_mail()
| [
"jamespo@gmail.com"
] | jamespo@gmail.com |
6d80c5e6129e225209760595077355c9ae9cdaac | 38c10c01007624cd2056884f25e0d6ab85442194 | /chrome/chrome_dll_bundle.gypi | 2a7c412d6691d2476d794c627214953543ac1abe | [
"BSD-3-Clause"
] | permissive | zenoalbisser/chromium | 6ecf37b6c030c84f1b26282bc4ef95769c62a9b2 | e71f21b9b4b9b839f5093301974a45545dad2691 | refs/heads/master | 2022-12-25T14:23:18.568575 | 2016-07-14T21:49:52 | 2016-07-23T08:02:51 | 63,980,627 | 0 | 2 | BSD-3-Clause | 2022-12-12T12:43:41 | 2016-07-22T20:14:04 | null | UTF-8 | Python | false | false | 9,533 | gypi | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# This file contains resources for the main Mac chromium bundle.
{
# The main browser executable's name is <(mac_product_name).
# Certain things will get confused if two modules in the
# executable share the same name, so append " Framework" to the
# product_name used for the framework. This will result in
# a name like "Chromium Framework.framework".
'product_name': '<(mac_product_name) Framework',
'mac_bundle': 1,
'xcode_settings': {
'CHROMIUM_BUNDLE_ID': '<(mac_bundle_id)',
# The dylib versions are of the form a[.b[.c]], where a is a
# 16-bit unsigned integer, and b and c are 8-bit unsigned
# integers. Any missing component is taken to be 0. The
# best mapping from product version numbers into this scheme
# is to just use a=BUILD, b=(PATCH/256), c=(PATCH%256). There
# is no ambiguity in this scheme because the build and patch
# numbers are guaranteed unique even across distinct major
# and minor version numbers. These settings correspond to
# -compatibility_version and -current_version.
'DYLIB_COMPATIBILITY_VERSION': '<(version_mac_dylib)',
'DYLIB_CURRENT_VERSION': '<(version_mac_dylib)',
# The framework is placed within the .app's versioned
# directory. DYLIB_INSTALL_NAME_BASE and
# LD_DYLIB_INSTALL_NAME affect -install_name.
'DYLIB_INSTALL_NAME_BASE':
'@executable_path/../Versions/<(version_full)',
# See /build/mac/copy_framework_unversioned.sh for
# information on LD_DYLIB_INSTALL_NAME.
'LD_DYLIB_INSTALL_NAME':
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(WRAPPER_NAME)/$(PRODUCT_NAME)',
'INFOPLIST_FILE': 'app/framework-Info.plist',
},
'includes': [
'chrome_nibs.gypi',
],
# TODO(mark): Come up with a fancier way to do this. It should
# only be necessary to list framework-Info.plist once, not the
# three times it is listed here.
'mac_bundle_resources': [
# This image is used to badge the lock icon in the
# authentication dialogs, such as those used for installation
# from disk image and Keystone promotion (if so enabled). It
# needs to exist as a file on disk and not just something in a
# resource bundle because that's the interface that
# Authorization Services uses. Also, Authorization Services
# can't deal with .icns files.
'app/theme/default_100_percent/<(theme_dir_name)/product_logo_32.png',
'app/framework-Info.plist',
'<@(mac_all_xibs)',
'app/theme/find_next_Template.pdf',
'app/theme/find_prev_Template.pdf',
'app/theme/menu_overflow_down.pdf',
'app/theme/menu_overflow_up.pdf',
'browser/mac/install.sh',
'<(SHARED_INTERMEDIATE_DIR)/repack/chrome_100_percent.pak',
'<(SHARED_INTERMEDIATE_DIR)/repack/resources.pak',
'<!@pymod_do_main(repack_locales -o -p <(OS) -g <(grit_out_dir) -s <(SHARED_INTERMEDIATE_DIR) -x <(SHARED_INTERMEDIATE_DIR) <(locales))',
# Note: pseudo_locales are generated via the packed_resources
# dependency but not copied to the final target. See
# common.gypi for more info.
],
'mac_bundle_resources!': [
'app/framework-Info.plist',
],
'dependencies': [
'app_mode_app',
# Bring in pdfsqueeze and run it on all pdfs
'../build/temp_gyp/pdfsqueeze.gyp:pdfsqueeze',
'../crypto/crypto.gyp:crypto',
# On Mac, Flash gets put into the framework, so we need this
# dependency here. flash_player.gyp will copy the Flash bundle
# into PRODUCT_DIR.
'../third_party/adobe/flash/flash_player.gyp:flapper_binaries',
'../third_party/crashpad/crashpad/handler/handler.gyp:crashpad_handler',
'../third_party/widevine/cdm/widevine_cdm.gyp:widevinecdmadapter',
'chrome_resources.gyp:packed_extra_resources',
'chrome_resources.gyp:packed_resources',
],
'rules': [
{
'rule_name': 'pdfsqueeze',
'extension': 'pdf',
'inputs': [
'<(PRODUCT_DIR)/pdfsqueeze',
],
'outputs': [
'<(INTERMEDIATE_DIR)/pdfsqueeze/<(RULE_INPUT_ROOT).pdf',
],
'action': ['<(PRODUCT_DIR)/pdfsqueeze',
'<(RULE_INPUT_PATH)', '<@(_outputs)'],
'message': 'Running pdfsqueeze on <(RULE_INPUT_PATH)',
},
],
'variables': {
'theme_dir_name': '<(branding_path_component)',
},
'postbuilds': [
{
# Modify the Info.plist as needed. The script explains why
# this is needed. This is also done in the chrome target.
# The framework needs the Breakpad keys if this feature is
# enabled. It does not need the Keystone keys; these always
# come from the outer application bundle. The framework
# doesn't currently use the SCM keys for anything,
# but this seems like a really good place to store them.
'postbuild_name': 'Tweak Info.plist',
'action': ['<(tweak_info_plist_path)',
'--breakpad=<(mac_breakpad_compiled_in)',
'--breakpad_uploads=<(mac_breakpad_uploads)',
'--keystone=0',
'--scm=1',
'--branding=<(branding)'],
},
{
'postbuild_name': 'Symlink Libraries',
'action': [
'ln',
'-fns',
'Versions/Current/Libraries',
'${BUILT_PRODUCTS_DIR}/${WRAPPER_NAME}/Libraries'
],
},
],
'copies': [
{
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Libraries',
'files': [
'<(PRODUCT_DIR)/exif.so',
],
},
{
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Helpers',
'files': [
'<(PRODUCT_DIR)/crashpad_handler',
],
},
{
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Internet Plug-Ins',
'files': [],
'conditions': [
['disable_nacl!=1', {
'files': [
'<(PRODUCT_DIR)/nacl_irt_x86_64.nexe',
],
}],
],
},
{
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Internet Plug-Ins/PepperFlash',
'files': [],
'conditions': [
['branding == "Chrome"', {
'files': [
'<(PRODUCT_DIR)/PepperFlash/PepperFlashPlayer.plugin',
],
}],
],
},
{
# This file is used by the component installer.
# It is not a complete plugin on its own.
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Internet Plug-Ins/',
'files': [],
'conditions': [
['branding == "Chrome"', {
'files': [
'<(PRODUCT_DIR)/widevinecdmadapter.plugin',
],
}],
],
},
{
# Copy of resources used by tests.
'destination': '<(PRODUCT_DIR)',
'files': [
'<(SHARED_INTERMEDIATE_DIR)/repack/resources.pak'
],
},
{
# Copy of resources used by tests.
'destination': '<(PRODUCT_DIR)/pseudo_locales',
'files': [
'<(SHARED_INTERMEDIATE_DIR)/<(pseudo_locales).pak'
],
},
{
'destination': '<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Resources',
'files': [
# Loader bundle for platform apps.
'<(PRODUCT_DIR)/app_mode_loader.app',
],
},
],
'conditions': [
['branding=="Chrome"', {
'copies': [
{
# This location is for the Mac build. Note that the
# copying of these files for Windows and Linux is handled
# in chrome.gyp, as Mac needs to be dropped inside the
# framework.
'destination':
'<(PRODUCT_DIR)/$(CONTENTS_FOLDER_PATH)/Default Apps',
'files': ['<@(default_apps_list)'],
},
],
}],
['mac_breakpad==1', {
'variables': {
# A real .dSYM is needed for dump_syms to operate on.
'mac_real_dsym': 1,
},
}],
['mac_keystone==1', {
'mac_bundle_resources': [
'browser/mac/keystone_promote_preflight.sh',
'browser/mac/keystone_promote_postflight.sh',
],
'postbuilds': [
{
'postbuild_name': 'Copy KeystoneRegistration.framework',
'action': [
'../build/mac/copy_framework_unversioned.sh',
'-I',
'../third_party/googlemac/Releases/Keystone/KeystoneRegistration.framework',
'${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}/Frameworks',
],
},
{
'postbuild_name': 'Symlink Frameworks',
'action': [
'ln',
'-fns',
'Versions/Current/Frameworks',
'${BUILT_PRODUCTS_DIR}/${WRAPPER_NAME}/Frameworks'
],
},
],
}], # mac_keystone
['debug_devtools==1', {
'postbuilds': [{
'postbuild_name': 'Copy inspector files',
'action': [
'ln',
'-fs',
'${BUILT_PRODUCTS_DIR}/resources/inspector',
'${BUILT_PRODUCTS_DIR}/${CONTENTS_FOLDER_PATH}/Resources',
],
}],
}],
['enable_hidpi==1', {
'mac_bundle_resources': [
'<(SHARED_INTERMEDIATE_DIR)/repack/chrome_200_percent.pak',
],
}],
['icu_use_data_file_flag==1', {
'mac_bundle_resources': [
'<(PRODUCT_DIR)/icudtl.dat',
],
}],
['v8_use_external_startup_data==1', {
'mac_bundle_resources': [
'<(PRODUCT_DIR)/natives_blob.bin',
'<(PRODUCT_DIR)/snapshot_blob.bin',
],
}],
], # conditions
}
| [
"zeno.albisser@hemispherian.com"
] | zeno.albisser@hemispherian.com |
3566933b344b6d9ec731005106a6c955325df31e | da7d57fc316b7d54663f59103159610d4d50c051 | /pangram.py | 56dad673ec77786017ba60a15caa8833ece19373 | [] | no_license | rmdes/pthw | b2c65e762d2b95d5da91af203ffef67c7d5768b7 | 000842ca08db0487850a4a4f8b60151aaf6fee61 | refs/heads/main | 2023-09-04T10:23:28.237087 | 2021-11-04T08:44:16 | 2021-11-04T09:05:46 | 424,525,697 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 516 | py | import string
def isPangram(str1, alphabet = string.ascii_lowercase):
dict = {}
for i in alphabet:
dict[i] = 0
str1 = str1.lower()
str1 = str1.replace(" ","")
for character in str1:
if(dict[character] == 0):
dict[character] = 1
for i in alphabet:
if(dict[i] == 0):
print("Not a pangram")
break;
else:
print("Pangram!")
pangram = "The quick brown fox jumps over the lazy dog"
isPangram(pangram)
print (pangram) | [
"rick@armada.digital"
] | rick@armada.digital |
a2a6057ec3be0d2b9ce1d28725370142f3ac55c8 | 8276834724f8672b7fea13fb8c6d717763091506 | /fmget.py | 4ea9f29bce41ad3cc85be265010a734d48972a7e | [] | no_license | yijiemkii/Pyquant | 782876ac4e9ee314c6cfc971c30e4faf1d282b6b | ed6b5b2c73fd9b56730dca54be002c2ee3b44aa6 | refs/heads/main | 2023-07-04T03:41:41.408707 | 2021-08-14T01:27:01 | 2021-08-14T01:27:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,690 | py | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import datetime as dt
from datetime import timedelta
from pandas.api.types import is_string_dtype
from pandas.api.types import is_numeric_dtype
import yfinance as yf
import time
import requests, pandas, lxml
from lxml import html
from fredapi import Fred
import pandas as pd
import quandl
"""
.. module:: fmget.py
:Python version 3.7 or greater
:synopsis: functions to get and manage financial and economic data from various APIs
.. moduleauthor:: Alberto Gutierrez <aljgutier@yahoo.com>
"""
# Yahoo price history scraper
# https://medium.com/c%C3%B3digo-ecuador/how-to-scrape-yahoo-stock-price-history-with-python-b3612a64bdc6
# see fmscraper ipynb notebook for example
# This is an example in case we need a scraper in the future ... still requires enhancedment
# hardwiree to 100 day download
def format_date(date_datetime):
date_timetuple = date_datetime.timetuple()
date_mktime = time.mktime(date_timetuple)
date_int = int(date_mktime)
date_str = str(date_int)
return date_str
def subdomain(symbol, start, end, filter='history'):
subdoma="/quote/{0}/history?period1={1}&period2={2}&interval=1d&filter={3}&frequency=1d"
subdomain = subdoma.format(symbol, start, end, filter)
return subdomain
def header_function(subdomain):
hdrs = {"authority": "finance.yahoo.com",
"method": "GET",
"path": subdomain,
"scheme": "https",
"accept": "text/html",
"accept-encoding": "gzip, deflate, br",
"accept-language": "en-US,en;q=0.9",
"cache-control": "no-cache",
"cookie": "Cookie:identifier",
"dnt": "1",
"pragma": "no-cache",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "same-origin",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64)"}
return hdrs
def scrape_page(url, header):
page = requests.get(url, headers=header)
element_html = html.fromstring(page.content)
table = element_html.xpath('//table')
table_tree = lxml.etree.tostring(table[0], method='xml')
panda = pandas.read_html(table_tree)[0]
panda = panda.loc[0:99]
panda.columns=['Date','Open', 'High', 'Low','Close','Adj CLose','Volume']
return panda
#if __name__ == '__main__':
# symbol = 'BB'
# dt_start = dt.datetime.today() - timedelta(days=365)
# dt_end = dt.datetime.today()
# start = format_date(dt_start)
# end = format_date(dt_end)
# sub = subdomain(symbol, start, end)
# header = header_function(sub)
# base_url = 'https://finance.yahoo.com'
# url = base_url + sub
# price_history = scrape_page(url, header)
def fred_getappend(Series,start,end,df='',API_KEY_FRED='',save=False,savedir='.'):
"""
Get the yield curve, T103YM from FRED (Fedral Reserve Economic Data) St. Louis.
Args:
Seiries (string): Name of the Fred Series
start (string): "YYYY-MM-DD"
end (string): "YYYY-MM-DD"
API_KEY_FRED (string): FRED API Key
df (dataframe): append to dataframe
save (boolean): if True (default) then save the file with filename "T103YM_year-month-day_to_year-month-day=year.csv"
savedir ('string'): director for saving data. Default is current directory './'
Returns:
The dft103ym dataframe and by default saves the data to file
Notes
-----
Uses the Mortada fredapi package to get 'T103YM' series from FRED (Federal Reserve Economic Data)
(https://pypi.org/project/fredapi/), pip install fredapi
|
"""
fred = Fred(api_key=API_KEY_FRED)
dfseries=pd.DataFrame(fred.get_series(Series,observation_start=start, observation_end=end))
dfseries.columns=[Series]
#display(dfseries.tail(3))
## append
if ~df.empty:
#display(df.head(3))
#display(dfsymbol.head(3))
df = pd.concat([df,dfseries])
df=df[~df.index.duplicated(keep='first')]
df.sort_index(inplace=True, ascending=True)
# save
if save == True:
# remove special characters from string name
s=df.index[0]
e=df.index[len(df.index)-1]
filename=Series+'_'+str(s.year)+'-'+str(s.month)+'-'+str(s.day)
filename=savedir+'/'+filename+'_to_'+str(e.year)+'-'+str(e.month)+'-'+str(e.day)+'.csv'
print("df to csv, filename = ",filename)
#dfsymbol.reset_index(inplace=True)
df.reset_index().to_csv(filename,index=False)
return df
def _peapply(row):
if np.isnan(row['PE']):
return row['Close']/row['Earnings']
else:
return row['PE']
# PE and Eaarnings ... need to get price from dfsp500
def quandl_sppe_getappend(dfsppe,dfsp500,quandl_api_key, start_date, end_date, save=False,savedir='./'):
data=quandl.get("MULTPL/SP500_PE_RATIO_MONTH", authtoken=quandl_api_key, start_date=start_date, end_date=end_date)
pe_data = data=quandl.get("MULTPL/SP500_PE_RATIO_MONTH", authtoken=quandl_api_key)
pe_data.columns=["PE"]
df_sppe=pe_data.join(df_sp500,how='left')
df_sppe.drop(['High','Low','Open','Volume','Adj Close'],axis=1, inplace=True)
df_sppe.dropna(how='any',inplace=True)
df_sppe['Earnings'] = df_sppe['Close']/df_sppe['PE']
if save==True:
s=df_sppe.index[0]
e=df_sppe.index[df_sppe.index.size-1]
filename='sppe_'+str(s.year)+'-'+str(s.month)+'-'+str(s.day)
filename=savedir+'/'+filename+'_to_'+str(e.year)+'-'+str(e.month)+'-'+str(e.day)+'.csv'
df_sppe.reset_index().to_csv(filename,index=False)
print("df to csv, filename = ",filename)
return df_sppe
def yahoo_getappend(symbol,start,end,df='',save=False,savedir='./'):
"""
Get data from yahoo finance and append Append.
**Parameters**:
symbol(string): symbol to get from yahoo
start (datetime): start date
end (datetime): end date
df (dataframe): append the data from yahoo finance to df
save (boolean): if True (default) then save the file with filename "symbol_year-month-day_to_year-month-day=year.csv"
savedir (string): directory to save data. Default is current directory '.'
**Returns**:
The input dataframe with an additional variable "ticker_R" corresponding to daily returns
**How it works**:
The *yfgetappend* function uses the yfinance package to get data for the "symbol" and appends to df.
(https://aroussi.com/post/python-yahoo-finance). to install the yfanance package with
anaconda: conda install -c ranaroussi yfinance
|
"""
print(start, end)
dfsymbol = yf.download(symbol,start,end)
## append
if ~df.empty:
#display(df.head(3))
#display(dfsymbol.head(3))
dfsymbol = pd.concat([df,dfsymbol])
dfsymbol=dfsymbol[~dfsymbol.index.duplicated(keep='first')]
if save == True:
# remove special characters from string name
symbol2=''
for c in symbol:
if c.isalnum():
symbol2 += c
s=dfsymbol.index[0]
e=dfsymbol.index[len(dfsymbol.index)-1]
filename=symbol2+'_'+str(s.year)+'-'+str(s.month)+'-'+str(s.day)
filename=savedir+'/'+filename+'_to_'+str(e.year)+'-'+str(e.month)+'-'+str(e.day)+'.csv'
print("df to csv, filename = ",filename)
#dfsymbol.reset_index(inplace=True)
dfsymbol.reset_index().to_csv(filename,index=False)
return dfsymbol
| [
"aljgutier@yahoo.com"
] | aljgutier@yahoo.com |
2cbe45d9beb5015b6e9b7bdfe21f6840efca9502 | 6ac10eb79b9b44bb6c8be78f9b09a13f584dcabc | /Chapter04/chapter_04_example_01.py | c16e787ec3c68e1213449a116dbd093de499d47b | [] | no_license | javonnii/magenta | 0231fec36fe814720b17e445bc8ffaafd24aed8b | 8eaf176dcfe0cf5fe04f6de816d52d4ea7a30e45 | refs/heads/main | 2023-02-01T03:40:52.613846 | 2020-12-19T10:08:08 | 2020-12-19T10:08:08 | 322,817,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,624 | py | """
This example shows how to sample, interpolate and humanize a drums sequence
using MusicVAE and various configurations.
"""
import os
from typing import List
import magenta.music as mm
import tensorflow as tf
from magenta.models.music_vae import TrainedModel, configs
from magenta.music import DEFAULT_STEPS_PER_BAR
from magenta.protobuf.music_pb2 import NoteSequence
from six.moves import urllib
from note_sequence_utils import save_midi, save_plot
def download_checkpoint(model_name: str,
checkpoint_name: str,
target_dir: str):
"""
Downloads a Magenta checkpoint to target directory.
Target directory target_dir will be created if it does not already exist.
:param model_name: magenta model name to download
:param checkpoint_name: magenta checkpoint name to download
:param target_dir: local directory in which to write the checkpoint
"""
tf.gfile.MakeDirs(target_dir)
checkpoint_target = os.path.join(target_dir, checkpoint_name)
if not os.path.exists(checkpoint_target):
response = urllib.request.urlopen(
f"https://storage.googleapis.com/magentadata/models/"
f"{model_name}/checkpoints/{checkpoint_name}")
data = response.read()
local_file = open(checkpoint_target, 'wb')
local_file.write(data)
local_file.close()
def get_model(name: str):
"""
Returns the model instance from its name.
:param name: the model name
"""
checkpoint = name + ".tar"
download_checkpoint("music_vae", checkpoint, "checkpoints")
return TrainedModel(
# Removes the .lohl in some training checkpoint which shares the same config
configs.CONFIG_MAP[name.split(".")[0] if "." in name else name],
# The batch size changes the number of sequences to be processed together,
# we'll be working with maximum 6 sequences (during groove)
batch_size=8,
checkpoint_dir_or_path=os.path.join("checkpoints", checkpoint))
def sample(model_name: str,
num_steps_per_sample: int) -> List[NoteSequence]:
"""
Samples 2 sequences using the given model.
"""
model = get_model(model_name)
# Uses the model to sample 2 sequences,
# with the number of steps and default temperature
sample_sequences = model.sample(n=2, length=num_steps_per_sample)
# Saves the midi and the plot in the sample folder
save_midi(sample_sequences, "sample", model_name)
save_plot(sample_sequences, "sample", model_name)
return sample_sequences
def interpolate(model_name: str,
sample_sequences: List[NoteSequence],
num_steps_per_sample: int,
num_output: int,
total_bars: int) -> NoteSequence:
"""
Interpolates between 2 sequences using the given model.
"""
if len(sample_sequences) != 2:
raise Exception(f"Wrong number of sequences, "
f"expected: 2, actual: {len(sample_sequences)}")
if not sample_sequences[0].notes or not sample_sequences[1].notes:
raise Exception(f"Empty note sequences, "
f"sequence 1 length: {len(sample_sequences[0].notes)}, "
f"sequence 2 length: {len(sample_sequences[1].notes)}")
model = get_model(model_name)
# Use the model to interpolate between the 2 input sequences,
# with the number of output (counting the start and end sequence),
# number of steps per sample and default temperature
#
# This might throw a NoExtractedExamplesError exception if the
# sequences are not properly formed (for example if the sequences
# are not quantized, a sequence is empty or not of the proper length).
interpolate_sequences = model.interpolate(
start_sequence=sample_sequences[0],
end_sequence=sample_sequences[1],
num_steps=num_output,
length=num_steps_per_sample)
# Saves the midi and the plot in the interpolate folder
save_midi(interpolate_sequences, "interpolate", model_name)
save_plot(interpolate_sequences, "interpolate", model_name)
# Concatenates the resulting sequences (of length num_output) into one
# single sequence.
# The second parameter is a list containing the number of seconds
# for each input sequence. This is useful if some of the input
# sequences do not have notes at the end (for example the last
# note ends at 3.5 seconds instead of 4)
interpolate_sequence = mm.sequences_lib.concatenate_sequences(
interpolate_sequences, [4] * num_output)
# Saves the midi and the plot in the merge folder,
# with the plot having total_bars size
save_midi(interpolate_sequence, "merge", model_name)
save_plot(interpolate_sequence, "merge", model_name,
plot_max_length_bar=total_bars,
bar_fill_alphas=[0.50, 0.50, 0.05, 0.05])
return interpolate_sequence
def groove(model_name: str,
interpolate_sequence: NoteSequence,
num_steps_per_sample: int,
num_output: int,
total_bars: int) -> NoteSequence:
"""
Adds groove to the given sequence by splitting it in manageable sequences
and using the given model to humanize it.
"""
model = get_model(model_name)
# Split the sequences in chunks of 4 seconds (which is 2 bars at 120 qpm),
# which is necessary since the model is trained for 2 bars
split_interpolate_sequences = mm.sequences_lib.split_note_sequence(
interpolate_sequence, 4)
if len(split_interpolate_sequences) != num_output:
raise Exception(f"Wrong number of interpolate size, "
f"expected: 10, actual: {len(split_interpolate_sequences)}")
# Uses the model to encode the list of sequences, returning the encoding
# (also called z or latent vector) which will the used in the decoding,
# The other values mu and sigma are not used, but kept in the code for
# clarity.
#
# The resulting array shape is (a, b), where a is the number of
# split sequences (should correspond to num_output), and b is the encoding
# size.
#
# This might throw a NoExtractedExamplesError exception if the
# sequences are not properly formed (for example if the sequences
# are not quantized, a sequence is empty or not of the proper length).
encoding, mu, sigma = model.encode(
note_sequences=split_interpolate_sequences)
# Uses the model to decode the encoding (also called z or latent vector),
# returning a list of humanized sequence with one element per encoded
# sequences (each of length num_steps_per_sample).
groove_sequences = model.decode(
z=encoding, length=num_steps_per_sample)
# Concatenates the resulting sequences (of length num_output) into one
# single sequence.
groove_sequence = mm.sequences_lib.concatenate_sequences(
groove_sequences, [4] * num_output)
# Saves the midi and the plot in the groove folder,
# with the plot having total_bars size
save_midi(groove_sequence, "groove", model_name)
save_plot(groove_sequence, "groove", model_name,
plot_max_length_bar=total_bars, show_velocity=True,
bar_fill_alphas=[0.50, 0.50, 0.05, 0.05])
return groove_sequence
def app(unused_argv):
# Number of interpolated sequences (counting the start and end sequences)
num_output = 6
# Number of bar per sample, also giving the size of the interpolation splits
num_bar_per_sample = 2
# Number of steps per sample and interpolation splits
num_steps_per_sample = num_bar_per_sample * DEFAULT_STEPS_PER_BAR
# The total number of bars
total_bars = num_output * num_bar_per_sample
# Samples 2 new sequences with "lokl" model which is optimized for sampling
generated_sample_sequences = sample("cat-drums_2bar_small.lokl",
num_steps_per_sample)
# Interpolates between the 2 sequences, returns 1 sequence
# with "hikl" which is optimized for sampling
generated_interpolate_sequence = interpolate("cat-drums_2bar_small.hikl",
generated_sample_sequences,
num_steps_per_sample,
num_output,
total_bars)
# Adds groove to the whole sequence
generated_groove_sequence = groove("groovae_2bar_humanize",
generated_interpolate_sequence,
num_steps_per_sample,
num_output,
total_bars)
print(f"Generated groove sequence total time: "
f"{generated_groove_sequence.total_time}")
return 0
if __name__ == "__main__":
tf.app.run(app)
| [
"noreply@github.com"
] | noreply@github.com |
1b3b643a9de2d0688ccc6fd43534603d811be2eb | 927b50cdaf1c384c8bbf6f13816d0ba465852fd8 | /machine_learning_models/ctscan_covid_prediction/cnn.py | 61c6c1a50ac8a0e672cbe76c993cee29544e1ef9 | [
"MIT"
] | permissive | jhabarsingh/DOCMED | f37d336483cffd874b0a7db43677c08a47bd639c | 8a831886d3dd415020699491687fb73893e674c5 | refs/heads/main | 2023-04-26T06:45:10.409633 | 2021-05-19T14:37:53 | 2021-05-19T14:37:53 | 316,683,855 | 3 | 5 | MIT | 2021-02-21T13:32:33 | 2020-11-28T07:51:22 | JavaScript | UTF-8 | Python | false | false | 2,926 | py | import tensorflow as tf
import keras
import pandas as pd
import numpy as np
import zipfile
import os
from random import randint
from keras.layers import Dense,Flatten,MaxPool2D,Conv2D,Dropout,GlobalAveragePooling2D
from keras.losses import binary_crossentropy,categorical_crossentropy
from keras.models import Model,Sequential
import matplotlib.pyplot as plt
from keras.preprocessing.image import ImageDataGenerator
from keras.preprocessing.image import load_img,img_to_array
classes = ['covid', 'normal', 'others']
train_df = pd.DataFrame(columns=['image','clas'])
val_df = pd.DataFrame(columns=['image','clas'])
for label in classes:
images = f'./mini_natural_images/{label}'
print(images)
for image in os.listdir(images)[:-30]:
train_df = train_df.append({'image':'./mini_natural_images/'+label+'/'+image,'clas':label},ignore_index=True)
for image in os.listdir(images)[-30:]:
val_df = val_df.append({'image':'./mini_natural_images/'+label+'/'+image,'clas':label},ignore_index=True)
print(train_df)
val_df.head()
train_df.shape,val_df.shape
train_datagen = ImageDataGenerator(
rotation_range=40,
width_shift_range=0.2,
height_shift_range=0.2,
shear_range=0.2,
zoom_range=0.2,
horizontal_flip=True,
rescale=1/255,
)
val_datagen = ImageDataGenerator(
rescale=1/255
)
train_generator = train_datagen.flow_from_dataframe(train_df, x_col='image',y_col='clas',classes=classes)
val_generator = val_datagen.flow_from_dataframe(val_df,x_col='image',y_col='clas',classes=classes)
from keras.applications.inception_resnet_v2 import InceptionResNetV2
inceptionresnet = InceptionResNetV2(include_top=False, input_shape=(256,256,3),classes=3)
inceptionresnet.trainable = False
last_layer = inceptionresnet.layers[-1].output
x = GlobalAveragePooling2D()(last_layer)
x = Dense(3,activation='softmax')(x)
model = Model(inceptionresnet.inputs,x)
model.summary()
model.compile(loss='categorical_crossentropy',optimizer=keras.optimizers.RMSprop(learning_rate=0.0001),metrics=['acc'])
history = model.fit(train_generator,epochs=3,validation_data=val_generator)
model.save("pickle.h5")
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs = range(len(acc))
plt.plot(epochs, loss, 'r', label='Training loss')
plt.plot(epochs, val_loss, 'r', label='Validation loss')
plt.plot(epochs, acc, 'r', label='Training accuracy')
plt.plot(epochs, val_acc, 'b', label='Validation accuracy')
plt.title('Training and validation accuracy')
plt.legend(loc=0)
plt.figure()
plt.show()
index = randint(1,val_df.shape[0])
image = val_df.iloc[index]
img = load_img(image.image,target_size=(256,256))
plt.imshow(img)
img_tensor = img_to_array(img)
img_tensor = np.expand_dims(img_tensor, axis=0)
img_tensor /= 255.
prediction = model.predict(img_tensor)
classes[np.argmax(prediction)]
| [
"jhabarsinghbhati23@gmail.com"
] | jhabarsinghbhati23@gmail.com |
2220cb20fe57b707982d3591ab2714ae3b8e8482 | d869ade18e20dd793f0043ab8094a4106310f3b1 | /blog/migrations/0015_auto_20201019_1904.py | 07d3ee9110dcf65ccbedd14b74c4d7d14c5402d0 | [] | no_license | krishnajhapate/icoder | e474de880ae4792c64ccd640924c13a8976c4f4b | a966940e501eb94ded082371fcc04be094412e09 | refs/heads/main | 2023-06-04T01:55:05.428576 | 2021-06-28T18:42:25 | 2021-06-28T18:42:25 | 304,909,458 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | # Generated by Django 3.1.1 on 2020-10-19 13:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('blog', '0014_auto_20201019_1904'),
]
operations = [
migrations.AlterField(
model_name='post',
name='author',
field=models.CharField(default='', max_length=100),
),
]
| [
"ki54.jhapate@gmail.com"
] | ki54.jhapate@gmail.com |
a41ea17f1416f18dd662cef71f7c460afc86a41a | 7139333bea24651be4d7d65ffc7106edcc5f6b7b | /article/migrations/0005_likes.py | 6246bf0b67469c79635dde05167bdc80f16b9133 | [] | no_license | xiaoweiOvO/my_blog | 4ac2dc0aaa2393d0e4b8a284bf4476aabcc313ed | 0da9a2e9d8bd4615f2a5bf661ce75f8428456e05 | refs/heads/master | 2022-12-22T01:46:04.145390 | 2020-06-03T01:52:57 | 2020-06-03T01:52:57 | 247,701,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 875 | py | # Generated by Django 3.0.4 on 2020-03-29 08:05
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('article', '0004_remove_articlepost_likes'),
]
operations = [
migrations.CreateModel(
name='likes',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('article', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='article', to='article.ArticlePost')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user', to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"2283940851@qq.com"
] | 2283940851@qq.com |
1034bd77edf5af5ae781950bbc82b7b3ac584294 | f06557f1b2ee2add10e99be6dfe5870e703ee384 | /services/migrations/0003_businesshour.py | 927da1f0e6ffa151a345525aa670af82dc9ffd04 | [] | no_license | jonathan-odonnell/well-groomed-woofers | 2dbff2d4672944c2d5664008967680aad23c7a8a | c8a61c9e840e5b03b975615c947ee75dd188583b | refs/heads/master | 2023-07-19T00:58:29.539488 | 2021-09-08T15:44:58 | 2021-09-08T15:44:58 | 389,753,575 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | # Generated by Django 3.1.3 on 2021-08-01 23:49
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('services', '0002_service_size'),
]
operations = [
migrations.CreateModel(
name='BusinessHour',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('start_time', models.TimeField()),
('end_time', models.TimeField()),
('start_date', models.DateField()),
('end_date', models.DateField()),
],
options={
'verbose_name': 'Business Hours',
},
),
]
| [
"jonathanodonnell1994@gmail.com"
] | jonathanodonnell1994@gmail.com |
e23b7df01fd1ad5179934574661d3b12abd07a70 | 6eaf41b83d66447fdae025f2d69a10a5c0be6f28 | /password.py | 3d4964598643c3f03b9a7f507c69cb57812a33ef | [] | no_license | DrazThan/PassGenerator | 083561695bc94b4a8af78396af65dfe71c6bd862 | e85c2a97aae2adc57853854202fccf85df8217bb | refs/heads/main | 2023-07-27T14:53:17.562048 | 2021-09-10T03:44:02 | 2021-09-10T03:44:02 | 403,714,231 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | from tkinter import *
from random import randint
root = Tk()
root.title('Strong PassWord Generator')
root.geometry("500x300")
def new_rand():
# Clear entry box
pw_entry.delete(0, END)
# Get PW length and convert to integer
pw_length = int(my_entry.get())
# create a variable to hold our password
my_password = ''
# Loop through password length
for x in range(pw_length):
my_password += chr(randint(33,126))
# Output password to the screen
pw_entry.insert(0, my_password)
def clipper():
pass
# Copy to clipboard
def clipper():
# clear the clipboard
root.clipboard_clear()
# copy to clipboard
root.clipboard_append(pw_entry.get())
# Label Frame
lf = LabelFrame(root, text="How Many Charecters ?")
lf.pack(pady=20)
# Create entry box to designate number of charecters
my_entry = Entry(lf, font=("Helvetic", 24))
my_entry.pack(pady=20, padx=20)
# Create Entry box for our returned password
pw_entry = Entry(root,font=("Helvetica", 24), bd=0)
pw_entry.pack(pady=20)
pw_var = StringVar()
pw_entry.configure(textvariable=pw_var)
# Create a frame for our buttons
my_frame = Frame(root)
my_frame.pack(pady=20)
# Create our buttons
my_button = Button(my_frame, text="Generate Strong Password", command=new_rand)
my_button.grid(row=0, column=0, padx=10)
clip_button = Button(my_frame, text="Copy To Clipboard", command=clipper)
clip_button.grid(row=0, column=1, padx=10)
root.mainloop()
| [
"oz.tal000@gmail.com"
] | oz.tal000@gmail.com |
19a630f6b4d8fa9361865e00c7183aa26c84182f | 5abb082407832749f418c803d875b1798a457be6 | /gui/demo_gui/eel/venv/Scripts/pip3-script.py | 5f9d917576d499f51a12898a2f02047bd1fe3eff | [] | no_license | darkflake/ASDAS | 63b4593d77d09132a7365518a1e10db3a40e1c75 | 9dbb63c7f0279270dcd321b7a4456ec55f70bf6f | refs/heads/master | 2022-05-26T11:26:26.497155 | 2020-04-27T15:58:35 | 2020-04-27T15:58:35 | 259,531,939 | 0 | 0 | null | 2020-04-28T04:34:55 | 2020-04-28T04:34:55 | null | UTF-8 | Python | false | false | 406 | py | #!C:\Users\HP\PycharmProjects\eel\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip3'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip3')()
)
| [
"siddhiasonar@gmail.com"
] | siddhiasonar@gmail.com |
259eb83402332534b5d99c0a6e094279776f1915 | ac5e52a3fc52dde58d208746cddabef2e378119e | /exps-gsn-edf/gsn-edf_ut=3.5_rd=0.65_rw=0.04_rn=4_u=0.075-0.35_p=harmonic-2/sched=RUN_trial=73/params.py | 885ca21da69a7e1b42cd2a69b712f6a68b7f435e | [] | no_license | ricardobtxr/experiment-scripts | 1e2abfcd94fb0ef5a56c5d7dffddfe814752eef1 | 7bcebff7ac2f2822423f211f1162cd017a18babb | refs/heads/master | 2023-04-09T02:37:41.466794 | 2021-04-25T03:27:16 | 2021-04-25T03:27:16 | 358,926,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | {'cpus': 4,
'duration': 30,
'final_util': '3.532857',
'max_util': '3.5',
'periods': 'harmonic-2',
'release_master': False,
'res_distr': '0.65',
'res_nmb': '4',
'res_weight': '0.04',
'scheduler': 'GSN-EDF',
'trial': 73,
'utils': 'uni-medium-3'}
| [
"ricardo.btxr@gmail.com"
] | ricardo.btxr@gmail.com |
878f4a818f04d4f9ab83b2f46874ac368fb98eca | a5c87282656e6b30bdb05a172cf62dba30753118 | /wan7.py | 274842d9e137ef4bb97a7ddb3cf861b9f6dd46aa | [] | no_license | NAMWANHEE/data_mining_in_school | 3cfb7ea79f06fe50c79b55aa871e7efb4d6ea6b4 | 14fb21851061835f0149b94f20c7e65405e5c523 | refs/heads/master | 2023-03-08T12:53:02.663958 | 2021-02-24T09:20:29 | 2021-02-24T09:20:29 | 341,083,093 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,236 | py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.datasets import load_iris
iris = load_iris()
a = pd.DataFrame(iris.data,columns=iris.feature_names)
a['target'] = iris.target
a['targetName'] = np.where(a['target'] ==0,"setosa",np.where(a['target'] == 1,"versicolor","virginica"))
print(a.head())
plt.figure()
one =a.iloc[:,:2][a.target ==0]
two =a.iloc[:,:2][a.target ==1]
three =a.iloc[:,:2][a.target ==2]
plt.plot(one['sepal length (cm)'],one['sepal width (cm)'],'or',label='setosa')
plt.plot(two['sepal length (cm)'],two['sepal width (cm)'],'xb',label='versicolor')
plt.plot(three['sepal length (cm)'],three['sepal width (cm)'],'^g',label='virginica')
plt.legend()
plt.xlabel('sepal length')
plt.ylabel("sepal width")
plt.figure()
one1 =a.iloc[:,2:4][a.target ==0]
two1 =a.iloc[:,2:4][a.target ==1]
three1 =a.iloc[:,2:4][a.target ==2]
plt.plot(one1['petal length (cm)'],one1['petal width (cm)'],'or',label='setosa')
plt.plot(two1['petal length (cm)'],two1['petal width (cm)'],'xb',label='versicolor')
plt.plot(three1['petal length (cm)'],three1['petal width (cm)'],'^g',label='virginica')
plt.legend()
plt.xlabel('petal length')
plt.ylabel("petal width")
plt.show() | [
"xjffla12@naver.com"
] | xjffla12@naver.com |
cf28652acd3c0bf0723346e3d5df428c6ed15679 | 70b8d3b6c8530950a70e6550325d60dc981d2807 | /assets/code/python_code/dt_data/dt_gen_csv.py | 108585e606c1bdc78f6bbbb9d437acff0a16b080 | [
"MIT"
] | permissive | zanghu/gitbook_notebook | 85898713e2b74c7da908d73588691878f64b5b1c | 9bf90c2ed192c1037eadad9324f26db30c11c927 | refs/heads/master | 2023-06-27T20:17:55.395183 | 2023-06-08T07:52:21 | 2023-06-08T07:52:21 | 116,573,098 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,791 | py | #coding=utf-8
#!/usr/bin/env python
"""
1.以下几项操作都有统一的(不区分操作物品)表中记录
评论
关注
收藏
2.以下几项操作只能在与物品相关的表中获得
赞
踩
回复
作者
"""
import os
import pickle
import numpy
import time
import pandas
# "用户-物品"矩阵的元素格式
# [赞, 踩, 关注, 收藏, 评论, 回复, 浏览, 作者, ]
g_element_labels = ['like', 'dislike', 'follow', 'collect', 'comment', 'reply', 'seen', 'author', 'participant']
def gen_single_col_csv(src_pth, dst_pth, src_key_col, dst_key_col, sort_type):
"""读取指定"""
"""读取pkl文件, 生成所有用户id组成的csv文件, 每个用户id占一行"""
assert sort_type in ('str', 'int')
with open(src_pth, 'rb') as f:
dt_data = pickle.load(f)
assert src_key_col in dt_data['cols']
idx = dt_data['cols'].index(src_key_col)
ids = []
for line in dt_data['data']:
id = line[idx]
ids.append(id)
if sort_type == 'int':
tmp = [int(x) for x in ids]
tmp.sort()
ids = [str(x) for x in tmp]
else:
ids.sort()
# 保存为csv
#obj = pandas.DataFrame(columns=[dst_key_col], data=ids)
#obj.to_csv(dst_pth, encoding='utf-8')
with open(dst_pth, 'w') as f:
f.write('\n'.join(ids))
f.write('\n')
def gen_ids_csv():
""""""
gen_single_col_csv('data_pkl/USER/3.1_DT_USERS.pkl', 'data_csv/user_ids.csv', 'ID_USER', 'USER_ID', 'str') # id_users
gen_single_col_csv('data_pkl/QA/3.13_DT_QUESTIONS.pkl', 'data_csv/question_ids.csv', 'QUESTION_ID', 'QUESTION_ID', 'int') # id_questions
gen_single_col_csv('data_pkl/BLOG/3.49_DT_BLOG_META.pkl', 'data_csv/blog_ids.csv', 'ID_BLOG_META', 'BLOG_ID', 'int') # id_blogs
gen_single_col_csv('data_pkl/IDEA/3.84_DT_IDEA.pkl', 'data_csv/idea_ids.csv', 'ID_IDEA', 'IDEA_ID', 'int') # id_ideas
def gen_user_item_csv(users, items, u2i, csv_pth):
"""用户物品矩阵csv"""
with open(csv_pth, 'wb') as f:
# 写入首行
f.write(b'USER_ID,')
for item_id in items:
f.write(bytes(item_id, encoding='utf-8'))
f.write(b',')
f.write(b'\n')
# 循环写入每一行
for user_id in users:
line = []
line.append(bytes(user_id, encoding='utf-8'))
if not (user_id in u2i):
for _ in items:
line.append(b'')
else:
for item_id in items:
if item_id in u2i[user_id]:
elem = '|'.join([str(x) for x in u2i[user_id][item_id]]) # u2i矩阵每个元素是一个n维列表, 把列表中的元素
line.append(bytes(elem, encoding='utf-8'))
else:
line.append(b'')
f.write(b','.join(line))
f.write(b'\n')
def read_ids_csv(csv_pth):
""""""
assert os.path.isfile(csv_pth)
with open(csv_pth, 'r') as f:
lines = f.readlines()
ids = [line.strip('\n') for line in lines]
# 如果最后csv一行只有一个回车符
if ids[-1] == '':
ids = ids[:-1]
return ids
def gen_user_item_csv_all(id_csv_dir, u2i_pkl_dir, dst_csv_dir):
""""""
assert os.path.isdir(id_csv_dir)
assert os.path.isdir(u2i_pkl_dir)
if os.path.exists(dst_csv_dir):
assert os.path.isdir(dst_csv_dir)
else:
os.makedirs(dst_csv_dir)
user_ids_csv = os.path.join(id_csv_dir, 'user_ids.csv')
question_ids_csv = os.path.join(id_csv_dir, 'question_ids.csv')
blog_ids_csv = os.path.join(id_csv_dir, 'blog_ids.csv')
idea_ids_csv = os.path.join(id_csv_dir, 'idea_ids.csv')
question_u2i_pkl = os.path.join(u2i_pkl_dir, 'QA_u2i.pkl')
blog_u2i_pkl = os.path.join(u2i_pkl_dir, 'BLOG_u2i.pkl')
idea_u2i_pkl = os.path.join(u2i_pkl_dir, 'IDEA_u2i.pkl')
# user_ids
user_ids = read_ids_csv(user_ids_csv)
# question_ids
question_ids = read_ids_csv(question_ids_csv)
# blog_ids
blog_ids = read_ids_csv(blog_ids_csv)
# idea_ids
idea_ids = read_ids_csv(idea_ids_csv)
# question u2i
with open(question_u2i_pkl, 'rb') as f:
u2i = pickle.load(f)
gen_user_item_csv(user_ids, question_ids, u2i, os.path.join(dst_csv_dir, 'user_question.csv'))
# blog u2i
with open(blog_u2i_pkl, 'rb') as f:
u2i = pickle.load(f)
gen_user_item_csv(user_ids, blog_ids, u2i, os.path.join(dst_csv_dir, 'user_blog.csv'))
# idea u2i
with open(idea_u2i_pkl, 'rb') as f:
u2i = pickle.load(f)
gen_user_item_csv(user_ids, idea_ids, u2i, os.path.join(dst_csv_dir, 'user_idea.csv'))
if __name__ == '__main__':
gen_ids_csv()
gen_user_item_csv_all("data_csv", "data_mat", "u2i_csv") | [
"anonymous@gitbook.com"
] | anonymous@gitbook.com |
745bcadbece4be250f6c1d6a9990da5454da9590 | e79d0a9a8143e03705979d2f8ce78e278d2078c0 | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2017_11_01/aio/operations/_virtual_network_gateways_operations.py | 867d69b692014f5561a8e8d194099242c85628cf | [
"LGPL-2.1-or-later",
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | dannysijia/azure-sdk-for-python | f2c276600d1ba48620e8d1c73b57eb8e3c73e35a | 71bd202f5261e7ce8b2db38bb9b6802f1a55a5dc | refs/heads/main | 2023-08-22T04:15:24.519733 | 2021-11-01T08:25:59 | 2021-11-01T08:25:59 | 423,389,780 | 0 | 0 | MIT | 2021-11-01T08:25:28 | 2021-11-01T08:25:28 | null | UTF-8 | Python | false | false | 83,933 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations:
"""VirtualNetworkGatewaysOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2017_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
**kwargs: Any
) -> "_models.VirtualNetworkGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VirtualNetworkGateway",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def get(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> "_models.VirtualNetworkGateway":
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.VirtualNetworkGateway":
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
async def begin_update_tags(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Updates a virtual network gateway tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to update virtual network gateway tags.
:type parameters: ~azure.mgmt.network.v2017_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkGatewayListResult"]:
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def list_connections(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncIterable["_models.VirtualNetworkGatewayListConnectionsResult"]:
"""Gets all the connections in a virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListConnectionsResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGatewayListConnectionsResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListConnectionsResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_connections.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListConnectionsResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/connections'} # type: ignore
async def _reset_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs: Any
) -> Optional["_models.VirtualNetworkGateway"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if gateway_vip is not None:
query_parameters['gatewayVip'] = self._serialize.query("gateway_vip", gateway_vip, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
async def begin_reset(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
gateway_vip: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller["_models.VirtualNetworkGateway"]:
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param gateway_vip: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type gateway_vip: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
gateway_vip=gateway_vip,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
async def _generatevpnclientpackage_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs: Any
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
async def begin_generatevpnclientpackage(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs: Any
) -> AsyncLROPoller[str]:
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2017_11_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
async def _generate_vpn_profile_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs: Any
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generate_vpn_profile_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generate_vpn_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
async def begin_generate_vpn_profile(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
parameters: "_models.VpnClientParameters",
**kwargs: Any
) -> AsyncLROPoller[str]:
"""Generates VPN profile for P2S client of the virtual network gateway in the specified resource
group. Used for IKEV2 and radius based authentication.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2017_11_01.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._generate_vpn_profile_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generate_vpn_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnprofile'} # type: ignore
async def _get_vpn_profile_package_url_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> Optional[str]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional[str]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_vpn_profile_package_url_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_vpn_profile_package_url_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
async def begin_get_vpn_profile_package_url(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller[str]:
"""Gets pre-generated VPN profile for P2S client of the virtual network gateway in the specified
resource group. The profile needs to be generated first using generateVpnProfile.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_vpn_profile_package_url_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_vpn_profile_package_url.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getvpnprofilepackageurl'} # type: ignore
async def _get_bgp_peer_status_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs: Any
) -> Optional["_models.BgpPeerStatusListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.BgpPeerStatusListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_bgp_peer_status_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
if peer is not None:
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_bgp_peer_status_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
async def begin_get_bgp_peer_status(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: Optional[str] = None,
**kwargs: Any
) -> AsyncLROPoller["_models.BgpPeerStatusListResult"]:
"""The GetBgpPeerStatus operation retrieves the status of all BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer to retrieve the status of.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BgpPeerStatusListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.BgpPeerStatusListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.BgpPeerStatusListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_bgp_peer_status_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('BgpPeerStatusListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_bgp_peer_status.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getBgpPeerStatus'} # type: ignore
async def supported_vpn_devices(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> str:
"""Gets a xml format representation for supported vpn devices.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self.supported_vpn_devices.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
supported_vpn_devices.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/supportedvpndevices'} # type: ignore
async def _get_learned_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_learned_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_learned_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
async def begin_get_learned_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
**kwargs: Any
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway has learned, including
routes learned from BGP peers.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_learned_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_learned_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getLearnedRoutes'} # type: ignore
async def _get_advertised_routes_initial(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs: Any
) -> Optional["_models.GatewayRouteListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.GatewayRouteListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
accept = "application/json, text/json"
# Construct URL
url = self._get_advertised_routes_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['peer'] = self._serialize.query("peer", peer, 'str')
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_get_advertised_routes_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
async def begin_get_advertised_routes(
self,
resource_group_name: str,
virtual_network_gateway_name: str,
peer: str,
**kwargs: Any
) -> AsyncLROPoller["_models.GatewayRouteListResult"]:
"""This operation retrieves a list of routes the virtual network gateway is advertising to the
specified peer.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param peer: The IP address of the peer.
:type peer: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either GatewayRouteListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2017_11_01.models.GatewayRouteListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.GatewayRouteListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._get_advertised_routes_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
peer=peer,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('GatewayRouteListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_get_advertised_routes.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/getAdvertisedRoutes'} # type: ignore
async def vpn_device_configuration_script(
self,
resource_group_name: str,
virtual_network_gateway_connection_name: str,
parameters: "_models.VpnDeviceScriptParameters",
**kwargs: Any
) -> str:
"""Gets a xml format representation for vpn device configuration script.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_connection_name: The name of the virtual network gateway
connection for which the configuration script is generated.
:type virtual_network_gateway_connection_name: str
:param parameters: Parameters supplied to the generate vpn device script operation.
:type parameters: ~azure.mgmt.network.v2017_11_01.models.VpnDeviceScriptParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:return: str, or the result of cls(response)
:rtype: str
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self.vpn_device_configuration_script.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayConnectionName': self._serialize.url("virtual_network_gateway_connection_name", virtual_network_gateway_connection_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnDeviceScriptParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
vpn_device_configuration_script.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/connections/{virtualNetworkGatewayConnectionName}/vpndeviceconfigurationscript'} # type: ignore
| [
"noreply@github.com"
] | noreply@github.com |
7d7cada6debb9f178ad60a991eb6e7cb110ccb8e | dcda5ba16474dd8ff650e04e7f4a9bf700f6a9ff | /shop/admin.py | 35ea34c67b82499abed52b603a899222fbc1937b | [] | no_license | 007vict/shopbyexample | 2084d6e53faafb5c7e856cc8b3a5ff43bc3a82e2 | bc7dcfe5818499731c3cbf956c9c0b95cf3791da | refs/heads/master | 2022-12-21T13:05:08.425653 | 2019-04-10T10:30:41 | 2019-04-10T10:30:41 | 177,291,341 | 0 | 0 | null | 2022-12-08T04:58:00 | 2019-03-23T13:18:59 | JavaScript | UTF-8 | Python | false | false | 667 | py | from django.contrib import admin
from .models import Category, Product
from parler.admin import TranslatableAdmin
@admin.register(Category)
class CategoryAdmin(TranslatableAdmin):
list_display = ['name', 'slug']
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('name',)}
@admin.register(Product)
class ProductAdmin(TranslatableAdmin):
list_display = ['name', 'slug', 'price',
'available', 'created', 'updated']
list_filter = ['available', 'created', 'updated']
list_editable = ['price', 'available']
def get_prepopulated_fields(self, request, obj=None):
return {'slug': ('name',)}
| [
"super_vg@bk.ru"
] | super_vg@bk.ru |
c7bf6347a9483578c151ba6fdf82003fc374d7ff | f3b233e5053e28fa95c549017bd75a30456eb50c | /ptp1b_input/L82/82-80_MD_NVT_rerun/set_2.py | 418a71c6ac0ba7775d409fa90b0c3d5147873a7b | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 741 | py | import os
dir = '/mnt/scratch/songlin3/run/ptp1b/L82/MD_NVT_rerun/ti_one-step/82_80/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_2.in'
temp_pbs = filesdir + 'temp_2.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_2.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_2.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"songlin3@msu.edu"
] | songlin3@msu.edu |
616df77085bb95807dd89413c1aa21dd7e7250a7 | 442ccaa620eb22d51378a45941d021b44d7cde98 | /src/courses/admin.py | daed5cd7fc1160232f05616b0ed9cc628995f6df | [] | no_license | achiengcindy/supreme-school | 43884b3c34fcac51d8f6ab4df38ee923f473fec9 | c3777cb5f63ec41a167f87f0c7ec6a575e88ff0b | refs/heads/master | 2021-01-16T01:57:01.590225 | 2020-03-26T01:40:14 | 2020-03-26T01:40:14 | 242,935,025 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 587 | py | from django.contrib import admin
from .models import Subject, Course, Module
# Register your models here.
@admin.register(Subject)
class SubjectAdmin(admin.ModelAdmin):
list_display = ['title', 'slug']
prepopulated_fields = {'slug': ('title',)}
class ModuleInline(admin.StackedInline):
model = Module
@admin.register(Course)
class CourseAdmin(admin.ModelAdmin):
list_display = ['title', 'subject', 'created']
list_filter = ['created', 'subject']
search_fields = ['title', 'overview']
prepopulated_fields = {'slug': ('title',)}
inlines = [ModuleInline]
| [
"achiengcindy36@gmail.com"
] | achiengcindy36@gmail.com |
9f31dcd636246865c64704e13cfc9ea76c727b6e | ccd21ace0d4b33a26e15e28292b67a9d01e3775f | /venv/Lib/site-packages/verify_email/confirm.py | d56ba7688c51a62ddff41cd5704fb8acc05283d2 | [] | no_license | Sofia7311/djangoProjectcopy | 68237e4181f1e7e3883552863dc4bef9f5a6971e | f061ac6423a9a280d1995f1d2818e03b71695c40 | refs/heads/master | 2023-03-28T16:25:51.021092 | 2021-03-24T12:28:51 | 2021-03-24T12:28:51 | 351,071,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,413 | py | from base64 import urlsafe_b64decode
from binascii import Error as BASE64ERROR
from django.contrib.auth import get_user_model
from django.utils import timezone
from django.contrib.auth.tokens import default_token_generator
class _UserActivationProcess:
"""
This class is pretty self.explanatory...
"""
def __init__(self):
pass
def __activate_user(self, user):
user.is_active = True
user.last_login = timezone.now()
user.save()
def verify_token(self, useremail, usertoken):
try:
email = urlsafe_b64decode(useremail).decode('utf-8')
token = urlsafe_b64decode(usertoken).decode('utf-8')
except BASE64ERROR:
return False
inactive_users = get_user_model().objects.filter(email=email)
try:
if inactive_users:
for unique_user in inactive_users:
valid = default_token_generator.check_token(unique_user, token)
print('inside confirm.py')
print(unique_user, token, valid)
if valid:
self.__activate_user(unique_user)
return valid
return False
return False
except:
return False
def _verify_user(useremail, usertoken):
return _UserActivationProcess().verify_token(useremail, usertoken)
| [
"Sofiadevi.Vajravel@redeemertech.co.uk"
] | Sofiadevi.Vajravel@redeemertech.co.uk |
ae70d5109c5dcd25b68d33b38404401d72960843 | ad972f9baebfa69eafa7e4cea7aae5e76c3ab285 | /venue/views.py | 3cbea475579ca6689c96f9b74dfc4018c9207846 | [] | no_license | KevinZhengNCU/software_enigeering | cce3380d0f5964f9cc301113d70392b76d608fe1 | 270627dec5c7539d16050dfd19a325270978cb7b | refs/heads/master | 2022-04-16T08:24:14.370222 | 2020-03-31T04:31:02 | 2020-03-31T04:31:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,005 | py | from django.shortcuts import render
import urllib.request
from .models import Venue
from django.db.models import Q
import urllib.request
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
response = urllib.request.urlopen('https://www.python.org')
# Create your views here.
#gmaps.configure(api_key='AIzaSyDLJ0y_piefTd33M6ySRtEgztYSdmjDzLc')
#新版
def venuesearch(request):
venues = Venue.objects.all()
return render(request, 'venuesearch.html',locals())
#
def venueresult(request):
venues = Venue.objects.all()
query1 = request.POST.get("city")
query2 = request.POST.get("type")
if query1:
venues = venues.filter(
Q(locationName__icontains=query2) &
Q(location__icontains=query1)).distinct()
elif query2:
venues = venues.filter(
Q(locationName__icontains=query2) &
Q(location__icontains=query1)).distinct()
return render(request, 'venueresult.html', locals())
| [
"v123830153@gmail.com"
] | v123830153@gmail.com |
5510844e24aaa869228afa3b3dcb1837115eef51 | 1cd160b33e71611d7d9e6cb961fc94fac6c3b9ce | /cadastro_de_valore.py | 7ee561383a06184f4af9b05a81e092f02d78edbf | [] | no_license | lhaislla/Introducao-a-programacao | ea5dd6c6e1bb507e60434ccb9386688a7f4cb668 | 92ed641ff69f306832ffb80abc7ca89a81b2a0e5 | refs/heads/master | 2022-04-12T20:24:45.155800 | 2020-03-31T19:57:59 | 2020-03-31T19:57:59 | 207,156,155 | 0 | 0 | null | 2020-03-31T19:58:00 | 2019-09-08T18:34:26 | Python | UTF-8 | Python | false | false | 380 | py | lista = list()
while True:
valor = int(input('Digite um valor: '))
if valor not in lista:
lista.append(valor)
print('Adicionado com sucesso...')
else:
print('Valor duplicado! não vou adicionar...')
r = str(input('Quer continuar? [S/N] ')).upper()
if r in 'Nn':
break
lista.sort()
print(f'Você digitou os valores: {lista}')
| [
"noreply@github.com"
] | noreply@github.com |
c51faf4d59a67910bf2fc594e9563bf8055c494a | bf8ade7a8ece3c4a68bdc66dc7ef8988fcd9f631 | /sem_seg/organise_apple_tree.py | 1aedd41117f4661479c961f46696a1b58f9fd00c | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | artzet-s/pointnet | b28e1c3a1a323e3b25bb92cf71703a87daaa959a | 5b49b61d630e791406d8132cebb0e616aaa0ec07 | refs/heads/master | 2020-09-26T01:50:46.078262 | 2020-02-28T13:10:18 | 2020-02-28T13:10:18 | 226,134,784 | 0 | 0 | NOASSERTION | 2019-12-05T15:39:38 | 2019-12-05T15:39:38 | null | UTF-8 | Python | false | false | 8,603 | py | import os
import numpy
import sys
import glob
# ==============================================================================
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
ROOT_DIR = os.path.dirname(BASE_DIR)
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(ROOT_DIR, 'utils'))
import shutil
import random
import multiprocessing
import indoor3d_util
def multiprocess_function(function, elements, nb_process=2):
pool = multiprocessing.Pool(nb_process)
nb_elements = len(elements)
it = pool.imap_unordered(function, elements)
for i in range(nb_elements):
try:
it.next()
print("{} : {} / {} ".format(function, i, nb_elements))
sys.stdout.flush()
except Exception as e:
print("{} : {} / {} - ERROR {}".format(
function, i, nb_elements, e))
sys.stdout.flush()
pool.close()
pool.join()
print("%s : %d / %d" % (function, nb_elements, nb_elements))
sys.stdout.flush()
def _get_indice_3d_windows(xyz, x0, xn, y0, yn, z0, zn):
indx = numpy.bitwise_and(x0 <= xyz[:, 0], xyz[:, 0] < xn)
indy = numpy.bitwise_and(y0 <= xyz[:, 1], xyz[:, 1] < yn)
indz = numpy.bitwise_and(z0 <= xyz[:, 2], xyz[:, 2] < zn)
return numpy.bitwise_and(numpy.bitwise_and(indx, indy), indz)
def compute_min_max():
input_folder = "/home/artzet_s/code/dataset/afef_apple_tree_filtred"
vmax, vmin = list(), list()
for i, filename in enumerate(
glob.glob("{}/pc_2018*.txt".format(input_folder))):
data_label = numpy.loadtxt(filename)
vmax.append(numpy.amax(data_label, axis=0))
vmin.append(numpy.amin(data_label, axis=0))
vmax = numpy.amax(numpy.array(vmax), axis=0)
vmin = numpy.amax(numpy.array(vmin), axis=0)
arr = numpy.stack([vmin, vmax], axis=0)
numpy.savetxt("mean_data.txt", arr)
def build_blocks(data,
label,
num_point,
test_mode=False,
K=6):
window_size=(0.25, 0.25, 0.25)
# Collect blocks
block_data_list = []
block_label_list = []
xyz = data[:, :3]
ws = numpy.array(window_size)
xyz_max = numpy.max(xyz, axis=0)
xyz_min = numpy.min(xyz, axis=0)
pc_nb = numpy.ceil((xyz_max - xyz_min) / ws).astype(int)
for i, j, k in numpy.ndindex((pc_nb[0], pc_nb[1], pc_nb[2])):
x0, y0, z0 = xyz_min + ws * numpy.array([i, j, k])
xn, yn, zn = xyz_min + ws * numpy.array([i + 1, j + 1, k + 1])
cond = _get_indice_3d_windows(xyz, x0, xn, y0, yn, z0, zn)
if numpy.count_nonzero(cond) < 500:
continue
block_data, block_label = data[cond], label[cond]
block_data_sampled, block_label_sampled = indoor3d_util.room2samples(
block_data, block_label, num_point, K=K)
if test_mode:
for i in range(block_data_sampled.shape[0]):
block_data_list.append(
numpy.expand_dims(block_data_sampled[i, :, ], 0))
block_label_list.append(
numpy.expand_dims(block_label_sampled[i, :, 0], 0))
else:
if numpy.count_nonzero(block_label) > 100 and numpy.count_nonzero(block_label == 0) > 100:
indice_noise = numpy.random.choice(numpy.where(block_label == 0)[0], num_point // 2)
indice_apple = numpy.random.choice(numpy.where(block_label == 1)[0], num_point // 2)
block_data_sampled = numpy.concatenate([block_data[indice_noise, ...],
block_data[indice_apple, ...]])
block_label_sampled = numpy.concatenate([block_label[indice_noise, ...],
block_label[indice_apple, ...]])
block_data_list.append(numpy.expand_dims(block_data_sampled, 0))
block_label_list.append(numpy.expand_dims(block_label_sampled, 0))
if block_data_list:
return numpy.concatenate(block_data_list, 0), numpy.concatenate(block_label_list, 0)
else:
return None, None
def block_xyzrad(data_label,
num_point,
min_max,
test_mode=False):
data = data_label[:, :6]
label = data_label[:, -1].astype(numpy.uint8)
# CENTRALIZE HERE
data[:, :3] = data[:, :3] - numpy.amin(data, 0)[0:3]
# Normalize Attribute value
data[:, 3:6] = (data[:, 3:6] - min_max[0, 3:6]) / min_max[1, 3:6]
data_batch, label_batch = build_blocks(data,
label,
num_point,
test_mode,
K=6)
return data_batch, label_batch
def block_xyz(data_label,
num_point,
test_mode=False):
data = data_label[:, :3]
label = data_label[:, -1].astype(numpy.uint8)
# CENTRALIZE HERE
data[:, :3] = data[:, :3] - numpy.amin(data, 0)[0:3]
data_batch, label_batch = build_blocks(data,
label,
num_point,
test_mode,
K=3)
return data_batch, label_batch
def compute_block(input_filename, output_filename, min_max):
print(input_filename)
data_label = numpy.load(input_filename)
data, label = block_xyzrad(
data_label,
4096,
min_max)
if data is not None:
label = numpy.array([label]).reshape((label.shape[0], label.shape[1], 1))
label = numpy.array([label]).reshape((label.shape[0], label.shape[1], 1))
data_label = numpy.concatenate([data, label], axis=2)
numpy.save(output_filename, data_label)
def compute_xyz_block(input_filename, output_filename):
data_label = numpy.loadtxt(input_filename)
data, label = block_xyz(
data_label,
4096)
if data is not None:
label = numpy.array([label]).reshape((label.shape[0], label.shape[1], 1))
label = numpy.array([label]).reshape((label.shape[0], label.shape[1], 1))
data_label = numpy.concatenate([data, label], axis=2)
numpy.save(output_filename, data_label)
def organize_data():
# input_folders = [
# "/home/artzet_s/code/dataset/afef_apple_tree_filtred_labeled/train",
# "/home/artzet_s/code/dataset/afef_apple_tree_filtred_labeled/test"]
# output_folders = [
# "/home/artzet_s/code/dataset/pn_train_xyzadr_25cm3_balanced",
# "/home/artzet_s/code/dataset/pn_test_xyzadr_25cm3_balanced"]
input_folders = [
"/home/artzet_s/code/dataset/afef_apple_tree_filtred_labeled/aug_train",
"/home/artzet_s/code/dataset/afef_apple_tree_filtred_labeled/aug_test"]
output_folders = [
"/home/artzet_s/code/dataset/pn_train_aug_xyzadr_25cm3_balanced",
"/home/artzet_s/code/dataset/pn_test_aug_xyzadr_25cm3_balanced"]
# input_folders = [
# "/home/artzet_s/code/dataset/synthetic_data/train",
# "/home/artzet_s/code/dataset/synthetic_data/test"]
# output_folders = [
# "/home/artzet_s/code/dataset/synthetic_data/train_block_synthetic_data",
# "/home/artzet_s/code/dataset/synthetic_data/test_block_synthetic_data"]
min_max = numpy.loadtxt("mean_data.txt")
elements = list()
for input_folders, output_folder in zip(input_folders, output_folders):
if not os.path.exists(output_folder):
os.mkdir(output_folder)
filenames = glob.glob(os.path.join(input_folders, "*.npy"))
# filenames = glob.glob(os.path.join(input_folders, "*.txt"))
for i, filename in enumerate(filenames):
basename = os.path.basename(filename)[:-4]
output_filename = os.path.join(output_folder,
"{}.npy".format(basename))
if not os.path.exists(output_filename):
elements.append((filename, output_filename, min_max.copy()))
# elements.append((filename, output_filename))
print(len(elements))
print(elements)
nb_process = 4
pool = multiprocessing.Pool(nb_process)
pool.starmap(compute_block, elements)
# pool.starmap(compute_xyz_block, elements)
if __name__ == "__main__":
organize_data()
| [
"simon.artzet@gmail.com"
] | simon.artzet@gmail.com |
0a8945f414d4b72d4de7965d496a29bfd79bf9cd | f47991b692697f29a468c9a8d3a3d80ca8bda055 | /HG/GAN_mnist/cycleGAN.py | 62f3e17598c735767fd47032a97d560aa94d6dbe | [] | no_license | hogunkee/CanWeDeep | 10dd4cd2ef5d61ba82772373caf50790229735e6 | 24dbc3cbd1fc7121a873066b3e6eb39ca4ce7094 | refs/heads/master | 2021-04-18T19:31:09.664515 | 2020-12-23T04:05:24 | 2020-12-23T04:05:24 | 126,709,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,299 | py | import tensorflow as tf
import numpy as np
import matplotlib.pyplot as plt
plt.switch_backend('agg')
from tensorflow.examples.tutorials.mnist import input_data
# data loader
mnist = input_data.read_data_sets("./sample/MNIST_data/")
train_x = mnist.train.images
train_y = mnist.train.labels
print(train_x.shape, train_y.shape)
# hyperparameters
total_epochs = 100
batch_size = 100
learning_rate = 2e-4
# batch normalization
def batch_norm(input_layer, scope, reuse):
BN_EPSILON = 1e-5
dimension = int(input_layer.shape[3])
with tf.variable_scope(scope):
if reuse:
tf.get_variable_scope().reuse_variables()
mean, variance = tf.nn.moments(input_layer, axes=[0, 1, 2])
beta = tf.get_variable('beta', dimension, tf.float32,
initializer=tf.constant_initializer(0.0, tf.float32))
gamma = tf.get_variable('gamma', dimension, tf.float32,
initializer=tf.constant_initializer(1.0, tf.float32))
bn_layer = tf.nn.batch_normalization(input_layer, mean, variance, \
beta, gamma, BN_EPSILON)
return bn_layer
# generator
def generator_A2B(x, reuse = False):
with tf.variable_scope('GenA2B', reuse=reuse) as scope:
# Convolution layers
dw1 = tf.get_variable('w1', [7,7,3,64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db1 = tf.get_variable('b1', [64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw2 = tf.get_variable('w2', [3,3,64,128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db2 = tf.get_variable('b2', [128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw3 = tf.get_variable('w3', [3,3,128,256], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db3 = tf.get_variable('b3', [256], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
# Residual Blocks
# Deconvolution layers
gw3 = tf.get_variable('w3', [3,3,128,256], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
gb3 = tf.get_variable('b3', [128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
gw4 = tf.get_variable('w4', [3,3,64,128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
gb4 = tf.get_variable('b4', [64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
gw5 = tf.get_variable('w5', [7,7,3,64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
gb5 = tf.get_variable('b5', [1], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
x_reshape = tf.reshape(x, [-1, 28, 28, 1])
h = tf.nn.conv2d(x_reshape, dw1, strides=[1,1,1,1],padding='SAME') + db1
h = batch_norm(h, 'D-bn1', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d(h, dw2, strides=[1,2,2,1],padding='SAME') + db2
h = batch_norm(h, 'D-bn2', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d(h, dw3, strides=[1,2,2,1],padding='SAME') + db3
h = batch_norm(h, 'D-bn3', reuse)
h = tf.nn.relu(h)
# residual blocks
h = tf.nn.conv2d_transpose(h, gw3, [tf.shape(h)[0], 128, 128, 128], \
strides=[1,2,2,1], padding='SAME') + gb3
h = batch_norm(h, 'G-bn3', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d_transpose(h, gw4, [tf.shape(h)[0], 256, 256, 64], \
strides=[1,2,2,1], padding='SAME') + gb4
h = batch_norm(h, 'G-bn4', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d_transpose(h, gw5, [tf.shape(h)[0], 256, 256, 1], \
strides=[1,1,1,1], padding='SAME') + gb5
output = tf.reshape(h, [-1, 3*256*256])
return output
# discriminator
def discriminator(x, reuse=False):
with tf.variable_scope('Dis', reuse=reuse) as scope:
dw1 = tf.get_variable('w1', [4,4,3,64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db1 = tf.get_variable('b1', [64], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw2 = tf.get_variable('w2', [4,4,64,128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db2 = tf.get_variable('b2', [128], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw3 = tf.get_variable('w3', [4,4,128,256], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db3 = tf.get_variable('b3', [256], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw4 = tf.get_variable('w4', [4,4,256,512], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db4 = tf.get_variable('b4', [512], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
dw5 = tf.get_variable('w5', [4,4,512,1], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
db5 = tf.get_variable('b5', [1], initializer = \
tf.random_normal_initializer(mean=0.0, stddev=0.01))
x_reshape = tf.reshape(x, [-1, 28, 28, 1])
h = tf.nn.conv2d(x_reshape, dw1, strides=[1,2,2,1],padding='SAME') + db1
h = batch_norm(h, 'D-bn1', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d(h, dw2, strides=[1,2,2,1],padding='SAME') + db2
h = batch_norm(h, 'D-bn2', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d(h, dw3, strides=[1,2,2,1],padding='SAME') + db3
h = batch_norm(h, 'D-bn3', reuse)
h = tf.nn.relu(h)
h = tf.nn.conv2d(h, dw4, strides=[1,2,2,1],padding='SAME') + db4
h = batch_norm(h, 'D-bn4', reuse)
h = tf.nn.relu(h)
output = tf.nn.conv2d(h, dw5, strides=[1,1,1,1],padding='SAME') + db5
# shape: [-1, 32, 32, 1]
return output
# Graph
g = tf.Graph()
with g.as_default():
X = tf.placeholder(tf.float32, [None, 784])
Z = tf.placeholder(tf.float32, [None, 128])
fake_x = generator(Z)
result_fake = discriminator(fake_x)
result_real = discriminator(X, True)
D_G_Z = tf.reduce_mean(result_fake)
D_X = tf.reduce_mean(result_real)
g_loss = -tf.reduce_mean(tf.log(result_fake))
d_loss = -tf.reduce_mean(tf.log(result_real) + tf.log(1-result_fake))
t_vars = tf.trainable_variables()
g_vars = [var for var in t_vars if 'Gen' in var.name]
d_vars = [var for var in t_vars if 'Dis' in var.name]
optimizer = tf.train.AdamOptimizer(learning_rate)
g_train = optimizer.minimize(g_loss, var_list = g_vars)
d_train = optimizer.minimize(d_loss, var_list = d_vars)
# Train
with tf.Session(graph = g, config=tf.ConfigProto(gpu_options=tf.GPUOptions(allow_growth=True))) as sess:
sess.run(tf.global_variables_initializer())
total_batch = int(train_x.shape[0] / batch_size)
for epoch in range(total_epochs):
for batch in range(total_batch):
batch_x = train_x[batch * batch_size: (batch+1) * batch_size]
batch_y = train_y[batch * batch_size: (batch+1) * batch_size]
noise = random_noise(batch_size)
sess.run(g_train, feed_dict = {Z: noise})
sess.run(d_train, feed_dict = {X: batch_x, Z: noise})
D_gz, D_x, gl, dl = sess.run([D_G_Z, D_X, g_loss, d_loss], \
feed_dict={X: batch_x, Z: noise})
#if (epoch+1)%20==0 or epoch==1:
print('\nEpoch: %d/%d' %(epoch, total_epochs))
print('Generator:', gl)
print('Discriminator:', dl)
print('Fake D:', D_gz, '/ Real D:', D_x)
sample_noise = random_noise(10)
if epoch==0 or (epoch+1)%5 == 0:
generated = sess.run(fake_x, feed_dict = {Z: sample_noise})
fig, ax = plt.subplots(1, 10, figsize=(10,1))
for i in range(10):
ax[i].set_axis_off()
ax[i].imshow(np.reshape(generated[i], (28,28)))
plt.savefig('result/largeconv-%s.png' %str(epoch).zfill(3), bbox_inches='tight')
plt.close(fig)
print('Finished!!')
| [
"hogunhogun@naver.com"
] | hogunhogun@naver.com |
68b3cec1b5512d4fcda2395d608d5ebd33c2902e | 3284ba582d3f43a2e4ec7ae8e024bea77cc28a1f | /venv/Scripts/easy_install-script.py | 3fb5bc1ddb5edb3ae60e2577ce2967b7a1e385f0 | [] | no_license | Rubensrvsc/Search-keyword | f00b9687a607cde169abef1bb73c4d6bf6ba07d6 | b7f0c31d0157d9784dfbe5be9140072b8dcbabb1 | refs/heads/master | 2020-03-29T20:52:45.826118 | 2018-10-02T18:26:32 | 2018-10-02T18:26:32 | 150,336,409 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | #!C:\Users\ruben\PycharmProjects\Search-keyword\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"Rubensspfc100@gmail.com"
] | Rubensspfc100@gmail.com |
54abea1cc77082c2d7a2b55605468c8af6661b87 | ba435f72e6ccad30cb5577455f2b913479a6b9c1 | /week4.py | 42f33b3cd1978c9aa12cc1e917f803d7778d22da | [] | no_license | MuxBee/alds | bc16e58d98dab70a3052eb9679f84decd204e256 | d3bf85ca1167b920c019bcd7b1b7b079b57c74ac | refs/heads/master | 2021-01-19T12:46:18.125846 | 2017-01-12T00:45:09 | 2017-01-12T00:45:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,328 | py | import sys
class Hash:
def __init__(self):
self.len = 1
self.table = [set()]
self.used = 0
def __repr__(self):
message = ""
count = 0
for i in self.table:
message += str(count) + ":"
count += 1
message += str(i) + "\n"
return message
def search(self, e):
if e:
for i in self.table:
if e in i:
return True
return False
def insert(self, e):
if e:
key = int(e % self.len)
if self.used > 0.75 * self.len:
self.rehash(self.len * 2)
self.table[key].add(e)
self.used += 1
def delete(self, e):
if e:
for i in self.table:
if e in i:
i.remove(e)
self.used -= 1
return True
return False
def rehash(self, new_len):
print("Rehashing")
print(self)
oldTable = self.table
self.table = []
self.len = new_len
for i in range(new_len):
self.table.append(set())
for i in oldTable:
for s in i:
self.table[int(s % self.len)].add(s)
import random
sys.setrecursionlimit(100000000)
t = Hash()
print("*" * 10, "CHAIN HASH", "*" * 10)
print("*" * 10, "INSERTING", "*" * 10)
temp = []
for i in range(200):
temp.append(random.uniform(0, 1000))
t.insert(temp[i])
print("*" * 10, "DONE INSERTING", "*" * 10)
ready_to_delete = []
while len(ready_to_delete) <= 100:
element = random.choice(temp)
ready_to_delete.append(element)
temp.remove(element)
print("*" * 10, "READY TO DELETE", "*" * 10)
for i in ready_to_delete:
print(i, "FOUND ", t.search(i))
print("DELETING")
for i in ready_to_delete:
t.delete(i)
for i in ready_to_delete:
print(i, "FOUND ", t.search(i))
print("*" * 10, "DONE DELETING", "*" * 10)
print(t)
print("*" * 10, "END CHAIN HASH", "*" * 10)
hashdict = dict()
while (True):
r = random.random()
if r not in hashdict.values():
hr = hash(r) % (2 ** 32)
if hr in hashdict.keys():
print(repr(r) + ", " + repr(hashdict[hr]) + ": " + repr(hr))
break
hashdict[hr] = r
def B(n, k):
C = [0 for i in range(k + 1)]
C[0] = 1 # since nC0 is 1
for i in range(1, n + 1):
j = min(i, k)
while j > 0:
C[j] = C[j] + C[j - 1]
print(C[j], end=' ')
j -= 1
print()
return C[k]
print((B(50, 100))) # 184756
# https://en.wikipedia.org/wiki/Change-making_problem
def f(n):
assert type(n) == int
assert n <= 1000
actual_amount = 0
coins = [1, 2, 5, 10, 20, 50, 100, 200, 500, 1000, 2000, 5000, 10000]
minCoins = [[0 for _ in range(n + 1)] for _ in range(len(coins) + 1)]
for i in range(n + 1):
minCoins[0][i] = i
for c in range(1, len(coins) + 1):
for r in range(1, n + 1):
if coins[c - 1] == r:
minCoins[c][r] = 1
elif coins[c - 1] > r:
minCoins[c][r] = minCoins[c - 1][r]
else:
minCoins[c][r] = min(minCoins[c - 1][r], 1 + minCoins[c][r - coins[c - 1]])
return minCoins[-1][-1]
print(f(25))
| [
"smartcat007@hotmail.com"
] | smartcat007@hotmail.com |
05648e98861f7242f3e9a0fefa5d5293dc55f54e | 4aada682def251d936d304d6c0cca956c64c121e | /registration/models.py | 528eb8e3705e27e7da7fe7a4127a4be48a50cb22 | [] | no_license | kayethano/howmuch | dfb5f45b25b5f98ffc06e55ca764e485f2affed6 | fac90c01e60ff9bf2d271de1e7a723de129cc864 | refs/heads/master | 2020-05-30T18:44:59.684356 | 2013-06-03T20:55:26 | 2013-06-03T20:55:26 | 8,377,273 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,166 | py | import datetime
import random
import re
from django.conf import settings
from django.contrib.auth.models import User
from django.db import models
from django.db import transaction
from django.template.loader import render_to_string
from django.utils.hashcompat import sha_constructor
from django.utils.translation import ugettext_lazy as _
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
"""
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
try:
profile = self.get(activation_key=activation_key)
except self.model.DoesNotExist:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.save()
profile.activation_key = self.model.ACTIVATED
profile.save()
return user
return False
def create_inactive_user(self, username, first_name, last_name, email, password,
site, send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
By default, an activation email will be sent to the new
user. To disable this, pass ``send_email=False``.
"""
new_user = User.objects.create_user(username, email, password)
new_user.first_name = first_name
new_user.last_name = last_name
new_user.is_active = False
new_user.save()
registration_profile = self.create_profile(new_user)
if send_email:
registration_profile.send_activation_email(site)
return new_user
create_inactive_user = transaction.commit_on_success(create_inactive_user)
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = sha_constructor(str(random.random())).hexdigest()[:5]
activation_key = sha_constructor(salt+user.username).hexdigest()
return self.create(user=user,
activation_key=activation_key)
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in self.all():
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
class RegistrationProfile(models.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = models.ForeignKey(User, unique=True, verbose_name=_('user'))
activation_key = models.CharField(_('activation key'), max_length=40)
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == self.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime.datetime.now())
activation_key_expired.boolean = True
def send_activation_email(self, site):
"""
Send an activation email to the user associated with this
``RegistrationProfile``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. Because it is used as the subject line of an email,
this template's output **must** be only a single line of
text; output longer than one line will be forcibly joined
into only a single line.
``registration/activation_email.txt``
This template will be used for the body of the email.
These templates will each receive the following context
variables:
``activation_key``
The activation key for the new account.
``expiration_days``
The number of days remaining during which the account may
be activated.
``site``
An object representing the site on which the user
registered; depending on whether ``django.contrib.sites``
is installed, this may be an instance of either
``django.contrib.sites.models.Site`` (if the sites
application is installed) or
``django.contrib.sites.models.RequestSite`` (if
not). Consult the documentation for the Django sites
framework for details regarding these objects' interfaces.
"""
ctx_dict = { 'activation_key': self.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': site }
subject = render_to_string('registration/activation_email_subject.txt',
ctx_dict)
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
ctx_dict)
self.user.email_user(subject, message, settings.DEFAULT_FROM_EMAIL)
| [
"kayethano@gmail.com"
] | kayethano@gmail.com |
8c63790e46698adacad84c7b80f0afe8e85aec72 | c1d88dcb0a7efa8249a20aadb2e4afdeb9c26fcb | /discrete_mathematics/projects/final_project_method2.py | ecfb756556edbf7e316230608c5b6e24b3b13fe3 | [] | no_license | gadr1on/computer_science_major | 6de8396989d1d4fc314d341c1c7254a7768148a7 | b5bcf34bbcb1228126f9dc6d7cdd6305f8c0ca20 | refs/heads/master | 2021-05-22T22:19:46.913315 | 2020-04-06T08:45:43 | 2020-04-06T08:45:43 | 253,122,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,020 | py | import scipy.spatial.distance as ssd
import scipy.cluster.hierarchy as sch
import matplotlib.pyplot as plt
distMatrix = [[ 0 , 31 , 58 , 27 , 12 , 10 , 34 , 64 , 44 ],
[ 31 , 0 , 83 , 21 , 36 , 45 , 35 , 92 , 102],
[ 58 , 83 , 0 , 66 , 11 , 78 , 28 , 39 , 94 ],
[ 27 , 21 , 66 , 0 , 88 , 96 , 67 , 49 , 50 ],
[ 12 , 36 , 11 , 88 , 0 , 43 , 12 , 33 , 70 ],
[ 10 , 45 , 78 , 96 , 43 , 0 , 23 , 46 , 83 ],
[ 34 , 35 , 28 , 67 , 12 , 23 , 0 , 30 , 79 ],
[ 64 , 92 , 39 , 49 , 33 , 46 , 30 , 0 , 92 ],
[ 44 , 102, 94 , 50 , 70 , 83 , 79 , 92 , 0 ]]
def formClusters(dists, distance):
# Make distance matrix square
dists = ssd.squareform(dists)
# Compute linkage
links = sch.linkage(dists)
plt.figure(figsize=(15,5))
p = sch.dendrogram(links)
plt.show()
formClusters(distMatrix, 38)
| [
"gadrionpr@gmail.com"
] | gadrionpr@gmail.com |
ec2eb0aa2df734cf02d1c1f59fc5ac432f487b0f | e4427b40760354593a9c1b923825640bc7b2dd4e | /reports/general/confirmedusers.py | cd91b597f209dddd6aad5b9369caeb133f451168 | [
"LicenseRef-scancode-public-domain"
] | permissive | legoktm/database-reports | a04a4d4b00fcf658b1777c5413a3586cecf90891 | f1ead4d4e8d256e5bfd4988914171f31cc845193 | refs/heads/master | 2021-01-16T20:35:15.793256 | 2013-05-05T01:30:42 | 2013-05-05T01:30:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,551 | py | # Copyright 2009, 2013 bjweeks, MZMcBride, Tim Landscheidt
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Report class for autoconfirmed users in the confirmed user group
"""
import datetime
import reports
class report(reports.report):
def get_title(self):
return 'Autoconfirmed users in the confirmed user group'
def get_preamble(self, conn):
cursor = conn.cursor()
cursor.execute('SELECT UNIX_TIMESTAMP() - UNIX_TIMESTAMP(rc_timestamp) FROM recentchanges ORDER BY rc_timestamp DESC LIMIT 1;')
rep_lag = cursor.fetchone()[0]
current_of = (datetime.datetime.utcnow() - datetime.timedelta(seconds=rep_lag)).strftime('%H:%M, %d %B %Y (UTC)')
return '''Users in the "confirmed" user group who have passed the autoconfirmed threshold; \
data as of <onlyinclude>%s</onlyinclude>.''' % current_of
def get_table_columns(self):
return ['User', 'Edit count', 'First edit']
def get_table_rows(self, conn):
cursor = conn.cursor()
cursor.execute('''
/* confirmedusers.py SLOW_OK */
SELECT
CONVERT(user_name USING utf8),
user_editcount,
rev_timestamp
FROM user
JOIN user_groups
ON ug_user = user_id
JOIN revision
ON rev_user = user_id
AND ug_group = 'confirmed'
AND user_editcount > 9
AND (SELECT
MIN(rev_timestamp)
FROM revision
WHERE rev_user = user_id) < DATE_FORMAT(DATE_SUB(NOW(),INTERVAL 4 DAY),'%Y%m%d%H%i%s')
AND rev_timestamp = (SELECT
MIN(rev_timestamp)
FROM revision
WHERE rev_user = user_id);
''')
for user_name, user_editcount, rev_timestamp in cursor:
user_name = u'{{dbr link|1=%s}}' % user_name
yield [user_name, str(user_editcount), rev_timestamp]
cursor.close()
| [
"tim@tim-landscheidt.de"
] | tim@tim-landscheidt.de |
4b22873ba2b86ee6f8f353268dab2f9cda56c13c | d77c8e7d5ec57940a2e1ee0e9836fb6181b0e051 | /user.py | ff88009a1f188531003aede35af3ba5cdd19b583 | [] | no_license | ultralegendary/Git-RPG | 6111ea5bb25ecbd5d86d4a577935e8aa0fc40f15 | db3caeea635a56303a971a4ee6488de7963a5aa2 | refs/heads/master | 2023-03-21T14:15:18.612195 | 2021-03-14T14:06:57 | 2021-03-14T14:06:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py | import sqlite3
import os
"""Db structure:
{
user_name = ...
file_path = ...
level = {
lvl_no: ["init","commit",....],
}
"""
basepath = os.path.join(os.path.dirname(os.path.realpath(__file__)), "user.sqlite3")
class User:
def __init__(self, path=None):
"""Takes a file-path"""
if path is None:
path = basepath
self.db = sqlite3.connect(path)
self.cursor = self.db.cursor()
if (
self.db.execute(
"SELECT name FROM sqlite_master WHERE type='table' AND name='Users';"
).fetchone()
== None
):
self.cursor.execute(
"create table if not exists Users (id integer primary key autoincrement, path text, level int, sublevel int)"
)
self.cursor.execute("insert into Users values (1,NULL,1,1)")
self.db.commit()
def update(self, item: dict):
if len(item) > 1:
safe_text = ", ".join(f"{i} = ?" for i in item.keys())
else:
safe_text = " ".join(f"{i} = ?" for i in item.keys())
res = self.cursor.execute(f"update Users set {safe_text} where id=1", tuple(item.values()))
self.db.commit()
return res
def get(self, item):
res = self.cursor.execute(f"select {item} from Users where id=1").fetchone()
if len(res) > 1:
return res
else:
return res[0]
| [
"npc203@users.noreply.github.com"
] | npc203@users.noreply.github.com |
60f83e7f42ce5330dc0e5d4b308975dca448fe8a | 2330c7c518bd42eddcad9d46a6c8c5747b72fe3a | /intent/apps/query/migrations/0005_auto__add_dailystat.py | a8c0d7be15e0dbc4d49ca59f2eb77ecba3d9ebc3 | [] | no_license | ktundwal/intent | f2873b172a9e8439378c668539780277026c7655 | 605c7dfa4a8257f87fb93b5f7d984e0c95434b22 | refs/heads/master | 2023-06-08T09:06:14.965357 | 2013-12-29T05:32:17 | 2013-12-29T05:32:17 | 380,635,059 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,329 | py | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'DailyStat'
db.create_table('query_dailystat', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('stat_of', self.gf('django.db.models.fields.related.ForeignKey')(related_name='dailystats', blank=True, to=orm['query.Query'])),
('stat_for', self.gf('django.db.models.fields.DateField')(auto_now_add=True, blank=True)),
('document_count', self.gf('django.db.models.fields.IntegerField')()),
('buy_count', self.gf('django.db.models.fields.IntegerField')()),
('recommendation_count', self.gf('django.db.models.fields.IntegerField')()),
('question_count', self.gf('django.db.models.fields.IntegerField')()),
('commitment_count', self.gf('django.db.models.fields.IntegerField')()),
('like_count', self.gf('django.db.models.fields.IntegerField')()),
('dislike_count', self.gf('django.db.models.fields.IntegerField')()),
('try_count', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('query', ['DailyStat'])
def backwards(self, orm):
# Deleting model 'DailyStat'
db.delete_table('query_dailystat')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'query.author': {
'Meta': {'object_name': 'Author'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'twitter_handle': ('django.db.models.fields.CharField', [], {'max_length': '40'})
},
'query.dailystat': {
'Meta': {'object_name': 'DailyStat'},
'buy_count': ('django.db.models.fields.IntegerField', [], {}),
'commitment_count': ('django.db.models.fields.IntegerField', [], {}),
'dislike_count': ('django.db.models.fields.IntegerField', [], {}),
'document_count': ('django.db.models.fields.IntegerField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'like_count': ('django.db.models.fields.IntegerField', [], {}),
'question_count': ('django.db.models.fields.IntegerField', [], {}),
'recommendation_count': ('django.db.models.fields.IntegerField', [], {}),
'stat_for': ('django.db.models.fields.DateField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'stat_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dailystats'", 'blank': 'True', 'to': "orm['query.Query']"}),
'try_count': ('django.db.models.fields.IntegerField', [], {})
},
'query.document': {
'Meta': {'ordering': "['-date']", 'object_name': 'Document'},
'analyzed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'author': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'documents'", 'to': "orm['query.Author']"}),
'buy_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'buys'", 'null': 'True', 'to': "orm['query.Rule']"}),
'commitment_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'commitments'", 'null': 'True', 'to': "orm['query.Rule']"}),
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'dislike_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'dislikes'", 'null': 'True', 'to': "orm['query.Rule']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'like_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'likes'", 'null': 'True', 'to': "orm['query.Rule']"}),
'question_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'questions'", 'null': 'True', 'to': "orm['query.Rule']"}),
'recommendation_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'recommendations'", 'null': 'True', 'to': "orm['query.Rule']"}),
'result_of': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'results'", 'blank': 'True', 'to': "orm['query.Query']"}),
'source': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'source_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '40'}),
'text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '140', 'blank': 'True'}),
'try_rule': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'tries'", 'null': 'True', 'to': "orm['query.Rule']"})
},
'query.query': {
'Meta': {'ordering': "['-created_on']", 'object_name': 'Query'},
'count': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"}),
'created_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'auto_now_add': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.IntegerField', [], {'default': '1800'}),
'last_run': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'latitude': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'longitude': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'num_times_run': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'query': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'query_exception': ('django.db.models.fields.CharField', [], {'max_length': '200', 'blank': 'True'}),
'radius': ('django.db.models.fields.CharField', [], {'max_length': '40', 'blank': 'True'}),
'status': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'throttle': ('django.db.models.fields.FloatField', [], {'default': '0.5'})
},
'query.rule': {
'Meta': {'object_name': 'Rule'},
'confidence': ('django.db.models.fields.DecimalField', [], {'default': '1.0', 'max_digits': '2', 'decimal_places': '1'}),
'grammar': ('django.db.models.fields.IntegerField', [], {}),
'grammar_version': ('django.db.models.fields.DecimalField', [], {'max_digits': '2', 'decimal_places': '1'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'rule': ('django.db.models.fields.CharField', [], {'max_length': '100', 'blank': 'True'})
}
}
complete_apps = ['query'] | [
"kapil@cruxly.com"
] | kapil@cruxly.com |
422b89a625e8ed71bceb6edd8df9b18591547f09 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /125_algorithms/_exercises/templates/_algorithms_challenges/codeabbey/_CodeAbbeyPythonSolutions-master/matching_brackets.py | 6e1fa02b64da38699b6c5780bcf0007efcd986dd | [] | no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 745 | py | _______ __
amount_values i..(input
results # list
___ is_matching(word
word __.sub _ [^()[\]{}<>]","",word)
open_brackets ["[","(","{","<"]
close_brackets ["]",")","}",">"]
open_brackets_in_word = # list
___ i __ word:
__(i __ open_brackets
open_brackets_in_word.a..(i)
____(i __ close_brackets
__(l..(open_brackets_in_word) __ 0
r.. 0
__(open_brackets.i.. open_brackets_in_word[-1]) !_ close_brackets.i.. i:
r.. 0
____
open_brackets_in_word.p.. )
__(l..(open_brackets_in_word) > 0
r.. 0
r.. 1
___ i __ r..(amount_values
word i.. )
results.a..(is_matching(word
print(*results)
| [
"sergejyurskyj@yahoo.com"
] | sergejyurskyj@yahoo.com |
d381508d7d996ad00cf41f20960b069d5bad79c3 | 9a4dc899928a2fbce0bc7501adc4590f17f855fb | /demeter/demeter/settings.py | 770bfb71ab8d5506c6843fe1b3fd7660507019f0 | [] | no_license | kstroevsky/parf_test | 697fe705b9dc3ade3377eaf076839378233acd21 | 1c756bab32a6212fd916238780ee61eb12e5b1ff | refs/heads/master | 2020-09-17T04:12:17.511723 | 2019-12-10T20:07:08 | 2019-12-10T20:07:08 | 223,984,498 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,102 | py | """
Django settings for demeter project.
Generated by 'django-admin startproject' using Django 2.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'cxb7cf1=s&p0!2w!)rqqk2xykgud31l8($3gsloqzeva3+b76u'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'core'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'demeter.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'demeter.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
| [
"38957112+kstroevsky@users.noreply.github.com"
] | 38957112+kstroevsky@users.noreply.github.com |
6f16a1c2b4dac3fc4a91743e308c6b7a3bc0d011 | c16ea32a4cddb6b63ad3bacce3c6db0259d2bacd | /google/cloud/bigquery/storage/v1/bigquery-storage-v1-py/setup.py | f20749fb5fe943b8d9cce3d3b649c3f8f26a9e70 | [
"Apache-2.0"
] | permissive | dizcology/googleapis-gen | 74a72b655fba2565233e5a289cfaea6dc7b91e1a | 478f36572d7bcf1dc66038d0e76b9b3fa2abae63 | refs/heads/master | 2023-06-04T15:51:18.380826 | 2021-06-16T20:42:38 | 2021-06-16T20:42:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,802 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import io
import os
import setuptools # type: ignore
version = '0.1.0'
package_root = os.path.abspath(os.path.dirname(__file__))
readme_filename = os.path.join(package_root, 'README.rst')
with io.open(readme_filename, encoding='utf-8') as readme_file:
readme = readme_file.read()
setuptools.setup(
name='google-cloud-bigquery-storage',
version=version,
long_description=readme,
packages=setuptools.PEP420PackageFinder.find(),
namespace_packages=('google', 'google.cloud'),
platforms='Posix; MacOS X; Windows',
include_package_data=True,
install_requires=(
'google-api-core[grpc] >= 1.22.2, < 2.0.0dev',
'libcst >= 0.2.5',
'proto-plus >= 1.15.0',
'packaging >= 14.3', ),
python_requires='>=3.6',
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: Developers',
'Operating System :: OS Independent',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Topic :: Internet',
'Topic :: Software Development :: Libraries :: Python Modules',
],
zip_safe=False,
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
90c78d722c19c585af06e26253b4db4d10aea893 | e1e128b3a0aa886010d964b0176ea9fb86d6304b | /progetto1/Grafici/Errore-w-vs-u.py | d99b3e79511b1a7c4dab1edcdeab304112d44909 | [] | no_license | CostantiniMatteo/progetto-mcs | cfbc97959588737c13e77548f84e7f623fac10df | dea973b3cce9fd3a8c9f75ec232c2493356ba661 | refs/heads/master | 2020-03-12T14:29:59.197222 | 2018-06-07T13:23:52 | 2018-06-07T13:23:52 | 130,668,716 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,846 | py | import matplotlib.pyplot as plt
import pandas as pd
dt = pd.read_csv(
"../logs/res2.csv",
sep=',',
encoding="utf-8-sig",
)
# tableau20[2] arancione
# tableau20[4] verde
# tableau20[6] rosso
# tableau20[8] viola
# tableau20[10] marrone
# tableau20[12] rosa
# tableau20[14] grigio
# tableau20[16] verdino
# tableau20[18] azzurro
tableau20 = [(31, 119, 180), (174, 199, 232), (255, 127, 14), (255, 187, 120),
(44, 160, 44), (152, 223, 138), (214, 39, 40), (255, 152, 150),
(148, 103, 189), (197, 176, 213), (140, 86, 75), (196, 156, 148),
(227, 119, 194), (247, 182, 210), (127, 127, 127), (199, 199, 199),
(188, 189, 34), (219, 219, 141), (23, 190, 207), (158, 218, 229)]
# Scale the RGB values to the [0, 1] range, which is the format matplotlib accepts.
for i in range(len(tableau20)):
r, g, b = tableau20[i]
tableau20[i] = (r / 255., g / 255., b / 255.)
plt.figure(1, figsize=(16, 7))
# Settaggio griglia asse y
ax = plt.axes()
ax.yaxis.grid(linestyle='--')
# Remove the plot frame lines. They are unnecessary chartjunk.
ax = plt.subplot(111)
ax.spines["top"].set_visible(False)
ax.spines["right"].set_visible(False)
#plt.xlabel('n° di valori non zeri')
plt.xlabel('Dimensione')
plt.ylabel('Errore (%)')
dt = dt.sort_values('nnz')
dt = dt.reset_index(drop=True)
x = list()
for el in dt['name'].unique():
x.append(dt.loc[dt['name'] == el, 'nnz'].unique().mean())
# x = list(range(1, len(dt['name'].unique()) + 1))
# plt.xticks(x, dt['dim'].unique(), rotation=45)
# Per dimensione
dt = dt.sort_values('dim')
dt = dt.reset_index(drop=True)
x = dt['dim'].unique()
#ERRORE
y_py_w = dt.loc[(dt['os'] == 'windows') & (dt['lang'] == 'python'),'re']
y_mat_w = dt.loc[(dt['os'] == 'windows') & (dt['lang'] == 'matlab'),'re']
y_py_u = dt.loc[(dt['os'] == 'ubuntu') & (dt['lang'] == 'python'),'re']
y_mat_u = dt.loc[(dt['os'] == 'ubuntu') & (dt['lang'] == 'matlab'),'re']
# y_u = [sum(y) / 2 for y in zip(y_py_u, y_mat_u)]
# plt.plot(x, y_u, color=tableau20[2], dashes=[2, 2], marker='o', label='Media Ubuntu')
# y_w = [sum(y) / 2 for y in zip(y_mat_w, y_py_w)]
# plt.plot(x, y_w, color=tableau20[18], dashes=[2, 2], marker='o', label='Media Windows')
# plt.plot(x, y_py_w, color=tableau20[4], dashes=[2, 2], marker='o', label='Windows/Python')
# plt.plot(x, y_py_u, color=tableau20[6], dashes=[2, 2], marker='o', label='Ubuntu/Python')
plt.plot(x, y_mat_w, color=tableau20[4], dashes=[2, 2], marker='o', label='Windows/Matlab')
plt.plot(x, y_mat_u, color=tableau20[6], dashes=[2, 2], marker='o', label='Ubuntu/Matlab')
plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
plt.yscale('log')
plt.xscale('log')
plt.subplots_adjust(bottom=0.15, right=0.8)
#plt.gca().add_artist(legend1)
#plt.savefig('immagini/Errore-w-vs-u_dim.png')
plt.show()
| [
"d.gerosa6@campus.unimib.it"
] | d.gerosa6@campus.unimib.it |
3f52dd08607f49f62ba2a9fcf0763299051fc089 | 4c7fc810eb442b386969bf345b4dc6ef3152c783 | /src/transformers/models/pegasus/configuration_pegasus.py | ae5f8f007573b77a2c22f667698e25ef5bd39b3e | [
"Apache-2.0"
] | permissive | newcodevelop/transformers | fbcef5d703b12febf6e76e84e3f0493769fb9d37 | e8d1bd7427021d2114ec159b2c90c6b1fcddeae7 | refs/heads/main | 2023-03-15T11:45:09.906184 | 2022-08-30T07:26:17 | 2022-08-30T07:26:17 | 254,360,734 | 0 | 1 | Apache-2.0 | 2020-04-09T12:07:09 | 2020-04-09T12:07:08 | null | UTF-8 | Python | false | false | 7,868 | py | # coding=utf-8
# Copyright 2021, Google and The HuggingFace Inc. team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" PEGASUS model configuration"""
from ...configuration_utils import PretrainedConfig
from ...utils import logging
logger = logging.get_logger(__name__)
PEGASUS_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"google/pegasus-large": "https://huggingface.co/google/pegasus-large/resolve/main/config.json",
# See all PEGASUS models at https://huggingface.co/models?filter=pegasus
}
class PegasusConfig(PretrainedConfig):
r"""
This is the configuration class to store the configuration of a [`PegasusModel`]. It is used to instantiate an
PEGASUS model according to the specified arguments, defining the model architecture. Instantiating a configuration
with the defaults will yield a similar configuration to that of the PEGASUS
[google/pegasus-large](https://huggingface.co/google/pegasus-large) architecture.
Configuration objects inherit from [`PretrainedConfig`] and can be used to control the model outputs. Read the
documentation from [`PretrainedConfig`] for more information.
Args:
vocab_size (`int`, *optional*, defaults to 50265):
Vocabulary size of the PEGASUS model. Defines the number of different tokens that can be represented by the
`inputs_ids` passed when calling [`PegasusModel`] or [`TFPegasusModel`].
d_model (`int`, *optional*, defaults to 1024):
Dimensionality of the layers and the pooler layer.
encoder_layers (`int`, *optional*, defaults to 12):
Number of encoder layers.
decoder_layers (`int`, *optional*, defaults to 12):
Number of decoder layers.
encoder_attention_heads (`int`, *optional*, defaults to 16):
Number of attention heads for each attention layer in the Transformer encoder.
decoder_attention_heads (`int`, *optional*, defaults to 16):
Number of attention heads for each attention layer in the Transformer decoder.
decoder_ffn_dim (`int`, *optional*, defaults to 4096):
Dimensionality of the "intermediate" (often named feed-forward) layer in decoder.
encoder_ffn_dim (`int`, *optional*, defaults to 4096):
Dimensionality of the "intermediate" (often named feed-forward) layer in decoder.
activation_function (`str` or `function`, *optional*, defaults to `"gelu"`):
The non-linear activation function (function or string) in the encoder and pooler. If string, `"gelu"`,
`"relu"`, `"silu"` and `"gelu_new"` are supported.
dropout (`float`, *optional*, defaults to 0.1):
The dropout probability for all fully connected layers in the embeddings, encoder, and pooler.
attention_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for the attention probabilities.
activation_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for activations inside the fully connected layer.
classifier_dropout (`float`, *optional*, defaults to 0.0):
The dropout ratio for classifier.
max_position_embeddings (`int`, *optional*, defaults to 1024):
The maximum sequence length that this model might ever be used with. Typically set this to something large
just in case (e.g., 512 or 1024 or 2048).
init_std (`float`, *optional*, defaults to 0.02):
The standard deviation of the truncated_normal_initializer for initializing all weight matrices.
encoder_layerdrop (`float`, *optional*, defaults to 0.0):
The LayerDrop probability for the encoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556)
for more details.
decoder_layerdrop (`float`, *optional*, defaults to 0.0):
The LayerDrop probability for the decoder. See the [LayerDrop paper](see https://arxiv.org/abs/1909.11556)
for more details.
scale_embedding (`bool`, *optional*, defaults to `False`):
Scale embeddings by diving by sqrt(d_model).
use_cache (`bool`, *optional*, defaults to `True`):
Whether or not the model should return the last key/values attentions (not used by all models)
forced_eos_token_id (`int`, *optional*, defaults to 1):
The id of the token to force as the last generated token when `max_length` is reached. Usually set to
`eos_token_id`.
Example:
```python
>>> from transformers import PegasusModel, PegasusConfig
>>> # Initializing a PEGASUS google/pegasus-large style configuration
>>> configuration = PegasusConfig()
>>> # Initializing a model from the google/pegasus-large style configuration
>>> model = PegasusModel(configuration)
>>> # Accessing the model configuration
>>> configuration = model.config
```"""
model_type = "pegasus"
keys_to_ignore_at_inference = ["past_key_values"]
attribute_map = {"num_attention_heads": "encoder_attention_heads", "hidden_size": "d_model"}
def __init__(
self,
vocab_size=50265,
max_position_embeddings=1024,
encoder_layers=12,
encoder_ffn_dim=4096,
encoder_attention_heads=16,
decoder_layers=12,
decoder_ffn_dim=4096,
decoder_attention_heads=16,
encoder_layerdrop=0.0,
decoder_layerdrop=0.0,
use_cache=True,
is_encoder_decoder=True,
activation_function="gelu",
d_model=1024,
dropout=0.1,
attention_dropout=0.0,
activation_dropout=0.0,
init_std=0.02,
decoder_start_token_id=0,
classifier_dropout=0.0,
scale_embedding=False,
pad_token_id=0,
eos_token_id=1,
forced_eos_token_id=1,
**kwargs
):
self.vocab_size = vocab_size
self.max_position_embeddings = max_position_embeddings
self.d_model = d_model
self.encoder_ffn_dim = encoder_ffn_dim
self.encoder_layers = encoder_layers
self.encoder_attention_heads = encoder_attention_heads
self.decoder_ffn_dim = decoder_ffn_dim
self.decoder_layers = decoder_layers
self.decoder_attention_heads = decoder_attention_heads
self.dropout = dropout
self.attention_dropout = attention_dropout
self.activation_dropout = activation_dropout
self.activation_function = activation_function
self.init_std = init_std
self.encoder_layerdrop = encoder_layerdrop
self.decoder_layerdrop = decoder_layerdrop
self.classifier_dropout = classifier_dropout
self.use_cache = use_cache
self.num_hidden_layers = encoder_layers
self.scale_embedding = scale_embedding # scale factor will be sqrt(d_model) if True
super().__init__(
pad_token_id=pad_token_id,
eos_token_id=eos_token_id,
is_encoder_decoder=is_encoder_decoder,
decoder_start_token_id=decoder_start_token_id,
forced_eos_token_id=forced_eos_token_id,
**kwargs,
)
@property
def num_attention_heads(self) -> int:
return self.encoder_attention_heads
@property
def hidden_size(self) -> int:
return self.d_model
| [
"noreply@github.com"
] | noreply@github.com |
3e4de2fc6d9d8b76fb50b968c3ddd51ec32bf16e | 5aa910b8eb3c5ec0ffedfef7e4956c04da5e803a | /python/tensorflow/basic/basic_usage.py | 75eb4694b575d4a9b5a5c011f74ca9d7ca548ab0 | [
"MIT"
] | permissive | gdsglgf/tutorials | 67b01a0594310c867bea5d6f6e32c26238c70b3d | bacb2b901cca778e8d4439a363bca25981074fed | refs/heads/master | 2020-06-21T05:45:34.970214 | 2017-10-31T11:39:40 | 2017-10-31T11:39:40 | 74,804,952 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,361 | py | import tensorflow as tf
# Create a Constant op that produces a 1x2 matrix. The op is
# added as a node to the default graph.
#
# The value returned by the constructor represents the output
# of the Constant op.
matrix1 = tf.constant([[3., 3.]])
# Create another Constant that produces a 2x1 matrix.
matrix2 = tf.constant([[2.],[2.]])
# Create a Matmul op that takes 'matrix1' and 'matrix2' as inputs.
# The returned value, 'product', represents the result of the matrix
# multiplication.
product = tf.matmul(matrix1, matrix2)
# Launch the default graph.
sess = tf.Session()
# To run the matmul op we call the session 'run()' method, passing 'product'
# which represents the output of the matmul op. This indicates to the call
# that we want to get the output of the matmul op back.
#
# All inputs needed by the op are run automatically by the session. They
# typically are run in parallel.
#
# The call 'run(product)' thus causes the execution of three ops in the
# graph: the two constants and matmul.
#
# The output of the op is returned in 'result' as a numpy `ndarray` object.
result = sess.run(product)
print(result)
# ==> [[ 12.]]
# Close the Session when we're done.
sess.close()
# Using with block, The Session closes automatically at the end of the with block.
# with tf.Session() as sess:
# result = sess.run([product])
# print(result) | [
"wzguofulgz@126.com"
] | wzguofulgz@126.com |
a75b582f20ec9f0efc3cc68fe0333cda99140118 | a7a5207842249c3e957632351068bf15f76b2cac | /jobhound/wsgi.py | 398e62e29c7d3ced1003fc0aa9ddf67680a1a241 | [] | no_license | smileservices/jobhound | 4baaf19e45bb7a72f3d08069863be66077158a31 | d4b2e3905e0be4de9db061894d152332cc3387ed | refs/heads/master | 2022-12-11T11:32:27.004271 | 2020-04-16T05:05:16 | 2020-04-16T05:05:16 | 187,748,962 | 0 | 0 | null | 2022-12-08T05:28:02 | 2019-05-21T02:41:44 | CSS | UTF-8 | Python | false | false | 393 | py | """
WSGI config for jobhound project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'jobhound.settings')
application = get_wsgi_application()
| [
"vladimir.gorea@gmail.com"
] | vladimir.gorea@gmail.com |
77166de9586f0a39eb537a623054fb5db15a323e | bc8ba8ac83c5e585b67ae56c589bbdde94f9905e | /courseraTrain/courseraTrain/apps/page/views.py | 97dc4827d7cb5728659804dfcdbf9f50bbb87e00 | [] | no_license | Arilonchik/Learning-python | ec7138d9a80f7e569a2097cb4b2e168e19e91694 | bf92f0cfa7b18bb5c413cf39b92cab72cd726cc9 | refs/heads/master | 2021-09-25T11:40:11.818306 | 2020-08-04T15:38:49 | 2020-08-04T15:38:49 | 246,258,987 | 0 | 0 | null | 2021-09-22T19:02:52 | 2020-03-10T09:29:50 | JavaScript | UTF-8 | Python | false | false | 341 | py | from django.shortcuts import render
def main_page(request):
return render(request, 'page/main.html')
def logreg(request):
return render(request, 'page/logreg.html')
def userdetail(request, id):
return render(request, 'page/user_details.html')
def tag_news(request, tag):
return render(request, 'page/tag_list.html')
| [
"egorreal2011@gmail.com"
] | egorreal2011@gmail.com |
79800ea4bb344a36419d35687696ac0d9d4ee791 | 96f5c4513a2b41f0ddb8ef9df64d82359d4df257 | /ex15.py | cbd935a956e954fa77d1c162b7d1f69a4a52a7a7 | [] | no_license | whatrocks/lpthw | d6fa8d0542d9fcd302986aaefd7e83cc7f28736f | bf32fbde2a142215bb18c66d2aa2dc834e35d6bc | refs/heads/master | 2021-07-20T04:13:31.518575 | 2017-10-24T03:37:54 | 2017-10-24T03:37:54 | 107,930,274 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 243 | py | from sys import argv
script, filename = argv
txt = open(filename)
print(f"Here's your file {filename}: ")
print(txt.read())
print("Type the file name again:")
file_again = input("> ")
txt_again = open(file_again)
print(txt_again.read())
| [
"charlie@whatrocks.org"
] | charlie@whatrocks.org |
9dfd5420cc45c7fcf91fcb36723e5cb455c5338e | bb33e75a3c448c5acd9872b4f8644e3fc39377c4 | /WeTube/Audio/views.py | 544fa6cac2ec98f4ab56e463b3b631814da12de6 | [] | no_license | Andre-Oliveira/WeTube | bb051cdc6d5bd8b760f2d25734384fdbfa120d06 | 17458e23504d0ba3d8c8dc1e5931b75295c5b286 | refs/heads/main | 2023-01-11T05:53:44.653386 | 2020-11-06T17:48:15 | 2020-11-06T17:48:15 | 310,369,097 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(request):
return HttpResponse("Hello, there. You're at the Main Audio page.") | [
"andre.a.c.oliveira@outlook.com"
] | andre.a.c.oliveira@outlook.com |
793ed4dfcd35b1914cd21aae2a1c9b91367c20b2 | 84f96ab7cfa47b35f6b41bb673cea4d7d87f510e | /polls/migrations/0001_initial.py | 24f5f5fb7c63a84e723b74017c8e619b78274dfe | [] | no_license | duhq123/testDjango | bd2f644565ac8594f3ee54bc55235b6b06c41011 | d64321ad2d6ae9a5652a39eb82d389bd8f7c4c31 | refs/heads/master | 2021-08-17T16:45:20.755460 | 2020-04-03T09:00:19 | 2020-04-03T09:00:19 | 150,550,328 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,165 | py | # Generated by Django 2.0.7 on 2018-07-23 10:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='polls.Question'),
),
]
| [
"m15104611003@163.com"
] | m15104611003@163.com |
b73f68257023c1fa8bc857f401cec846ef532ede | cc04c3894d5e7f45bcbef0754bf51a63afa8d427 | /python/advanced_python_regex.py | 8e369633db03291f3a2a2ba3e46cb3bd2a6ee080 | [] | no_license | ejm714/dsp | 7c45bcf03896371c0ca917d44613e181252bc4b7 | 77415232a32b6eea3ec8cc0a630b2f83e94749b9 | refs/heads/master | 2021-01-22T19:42:11.299548 | 2017-04-09T01:48:25 | 2017-04-09T01:48:25 | 85,230,132 | 0 | 0 | null | 2017-03-16T18:41:11 | 2017-03-16T18:41:11 | null | UTF-8 | Python | false | false | 741 | py | ## Q1
import pandas as pd
from collections import Counter
df = pd.read_csv('https://raw.githubusercontent.com/ejm714/dsp/master/python/faculty.csv')
print(df.columns.tolist())
df.columns = df.columns.str.strip()
degrees = df['degree'].str.strip().str.replace('.', '').str.split()
degree_counts = Counter(degrees.sum())
print degree_counts
len(degree_counts)
## Q2
title = df['title'].str.replace(' is ', ' of ')
title_counts = title.value_counts()
print title_counts
len(title_counts)
## Q3
emails = df['email'].tolist()
print emails
txt_file = open('q3.txt', 'w')
txt_file.write('%s' % emails)
txt_file.close()
## Q4
unique_domains = df['email'].str.split('@').str[1].drop_duplicates().tolist()
print unique_domains
len(unique_domains)
| [
"noreply@github.com"
] | noreply@github.com |
e24e7df4f317e6c628e16a871d55ad7159b7cdb3 | f96637ce5bfd50c3db0943d347dc42cea0db2dee | /account/urls.py | a053d252966a6815332734bf42f7821a208f45d7 | [] | no_license | Kusbek/django2-tutorial2 | 9cb2ec0fe5dacaa50829d33560ee4ec11ec3c07f | ab300d74434f08ef9fa42a9c3a2580f36e73a7fd | refs/heads/master | 2020-08-16T05:34:31.831674 | 2019-10-16T08:43:04 | 2019-10-16T08:43:04 | 215,461,606 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 267 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.account, name = 'account'),
path('signup/', views.signup, name = 'signup'),
path('login/', views.login, name = 'login'),
path('logout/', views.logout, name = 'logout'),
] | [
"kusbek1994@gmail.com"
] | kusbek1994@gmail.com |
44410ba2601a87b586811b7cbc3f750b9218370e | 121d91d5a32fcf1425cb0f84e0f529e24e210f1e | /books/books_app/models.py | 40940667c98e41ebb018de88e1d3d5c9dc423bef | [] | no_license | Nouran-yehia/DjangoLab4_DB_routes | e613e26161ab9dc1b883b252c661500cade3c1f8 | 1b15d2f3cf4627535eb749374fa44988292587e7 | refs/heads/master | 2022-04-13T00:56:16.641785 | 2020-04-11T13:22:37 | 2020-04-11T13:22:37 | 254,870,933 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | from django.db import models
class Author(models.Model):
name = models.CharField(max_length=50)
def __str__(self):
return self.name
class Books(models.Model):
name = models.CharField(max_length=50)
details = models.TextField(max_length=100)
image = models.FileField(max_length=50, default='img.jpg')
author = models.ForeignKey(Author, on_delete=models.CASCADE)
def __str__(self):
return self.name
| [
"nourayehia1994@gmail.com"
] | nourayehia1994@gmail.com |
b4b943042458d5e9106737ab01e2e43af95406b8 | 5ee994967ada4d1154bad6569f736313f50dc17c | /common/models_db.py | a9e8b3391844bd3b69622069254036a0b6712f66 | [] | no_license | justefg/chat | faa4a6e9f3473a1021946f4e93e1697fb569788b | 5713105211f64ebf30bca5d6354b8ff00311bc9d | refs/heads/master | 2021-05-31T07:30:14.460566 | 2016-04-26T14:23:48 | 2016-04-26T14:24:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 833 | py | from sqlalchemy import Column, ForeignKey, Integer, DateTime, String, PickleType, \
Text, BigInteger, UniqueConstraint, Boolean
from sqlalchemy.ext.declarative import declarative_base, declared_attr
Base = declarative_base()
class Message(Base):
__tablename__ = "messages"
id = Column(BigInteger, primary_key=True)
sender = Column(BigInteger)
receiver = Column(BigInteger)
text = Column(String(1000))
timestamp = Column(DateTime)
rev = Column(Boolean)
def __init__(self, sender, receiver, text, timestamp, rev):
self.sender = sender
self.receiver = receiver
self.text = text
self.timestamp = timestamp
self.rev = rev
def __str__(self):
return "{id: %s, sender: %s, receiver: %s}" % (self.id, self.sender, self.receiver)
| [
"theluckyemil@gmail.com"
] | theluckyemil@gmail.com |
d942c24bdc09d730a660133aa3a56186f2630840 | cf024f84c3fac53fb2ce5e6304509ec869509b52 | /NumPy/code.py | 2a9b7b7477a51750682369c65c814f98f744b6dc | [
"MIT"
] | permissive | pratik0917/greyatom-python-for-data-science | 20bedb37842355e692ddda945796a8fe7ead7a5a | 929cfd6d408885beae66a556c11f4b8eee9b2114 | refs/heads/master | 2020-06-16T17:43:28.756239 | 2019-07-07T13:30:16 | 2019-07-07T13:30:16 | 195,653,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,180 | py | # --------------
# Importing header files
import numpy as np
# Path of the file has been stored in variable called 'path'
data=np.genfromtxt(path,delimiter=",",skip_header=1)
print(data)
print(type(data))
#New record
new_record=np.array([[50, 9, 4, 1, 0, 0, 40, 0]])
print(new_record)
census=np.concatenate((data,new_record),axis=0)
print(census)
#Code starts here
# --------------
#Code starts here
age=census[:,0]
print(age)
max_age=np.max(age)
min_age=np.min(age)
age_mean=np.mean(age)
age_std=np.std(age)
print(max_age)
print(min_age)
print(age_mean)
print(age_std)
# --------------
race=census[:,2]
print("length of complete data setup cencus is : "+ str(len(race)))
race_0=census[census[:,2]==0]
race_1=census[census[:,2]==1]
race_2=census[census[:,2]==2]
race_3=census[census[:,2]==3]
race_4=census[census[:,2]==4]
#race_0=int(race_temp_0)
print("value of race_0 column is:")
print(race_0)
len_0=len(race_0)
print(len_0)
print("value of race_1 column is:")
print(race_1)
len_1=len(race_1)
print(len_1)
print("value of race_2 column is:")
print(race_2)
len_2=len(race_2)
print(len_2)
print("value of race_3 column is:")
print(race_3)
len_3=len(race_3)
print(len_3)
print("value of race_4 column is:")
print(race_4)
len_4=len(race_4)
print(len_4)
minimun_count=np.min([len_0,len_1,len_2,len_3,len_4])
if minimun_count==len_0:
minority_race=0;
elif minimun_count==len_1:
minority_race=1;
elif minimun_count==len_2:
minority_race=2;
elif minimun_count==len_3:
minority_race=3;
elif minimun_count==len_4:
minority_race=4;
print(minority_race)
# --------------
#Code starts here
senior_citizens=census[census[:,0]>60]
#print(senior_citizens)
working_hours_sum=np.sum(senior_citizens[:,6])
print(working_hours_sum)
senior_citizens_len=len(senior_citizens[:,0])
avg_working_hours=working_hours_sum/senior_citizens_len
print(avg_working_hours)
# --------------
#Code starts here
high=census[census[:,1]>10];
low=census[census[:,1]<=10];
avg_pay_high=np.mean(high[:,7])
avg_pay_low=np.mean(low[:,7])
print(avg_pay_high);
print(avg_pay_low);
| [
"pratik0917@gmail.com"
] | pratik0917@gmail.com |
d1090c80d748a3d4b9afb55fd64452dbaaf49f43 | be44e062f00a728f9c4add0873ce9dc925c67630 | /js/google/closure_linter/2.3.9/setup.py | 5169f53bfc589894f375edaacc6bdac5d78bb72d | [] | no_license | aaren-cordova/un_squadron | c2e5e1a7f91d38df14cb39de7995a7e7a6350566 | 0172976086dc456ddc17a5d7d41ec9c7191d9f00 | refs/heads/master | 2020-04-25T05:44:40.090788 | 2014-12-18T20:03:40 | 2014-12-18T20:03:40 | 20,908,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,323 | py | #!/usr/bin/env python
#
# Copyright 2010 The Closure Linter Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name='closure_linter',
version='2.3.9',
description='Closure Linter',
license='Apache',
author='The Closure Linter Authors',
author_email='opensource@google.com',
url='http://code.google.com/p/closure-linter',
install_requires=['python-gflags'],
package_dir={'closure_linter': 'closure_linter'},
packages=['closure_linter', 'closure_linter.common'],
entry_points = {
'console_scripts': [
'gjslint = closure_linter.gjslint:main',
'fixjsstyle = closure_linter.fixjsstyle:main'
]
}
)
| [
"aaren.cordova@demandmedia.com"
] | aaren.cordova@demandmedia.com |
530fe8d77ce4622a1d97b4e820ba312ea94d142f | ca25fa25d7b936d46975a3463bf81ea4c07daa7b | /myvenv/lib/python3.7/site-packages/chartjs/views/pie.py | 65bce86be48e5aca7d89f158a781be9a82ba7c11 | [
"MIT"
] | permissive | lasher85/AirCheck | 25b870a280d6681642350e3872a24a8f135069d2 | 52b2f78c7d797999df4952b1bcac9f7d2c12b42c | refs/heads/master | 2022-05-03T15:52:23.574362 | 2019-07-12T16:53:31 | 2019-07-12T16:53:31 | 196,597,675 | 0 | 0 | MIT | 2022-04-22T21:52:24 | 2019-07-12T14:56:05 | Python | UTF-8 | Python | false | false | 924 | py | from . import HighChartsView
class HighChartPieView(HighChartsView):
def get_context_data(self, **kwargs):
data = super(HighChartPieView, self).get_context_data(**kwargs)
data['series'] = self.get_series()
return data
def get_series(self):
series = super(HighChartPieView, self).get_series()
for serie in series:
serie.update({'type': 'pie'})
return series
def get_providers(self):
return []
class HighChartDonutView(HighChartPieView):
inner_size = '50%'
def get_series(self):
series = super(HighChartDonutView, self).get_series()
for serie in series:
serie.update({"innerSize": self.inner_size})
return series
def get_plot_options(self):
options = super(HighChartDonutView, self).get_plot_options()
options.update({'pie': {"showInLegend": True}})
return options
| [
"lasher85@gmail.com"
] | lasher85@gmail.com |
6d2611fe31d3a15dbde4884f4dcd16f59a19f9db | 2664d89b460c53d1e56e2ac4123bf149c51bf1fb | /accounts/migrations/0022_auto_20200419_0744.py | 8d090500f789ea7246a84359c878ac8e586a0fa0 | [] | no_license | jmvillegasjr/Tuffy-Library-Management-System | 7ec1bc5a778e8a31e5405456a40316a03cfe780f | ab6f3a676e647aa90317885465bc21e71b87bdcd | refs/heads/master | 2023-01-06T11:24:45.573841 | 2020-10-14T05:39:24 | 2020-10-14T05:39:24 | 303,908,746 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,670 | py | # Generated by Django 3.0 on 2020-04-19 07:44
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('accounts', '0021_auto_20200414_0006'),
]
operations = [
migrations.CreateModel(
name='IssueOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_out', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('Checked In', 'Checked In')], max_length=255, null=True)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.Customer')),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.Product')),
],
),
migrations.CreateModel(
name='ReturnOrder',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_in', models.DateTimeField(auto_now_add=True)),
('status', models.CharField(choices=[('Checked Out', 'Checked Out')], max_length=255, null=True)),
('customer', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.Customer')),
('product', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='accounts.Product')),
],
),
migrations.DeleteModel(
name='Order',
),
]
| [
"58552487+jmvillegasjr@users.noreply.github.com"
] | 58552487+jmvillegasjr@users.noreply.github.com |
194577f9c7fa24cccb61ff28186440bd3dbb1c36 | e2de8e9d819d1dda25da7083eb6c26ec6364d796 | /me/urls.py | a5ad87014a4f1ebbf6f02cd6cb1aceaccac246af | [] | no_license | losh1/django_blog | e677e999b06c6c93254549cd075aaedf9790480d | a7ee44af7436e076a9dc0fd6ab87ee7007097935 | refs/heads/master | 2021-01-20T20:21:07.659252 | 2016-08-05T02:40:25 | 2016-08-05T02:40:25 | 64,943,783 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 814 | py | """me URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url
from django.contrib import admin
urlpatterns = [
url(r'^admin/', admin.site.urls),
]
| [
"patokalosh@gmail.com"
] | patokalosh@gmail.com |
b7620b9604af7465bdcd1f9a1d2439bb39b84b3f | a431804e091bd9b110b90e542af5ee433f4a7a64 | /calc-2/arithmetic.py | 64f3e3e4c97b7b69cd1ca42637382f61789a3a86 | [] | no_license | ErikaAzabache/hackbright-lessons | 77cf3869ccefc4eed15717fcb8cdc8324ea3e712 | 1f0ce38fa46c30012f5add141f173e058ab0c438 | refs/heads/master | 2021-01-23T03:34:11.025843 | 2017-03-24T17:41:50 | 2017-03-24T17:41:50 | 86,094,325 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,142 | py | def add(list_o_nums):
answer = 0
for num in list_o_nums:
answer += num
return answer
def subtract(list_o_nums):
answer = list_o_nums[0]
for num in list_o_nums[1:]:
answer -= num
return answer
def multiply(list_o_nums):
answer = 1
for num in list_o_nums:
answer *= num
return answer
def divide(list_o_nums):
# Need to turn at least argument to float for division to
# not be integer division
answer = float(list_o_nums[0])
for num in list_o_nums[1:]:
answer /= float(num)
return answer
def square(list_o_nums):
# Needs only one argument
answer = []
for num in list_o_nums:
answer.append(num**2)
return answer
def cube(list_o_nums):
# Needs only one argument
answer = []
for num in list_o_nums:
answer.append(num**3)
return answer
def power(list_o_nums):
answer = list_o_nums[0]
for num in list_o_nums[1:]:
answer = answer ** num
return answer
def mod(list_o_nums):
answer = list_o_nums[0]
for num in list_o_nums[1:]:
answer = answer % num
return answer
| [
"erika@doravel.me"
] | erika@doravel.me |
34fd3a4d47ceb40a08144bd8450f24f59c31cc6f | d2bc43b1c029f18226cd77440de1dad574c19499 | /mvn/models/v2v_net.py | a8adb2cd5488f58cc88104f5733eea2b51940777 | [] | no_license | TrendingTechnology/ContextPose-PyTorch-release | 5552bfcf1866200fbb416786f3a1837c096466bd | 3057e71e49860e701373e788d9cbecde82240052 | refs/heads/master | 2023-04-18T12:40:57.003850 | 2021-04-30T15:30:06 | 2021-04-30T15:30:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,013 | py | # Reference: https://github.com/dragonbook/V2V-PoseNet-pytorch
import torch
import torch.nn as nn
import torch.nn.functional as F
from .attention_conv_layer import AttentionConv3D
import argparse
from torch.nn.parallel import DistributedDataParallel
import os
class Basic3DBlock(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size):
super(Basic3DBlock, self).__init__()
self.block = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=kernel_size, stride=1, padding=((kernel_size-1)//2)),
nn.BatchNorm3d(out_planes),
nn.ReLU(True)
)
def forward(self, x):
return self.block(x)
class AttenRes3DBlock(nn.Module):
def __init__(self, in_planes, out_planes, config):
super(AttenRes3DBlock, self).__init__()
self.atten_conv = AttentionConv3D(in_planes, out_planes, config)
self.res_branch = nn.Sequential(
nn.BatchNorm3d(out_planes),
nn.ReLU(True),
nn.Conv3d(out_planes, out_planes, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(out_planes)
)
if in_planes == out_planes:
self.skip_con = nn.Sequential()
else:
self.skip_con = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=1, stride=1, padding=0),
nn.BatchNorm3d(out_planes)
)
def forward(self, x, args):
res, atten_global = self.atten_conv(x, args)
res = self.res_branch(res)
skip = self.skip_con(x)
return F.relu(res + skip, True), atten_global
class Res3DBlock(nn.Module):
def __init__(self, in_planes, out_planes):
super(Res3DBlock, self).__init__()
self.res_branch = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(out_planes),
nn.ReLU(True),
nn.Conv3d(out_planes, out_planes, kernel_size=3, stride=1, padding=1),
nn.BatchNorm3d(out_planes)
)
if in_planes == out_planes:
self.skip_con = nn.Sequential()
else:
self.skip_con = nn.Sequential(
nn.Conv3d(in_planes, out_planes, kernel_size=1, stride=1, padding=0),
nn.BatchNorm3d(out_planes)
)
def forward(self, x):
res = self.res_branch(x)
skip = self.skip_con(x)
return F.relu(res + skip, True)
class Pool3DBlock(nn.Module):
def __init__(self, pool_size):
super(Pool3DBlock, self).__init__()
self.pool_size = pool_size
def forward(self, x):
return F.max_pool3d(x, kernel_size=self.pool_size, stride=self.pool_size)
class Upsample3DBlock(nn.Module):
def __init__(self, in_planes, out_planes, kernel_size, stride):
super(Upsample3DBlock, self).__init__()
assert(kernel_size == 2)
assert(stride == 2)
self.block = nn.Sequential(
nn.ConvTranspose3d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=0, output_padding=0),
nn.BatchNorm3d(out_planes),
nn.ReLU(True)
)
def forward(self, x):
return self.block(x)
class EncoderDecorder(nn.Module):
def __init__(self, config):
super(EncoderDecorder, self).__init__()
self.att_channels = config.model.volume_net.att_channels
self.encoder_pool1 = Pool3DBlock(2)
self.encoder_res1 = Res3DBlock(34, 51)
self.encoder_pool2 = Pool3DBlock(2)
self.encoder_res2 = Res3DBlock(51, self.att_channels)
self.mid_res = AttenRes3DBlock(self.att_channels, self.att_channels, config)
self.decoder_res2 = Res3DBlock(self.att_channels, self.att_channels)
self.decoder_upsample2 = Upsample3DBlock(self.att_channels, 51, 2, 2)
self.decoder_res1 = Res3DBlock(51, 51)
self.decoder_upsample1 = Upsample3DBlock(51, 34, 2, 2)
self.skip_res1 = Res3DBlock(34, 34)
self.skip_res2 = Res3DBlock(51, 51)
def forward(self, x, args):
skip_x1 = self.skip_res1(x)
x = self.encoder_pool1(x)
x = self.encoder_res1(x)
skip_x2 = self.skip_res2(x)
x = self.encoder_pool2(x)
x = self.encoder_res2(x)
x, atten_global = self.mid_res(x, args)
x = self.decoder_res2(x)
x = self.decoder_upsample2(x)
x = x + skip_x2
x = self.decoder_res1(x)
x = self.decoder_upsample1(x)
x = x + skip_x1
return x, atten_global
class V2VNet(nn.Module):
def __init__(self, input_channels, output_channels, config):
super(V2VNet, self).__init__()
self.front_layers = nn.Sequential(
Basic3DBlock(input_channels, 17, 3),
Res3DBlock(17, 34),
)
self.encoder_decoder = EncoderDecorder(config)
self.output_layer = nn.Conv3d(34, output_channels, kernel_size=1, stride=1, padding=0)
self._initialize_weights()
def forward(self, x, args):
x = self.front_layers(x)
x, atten_global = self.encoder_decoder(x, args)
x = self.output_layer(x)
return x, atten_global
def _initialize_weights(self):
for m in self.modules():
if isinstance(m, nn.Conv3d):
# nn.init.xavier_normal_(m.weight)
nn.init.normal_(m.weight, 0, 0.001)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.ConvTranspose3d):
# nn.init.xavier_normal_(m.weight)
nn.init.normal_(m.weight, 0, 0.001)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
| [
"maxiaoxuan@pku.edu.cn"
] | maxiaoxuan@pku.edu.cn |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.