hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
d84bc5b6f7292dc9f40fd92ef12317fa084962da
2,731
py
Python
mosquitto-1.5.4/test/broker/08-ssl-bridge.py
RainaWLK/mqtt-test
cb4175c8bd1e35deed45941ca61c88fdcc6ddeba
[ "MIT" ]
null
null
null
mosquitto-1.5.4/test/broker/08-ssl-bridge.py
RainaWLK/mqtt-test
cb4175c8bd1e35deed45941ca61c88fdcc6ddeba
[ "MIT" ]
null
null
null
mosquitto-1.5.4/test/broker/08-ssl-bridge.py
RainaWLK/mqtt-test
cb4175c8bd1e35deed45941ca61c88fdcc6ddeba
[ "MIT" ]
1
2021-06-19T17:17:41.000Z
2021-06-19T17:17:41.000Z
#!/usr/bin/env python import subprocess import socket import ssl import inspect, os, sys # From http://stackoverflow.com/questions/279237/python-import-a-module-from-a-folder cmd_subfolder = os.path.realpath(os.path.abspath(os.path.join(os.path.split(inspect.getfile( inspect.currentframe() ))[0],".."))) if cmd_subfolder not in sys.path: sys.path.insert(0, cmd_subfolder) import mosq_test def write_config(filename, port1, port2): with open(filename, 'w') as f: f.write("port %d\n" % (port2)) f.write("\n") f.write("connection bridge_test\n") f.write("address 127.0.0.1:%d\n" % (port1)) f.write("topic bridge/# both 0\n") f.write("notifications false\n") f.write("restart_timeout 2\n") f.write("\n") f.write("bridge_cafile ../ssl/all-ca.crt\n") f.write("bridge_insecure true\n") (port1, port2) = mosq_test.get_port(2) conf_file = os.path.basename(__file__).replace('.py', '.conf') write_config(conf_file, port1, port2) rc = 1 keepalive = 60 client_id = socket.gethostname()+".bridge_test" connect_packet = mosq_test.gen_connect(client_id, keepalive=keepalive, clean_session=False, proto_ver=128+4) connack_packet = mosq_test.gen_connack(rc=0) mid = 1 subscribe_packet = mosq_test.gen_subscribe(mid, "bridge/#", 0) suback_packet = mosq_test.gen_suback(mid, 0) publish_packet = mosq_test.gen_publish("bridge/ssl/test", qos=0, payload="message") sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) ssock = ssl.wrap_socket(sock, ca_certs="../ssl/all-ca.crt", keyfile="../ssl/server.key", certfile="../ssl/server.crt", server_side=True, ssl_version=ssl.PROTOCOL_TLSv1) ssock.settimeout(20) ssock.bind(('', port1)) ssock.listen(5) broker = mosq_test.start_broker(filename=os.path.basename(__file__), port=port2, use_conf=True) try: (bridge, address) = ssock.accept() bridge.settimeout(20) if mosq_test.expect_packet(bridge, "connect", connect_packet): bridge.send(connack_packet) if mosq_test.expect_packet(bridge, "subscribe", subscribe_packet): bridge.send(suback_packet) pub = subprocess.Popen(['./08-ssl-bridge-helper.py', str(port2)], stdout=subprocess.PIPE, stderr=subprocess.PIPE) pub.wait() (stdo, stde) = pub.communicate() if mosq_test.expect_packet(bridge, "publish", publish_packet): rc = 0 bridge.close() finally: os.remove(conf_file) try: bridge.close() except NameError: pass broker.terminate() broker.wait() (stdo, stde) = broker.communicate() if rc: print(stde) ssock.close() exit(rc)
31.390805
168
0.680337
389
2,731
4.606684
0.383033
0.049107
0.027344
0.047433
0.061384
0.046875
0
0
0
0
0
0.021978
0.166972
2,731
86
169
31.755814
0.765714
0.038081
0
0.092308
0
0
0.125381
0.009527
0
0
0
0
0
1
0.015385
false
0.015385
0.076923
0
0.092308
0.015385
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d84c3bb5b6974c0f95b489673269ce950a277333
8,658
py
Python
models/architecture/vaegan/trainer.py
EmmaNguyen/feature_adversarial_with_topology_signatures
efa7db6d0fdf5b2505d67d4341dcdb2ab05a97a7
[ "MIT" ]
1
2018-10-08T09:29:51.000Z
2018-10-08T09:29:51.000Z
models/architecture/vaegan/trainer.py
EmmaNguyen/feature_adversarial_with_topology_signatures
efa7db6d0fdf5b2505d67d4341dcdb2ab05a97a7
[ "MIT" ]
4
2018-06-30T18:06:47.000Z
2018-08-16T02:01:59.000Z
models/architecture/vaegan/trainer.py
EmmaNguyen/feature_adversarial_with_topology_signatures
efa7db6d0fdf5b2505d67d4341dcdb2ab05a97a7
[ "MIT" ]
null
null
null
import numpy as np import torch import torch.nn.functional as F from torch.autograd import Variable from .distributions import rand_circle2d from ot import gromov_wasserstein2, unif def rand_projections(embedding_dim, num_samples=50): """This fn generates `L` random samples from the latent space's unit sphere. Args: embedding_dim (int): embedding dimension size num_samples (int): number of random projection samples Return: torch.Tensor """ theta = [w / np.sqrt((w**2).sum()) for w in np.random.normal(size=(num_samples, embedding_dim))] theta = np.asarray(theta) return torch.from_numpy(theta).type(torch.FloatTensor) def _sliced_wasserstein_distance(encoded_samples, distribution_samples, num_projections=50, p=2): """Sliced Wasserstein Distance between encoded samples and drawn distribution samples. Args: encoded_samples (toch.Tensor): embedded training tensor samples distribution_samples (torch.Tensor): distribution training tensor samples num_projections (int): number of projectsion to approximate sliced wasserstein distance p (int): power of distance metric Return: torch.Tensor """ # derive latent space dimension size from random samples drawn from a distribution in it embedding_dim = distribution_samples.size(1) # generate random projections in latent space projections = rand_projections(embedding_dim, num_projections) # calculate projection of the encoded samples encoded_projections = encoded_samples.matmul(projections.transpose(0, 1)) # calculate projection of the random distribution samples distribution_projections = distribution_samples.matmul(projections.transpose(0, 1)) # calculate the sliced wasserstein distance by # sorting the samples per projection and # calculating the difference between the # encoded samples and drawn samples per projection wasserstein_distance = torch.sort(encoded_projections.transpose(0, 1), dim=1)[0] - torch.sort(distribution_projections.transpose(0, 1), dim=1)[0] # distance between them (L2 by default for Wasserstein-2) wasserstein_distance_p = torch.pow(wasserstein_distance, p) # approximate wasserstein_distance for each projection return wasserstein_distance_p.mean() def sliced_wasserstein_distance(encoded_samples, distribution_fn=rand_circle2d, num_projections=50, p=2): """Sliced Wasserstein Distance between encoded samples and drawn distribution samples. Args: encoded_samples (toch.Tensor): embedded training tensor samples distribution_fn (callable): callable to draw random samples num_projections (int): number of projectsion to approximate sliced wasserstein distance p (int): power of distance metric Return: torch.Tensor """ # derive batch size from encoded samples batch_size = encoded_samples.size(0) # draw samples from latent space prior distribution z = distribution_fn(batch_size) # approximate wasserstein_distance between encoded and prior distributions # for average over each projection swd = _sliced_wasserstein_distance(encoded_samples, z, num_projections, p) return swd def _topology_persistence(encoded_samples, distribution_samples, num_projections=50, p=2): prior_subcripted_views = distribution_samples posterior_subscripted_views = encoded_samples adversarial_learner = AdversariallearnerBatchTrainer() adversarial_learner.train_on_batch(prior_subcripted_views) posterior_pred = adversarial_learner.eval_on_batch(posterior_subscripted_views) bce = F.binary_cross_entropy(posterior_pred) # derive latent space dimension size from random samples drawn from a distribution in it embedding_dim = distribution_samples.size(1) # generate random projections in latent space projections = rand_projections(embedding_dim, num_projections) # calculate projection of the encoded samples #import pdb; pdb.set_trace() Tensor = torch.cuda.FloatTensor if torch.cuda.is_available() else torch.FloatTensor encoded_projections = encoded_samples.matmul(projections.transpose(0, 1).cuda()) # calculate projection of the random distribution samples distribution_projections = distribution_samples.matmul(projections.transpose(0, 1)) # calculate the sliced wasserstein distance by # sorting the samples per projection and # calculating the difference between the # encoded samples and drawn samples per projection wasserstein_distance = torch.sort(encoded_projections.transpose(0, 1).cuda(), dim=1)[0] - torch.sort(distribution_projections.transpose(0, 1).cuda(), dim=1)[0] # distance between them (L2 by default for Wasserstein-2) wasserstein_distance_p = torch.pow(wasserstein_distance, p) # approximate wasserstein_distance for each projection return wasserstein_distance_p.mean() def topology_persistence(encoded_samples, distribution_fn=rand_cirlce2d, num_projections=50, p=2): batch_size = encoded_samples.size(0) z = distribution_fn(batch_size) return _topology_persistence(encoded_samples, self._distribution_fn, self.num_projections_, self.p_) def gromov_wasserstein_distance(X, Y, device): import concurrent.futures # import pdb; pdb.set_trace() mb_size = X.size(0) gw_dist = np.zeros(mb_size) Tensor = torch.FloatTensor with concurrent.futures.ProcessPoolExecutor() as executor: for i in executor.map(range(mb_size)): C1 = sp.spatial.distance.cdist(X[i,:].reshape(28,28).data.cpu().numpy(), X[i,:].reshape(28,28).data.cpu().numpy()) #Convert data back to an image from one hot encoding with size 28x28 C2 = sp.spatial.distance.cdist(Y[i,:].reshape(28,28).data.cpu().numpy(), Y[i,:].reshape(28,28).data.cpu().numpy()) C1 /= C1.max() C2 /= C2.max() p = unif(28) q = unif(28) gw_dist[i] = gromov_wasserstein2(C1, C2, p, q, loss_fun='square_loss', epsilon=5e-4) print("*"*100) return Variable(Tensor(gw_dist), requires_grad=True).sum() class SWAEBatchTrainer: """Sliced Wasserstein Autoencoder Batch Trainer. Args: autoencoder (torch.nn.Module): module which implements autoencoder framework optimizer (torch.optim.Optimizer): torch optimizer distribution_fn (callable): callable to draw random samples num_projections (int): number of projectsion to approximate sliced wasserstein distance p (int): power of distance metric weight_swd (float): weight of divergence metric compared to reconstruction in loss device (torch.Device): torch device """ def __init__(self, autoencoder, optimizer, distribution_fn, num_projections=50, p=2, weight_swd=10.0, device=None): self.model_ = autoencoder self.optimizer = optimizer self._distribution_fn = distribution_fn self.embedding_dim_ = self.model_ .encoder.embedding_dim_ self.num_projections_ = num_projections self.p_ = p self.weight_swd = weight_swd self._device = device if device else torch.device('cpu') def __call__(self, x): return self.eval_on_batch(x) def train_on_batch(self, x): # reset gradients self.optimizer.zero_grad() # autoencoder forward pass and loss evals = self.eval_on_batch(x) # backpropagate loss evals['loss'].backward() # update encoder and decoder parameters self.optimizer.step() return evals def test_on_batch(self, x): # reset gradients self.optimizer.zero_grad() # autoencoder forward pass and loss evals = self.eval_on_batch(x) return evals def eval_on_batch(self, x): x = x.to(self._device) recon_x, z = self.model_(x) # Equation 4 - this works for 1D # import pdb; pdb.set_trace() gw = gromov_wasserstein_distance(recon_x, x, self._device) # Equation 15, this is only works for 2D entropy = float(self.weight_swd) * topology_persistence(z, self._distribution_fn, self.num_projections_, self.p_) # Equation 16: but why there is a bce. Following the original implementation with Keras # it is said that (bce and l1) is the first term for equation 16, and w2 for the second term. loss = gw + entropy return {'loss': loss, 'gw': gw, 'entropy': entropy, 'encode': z, 'decode': recon_x}
46.299465
195
0.71125
1,096
8,658
5.443431
0.211679
0.073248
0.041904
0.029501
0.563024
0.521958
0.501341
0.488099
0.456252
0.422058
0
0.015604
0.208016
8,658
186
196
46.548387
0.854455
0.377916
0
0.227273
0
0
0.008641
0
0
0
0
0
0
1
0.125
false
0
0.079545
0.011364
0.329545
0.011364
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d84cdf2cbca845f67fc205a391078d2af1f1badc
475
py
Python
image_action.py
abhishekchetani/ML_18june
4a6465259c7d0de0cbdc12c1c9f10dd6f925883d
[ "Apache-2.0" ]
null
null
null
image_action.py
abhishekchetani/ML_18june
4a6465259c7d0de0cbdc12c1c9f10dd6f925883d
[ "Apache-2.0" ]
null
null
null
image_action.py
abhishekchetani/ML_18june
4a6465259c7d0de0cbdc12c1c9f10dd6f925883d
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python import cv2 img = cv2.imread("/home/abhishek/Desktop/tracks.jpeg") cv2.line(img,(0,0),(236,236),(100,54,255),3) cv2.rectangle(img,(199,112),(325,238),(0,0,255),2) cv2.circle(img,(262,175),60,(255,200,0),3) font = cv2.FONT_HERSHEY_SIMPLEX cv2.putText(img,'TRAIN',(210,270),font,1,(90,200,140),cv2.LINE_4) cv2.imshow("actions",img) cv2.imwrite("/home/abhishek/Desktop/lines.jpeg",img) cv2.waitKey(0) cv2.destroyAllWindows()
23.75
65
0.661053
81
475
3.839506
0.580247
0.057878
0.122187
0
0
0
0
0
0
0
0
0.189573
0.111579
475
19
66
25
0.547393
0.033684
0
0
0
0
0.172489
0.146288
0
0
0
0
0
1
0
false
0
0.090909
0
0.090909
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d84e2e63426049e55a4ce07d524f85ba7b495330
14,662
py
Python
GMM_nDim3.py
Sharut/My-Hybrid-GMM-SVM-Model
68f0ab9b86dbb0ca3d1e63f2df0dcc4c7066e424
[ "MIT" ]
1
2019-06-07T13:22:57.000Z
2019-06-07T13:22:57.000Z
GMM_nDim3.py
Sharut/My-Hybrid-GMM-SVM-Model
68f0ab9b86dbb0ca3d1e63f2df0dcc4c7066e424
[ "MIT" ]
null
null
null
GMM_nDim3.py
Sharut/My-Hybrid-GMM-SVM-Model
68f0ab9b86dbb0ca3d1e63f2df0dcc4c7066e424
[ "MIT" ]
1
2020-08-30T06:49:25.000Z
2020-08-30T06:49:25.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri May 24 09:08:48 2019 @author: uiet_mac1 """ import numpy as np import random as rd import matplotlib.pyplot as plt from matplotlib.patches import Ellipse #import hungarian as hg def random_parameters(data, K): """ K is the number of gaussians""" """if dimension is d, then mean is dX1""" """ init the means, covariances and mixing coefs""" cols = (data.shape)[1] #print(len(data)) mu = np.zeros((K, cols)) #mean of k clusters KXD for k in range(K): idx = np.floor(rd.random()*len(data)) for col in range(cols): mu[k][col] += (data[int(idx)][col]) sigma = [] for k in range(K): sigma.append(np.cov(data.T)) pi = np.ones(K)*1.0/K print(mu) print(sigma) return mu, sigma, pi def e_step(data, K, mu, sigma, pi): idvs = (data.shape)[0] #cols = (data.shape)[1] #print("idvs is " +str(idvs)) resp = np.zeros((idvs, K)) for i in range(idvs): for k in range(K): resp[i][k] = pi[k]*gaussian(data[i], mu[k], sigma[k])/likelihood(data[i], K, mu, sigma, pi) #print("responsibitlies is ") #print(resp) return resp def log_likelihood(data, K, mu, sigma, pi): """ marginal over X """ log_likelihood = 0.0 for n in range (len(data)): log_likelihood += np.log(likelihood(data[n], K, mu, sigma, pi)) return log_likelihood def likelihood(x, K, mu, sigma, pi): rs = 0.0 for k in range(K): rs += pi[k]*gaussian(x, mu[k], sigma[k]) return rs def m_step(data, K, resp): """ find the parameters that maximize the log-likelihood given the current resp.""" idvs = (data.shape)[0] cols = (data.shape)[1] mu = np.zeros((K, cols)) sigma = np.zeros((K, cols, cols)) pi = np.zeros(K) marg_resp = np.zeros(K) for k in range(K): for i in range(idvs): marg_resp[k] += resp[i][k] mu[k] += (resp[i][k])*data[i] mu[k] /= marg_resp[k] for i in range(idvs): #x_i = (np.zeros((1,cols))+data[k]) x_mu = np.zeros((1,cols))+data[i]-mu[k] sigma[k] += (resp[i][k]/marg_resp[k])*x_mu*x_mu.T pi[k] = marg_resp[k]/idvs return mu, sigma, pi def gaussian(x, mu, sigma): """ compute the pdf of the multi-var gaussian """ idvs = len(x) norm_factor = (2*np.pi)**idvs norm_factor *= np.linalg.det(sigma) norm_factor = 1.0/np.sqrt(norm_factor) x_mu = np.matrix(x-mu) rs = norm_factor*np.exp(-0.5*x_mu*np.linalg.inv(sigma)*x_mu.T) return rs def EM(data, rst, K, threshold): converged = False mu, sigma, pi = random_parameters(data, K) likelihood_list=[] current_log_likelihood = log_likelihood(data, K, mu, sigma, pi) max_iter = 100 for it in range(max_iter): likelihood_list.append(float(current_log_likelihood[0][0])) print(rst, " | ", it, " | ", current_log_likelihood[0][0]) #print("Mixing proportion is ", pi ) resp = e_step(data, K, mu, sigma, pi) mu, sigma, pi = m_step(data, K, resp) new_log_likelihood = log_likelihood(data, K, mu, sigma, pi) if (abs(new_log_likelihood-current_log_likelihood) < threshold): converged = True break current_log_likelihood = new_log_likelihood print(converged) plt.plot(likelihood_list) plt.ylabel('log likelihood') plt.show() return current_log_likelihood, mu, sigma, pi, resp ####################################################################### def assign_clusters(K, resp): idvs = len(resp) clusters = np.zeros(idvs, dtype=int) for i in range(idvs): #clusters[i][k] = 0 clss = 0 for k in range(K): if resp[i][k] > resp[i][clss]: clss = k resp[i][clss]= resp[i][k] clusters[i] = clss return clusters ''' def compute_statistics(clusters, ref_clusters, K): mat = make_ce_matrix(clusters, ref_clusters, K) #hung_solver = hg.Hungarian() rs = hung_solver.compute(mat, False) tmp_clusters = np.array(clusters) for old, new in rs: clusters[np.where(tmp_clusters == old)] = new #print old, new #print clusters, ref_clusters nbrIts = 0 for k in range(K): ref = np.where(ref_clusters == k)[0] clust = np.where(clusters == k)[0] nbrIts += len(np.intersect1d(ref, clust)) print(len(np.intersect1d(ref, clust))) return nbrIts def make_ce_matrix(clusters, ref_clusters, K): mat = np.zeros((K, K), dtype=int) for i in range(K): for j in xrange(K): ref_i = np.where(ref_clusters == i)[0] clust_j = np.where(clusters == j)[0] its = np.intersect1d(ref_i, clust_j) mat[i,j] = len(ref_i) + len(clust_j) -2*len(its) return mat ''' ######################################################################## def read_data(file_name): """ read the data from filename as numpy array """ with open(file_name) as f: data = np.loadtxt(f, delimiter=",", dtype = "float", skiprows=0, usecols=(0,1,2,3)) with open(file_name) as f: ref_classes = np.loadtxt(f, delimiter=",", dtype = "str", skiprows=0, usecols=[4]) unique_ref_classes = np.unique(ref_classes) ref_clusters = np.argmax(ref_classes[np.newaxis,:]==unique_ref_classes[:,np.newaxis],axis=0) return data, ref_clusters def f(t): return t def plot_ellipse(ax, mu, sigma, color="k"): """ Based on http://stackoverflow.com/questions/17952171/not-sure-how-to-fit-data-with-a-gaussian-python. """ # Compute eigenX_embeddedues and associated eigenvectors X_embeddeds, vecs = np.linalg.eigh(sigma) # Compute "tilt" of ellipse using first eigenvector x, y = vecs[:, 0] theta = np.degrees(np.arctan2(y, x)) # EigenX_embeddedues give length of ellipse along each eigenvector w, h = 2 * np.sqrt(X_embeddeds) ax.tick_params(axis='both', which='major', labelsize=20) ellipse = Ellipse(mu, w, h, theta, color=color) # color="k") ellipse.set_clip_box(ax.bbox) ellipse.set_alpha(0.2) ax.add_artist(ellipse) def error_ellipse(mu, cov, ax=None, factor=1.0, **kwargs): """ Plot the error ellipse at a point given its covariance matrix. """ # some sane defaults facecolor = kwargs.pop('facecolor', 'none') edgecolor = kwargs.pop('edgecolor', 'k') x, y = mu U, S, V = np.linalg.svd(cov) theta = np.degrees(np.arctan2(U[1, 0], U[0, 0])) ellipsePlot = Ellipse(xy=[x, y], width=2 * np.sqrt(S[0]) * factor, height=2 * np.sqrt(S[1]) * factor, angle=theta, facecolor=facecolor, edgecolor=edgecolor, **kwargs) if ax is None: ax = plt.gca() ax.add_patch(ellipsePlot) return ellipsePlot def _plot_gaussian(mean, covariance, color, zorder=0): """Plots the mean and 2-std ellipse of a given Gaussian""" plt.plot(mean[0], mean[1], color[0] + ".", zorder=zorder) if covariance.ndim == 1: covariance = np.diag(covariance) radius = np.sqrt(5.991) eigX_embeddeds, eigvecs = np.linalg.eig(covariance) axis = np.sqrt(eigX_embeddeds) * radius slope = eigvecs[1][0] / eigvecs[1][1] angle = 180.0 * np.arctan(slope) / np.pi plt.axes().add_artist(Ellipse( mean, 2 * axis[0], 2 * axis[1], angle=angle, fill=False, color=color, linewidth=1, zorder=zorder )) plt.show() def _plot_cov_ellipse(cov, pos, nstd=2, ax=None, **kwargs): """ Plots an `nstd` sigma error ellipse based on the specified covariance matrix (`cov`). Additional keyword arguments are passed on to the ellipse patch artist. Parameters ---------- cov : The 2x2 covariance matrix to base the ellipse on pos : The location of the center of the ellipse. Expects a 2-element sequence of [x0, y0]. nstd : The radius of the ellipse in numbers of standard deviations. Defaults to 2 standard deviations. ax : The axis that the ellipse will be plotted on. Defaults to the current axis. Additional keyword arguments are pass on to the ellipse patch. Returns ------- A matplotlib ellipse artist """ from matplotlib import pyplot as plt from matplotlib.patches import Ellipse def eigsorted(cov): X_embeddeds, vecs = np.linalg.eigh(cov) order = X_embeddeds.argsort()[::-1] return X_embeddeds[order], vecs[:, order] if ax is None: ax = plt.gca() X_embeddeds, vecs = eigsorted(cov) theta = np.degrees(np.arctan2(*vecs[:, 0][::-1])) # Width and height are "full" widths, not radius width, height = 2 * nstd * np.sqrt(X_embeddeds) ellip = Ellipse(xy=pos, width=width, height=height, angle=theta, **kwargs) ax.add_artist(ellip) plt.show() return ellip def main(): print("begining...") file_name = "iris.data" nbr_restarts = 5 threshold = 0.001 K = 3 data, ref_clusters = read_data(file_name) print("#restart | EM iteration | log likelihood") print("----------------------------------------") max_likelihood_score = float("-inf") for rst in range(nbr_restarts): log_likelihood, mu, sigma, pi, resp = EM(data, rst, K, threshold) if log_likelihood > max_likelihood_score: max_likelihood_score = log_likelihood max_mu, max_sigma, max_pi, max_resp = mu, sigma, pi, resp #print("Iteration is"+ str(rst)) #print("mixing is ") #print(max_pi) #print("mean is ") #print(max_mu) #print("sigma is ") #print(max_sigma) #print(max_mu, max_sigma, max_pi) print("mean matrix is ") print(max_mu) clusters = assign_clusters(K, max_resp) #cost = compute_statistics(clusters, ref_clusters, K) print(clusters) print(ref_clusters) #print(cost*1.0/len(data)) from mpl_toolkits.mplot3d import Axes3D #with first three variables are on the axis and the fourth being color: import matplotlib.pyplot as plt fig = plt.figure(figsize=(15, 12)) ax = fig.add_subplot(111, projection='3d') sp = ax.scatter(data[:,0],data[:,1],data[:,2], s=20, c=data[:,3]) fig.colorbar(sp) plt.show() from sklearn.manifold import TSNE data = np.concatenate((data,mu),axis = 0) print(data) X = np.array(data) #means = np.array(mu) ''' X_embedded = TSNE(n_components=1).fit_transform(X) print("!!!!") figs = plt.figure(figsize=(15, 12)) plt.plot(X_embedded,'ro') plt.plot( X_embedded[150:153],'g^') t1 = np.linspace(0, 140, 100) plt.plot(t1,[X_embedded[150]]*100 , 'g^') plt.plot(t1,[X_embedded[151]]*100 , 'g^') plt.plot(t1,[X_embedded[152]]*100 , 'g^') plt.ylabel('some numbers') plt.show() ''' X_embedded = TSNE(n_components=2).fit_transform(X) print(X_embedded) print("!!!!") figs = plt.figure(figsize=(15, 12)) plt.plot(X_embedded[0:150,0], X_embedded[0:150,1],'ro') plt.plot( X_embedded[150:153,0],X_embedded[150:153,1] ,'g^') plt.ylabel('some numbers') A = np.matrix(max_sigma[0]) N, M = A.shape assert N % 2 == 0 assert M % 2 == 0 A0 = np.empty((N//2, M//2)) for i in range(N//2): for j in range(M//2): A0[i,j] = A[2*i:2*i+2, 2*j:2*j+2].sum() A = np.matrix(max_sigma[1]) N, M = A.shape assert N % 2 == 0 assert M % 2 == 0 A1 = np.empty((N//2, M//2)) for i in range(N//2): for j in range(M//2): A1[i,j] = A[2*i:2*i+2, 2*j:2*j+2].sum() A = np.matrix(max_sigma[2]) N, M = A.shape assert N % 2 == 0 assert M % 2 == 0 A2 = np.empty((N//2, M//2)) for i in range(N//2): for j in range(M//2): A2[i,j] = A[2*i:2*i+2, 2*j:2*j+2].sum() print(A0) print(A1) print(A2) print(X_embedded[150,:]) #_plot_cov_ellipse(A0,X_embedded[150,:] ) mean = X_embedded[150,:] covariance = A0 plt.plot(mean[0], mean[1], 'g' + ".", zorder=0) if covariance.ndim == 1: covariance = np.diag(covariance) radius = np.sqrt(5.991) eigX_embeddeds, eigvecs = np.linalg.eig(covariance) axis = np.sqrt(eigX_embeddeds) * radius slope = eigvecs[1][0] / eigvecs[1][1] angle = 180.0 * np.arctan(slope) / np.pi plt.axes().add_artist(Ellipse( mean, 2 * axis[0], 2 * axis[1], angle=angle, fill=False, color='g', linewidth=1, zorder=0 )) mean = X_embedded[151,:] covariance = A1 plt.plot(mean[0], mean[1], 'g' + ".", zorder=0) if covariance.ndim == 1: covariance = np.diag(covariance) radius = np.sqrt(5.991) eigX_embeddeds, eigvecs = np.linalg.eig(covariance) axis = np.sqrt(eigX_embeddeds) * radius slope = eigvecs[1][0] / eigvecs[1][1] angle = 180.0 * np.arctan(slope) / np.pi plt.axes().add_artist(Ellipse( mean, 2 * axis[0], 2 * axis[1], angle=angle, fill=False, color='g', linewidth=1, zorder=0 )) mean = X_embedded[152,:] covariance = A2 plt.plot(mean[0], mean[1], 'g' + ".", zorder=0) if covariance.ndim == 1: covariance = np.diag(covariance) radius = np.sqrt(5.991) eigX_embeddeds, eigvecs = np.linalg.eig(covariance) axis = np.sqrt(eigX_embeddeds) * radius slope = eigvecs[1][0] / eigvecs[1][1] angle = 180.0 * np.arctan(slope) / np.pi plt.axes().add_artist(Ellipse( mean, 2 * axis[0], 2 * axis[1], angle=angle, fill=False, color='g', linewidth=1, zorder=0 )) plt.show() #_plot_gaussian(X_embedded[150,:], A0,'r') #error_ellipse(X_embedded[150,:], A0) #plot_ellipse(plt, X_embedded[150,:], A0 ) #np.savetxt("mu.txt",max_mu) return max_mu if __name__ == '__main__': main()
29.033663
103
0.559132
2,096
14,662
3.81584
0.166985
0.019255
0.016879
0.010003
0.384471
0.311453
0.245436
0.213303
0.195049
0.185046
0
0.034831
0.283317
14,662
505
104
29.033663
0.726304
0.149366
0
0.352941
0
0
0.022873
0.003781
0
0
0
0
0.022059
1
0.058824
false
0
0.033088
0.003676
0.143382
0.066176
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d84e7b0326da78457b27f3f5b7fda50734903f66
775
py
Python
Data-Structures/Stacks/stack.py
hussamEL-Hwary/DS-Algo-Handbook
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
[ "MIT" ]
18
2016-11-01T04:00:36.000Z
2021-09-13T14:26:35.000Z
Data-Structures/Stacks/stack.py
JEERU/DS-Algo-Handbook
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
[ "MIT" ]
60
2016-10-11T14:50:47.000Z
2016-10-31T11:05:01.000Z
Data-Structures/Stacks/stack.py
JEERU/DS-Algo-Handbook
86a97d586a4ca8b17168c0a9f5a9f43f856eba58
[ "MIT" ]
87
2016-09-08T05:04:50.000Z
2016-10-30T19:19:53.000Z
"""Implementation of a stack in python.""" class Stack: def __init__(self): self.items = [] def push(self, item): """Add an item to the stack.""" self.items.append(item) def pop(self): """Remove the most recent item from the stack.""" if len(self.items) > 0: last = self.items[-1] del(self.items[-1]) return last else: raise IndexError def peek(self): """Return the most recent item to be pushed to the stack.""" return self.items[-1] def isEmpty(self): """Returns True if stack is empty .""" return not len(self.items) >= 1 def size(self): """Return the size of the stack.""" return len(self.items)
24.21875
68
0.536774
102
775
4.039216
0.421569
0.174757
0.097087
0.082524
0
0
0
0
0
0
0
0.009709
0.335484
775
31
69
25
0.790291
0.289032
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0
0.611111
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d854e1572c3ce2b3c51dea839fbb388e61fd565b
535
py
Python
li_hang/test/test_knn.py
LucienShui/HelloMachineLearning
b00a4b3791808ace3b1e45112350c2b3c539995e
[ "Apache-2.0" ]
2
2019-07-28T08:25:40.000Z
2019-07-29T05:29:10.000Z
li_hang/test/test_knn.py
LucienShui/HelloMachineLearning
b00a4b3791808ace3b1e45112350c2b3c539995e
[ "Apache-2.0" ]
null
null
null
li_hang/test/test_knn.py
LucienShui/HelloMachineLearning
b00a4b3791808ace3b1e45112350c2b3c539995e
[ "Apache-2.0" ]
null
null
null
import unittest import logging import numpy from knn import KNN class MyTestCase(unittest.TestCase): def test_something(self): logging.basicConfig() dataset = numpy.array([ [[5, 4], 1], [[9, 6], 1], [[4, 7], 1], [[2, 3], -1], [[8, 1], -1], [[7, 2], -1] ]) knn = KNN(dataset, 1) test_point = numpy.array([5, 3]) self.assertEqual(knn.predict(test_point), 1) if __name__ == '__main__': unittest.main()
18.448276
52
0.48785
63
535
3.968254
0.492063
0.08
0.088
0
0
0
0
0
0
0
0
0.063401
0.351402
535
28
53
19.107143
0.657061
0
0
0
0
0
0.014953
0
0
0
0
0
0.05
1
0.05
false
0
0.2
0
0.3
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8560e6218ec99112b9cb038f1f87fe00535d31f
2,130
py
Python
src/taming.py
dwaybright/g729a_python
a9c78d9a6b2934c9742f63e3ade225fe4aee245e
[ "Unlicense" ]
null
null
null
src/taming.py
dwaybright/g729a_python
a9c78d9a6b2934c9742f63e3ade225fe4aee245e
[ "Unlicense" ]
null
null
null
src/taming.py
dwaybright/g729a_python
a9c78d9a6b2934c9742f63e3ade225fe4aee245e
[ "Unlicense" ]
null
null
null
from basic_op import * from ld8a import * from tab_ld8a import * L_exc_err = [0] * 4 def Init_exc_err() -> None: global L_exc_err for i in range(0, 4): L_exc_err[i] = MAX_INT_14 # Q14 def test_err(T0: int, T0_frac: int) -> int: """ # (o) flag set to 1 if taming is necessary # (i) T0 - integer part of pitch delay # (i) T0_frac - fractional part of pitch delay """ if T0_frac > 0: t1 = add(T0, 1) else: t1 = T0 i = sub(t1, (L_SUBFR + L_INTER10)) if i < 0: i = 0 zone1 = tab_tab_zone[i] i = add(t1, (L_INTER10 - 2)) zone2 = tab_tab_zone[i] L_maxloc = -1 flag = 0 for i in range(zone, zone1 + 1, -1): L_acc = L_sub(L_exc_err[i], L_maxloc) if L_acc > 0: L_maxloc = L_exc_err[i] L_acc = L_sub(L_maxloc, L_THRESH_ERR) if L_acc > 0: flag = 1 return flag def update_exc_err(gain_pit: int, T0: int) -> None: """ # (i) pitch gain # (i) integer part of pitch delay """ L_worst = -1 n = sub(T0, L_SUBFR) if n < 0: hi, lo = L_Extract(L_exc_err[0]) L_temp = Mpy_32_16(hi, lo, gain_pit) L_temp = L_shl(L_temp, 1) L_temp = L_add(MAX_INT_14, L_temp) L_acc = L_sub(L_temp, L_worst) if L_acc > 0: L_worst = L_temp hi, lo = L_Extract(L_temp) L_temp = Mpy_32_16(hi, lo, gain_pit) L_temp = L_shl(L_temp, 1) L_temp = L_add(MAX_INT_14, L_temp) L_acc = L_sub(L_temp, L_worst) if L_acc > 0: L_worst = L_temp else: zone1 = tab_tab_zone[n] i = sub(T0, 1) zone2 = tab_tab_zone[i] for i in range(zone1, zone2 + 1): hi, lo = L_Extract(L_exc_err[i]) L_temp = Mpy_32_16(hi, lo, gain_pit) L_temp = L_shl(L_temp, 1) L_temp = L_add(MAX_INT_14, L_temp) L_acc = L_sub(L_temp, L_worst) if L_acc > 0: L_worst = L_temp for i in range(3, 0, -1): L_exc_err[i] = L_exc_err[i-1] L_exc_err[0] = L_worst
21.958763
51
0.530047
380
2,130
2.655263
0.181579
0.109019
0.077304
0.047572
0.477701
0.326065
0.326065
0.288404
0.288404
0.288404
0
0.059942
0.357746
2,130
96
52
22.1875
0.677632
0.088732
0
0.387097
0
0
0
0
0
0
0
0
0
1
0.048387
false
0
0.048387
0
0.112903
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d85630288b52620b9339fd834ada73fb4075abbe
558
py
Python
PyObjCTest/test_nspagelayout.py
linuxfood/pyobjc-framework-Cocoa-test
3475890f165ab26a740f13d5afe4c62b4423a140
[ "MIT" ]
null
null
null
PyObjCTest/test_nspagelayout.py
linuxfood/pyobjc-framework-Cocoa-test
3475890f165ab26a740f13d5afe4c62b4423a140
[ "MIT" ]
null
null
null
PyObjCTest/test_nspagelayout.py
linuxfood/pyobjc-framework-Cocoa-test
3475890f165ab26a740f13d5afe4c62b4423a140
[ "MIT" ]
null
null
null
import AppKit from PyObjCTools.TestSupport import TestCase import objc class TestNSPageLayout(TestCase): def testMethods(self): self.assertArgIsSEL( AppKit.NSPageLayout.beginSheetWithPrintInfo_modalForWindow_delegate_didEndSelector_contextInfo_, # noqa: B950 3, b"v@:@" + objc._C_NSInteger + b"^v", ) self.assertArgHasType( AppKit.NSPageLayout.beginSheetWithPrintInfo_modalForWindow_delegate_didEndSelector_contextInfo_, # noqa: B950 4, b"^v", )
31
122
0.668459
48
558
7.520833
0.583333
0.016621
0.227147
0.304709
0.531856
0.531856
0.531856
0.531856
0.531856
0
0
0.019324
0.258065
558
17
123
32.823529
0.852657
0.037634
0
0.133333
0
0
0.014981
0
0
0
0
0
0.133333
1
0.066667
false
0
0.2
0
0.333333
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d85641b37bef389ead174fd99408f2c24628f116
4,458
py
Python
base_site/mainapp/models.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
4
2020-01-21T00:21:44.000Z
2021-06-15T19:38:36.000Z
base_site/mainapp/models.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
173
2019-11-18T08:19:44.000Z
2021-09-08T01:37:19.000Z
base_site/mainapp/models.py
ricardochaves/financeiro-bot
2c48be4355e3c8630c36aa846c16042f22b88271
[ "MIT" ]
3
2020-01-28T19:19:35.000Z
2021-05-01T02:33:36.000Z
from datetime import datetime from django.db import models class TypeEntry(models.Model): name = models.CharField(max_length=70, verbose_name="Nome", default="", blank=False, null=False) def __str__(self): return self.name class FamilyMember(models.Model): name = models.CharField(max_length=70, verbose_name="Nome", default="", blank=False, null=False) def __str__(self): return self.name class Category(models.Model): name = models.CharField(max_length=70, verbose_name="Nome", default="", blank=False, null=False) enable = models.BooleanField(default=True, verbose_name="Enable?") def __str__(self): return self.name class FullCommand(models.Model): PAYMENT_DATE_OPTIONS = ( (1, "Data do Dia"), (2, "Data do Cartão (15)"), (3, "Perguntar"), (4, "Dia Seguinte"), (5, "Mês Seguinte"), (6, "Dia 5 mês vigente"), (7, "Dia 5 mês que vem"), (8, "Crédito Parcelado"), ) command = models.CharField(max_length=70, verbose_name="Comando", default="", blank=False, null=False) entry_date = models.BooleanField(verbose_name="Data de Lançamento: Usa data do dia?") payment_date = models.IntegerField(choices=PAYMENT_DATE_OPTIONS, verbose_name="Data de Pagamento", default=1) debit = models.DecimalField(max_digits=6, verbose_name="Débito", decimal_places=2, blank=True, null=True) credit = models.DecimalField(max_digits=6, verbose_name="Crédito", decimal_places=2, blank=True, null=True) category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name="Categoria", blank=True, null=True) name = models.ForeignKey(FamilyMember, on_delete=models.CASCADE, blank=True, verbose_name="Nome", null=True) description = models.CharField(max_length=400, verbose_name="Descrição", blank=True, null=True) type_entry = models.ForeignKey(TypeEntry, on_delete=models.CASCADE, verbose_name="Tipo", blank=True, null=True) def __str__(self): return self.command class Records(models.Model): db_included_date_time = models.DateTimeField(auto_now=True, null=False, verbose_name="Inclusão no Bando de Dados") create_date_time = models.DateTimeField( default=datetime.now, null=False, blank=False, verbose_name="Data do Lançamento" ) payment_date_time = models.DateTimeField( default=datetime.now, null=True, blank=False, verbose_name="Data da Execução" ) debit = models.DecimalField(max_digits=6, verbose_name="Débito", decimal_places=2, blank=True, null=True) credit = models.DecimalField(max_digits=6, verbose_name="Crédito", decimal_places=2, blank=True, null=True) category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name="Categoria", blank=True, null=True) name = models.ForeignKey(FamilyMember, on_delete=models.CASCADE, blank=True, verbose_name="Nome", null=True) type_entry = models.ForeignKey(TypeEntry, on_delete=models.CASCADE, verbose_name="Tipo", blank=True, null=True) description = models.CharField(max_length=400, verbose_name="Descrição", default="", blank=True, null=True) class Meta: indexes = [ models.Index(fields=["db_included_date_time"], name="db_included_date_time_idx"), models.Index(fields=["create_date_time"], name="create_date_time_idx"), models.Index(fields=["payment_date_time"], name="payment_date_time_idx"), models.Index(fields=["category"], name="category_idx"), models.Index(fields=["name"], name="name_idx"), models.Index(fields=["type_entry"], name="type_entry_idx"), ] class Goal(models.Model): PERIOD_CHOICES = ((1, "This Week"),) category = models.ForeignKey(Category, on_delete=models.CASCADE, verbose_name="Categoria", blank=True, null=True) name_family = models.ForeignKey(FamilyMember, on_delete=models.CASCADE, blank=True, verbose_name="Nome", null=True) type_entry = models.ForeignKey(TypeEntry, on_delete=models.CASCADE, verbose_name="Tipo", blank=True, null=True) name = models.CharField(max_length=40, verbose_name="Name") value = models.DecimalField(max_digits=6, verbose_name="Value", decimal_places=2) period = models.IntegerField(choices=PERIOD_CHOICES, verbose_name="Data de Pagamento", default=1) enable = models.BooleanField(default=True, verbose_name="Enable") def __str__(self): return self.name
44.138614
119
0.711306
579
4,458
5.274611
0.181347
0.104453
0.051081
0.066798
0.703667
0.666339
0.63556
0.586771
0.554682
0.554682
0
0.010898
0.156124
4,458
100
120
44.58
0.800904
0
0
0.338028
0
0
0.126738
0.015029
0
0
0
0
0
1
0.070423
false
0
0.028169
0.070423
0.704225
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
d8587509c1aa42b6092003d2a561d39843b7a11a
1,306
bzl
Python
third_party/com_google_boringssl/workspace.bzl
stuarteberg/tensorstore
2c22a3c9f798b0fbf023031633c58cc7c644235d
[ "Apache-2.0" ]
null
null
null
third_party/com_google_boringssl/workspace.bzl
stuarteberg/tensorstore
2c22a3c9f798b0fbf023031633c58cc7c644235d
[ "Apache-2.0" ]
null
null
null
third_party/com_google_boringssl/workspace.bzl
stuarteberg/tensorstore
2c22a3c9f798b0fbf023031633c58cc7c644235d
[ "Apache-2.0" ]
null
null
null
# Copyright 2020 The TensorStore Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. load( "//third_party:repo.bzl", "third_party_http_archive", ) load("@bazel_tools//tools/build_defs/repo:utils.bzl", "maybe") def repo(): maybe( third_party_http_archive, name = "com_google_boringssl", urls = [ # When updating, always use commit from master-with-bazel branch. "https://github.com/google/boringssl/archive/34693f02f6cf9ac7982778b761c16a27f32433c1.tar.gz", # 2019-09-25 ], sha256 = "633e2e806d01a07a20725d1e68fff0be96db18344ed4389c00de042dcd874cac", strip_prefix = "boringssl-34693f02f6cf9ac7982778b761c16a27f32433c1", system_build_file = Label("//third_party/com_google_boringssl:system.BUILD.bazel"), )
39.575758
120
0.725881
162
1,306
5.746914
0.62963
0.064447
0.058002
0.034372
0
0
0
0
0
0
0
0.108716
0.183002
1,306
32
121
40.8125
0.763824
0.486983
0
0
0
0
0.571865
0.394495
0
0
0
0
0
1
0.0625
true
0
0
0
0.0625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
d85908aea84d7f7f730620a14ba03abbda1756ba
45
py
Python
common/__init__.py
weipeng/pyepi
1af5eab78e20f55fadccef8cb7b7aad6d503b31b
[ "MIT" ]
1
2020-03-28T06:29:28.000Z
2020-03-28T06:29:28.000Z
common/__init__.py
weipeng/pyepi
1af5eab78e20f55fadccef8cb7b7aad6d503b31b
[ "MIT" ]
null
null
null
common/__init__.py
weipeng/pyepi
1af5eab78e20f55fadccef8cb7b7aad6d503b31b
[ "MIT" ]
2
2020-03-29T16:12:52.000Z
2020-04-28T22:53:33.000Z
__all__ = [ 'stats', 'utils', 'linalg' ]
11.25
30
0.511111
4
45
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.244444
45
3
31
15
0.558824
0
0
0
0
0
0.355556
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
d8593d6692d7d033994d2758b7537e9bd3034f63
45
py
Python
skforecast/__init__.py
JavierEscobarOrtiz/skforecast
a3af4a1dd4201c582f159d4e3a1734ed6d29b6c5
[ "MIT" ]
1
2021-12-01T09:21:21.000Z
2021-12-01T09:21:21.000Z
skforecast/__init__.py
JavierEscobarOrtiz/skforecast
a3af4a1dd4201c582f159d4e3a1734ed6d29b6c5
[ "MIT" ]
null
null
null
skforecast/__init__.py
JavierEscobarOrtiz/skforecast
a3af4a1dd4201c582f159d4e3a1734ed6d29b6c5
[ "MIT" ]
null
null
null
name = "skforecast" __version__ = "0.5.dev1"
15
24
0.688889
6
45
4.5
1
0
0
0
0
0
0
0
0
0
0
0.076923
0.133333
45
2
25
22.5
0.615385
0
0
0
0
0
0.4
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
d859f7d3fb379be25a76d21ab67ead4d433c1d6c
777
py
Python
src/weathair_backend/weathair_backend/api/migrations/0004_auto_20201004_0412.py
willson556/weathair
e2b988f216f81a55d32d432ed41a09bf19d909b2
[ "MIT" ]
null
null
null
src/weathair_backend/weathair_backend/api/migrations/0004_auto_20201004_0412.py
willson556/weathair
e2b988f216f81a55d32d432ed41a09bf19d909b2
[ "MIT" ]
null
null
null
src/weathair_backend/weathair_backend/api/migrations/0004_auto_20201004_0412.py
willson556/weathair
e2b988f216f81a55d32d432ed41a09bf19d909b2
[ "MIT" ]
null
null
null
# Generated by Django 3.1.2 on 2020-10-04 04:12 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('api', '0003_auto_20201004_0401'), ] operations = [ migrations.AlterField( model_name='airnowreportingarea', name='name', field=models.CharField(db_index=True, max_length=45), ), migrations.AlterField( model_name='airnowreportingarea', name='state_code', field=models.CharField(db_index=True, max_length=2), ), migrations.AddIndex( model_name='airnowreportingarea', index=models.Index(fields=['name', 'state_code'], name='api_airnowr_name_3a07e3_idx'), ), ]
27.75
98
0.604891
80
777
5.675
0.5375
0.059471
0.185022
0.127753
0.405286
0.405286
0.176211
0.176211
0
0
0
0.067736
0.277992
777
27
99
28.777778
0.741533
0.057915
0
0.380952
1
0
0.189041
0.068493
0
0
0
0
0
1
0
false
0
0.047619
0
0.190476
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d85ca52402346be7dfaf6277ede793e7a996a2e4
1,176
py
Python
db_create.py
abmorton/stockhawk
b5f4d188a8f9420898f2390b01741c87a17ebbbd
[ "MIT" ]
7
2015-11-11T22:55:49.000Z
2021-06-03T17:23:59.000Z
db_create.py
abmorton/stockhawk
b5f4d188a8f9420898f2390b01741c87a17ebbbd
[ "MIT" ]
null
null
null
db_create.py
abmorton/stockhawk
b5f4d188a8f9420898f2390b01741c87a17ebbbd
[ "MIT" ]
3
2016-01-19T02:23:14.000Z
2018-08-03T12:20:07.000Z
from app import db from models import * import datetime # create the db and tables db.create_all() # prepare data to insert year = 1982 month = 4 day = 3 birthday = datetime.date(year, month, day) now = datetime.datetime.now() today = datetime.date(now.year, now.month, now.day) yesterday = datetime.date(now.year, now.month, 13) # insert data adam = User("adam", "abmorton@gmail.com", "testpw", yesterday) # db.session.add(User("admin", "admin@admin.com", "adminpw", today)) db.session.add(User(adam)) db.session.commit() # make a Portfolio port = Portfolio(adam.id) db.session.add(port) db.session.commit() # add a stock db.session.add(Stock("XOMA", "XOMA Corporation", "NGM", "0.9929", None, None, None, "117.74M", 1)) db.session.commit() # get a stock instance for later use creating other records stock = Stock.query.get(1) # make some trades db.session.add(Trade(stock.symbol, 1, 10, yesterday, None, None, None)) db.session.add(Trade(stock.symbol, 1.20, -5, today, None, None, None)) # make a Position # pos = Position(port.id, ) # position = Position(1) # insert the data requiring ForeignKeys & relationship() # commit changes db.session.commit()
21.381818
98
0.706633
182
1,176
4.56044
0.412088
0.108434
0.086747
0.045783
0.13494
0.13494
0.06988
0
0
0
0
0.027723
0.141156
1,176
55
99
21.381818
0.794059
0.311224
0
0.173913
0
0
0.080301
0
0
0
0
0
0
1
0
false
0
0.130435
0
0.130435
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d85d07c94c953c6ddb47057dd695b08ff1db7471
2,581
py
Python
csv_uploader/views.py
treebohotels/treebo-csv-uploader
6fad3c33e34627bf3be1365c4526ab2f7d385741
[ "MIT" ]
null
null
null
csv_uploader/views.py
treebohotels/treebo-csv-uploader
6fad3c33e34627bf3be1365c4526ab2f7d385741
[ "MIT" ]
null
null
null
csv_uploader/views.py
treebohotels/treebo-csv-uploader
6fad3c33e34627bf3be1365c4526ab2f7d385741
[ "MIT" ]
null
null
null
import logging from django.http import HttpResponse from django.utils.datastructures import MultiValueDictKeyError from rest_framework.views import APIView from rest_framework.response import Response from rest_framework.renderers import TemplateHTMLRenderer from .handler import CsvHandler from .validator import CsvValidator from .models import CsvJob logger = logging.getLogger(__name__) class CsvUploader(APIView): renderer_classes = (TemplateHTMLRenderer,) def get(self, request, *args, **kwargs): if not request.user.is_authenticated(): return HttpResponse("Access Denied..", content_type='text/plain') return Response(data=dict(action_names= CsvValidator.available_actions() , action_json=CsvValidator.available_actions_json()) , template_name='csv_uploader.html') def post(self, request, *args, **kwargs): template_name = 'csv_uploader.html' if not request.user.is_authenticated(): return HttpResponse("Access Denied..", content_type='text/plain') try: handler = CsvHandler(str(request.POST['action_name']), request.FILES['csv_file']) handler.process(request.user) return Response(data=dict(status_message= handler.display_message() ,action_names=CsvValidator.available_actions() , action_json=CsvValidator.available_actions_json()), template_name='csv_uploader.html') except MultiValueDictKeyError as e: logger.exception("Error in upload file: %s", str(e)) return Response(data={'status_message': 'csv file missing', 'action_names': CsvValidator.available_actions() , 'action_json': CsvValidator.available_actions_json()}, template_name=template_name) except Exception as e: logger.exception("Error in upload file: %s", str(e)) return Response(data={'status_message': str(e), 'action_names': CsvValidator.available_actions() , 'action_json': CsvValidator.available_actions_json()}, template_name=template_name) class CsvUploaderCallback(APIView): def post(self, request, *args, **kwargs): CsvHandler.callback(request.data['job_item_id'], request.data['status'], request.data['message'][0:199]) return Response('OK') class CsvUploaderCleanup(APIView): def post(self, request, *args, **kwargs): CsvJob.purge() return Response('OK')
44.5
120
0.665633
271
2,581
6.154982
0.317343
0.100719
0.134293
0.05036
0.51259
0.496403
0.479616
0.43765
0.43765
0.43765
0
0.002012
0.229756
2,581
58
121
44.5
0.837022
0
0
0.288889
0
0
0.111541
0
0
0
0
0
0
1
0.088889
false
0
0.2
0
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
d85d2d19f951d9db76457ffec3892cff2c16064c
2,979
py
Python
Voltron/Voltron/Entrevue/LeetCode/Array-SingleNumber.py
ernestyalumni/HrdwCCppCUDA
17ed937dea06431a4d5ca103f993ea69a6918734
[ "MIT" ]
1
2018-02-09T19:44:51.000Z
2018-02-09T19:44:51.000Z
Voltron/Voltron/Entrevue/LeetCode/Array-SingleNumber.py
ernestyalumni/HrdwCCppCUDA
17ed937dea06431a4d5ca103f993ea69a6918734
[ "MIT" ]
null
null
null
Voltron/Voltron/Entrevue/LeetCode/Array-SingleNumber.py
ernestyalumni/HrdwCCppCUDA
17ed937dea06431a4d5ca103f993ea69a6918734
[ "MIT" ]
null
null
null
""" @file Array-SingleNumber.py Log: 2020/10/13 8:31 9:07 working implementation """ example_input_1 = [2, 2, 1] example_input_2 = [4, 1, 2, 1, 2] example_input_3 = [1] example_output_1 = 1 example_output_2 = 4 example_output_3 = 1 """ Given a non-empty array of integers nums, every element appears twice except for one. Let N be total size of the array. For 0, 1, ... N - 1 (finite set), there exists I in 0, 1, ... N -1 such that a_I != a_i for all i != I. If i != I, there exists unique j != I, j != i such that a_i = a_j and a_k != a_i for all k != i, j For all i in 0, 1, ... N - 1, either a. exists unique j != i such that a_i = a_j and for all k != j, a_k != a_j, or b. a_i != a_j for all j != i. For any i, j in 0, 1, ... N - 1, i != j, either a. a_i != a_j so either A. a_i or a_j is unique or B. a_i and a_j "have other matching pairs" b. a_i == a_j and for all k != i, k != j, a_k != a_i """ # This is the base case. def find_single_number_from_3(nums3): """ Inputs nums3 - assumed to have length 3. """ top_element = nums3.pop() if top_element in nums3: if top_element == nums3[0]: return nums3[1] else: return nums3[0] else: return top_element def check_pair_from_3_nums(nums, traversed_numbers): """ Suppose unique 1 is in nums + traversed_numberes. For nums, either a. all 3 have pairs in traversed_numbers, so traversed_numbers is of size 4 b. nums contain unique number. Either A. traversed_numbers of size 2 so 2 numbers in nums has pairs in there B. traversed_numbers of size 0 and so a pair is in nums. """ if len(traversed_numbers) == 4: for num in nums: traversed_numbers.remove(num) return traversed_numbers[0] if len(traversed_numbers) == 2: for num in nums: if num not in traversed_numbers: return num if len(traversed_numbers) == 0: return find_single_number_from_3(nums) def check_pair(nums, traversed_numbers): if len(nums) == 3: return check_pair_from_3_nums(nums, traversed_numbers) pair = [] pair.append(nums.pop()) pair.append(nums.pop()) if (pair[0] == pair[1]): return check_pair(nums, traversed_numbers) if pair[0] in traversed_numbers: traversed_numbers.remove(pair[0]) else: traversed_numbers.append(pair[0]) if pair[1] in traversed_numbers: traversed_numbers.remove(pair[1]) else: traversed_numbers.append(pair[1]) return check_pair(nums, traversed_numbers) def find_single_number(nums): if len(nums) == 1: return nums[0] if len(nums) == 3: return find_single_number_from_3(nums) traversed_numbers = [] return check_pair(nums, traversed_numbers) class Solution: def singleNumber(self, nums) -> int: return 0 if __name__ == "__main__": print("\nArray-SingleNumber\n")
24.418033
79
0.631756
496
2,979
3.600806
0.209677
0.206047
0.089586
0.011198
0.322508
0.24804
0.202128
0.118141
0.019037
0
0
0.037796
0.26284
2,979
122
80
24.418033
0.775501
0.154414
0
0.272727
0
0
0.016911
0.012401
0
0
0
0
0
1
0.090909
false
0
0
0.018182
0.345455
0.018182
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d85e74f89e6ba559f49b28f11f9fbb57d950463e
2,910
py
Python
new_h1st/my_ml_modeler.py
TgithubJ/h1st
18c8ab2ca5e3a047aea255c636d27fd66bb80ec5
[ "Apache-2.0" ]
null
null
null
new_h1st/my_ml_modeler.py
TgithubJ/h1st
18c8ab2ca5e3a047aea255c636d27fd66bb80ec5
[ "Apache-2.0" ]
null
null
null
new_h1st/my_ml_modeler.py
TgithubJ/h1st
18c8ab2ca5e3a047aea255c636d27fd66bb80ec5
[ "Apache-2.0" ]
null
null
null
from typing import Any, Dict from h1st_ml import MLModeler import pandas as pd from sklearn.linear_model import LogisticRegression from sklearn.preprocessing import StandardScaler from sklearn.metrics import r2_score from my_ml_model import MyMLModel class MyMLModeler(MLModeler): def __init__(self): self.example_test_data_ratio = 0.2 def load_data(self) -> Dict: df_raw = pd.read_csv('iris.csv') return {'df_raw': df_raw} def preprocess(self, data): self.scaler = StandardScaler() return self.scaler.fit_transform(data) def generate_training_data(self, data: Dict[str, Any]) -> Dict[str, Any]: df_raw = data['df_raw'] self.targets = sorted(df_raw['species'].unique()) self.targets_dict = {k: v for v, k in enumerate(self.targets)} df_raw['species'] = df_raw['species'].apply(lambda x: self.targets_dict[x]) # Shuffle all the df_raw df_raw = df_raw.sample(frac=1, random_state=5).reset_index(drop=True) # Preprocess data df_raw.loc[:, 'sepal_length':'petal_width'] = self.preprocess( df_raw.loc[:, 'sepal_length':'petal_width']) # Split to training and testing data n = df_raw.shape[0] n_test = int(n * self.example_test_data_ratio) training_data = df_raw.iloc[n_test:, :].reset_index(drop=True) test_data = df_raw.iloc[:n_test, :].reset_index(drop=True) # Split the data to features and labels train_data_x = training_data.loc[:, 'sepal_length':'petal_width'] train_data_y = training_data['species'] test_data_x = test_data.loc[:, 'sepal_length':'petal_width'] test_data_y = test_data['species'] # When returning many variables, it is a good practice to give them names: return { 'train_x':train_data_x, 'train_y':train_data_y, 'test_x':test_data_x, 'test_y':test_data_y, } def train(self, data: Dict[str, Any]) -> Any: X, y = data['train_x'], data['train_y'] model = LogisticRegression(random_state=0) model.fit(X, y) return model # TODO: need to check model instance type def evaluate(self, data: Dict, model: Any) -> Dict: X, y_true = data['test_x'], data['test_y'] y_pred = model.predict(X) return {'r2_score': r2_score(y_true, y_pred)} def build(self) -> MyMLModel: data = self.load_data() training_data = self.generate_training_data(data) base_model = self.train(training_data) ml_model = MyMLModel(base_model) # pass all property of modeler to model for k, v in self.__dict__.copy().items(): ml_model.__dict__[k] = v ml_model.metrics = self.evaluate(training_data, ml_model.base_model) return ml_model
36.835443
83
0.631271
406
2,910
4.248768
0.288177
0.046377
0.02087
0.044058
0.156522
0.107826
0.075362
0.041739
0.041739
0.041739
0
0.00463
0.257732
2,910
79
84
36.835443
0.793981
0.090034
0
0
0
0
0.078618
0
0
0
0
0.012658
0
1
0.125
false
0
0.125
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
d85f295a43e1700c84c02249db04af993786f746
26
py
Python
homeassistant/components/imap/__init__.py
domwillcode/home-assistant
f170c80bea70c939c098b5c88320a1c789858958
[ "Apache-2.0" ]
30,023
2016-04-13T10:17:53.000Z
2020-03-02T12:56:31.000Z
homeassistant/components/imap/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
31,101
2020-03-02T13:00:16.000Z
2022-03-31T23:57:36.000Z
homeassistant/components/imap/__init__.py
jagadeeshvenkatesh/core
1bd982668449815fee2105478569f8e4b5670add
[ "Apache-2.0" ]
11,956
2016-04-13T18:42:31.000Z
2020-03-02T09:32:12.000Z
"""The imap component."""
13
25
0.615385
3
26
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.115385
26
1
26
26
0.695652
0.730769
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d85fa73be967336630b8bccd9bd0353e0af7dd9d
879
py
Python
test/libraryData_BulkUpdates.py
masqu3rad3/tik_manager
59821670e87a2af753a59cc70924c5f0aad8ad51
[ "BSD-3-Clause" ]
26
2019-05-05T04:52:38.000Z
2022-01-27T19:25:27.000Z
test/libraryData_BulkUpdates.py
masqu3rad3/tik_manager
59821670e87a2af753a59cc70924c5f0aad8ad51
[ "BSD-3-Clause" ]
null
null
null
test/libraryData_BulkUpdates.py
masqu3rad3/tik_manager
59821670e87a2af753a59cc70924c5f0aad8ad51
[ "BSD-3-Clause" ]
5
2020-02-14T06:43:07.000Z
2021-08-13T09:58:44.000Z
from tik_manager import assetLibrary reload(assetLibrary) import pprint import time pathList = ["E:\\backup\\_CharactersLibrary", "E:\\backup\\_BalikKrakerAssetLibrary", "E:\\backup\\_AssetLibrary", "M:\\Projects\\_CharactersLibrary", "M:\\Projects\\_BalikKrakerAssetLibrary", "M:\\Projects\\_AssetLibrary"] for path in pathList: lib = assetLibrary.AssetLibrary(path) lib.scanAssets() for item in lib.assetsList: data = lib._getData(item) # data["sourceProject"] = "Maya(ma)" # data["notes"] = "N/A" # data["version"] = "N/A" # if data["Faces/Triangles"] == "Nothing counted : no polygonal object is selected./Nothing counted : no polygonal object is selected.": # data["Faces/Triangles"] = "N/A" data["notes"]="" # data["Faces/Triangles"] = data["Faces/Trianges"] lib._setData(item, data)
43.95
223
0.651877
96
879
5.875
0.447917
0.06383
0.095745
0.088652
0.14539
0.14539
0.14539
0
0
0
0
0
0.185438
879
19
224
46.263158
0.78771
0.341297
0
0
0
0
0.337413
0.328671
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0.083333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d860dd72da3f6f3d31741e148127e543434eba7d
4,552
py
Python
tests/pv_generation.py
Aloso/pv-simulator
daae4a00c4d7c759eedeffe39b6d60395e9a3ebf
[ "MIT" ]
null
null
null
tests/pv_generation.py
Aloso/pv-simulator
daae4a00c4d7c759eedeffe39b6d60395e9a3ebf
[ "MIT" ]
null
null
null
tests/pv_generation.py
Aloso/pv-simulator
daae4a00c4d7c759eedeffe39b6d60395e9a3ebf
[ "MIT" ]
null
null
null
#!/usr/bin/env python import sys, os, random, unittest, itertools sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) # pylint: disable=import-error from libpv.pv_generation import PvGenerator, weather from libpv.time_of_day import TimeOfDay def generate_360_times(): t = TimeOfDay(0) while True: yield t t += 240 if t.seconds() < 240: break class TestPvGeneration(unittest.TestCase): def testEquality(self): gen = PvGenerator(TimeOfDay.from_hms(8), TimeOfDay.from_hms(20), 3500) self.assertEqual( [round(gen.get_value(x)) for x in generate_360_times()], [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 22, 32, 43, 54, 65, 76, 86, 97, 108, 119, 130, 140, 151, 162, 173, 184, 194, 205, 216, 229, 304, 378, 451, 523, 595, 665, 735, 803, 871, 938, 1004, 1069, 1134, 1197, 1260, 1322, 1383, 1443, 1502, 1560, 1618, 1674, 1730, 1785, 1839, 1892, 1944, 1996, 2046, 2096, 2145, 2193, 2240, 2286, 2332, 2376, 2420, 2463, 2504, 2545, 2586, 2625, 2663, 2701, 2738, 2774, 2809, 2843, 2876, 2908, 2940, 2971, 3000, 3029, 3058, 3085, 3111, 3137, 3161, 3185, 3208, 3230, 3251, 3271, 3291, 3309, 3327, 3344, 3360, 3375, 3389, 3403, 3415, 3427, 3438, 3448, 3457, 3465, 3472, 3479, 3484, 3489, 3493, 3496, 3498, 3500, 3500, 3500, 3498, 3496, 3493, 3489, 3484, 3479, 3472, 3465, 3457, 3448, 3438, 3427, 3415, 3403, 3389, 3375, 3360, 3344, 3327, 3309, 3291, 3271, 3251, 3230, 3208, 3185, 3161, 3137, 3111, 3085, 3058, 3029, 3000, 2971, 2940, 2908, 2876, 2843, 2809, 2774, 2738, 2701, 2663, 2625, 2586, 2545, 2504, 2463, 2420, 2376, 2332, 2286, 2240, 2193, 2145, 2096, 2046, 1996, 1944, 1892, 1839, 1785, 1730, 1674, 1618, 1560, 1502, 1443, 1383, 1322, 1260, 1197, 1134, 1069, 1004, 938, 871, 803, 735, 665, 595, 523, 451, 378, 304, 229, 216, 205, 194, 184, 173, 162, 151, 140, 130, 119, 108, 97, 86, 76, 65, 54, 43, 32, 22, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]) def testContinuityAndBounds(self): gen = PvGenerator(TimeOfDay.from_hms(8), TimeOfDay.from_hms(20), 3500) last = gen.get_value(TimeOfDay(0)) for time in generate_360_times(): power = gen.get_value(time) self.assertGreaterEqual(power, 0) self.assertLessEqual(power, 3500) self.assertLess(abs(power - last), 80) last = power class TestWeatherGeneration(unittest.TestCase): def testEquality(self): w = weather(0.6, random.Random(4)) self.assertEqual( [round(x * 10, 3) for x in itertools.islice(w, 100)], [ 9.201, 9.197, 9.193, 9.19, 9.186, 9.183, 9.179, 9.175, 9.172, 9.168, 9.165, 9.161, 9.158, 9.154, 9.15, 9.147, 9.143, 9.14, 9.137, 9.133, 9.13, 9.126, 9.123, 9.119, 9.116, 9.113, 9.109, 9.106, 9.102, 9.099, 9.096, 9.102, 9.107, 9.113, 9.119, 9.124, 9.13, 9.136, 9.141, 9.147, 9.152, 9.158, 9.164, 9.169, 9.175, 9.18, 9.186, 9.191, 9.197, 9.202, 9.208, 9.213, 9.219, 9.224, 9.23, 9.235, 9.24, 9.246, 9.251, 9.257, 9.253, 9.25, 9.247, 9.243, 9.24, 9.237, 9.234, 9.23, 9.227, 9.224, 9.221, 9.218, 9.215, 9.211, 9.208, 9.205, 9.202, 9.199, 9.196, 9.193, 9.19, 9.187, 9.183, 9.18, 9.177, 9.174, 9.171, 9.168, 9.165, 9.162, 9.159, 9.157, 9.154, 9.151, 9.148, 9.145, 9.142, 9.139, 9.136, 9.133, ]) def testNoiseFactor(self): w = weather(0.4, random.Random(3)) for n in itertools.islice(w, 200_000): self.assertLessEqual(0.6, n) self.assertLessEqual(n, 1) if __name__ == '__main__': unittest.main()
47.915789
85
0.504174
756
4,552
2.997355
0.345238
0.126214
0.186673
0.245366
0.172551
0.114298
0.114298
0.114298
0.114298
0.114298
0
0.450839
0.331942
4,552
94
86
48.425532
0.294311
0.010765
0
0.175
0
0
0.001777
0
0
0
0
0
0.0875
1
0.0625
false
0
0.0375
0
0.125
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
d8617eb30998d8220d39ad8ca6c7311751fdbf18
16,601
py
Python
tests/tests.py
ipashchenko/uvmod
5f81f9f621ccd2f83e99f22eb0c302ae8d8a218d
[ "MIT" ]
null
null
null
tests/tests.py
ipashchenko/uvmod
5f81f9f621ccd2f83e99f22eb0c302ae8d8a218d
[ "MIT" ]
5
2015-01-28T07:53:30.000Z
2015-04-16T11:21:58.000Z
tests/tests.py
ipashchenko/uvmod
5f81f9f621ccd2f83e99f22eb0c302ae8d8a218d
[ "MIT" ]
null
null
null
#!/usr/bin python # -*- coding: utf-8 -*- from __future__ import print_function from unittest import (TestCase, skip, skipIf) from uvmod.stats import LnLike, LS_estimates, LnPrior, LnPost, hdi_of_mcmc from uvmod.models import Model_1d, Model_2d_isotropic, Model_2d_anisotropic # TODO: Use ``np.random.uniform`` instead try: from scipy.stats import uniform is_scipy = True except ImportError: is_scipy = False try: import emcee is_emcee = True except ImportError: is_emcee = False import numpy as np import math # TODO: Add tests for data wo uncertainties # TODO: Add tests for not installed packages # TODO: Fix random state to guarantee passing class Test_1D(TestCase): def setUp(self): self.p = [2, 0.3] self.x = np.array([0., 0.1, 0.2, 0.4, 0.6]) self.model_1d = Model_1d self.model_1d_detections = Model_1d(self.x) self.y = self.model_1d_detections(self.p) + np.random.normal(0, 0.1, size=5) self.sy = np.random.normal(0.15, 0.025, size=5) self.xl = np.array([0.5, 0.7]) self.yl = np.array([0.6, 0.2]) self.syl = np.random.normal(0.1, 0.03, size=2) self.p1 = np.asarray(self.p) + np.array([1., 0.]) self.p2 = np.asarray(self.p) + np.array([-1., 0.]) self.p3 = np.asarray(self.p) + np.array([0., 0.2]) self.p4 = np.asarray(self.p) + np.array([0., -0.2]) self.p0_range = [0., 10.] self.p1_range = [0., 2.] @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnLike(self): lnlike = LnLike(self.x, self.y, self.model_1d, sy=self.sy, x_limits=self.xl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnlik0 = lnlike._lnprob[0].__call__(self.p) lnlik1 = lnlike._lnprob[1].__call__(self.p) self.assertEqual(lnlike(self.p), lnlik0 + lnlik1) self.assertGreater(lnlike(self.p), lnlike(self.p1)) self.assertGreater(lnlike(self.p), lnlike(self.p2)) self.assertGreater(lnlike(self.p), lnlike(self.p3)) self.assertGreater(lnlike(self.p), lnlike(self.p4)) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LS_estimates(self): lsq = LS_estimates(self.x, self.y, self.model_1d, sy=self.sy) p, pcov = lsq.fit([1., 1.]) delta0 = 3. * np.sqrt(pcov[0, 0]) delta1 = 5. * np.sqrt(pcov[1, 1]) self.assertAlmostEqual(self.p[0], p[0], delta=delta0) self.assertAlmostEqual(self.p[1], abs(p[1]), delta=delta1) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnPrior(self): lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),),) lnpr = LnPrior(lnprs) self.assertTrue(np.isinf(lnpr([-1., 1.]))) self.assertTrue(np.isinf(lnpr([1., -1.]))) self.assertTrue(np.isinf(lnpr([15., 1.]))) self.assertTrue(np.isinf(lnpr([1., 5.]))) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnPost(self): lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),),) lnpr = LnPrior(lnprs) lnlike = LnLike(self.x, self.y, self.model_1d, sy=self.sy, x_limits=self.xl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnpost = LnPost(self.x, self.y, self.model_1d, sy=self.sy, x_limits=self.xl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) self.assertEqual(lnpost._lnpr(self.p), lnpr(self.p)) self.assertEqual(lnpost._lnlike(self.p), lnlike(self.p)) self.assertGreater(lnpost(self.p), lnpost(self.p1)) self.assertGreater(lnpost(self.p), lnpost(self.p2)) self.assertGreater(lnpost(self.p), lnpost(self.p3)) self.assertGreater(lnpost(self.p), lnpost(self.p4)) @skipIf((not is_emcee) or (not is_scipy), "``emcee`` and/or ``scipy`` not" " installed") def test_MCMC(self): nwalkers = 250 ndim = 2 p0 = np.random.uniform(low=self.p1_range[0], high=self.p1_range[1], size=(nwalkers, ndim)) lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),),) lnpr = LnPrior(lnprs) lnpost = LnPost(self.x, self.y, self.model_1d, sy=self.sy, x_limits=self.xl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) sampler = emcee.EnsembleSampler(nwalkers, ndim, lnpost) pos, prob, state = sampler.run_mcmc(p0, 250) sampler.reset() sampler.run_mcmc(pos, 500) sample_vec0 = sampler.flatchain[::10, 0] sample_vec1 = sampler.flatchain[::10, 1] p0_hdi_min, p0_hdi_max = hdi_of_mcmc(sample_vec0) p1_hdi_min, p1_hdi_max = hdi_of_mcmc(sample_vec1) self.assertTrue((p0_hdi_min < self.p[0] < p0_hdi_max)) self.assertTrue((p1_hdi_min < self.p[1] < p1_hdi_max)) class Test_2D_isoptopic(TestCase): def setUp(self): np.random.seed(1) self.p = [2, 0.3] self.x1 = np.random.uniform(low=-1, high=1, size=10) self.x2 = np.random.uniform(low=-1, high=1, size=10) self.xx = np.column_stack((self.x1, self.x2)) self.model_2d = Model_2d_isotropic self.model_2d_detections = Model_2d_isotropic(self.xx) self.y = self.model_2d_detections(self.p) + np.random.normal(0, 0.1, size=10) self.sy = np.random.normal(0.15, 0.025, size=10) self.x1l = np.hstack((np.random.uniform(low=-1, high=-0.5, size=2), np.random.uniform(low=0.5, high=1, size=2),)) self.x2l = np.hstack((np.random.uniform(low=-1, high=-0.5, size=2), np.random.uniform(low=0.5, high=1, size=2),)) self.xxl = np.column_stack((self.x1l, self.x2l)) self.model_2d_limits = Model_2d_isotropic(self.xxl) self.yl = self.model_2d_limits(self.p) + abs(np.random.normal(0, 0.1, size=4)) self.syl = np.random.normal(0.1, 0.03, size=4) self.p1 = np.asarray(self.p) + np.array([1., 0.]) self.p2 = np.asarray(self.p) + np.array([-1., 0.]) self.p3 = np.asarray(self.p) + np.array([0., 0.2]) self.p4 = np.asarray(self.p) + np.array([0., -0.2]) self.p0_range = [0., 10.] self.p1_range = [0., 2.] @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnLike(self): lnlike = LnLike(self.xx, self.y, self.model_2d, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnlik0 = lnlike._lnprob[0].__call__(self.p) lnlik1 = lnlike._lnprob[1].__call__(self.p) self.assertEqual(lnlike(self.p), lnlik0 + lnlik1) self.assertGreater(lnlike(self.p), lnlike(self.p1)) self.assertGreater(lnlike(self.p), lnlike(self.p2)) self.assertGreater(lnlike(self.p), lnlike(self.p3)) self.assertGreater(lnlike(self.p), lnlike(self.p4)) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LS_estimates(self): lsq = LS_estimates(self.xx, self.y, self.model_2d, sy=self.sy) p, pcov = lsq.fit([1., 1.]) delta0 = 3. * np.sqrt(pcov[0, 0]) delta1 = 5. * np.sqrt(pcov[1, 1]) self.assertAlmostEqual(self.p[0], p[0], delta=delta0) # FIXME: use variance as parameter so p[1] > 0 self.assertAlmostEqual(self.p[1], abs(p[1]), delta=delta1) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnPost(self): lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),),) lnpr = LnPrior(lnprs) lnlike = LnLike(self.xx, self.y, self.model_2d, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnpost = LnPost(self.xx, self.y, self.model_2d, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) self.assertEqual(lnpost._lnpr(self.p), lnpr(self.p)) self.assertEqual(lnpost._lnlike(self.p), lnlike(self.p)) self.assertGreater(lnpost(self.p), lnpost(self.p1)) self.assertGreater(lnpost(self.p), lnpost(self.p2)) self.assertGreater(lnpost(self.p), lnpost(self.p3)) self.assertGreater(lnpost(self.p), lnpost(self.p4)) @skipIf((not is_emcee) or (not is_scipy), "``emcee`` and/or ``scipy`` not" " installed") def test_MCMC(self): nwalkers = 250 ndim = 2 p0 = np.random.uniform(low=self.p1_range[0], high=self.p1_range[1], size=(nwalkers, ndim)) lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),),) lnpr = LnPrior(lnprs) lnpost = LnPost(self.xx, self.y, self.model_2d, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) sampler = emcee.EnsembleSampler(nwalkers, ndim, lnpost) pos, prob, state = sampler.run_mcmc(p0, 250) sampler.reset() sampler.run_mcmc(pos, 500) sample_vec0 = sampler.flatchain[::10, 0] sample_vec1 = sampler.flatchain[::10, 1] p0_hdi_min, p0_hdi_max = hdi_of_mcmc(sample_vec0) p1_hdi_min, p1_hdi_max = hdi_of_mcmc(sample_vec1) self.assertTrue((p0_hdi_min < self.p[0] < p0_hdi_max)) self.assertTrue((p1_hdi_min < self.p[1] < p1_hdi_max)) class Test_2D_anisoptopic(TestCase): def setUp(self): self.p = [2, 0.7, 0.3, 1.] self.x1 = np.random.uniform(low=-1, high=1, size=10) self.x2 = np.random.uniform(low=-1, high=1, size=10) self.xx = np.column_stack((self.x1, self.x2)) self.model_2d_anisotropic = Model_2d_anisotropic self.model_2d_detections = Model_2d_anisotropic(self.xx) self.y = self.model_2d_detections(self.p) + np.random.normal(0, 0.05, size=10) self.sy = np.random.normal(0.15, 0.025, size=10) self.x1l = np.hstack((np.random.uniform(low=-1, high=-0.5, size=2), np.random.uniform(low=0.5, high=1, size=2),)) self.x2l = np.hstack((np.random.uniform(low=-1, high=-0.5, size=2), np.random.uniform(low=0.5, high=1, size=2),)) self.xxl = np.column_stack((self.x1l, self.x2l)) self.model_2d_limits = Model_2d_anisotropic(self.xxl) self.yl = self.model_2d_limits(self.p) + abs(np.random.normal(0, 0.05, size=4)) self.syl = np.random.normal(0.1, 0.03, size=4) self.p1 = np.asarray(self.p) + np.array([1., 0., 0., 0.]) self.p2 = np.asarray(self.p) + np.array([-1., 0., 0., 0.]) self.p3 = np.asarray(self.p) + np.array([0., 0.2, 0., 0.]) self.p4 = np.asarray(self.p) + np.array([0., -0.2, 0., 0.]) self.p5 = np.asarray(self.p) + np.array([0., 0., 0.4, 0.]) self.p6 = np.asarray(self.p) + np.array([0., 0., -0.4, 0.]) self.p7 = np.asarray(self.p) + np.array([0., 0., 0., math.pi / 2.]) self.p8 = np.asarray(self.p) + np.array([0., 0., 0., -math.pi / 2.]) self.p0_range = [0., 10.] self.p1_range = [0., 2.] self.p2_range = [0., 1.] self.p3_range = [0., math.pi] @skipIf(not is_scipy, "``scipy`` is not installed") def test_LS_estimates(self): lsq = LS_estimates(self.xx, self.y, self.model_2d_anisotropic, sy=self.sy) p, pcov = lsq.fit([1., 0.5, 0.5, 1.]) delta0 = 3. * np.sqrt(pcov[0, 0]) delta1 = 5. * np.sqrt(pcov[1, 1]) delta2 = 5. * np.sqrt(pcov[2, 2]) delta3 = 5. * np.sqrt(pcov[3, 3]) self.assertAlmostEqual(self.p[0], p[0], delta=delta0) # FIXME: use variance as parameter so p[1] > 0 self.assertAlmostEqual(self.p[1], abs(p[1]), delta=delta1) self.assertAlmostEqual(self.p[2], p[2], delta=delta2) self.assertAlmostEqual(self.p[3], p[3], delta=delta3) def test_LnLike(self): lnlike = LnLike(self.xx, self.y, self.model_2d_anisotropic, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnlik0 = lnlike._lnprob[0].__call__(self.p) lnlik1 = lnlike._lnprob[1].__call__(self.p) self.assertEqual(lnlike(self.p), lnlik0 + lnlik1) self.assertGreater(lnlike(self.p), lnlike(self.p1)) self.assertGreater(lnlike(self.p), lnlike(self.p2)) self.assertGreater(lnlike(self.p), lnlike(self.p3)) self.assertGreater(lnlike(self.p), lnlike(self.p4)) self.assertGreater(lnlike(self.p), lnlike(self.p5)) self.assertGreater(lnlike(self.p), lnlike(self.p6)) self.assertGreater(lnlike(self.p), lnlike(self.p7)) self.assertGreater(lnlike(self.p), lnlike(self.p8)) @skipIf(not is_scipy, "``scipy`` is not installed") def test_LnPost(self): lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),), (uniform.logpdf, self.p2_range, dict(),), (uniform.logpdf, self.p3_range, dict(),),) lnpr = LnPrior(lnprs) lnlike = LnLike(self.xx, self.y, self.model_2d_anisotropic, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, jitter=False, outliers=False) lnpost = LnPost(self.xx, self.y, self.model_2d_anisotropic, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) self.assertEqual(lnpost._lnpr(self.p), lnpr(self.p)) self.assertEqual(lnpost._lnlike(self.p), lnlike(self.p)) self.assertGreater(lnpost(self.p), lnpost(self.p1)) self.assertGreater(lnpost(self.p), lnpost(self.p2)) self.assertGreater(lnpost(self.p), lnpost(self.p3)) self.assertGreater(lnpost(self.p), lnpost(self.p4)) @skipIf((not is_emcee) or (not is_scipy), "``emcee`` and/or ``scipy`` not" " installed") def test_MCMC(self): nwalkers = 250 ndim = 4 p0 = np.random.uniform(low=self.p1_range[0], high=self.p1_range[1], size=(nwalkers, ndim)) lnprs = ((uniform.logpdf, self.p0_range, dict(),), (uniform.logpdf, self.p1_range, dict(),), (uniform.logpdf, self.p2_range, dict(),), (uniform.logpdf, self.p3_range, dict(),),) lnpr = LnPrior(lnprs) lnpost = LnPost(self.xx, self.y, self.model_2d_anisotropic, sy=self.sy, x_limits=self.xxl, y_limits=self.yl, sy_limits=self.syl, lnpr=lnpr, jitter=False, outliers=False) sampler = emcee.EnsembleSampler(nwalkers, ndim, lnpost) pos, prob, state = sampler.run_mcmc(p0, 250) sampler.reset() sampler.run_mcmc(pos, 500) sample_vec0 = sampler.flatchain[::10, 0] sample_vec1 = sampler.flatchain[::10, 1] sample_vec2 = sampler.flatchain[::10, 2] sample_vec3 = sampler.flatchain[::10, 3] p0_hdi_min, p0_hdi_max = hdi_of_mcmc(sample_vec0) p1_hdi_min, p1_hdi_max = hdi_of_mcmc(sample_vec1) p2_hdi_min, p2_hdi_max = hdi_of_mcmc(sample_vec2) p3_hdi_min, p3_hdi_max = hdi_of_mcmc(sample_vec3) self.assertTrue((p0_hdi_min < self.p[0] < p0_hdi_max)) self.assertTrue((p1_hdi_min < self.p[1] < p1_hdi_max)) self.assertTrue((p2_hdi_min < self.p[2] < p2_hdi_max)) self.assertTrue((p3_hdi_min < self.p[3] < p3_hdi_max))
49.555224
80
0.575267
2,375
16,601
3.884632
0.070316
0.048233
0.029807
0.03501
0.882723
0.882723
0.870692
0.848472
0.841318
0.841318
0
0.047336
0.269562
16,601
334
81
49.703593
0.713508
0.01789
0
0.763333
0
0
0.021907
0
0
0
0
0.002994
0.19
1
0.053333
false
0
0.033333
0
0.096667
0.003333
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
d862604d092bad15f0b361b2e53da5ab2a37c8ae
942
py
Python
eegpy/formats/tests/TestLoc3dMarker.py
thorstenkranz/eegpy
0f9461456999874abbb774896ca832eb27740a9d
[ "BSD-2-Clause-FreeBSD" ]
10
2015-05-12T10:42:51.000Z
2021-07-20T02:08:03.000Z
eegpy/formats/tests/TestLoc3dMarker.py
thorstenkranz/eegpy
0f9461456999874abbb774896ca832eb27740a9d
[ "BSD-2-Clause-FreeBSD" ]
2
2015-11-19T11:36:30.000Z
2018-03-21T05:00:09.000Z
eegpy/formats/tests/TestLoc3dMarker.py
thorstenkranz/eegpy
0f9461456999874abbb774896ca832eb27740a9d
[ "BSD-2-Clause-FreeBSD" ]
2
2016-09-21T22:41:34.000Z
2019-01-28T13:55:19.000Z
import os import numpy as np import time from numpy.testing import (assert_array_almost_equal, assert_array_equal) from nose.tools import assert_true, assert_equal, assert_raises, raises from tempfile import mktemp from eegpy.formats.loc3dmarker import Loc3dMarkers test_data = """A,-23.58,-18.00,-20.74,5.0,0.0,0.0,1.0 B,-25.50,-18.00,25.94,5.0,0.0,0.0,1.0 C,-57.27,-18.00,-1.49,5.0,0.0,0.0,1.0 D,-9.14,-18.00,59.63,5.0,0.0,0.0,1.0""" TMP_FN = None def setup(): global TMP_FN TMP_FN = mktemp() with open(TMP_FN, "w") as fh: fh.write(test_data) def teardown(): if TMP_FN is not None and os.path.exists(TMP_FN): os.unlink(TMP_FN) def test_load_marker(): markers = Loc3dMarkers(TMP_FN) assert_equal(4, markers.count) assert_equal("A", markers.labels[0]) assert_equal(5.0, markers.sizes[0]) #@raises(ValueError) #def test_nextpow2_negative_x(): # nextpow2(-1)
24.789474
71
0.673036
177
942
3.435028
0.435028
0.052632
0.059211
0.052632
0.052632
0.052632
0.052632
0.052632
0
0
0
0.113956
0.170913
942
37
72
25.459459
0.664533
0.070064
0
0
0
0.153846
0.175459
0.169725
0
0
0
0
0.230769
0
null
null
0
0.269231
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
d862e5191af1e26ac32d9cdf7c011969df1241d6
997
py
Python
video_reader.py
evgenevolkov/Automated-car-tracker-and-plates-reader
5cee11b654bb8cfd20d081198af43b56811d2107
[ "MIT" ]
3
2020-10-15T14:32:36.000Z
2022-03-08T20:56:58.000Z
video_reader.py
evgenevolkov/Automated-car-tracker-and-plates-reader
5cee11b654bb8cfd20d081198af43b56811d2107
[ "MIT" ]
2
2022-02-09T23:51:20.000Z
2022-02-10T02:25:10.000Z
video_reader.py
evgenevolkov/Automated-car-tracker-and-plates-reader
5cee11b654bb8cfd20d081198af43b56811d2107
[ "MIT" ]
2
2021-04-07T11:56:20.000Z
2022-01-28T22:25:36.000Z
# import nesessary packages import cv2 import config DEBUG = config.DEBUG class Reader: def __init__(self, source): if DEBUG: print('[INFO, reader]: reader module loaded') # if source: self.vs = None self.set_source(source) # else: # print('[INFO, reader]: videosource not defined, using camera') # self.vs = cv2.VideoCapture(0) self.start_frame_number = 0 def set_start_frame_no(self, frame_no): self.vs.set(cv2.CAP_PROP_POS_FRAMES, frame_no) def set_source(self, source): # set SOURCE_VID file as source otherwise use camera if source: self.vs = cv2.VideoCapture(source) if DEBUG: print('[INFO, reader]: videofile ' + source + ' succesfully opened') else: print('[ERR, reader]: no source file provided, using camera as source') def read(self): ret, frame = self.vs.read() return ret, frame
29.323529
84
0.594784
123
997
4.682927
0.398374
0.052083
0.078125
0.0625
0.097222
0.097222
0
0
0
0
0
0.008683
0.306921
997
34
85
29.323529
0.824891
0.194584
0
0.090909
0
0
0.179423
0
0
0
0
0
0
1
0.181818
false
0
0.090909
0
0.363636
0.136364
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d863b2417d20fc0b71005243f57ab636233f418d
2,605
py
Python
Harry_Poter_Cloak/harry_potter_cloak.py
SusovanGithub/SusovanGithub-OpenCV_projects
bff292a976e0e48c8b4094607878133e70395029
[ "MIT" ]
1
2021-05-18T15:49:54.000Z
2021-05-18T15:49:54.000Z
Harry_Poter_Cloak/harry_potter_cloak.py
SusovanGithub/SusovanGithub-OpenCV_projects
bff292a976e0e48c8b4094607878133e70395029
[ "MIT" ]
null
null
null
Harry_Poter_Cloak/harry_potter_cloak.py
SusovanGithub/SusovanGithub-OpenCV_projects
bff292a976e0e48c8b4094607878133e70395029
[ "MIT" ]
null
null
null
import cv2 import numpy as np # function for the empty work def empty(a): pass # * creating the Window windowName = 'Color Detection in HSV Space' # Window Name cv2.namedWindow(windowName) # Window Creation # * Adding the Track pad cv2.createTrackbar('HUE min',windowName,0,179,empty) cv2.createTrackbar('HUE max',windowName,179,179,empty) cv2.createTrackbar('SAT min',windowName,0,255,empty) cv2.createTrackbar('SAT max',windowName,255,255,empty) cv2.createTrackbar('Value min',windowName,0,255,empty) cv2.createTrackbar('Value max',windowName,255,255,empty) # * Creating the Webcam Instance cam = cv2.VideoCapture(0) while True: cv2.waitKey(1000) isTrue, initial_frame = cam.read() if isTrue: break # * Start Video Rolling while True: isTrue, frame = cam.read() # Reading the Frames # * Converting the frame in HSC color space framehsv = cv2.cvtColor(frame,cv2.COLOR_BGR2HSV) # * Getting the track bar Values h_min = cv2.getTrackbarPos('HUE min',windowName) h_max = cv2.getTrackbarPos('HUE max',windowName) s_min = cv2.getTrackbarPos('SAT min',windowName) s_max = cv2.getTrackbarPos('SAT max',windowName) v_min = cv2.getTrackbarPos('Value min',windowName) v_max = cv2.getTrackbarPos('Value max',windowName) # creating the lower and upper range lower = np.array([h_min,s_min,v_min]) upper = np.array([h_max,s_max,v_max]) # creating the mask mask = cv2.inRange(framehsv, lower, upper) mask = cv2.medianBlur(mask, 3) mask_inv = 255+mask kernel = np.ones((3,3),np.uint8) mask = cv2.dilate(mask, kernel,5) # creating blanket area color black b = frame[:,:,0] g = frame[:,:,1] r = frame[:,:,2] b = cv2.bitwise_and(b,mask_inv) g = cv2.bitwise_and(g,mask_inv) r = cv2.bitwise_and(r,mask_inv) black_blanket_frame = cv2.merge([b,g,r]) # cutting blanket area from initial frame b = initial_frame[:,:,0] g = initial_frame[:,:,1] r = initial_frame[:,:,2] b = cv2.bitwise_and(b,mask) g = cv2.bitwise_and(g,mask) r = cv2.bitwise_and(r,mask) initial_blanket_frame = cv2.merge([b,g,r]) # result output result = cv2.bitwise_or(black_blanket_frame,initial_blanket_frame) # stacking the output stackimgs = np.hstack([frame,result]) # * Display cv2.imshow(windowName,stackimgs) # * Creating the Exit Pole if cv2.waitKey(1) & 0xFF == 27: break cam.release() # Releasing the instance cv2.destroyAllWindows() # Destroing the windows
28.010753
70
0.664491
365
2,605
4.643836
0.30411
0.041298
0.046018
0.044248
0.19174
0.147493
0.102655
0.029499
0
0
0
0.042418
0.212668
2,605
93
71
28.010753
0.784008
0.187716
0
0.072727
0
0
0.057252
0
0
0
0.001908
0
0
1
0.018182
false
0.018182
0.036364
0
0.054545
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d863c07896bcb5f1642fe8d4a4e1720169611bc3
759
py
Python
qset_core/configuration/environment.py
adragolov/qset-core
ca2beb9d1a530b75f8f93194649c9d9c3e8d6ac1
[ "MIT" ]
null
null
null
qset_core/configuration/environment.py
adragolov/qset-core
ca2beb9d1a530b75f8f93194649c9d9c3e8d6ac1
[ "MIT" ]
null
null
null
qset_core/configuration/environment.py
adragolov/qset-core
ca2beb9d1a530b75f8f93194649c9d9c3e8d6ac1
[ "MIT" ]
null
null
null
import os from threading import RLock ENVIRONMENT_VAR_NAME: str = "QSET_ENVIRONMENT" ENVIRONMENT_NAME_DEFAULT: str = 'Development' ENVIRONMENT_NAME_PRODUCTION: str = 'Production' class Environment: __Default = None __Lock = RLock() def __init__(self, environment_name: str = None): self._environment_name: str = (environment_name or os.getenv(ENVIRONMENT_VAR_NAME, ENVIRONMENT_NAME_DEFAULT))\ .strip() @property def environment_name(self) -> str: return self._environment_name @classmethod def get_default(cls): if cls.__Default is None: with cls.__Lock: if cls.__Default is None: cls.__Default = Environment() return cls.__Default
26.172414
118
0.670619
85
759
5.552941
0.352941
0.254237
0.120763
0.09322
0.076271
0
0
0
0
0
0
0
0.252964
759
28
119
27.107143
0.832452
0
0
0.095238
0
0
0.048748
0
0
0
0
0
0
1
0.142857
false
0
0.095238
0.047619
0.47619
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d8644fc985adc50f63489cffe3bfe8417550597e
5,575
py
Python
autosrc.py
pwnwikiorg/AutoSRC
4cee92b2ae0e4f024059840a0b84d49f5e125e94
[ "MIT" ]
44
2021-07-12T05:45:47.000Z
2021-09-24T13:49:39.000Z
autosrc.py
mama2100/AutoSRC
4cee92b2ae0e4f024059840a0b84d49f5e125e94
[ "MIT" ]
null
null
null
autosrc.py
mama2100/AutoSRC
4cee92b2ae0e4f024059840a0b84d49f5e125e94
[ "MIT" ]
15
2021-07-12T05:48:25.000Z
2021-09-10T07:56:55.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- import os import subprocess import requests import argparse import base64 import sys import json import codecs def dec_data(byte_data: bytes): try: return byte_data.decode('UTF-8') except UnicodeDecodeError: return byte_data.decode('GB18030') def get_files(path): all_files = [] for root, dirs, files in os.walk(path): all_files = files return all_files def automation(): get_payload_dir = get_files("./payload/") get_result_dir = get_files("./fofa_file/") for i in get_payload_dir: print("\033[1;32m ================================================================\033[0m") print("\033[1;32m 开始 %s 漏洞检查\033[0m" % (i)) print("\033[1;32m 正在检查请稍等......\033[0m") print("\033[1;32m ================================================================\033[0m") for j in get_result_dir: if j == i + ".txt": p = subprocess.Popen('python3 "./payload/%s" -f "./fofa_file/%s"' % (i, j), shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, close_fds=True) while p.poll() is None: line = p.stdout.readline().strip() if line: line = dec_data(line) x = line.find('不', 0, len(line)) if x == -1: result = line.replace( "\033[1;36m", "").replace("\033[0m", " ").replace("\033[1;32m", " ").replace( "\033[0m", " ".replace("\033[36m[o] ", " ").replace("\033[0m", " ")) print(result) f = open("./results/" + i + "_OK.txt", 'a', encoding='utf-8') f.write(result + "\n") def banner(): print(""" \033[1;36m ___ \033[0m \033[1;36m ,--.'|_ \033[0m \033[1;36m ,--, | | :,' ,---. __ ,-. \033[0m \033[1;36m ,'_ /| : : ' : ' ,'\ .--.--. ,' ,'/ /| \033[0m \033[1;36m ,--.--. .--. | | :.;__,' / / / | / / ' ' | |' | ,---. \033[0m \033[1;36m / \ ,'_ /| : . || | | . ; ,. :| : /`./ | | ,'/ \ \033[0m \033[1;36m .--. .-. | | ' | | . .:__,'| : ' | |: :| : ;_ ' : / / / ' \033[0m \033[1;36m \__\/: . . | | ' | | | ' : |__' | .; : \ \ `. | | ' . ' / \033[0m \033[1;36m ," .--.; | : | : ; ; | | | '.'| : | `----. \; : | ' ; :__ \033[0m \033[1;36m / / ,. | ' : `--' \ ; : ;\ \ / / /`--' /| , ; ' | '.'| \033[0m \033[1;36m; : .' \: , .-./ | , / `----' '--'. / ---' | : : \033[0m \033[1;36m| , .-./ `--`----' ---`-' `--'---' \ \ / \033[0m \033[1;36m `--`---' `----' \033[0m """) print('\033[1;36m 工具使用方法\033[0m') print('\033[1;36m python3 autosrc.py -e/--email email -k/--key key\033[0m') print('\033[1;36m python3 autosrc.py -h/--help\033[0m') if len(sys.argv) == 1: banner() sys.exit() parser = argparse.ArgumentParser(description='autosrcfofaapi help') parser.add_argument('-e', '--email', help='Please Input a email!', default='') parser.add_argument('-k', '--key', help='Please Input a key!', default='') args = parser.parse_args() email = args.email key = args.key url = "https://fofa.so/api/v1/info/my?email=" + email + "&key=" + key header = { "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.93 Safari/537.36", "Content-Type": "application/x-www-form-urlencoded" } response = requests.get(url, headers=header) if 'errmsg' not in response.text: print("\033[1;32memail和key均正确\033[0m") get_payload_dir = get_files("./payload/") print(get_payload_dir) for i in get_payload_dir: f = codecs.open("./payload/" + i, mode='r', encoding='utf-8') line = f.readline() sentence = line.strip("#") print(sentence) print("\033[1;36mfofa语句 >>>\033[0m" + sentence) sentence = base64.b64encode(sentence.encode('utf-8')).decode("utf-8") url = "https://fofa.so/api/v1/search/all?email=" + email + "&key=" + key + "&qbase64=" + sentence response = requests.get(url, headers=header) if 'errmsg' not in response.text: print("\033[1;36m已保存到\033[0m\033[1;32mfofa_file目录下\033[0m") r1 = json.loads(response.text) for k in r1['results']: s = k[0] print(s) f = open("./fofa_file/" + i + ".txt", 'a', encoding='utf-8') f.write(s + "\n") else: print("\033[1;31mfofa语句不正确\033[0m") else: print("\033[1;31memail或key不正确\033[0m") print("\033[1;34m[INFO]\033[0m Success") print("\033[1;32m ================================================================\033[0m") print("\033[1;32m FOFA采集完成 开始漏洞检查\033[0m") print("\033[1;32m ================================================================\033[0m") automation()
45.696721
136
0.411121
581
5,575
3.851979
0.292599
0.073727
0.068365
0.058088
0.316354
0.290438
0.226095
0.206434
0.186774
0.157283
0
0.106799
0.348341
5,575
121
137
46.07438
0.509221
0.006816
0
0.132075
0
0.09434
0.49372
0.087551
0
0
0
0
0
1
0.037736
false
0
0.075472
0
0.141509
0.198113
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d86510c82b66f95dc50fd62936d72f29aeaa9999
360
py
Python
pirates/creature/DistributedCreatureAI.py
Willy5s/Pirates-Online-Rewritten
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
[ "BSD-3-Clause" ]
81
2018-04-08T18:14:24.000Z
2022-01-11T07:22:15.000Z
pirates/creature/DistributedCreatureAI.py
Willy5s/Pirates-Online-Rewritten
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
[ "BSD-3-Clause" ]
4
2018-09-13T20:41:22.000Z
2022-01-08T06:57:00.000Z
pirates/creature/DistributedCreatureAI.py
Willy5s/Pirates-Online-Rewritten
7434cf98d9b7c837d57c181e5dabd02ddf98acb7
[ "BSD-3-Clause" ]
26
2018-05-26T12:49:27.000Z
2021-09-11T09:11:59.000Z
from direct.directnotify import DirectNotifyGlobal from pirates.battle.DistributedBattleNPCAI import * from pirates.pirate import AvatarTypes class DistributedCreatureAI(DistributedBattleNPCAI): notify = DirectNotifyGlobal.directNotify.newCategory('DistributedCreatureAI') def __init__(self, air): DistributedBattleNPCAI.__init__(self, air)
32.727273
81
0.822222
31
360
9.290323
0.580645
0.076389
0.076389
0
0
0
0
0
0
0
0
0
0.113889
360
10
82
36
0.902821
0
0
0
0
0
0.058496
0.058496
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
2
d8681174b4934ada560118e7c8363f5ba24fcfa0
4,263
py
Python
gym-kinova-gripper/Old Code/stuff.py
OSUrobotics/KinovaGrasping
f22af60d3683fdc4ffecf49ccff179fbc6750748
[ "Linux-OpenIB" ]
16
2020-05-16T00:40:31.000Z
2022-02-22T11:59:03.000Z
gym-kinova-gripper/Old Code/stuff.py
OSUrobotics/KinovaGrasping
f22af60d3683fdc4ffecf49ccff179fbc6750748
[ "Linux-OpenIB" ]
9
2020-08-10T08:33:55.000Z
2021-08-17T02:10:50.000Z
gym-kinova-gripper/Old Code/stuff.py
OSUrobotics/KinovaGrasping
f22af60d3683fdc4ffecf49ccff179fbc6750748
[ "Linux-OpenIB" ]
7
2020-07-27T09:45:05.000Z
2021-06-21T21:42:50.000Z
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ Created on Fri Dec 13 09:59:13 2019 @author: orochi """ import numpy as np import csv from classifier_network import LinearNetwork from classifier_network import ReducedLinearNetwork import torch import torch.nn as nn import torch.nn.functional as F import matplotlib.pyplot as plt def calc_velocity(start,end): delta_t=0.05 #print(type(start),type(end)) velocity=(end-start)/delta_t return velocity def normalize_vector(vector): #print(vector-np.min(vector)) #print(np.max(vector)-np.min(vector)) if (np.max(vector)-np.min(vector)) == 0: n_vector=np.ones(np.shape(vector))*0.5 else: n_vector=(vector-np.min(vector))/(np.max(vector)-np.min(vector)) return n_vector filenames=['Classifier_Data_Big_Cube.csv','Classifier_Data_Med_Cube.csv','Classifier_Data_Small_Cube.csv', \ 'Classifier_Data_Big_Cylinder.csv','Classifier_Data_Med_Cylinder.csv','Classifier_Data_Small_Cylinder.csv'] a=[] column_names=[] #load in the data to one massive matrix called a for k in range(6): with open('Classifier_Data/'+filenames[k]) as csv_file: csv_reader = csv.reader(csv_file, delimiter=',') line_count = 0 for row in csv_reader: if line_count == 0: column_names.append(row) #print(f'Column names are {", ".join(row)}') #print(row[6],row[48]) line_count += 1 else: a.append(row) line_count += 1 #print('here') #print(f'Processed {line_count} lines.') #print(np.shape(a)) network=ReducedLinearNetwork() network.zero_grad() network.double() b=np.shape(a) print(b) a=np.array(a,dtype='f') #create a list of numbers that correspond to the columns to be removed. This arrangement removes the roll, pitch and yaw from the matrix a c=np.arange(9,42,6) d=np.arange(10,42,6) e=np.arange(11,42,6) f=np.arange(51,87,6) g=np.arange(52,87,6) h=np.arange(53,87,6) #obj_pose=np.array([84,85,86]) c=np.concatenate((c,d,e,f,g,h)) #calculate the velocity of the fingers. for i in range(36): velocity=calc_velocity(a[:,i+6],a[:,i+48]) a[:,i+6]=velocity #normalize the entire table so that all the inputs and outputs lie on a spectrum from 0-1 for i in range(b[1]): a[:,i]=normalize_vector(a[:,i]) #remove the columns that are unwanted, described by the array c new_a=np.zeros([b[0],69]) for i in range(b[0]): new_a[i,:]=np.delete(a[i,:],c) #check to make sure the right columns got deleted column_names=np.delete(column_names,c) print(column_names[0]) a=new_a #print(a[:,-1]) running_loss=0 learning_rate=0.1 total_loss=[] total_time=[] num_epocs=100 network= network.float() for j in range(num_epocs): print(j) learning_rate=0.1-j/num_epocs*0.09 np.random.shuffle(a) running_loss=0 for i in range(b[0]): #network=network.float() #state = ego.convert_world_state_to_front() #ctrl_delta, ctrl_vel, err, interr, differr = controller.calc_steer_control(t[i],state,x_true,y_true, vel, network) input1=a[i,:-1] #print(input1) network_input=torch.tensor(input1) #print(network_input) #print(a[i,-1]) network_target=torch.tensor(a[i,-1]) #network_target.reshape(1) network_input=network_input.float() #print(network_input) out=network(network_input) out.reshape(1) network.zero_grad() criterion = nn.MSELoss() loss = criterion(out, network_target) loss.backward() running_loss += loss.item() #print(out.data,network_target.data, out.data-network_target.data) #print(loss.item()) for f in network.parameters(): f.data.sub_(f.grad.data * learning_rate) if i % 1000 ==999: # keep a tally of the loss and time so that the training can be plotted print(running_loss) #print(loss.item(),out[0]) total_loss.append(running_loss) total_time.append((i+1)/1000+j*b[0]/1000) running_loss=0 plt.plot(total_time,total_loss) plt.show() torch.save(network.state_dict(),'./full_trained_classifier_no_rpw_obj_pose.pth')
31.577778
138
0.655407
676
4,263
3.989645
0.323965
0.007416
0.020393
0.031517
0.068224
0.034112
0
0
0
0
0
0.034808
0.204785
4,263
135
139
31.577778
0.760767
0.289937
0
0.122222
0
0
0.082581
0.076563
0
0
0
0
0
1
0.022222
false
0
0.088889
0
0.133333
0.044444
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d868e0721a8fe2dce25eaeb0e1b2ad04eea694ba
5,144
py
Python
src/main/nspawn/wrapper/sudo.py
Andrei-Pozolotin/nspawn
9dd3926f1d1a3a0648f6ec14199cbf4069af1c98
[ "Apache-2.0" ]
15
2019-10-10T17:35:48.000Z
2022-01-29T10:41:01.000Z
src/main/nspawn/wrapper/sudo.py
Andrei-Pozolotin/nspawn
9dd3926f1d1a3a0648f6ec14199cbf4069af1c98
[ "Apache-2.0" ]
null
null
null
src/main/nspawn/wrapper/sudo.py
Andrei-Pozolotin/nspawn
9dd3926f1d1a3a0648f6ec14199cbf4069af1c98
[ "Apache-2.0" ]
2
2019-10-10T17:36:43.000Z
2020-06-20T15:28:33.000Z
""" Wrapper for sudo https://linux.die.net/man/8/sudo """ import os import shlex from typing import List, Mapping from nspawn import CONFIG from nspawn.wrapper.base import Base from nspawn.support.parser import parse_text2dict class Sudo(Base): """ Provide basic file system operations """ def __init__(self): super().__init__('wrapper/sudo') def script(self, script:str) -> None: self.execute_unit_sert(script.split()) def folder_check(self, path:str) -> bool: return self.has_success(['test', '-d', path]) def folder_assert(self, path:str) -> None: assert self.folder_check(path), f"missing path '{path}'" def folder_ensure(self, path:str) -> None: self.execute_unit_sert(['mkdir', '--parents', path]) def parent_ensure(self, path:str) -> None: folder = os.path.dirname(path) self.folder_ensure(folder) def file_check(self, path:str) -> bool: return self.has_success(['test', '-f', path]) def file_assert(self, path:str): assert self.file_check(path), f"missing path '{path}'" def file_load(self, path) -> str: return self.execute_unit_sert(['cat', path]).stdout def file_save(self, path:str, text:str) -> None: self.parent_ensure(path) self.execute_unit_sert(['dd', f"of={path}"] , stdin=text) def files_copy(self, source:str, target:str) -> None: self.parent_ensure(target) self.execute_unit_sert(['cp', '--force', source, target]) def files_move(self, source:str, target:str) -> None: self.files_delete(target) self.parent_ensure(target) self.execute_unit_sert(['mv', '--force', source, target]) def files_delete(self, path:str) -> None: self.execute_unit_sert(['rm', '--force', '--recursive', path]) # # # def files_sync_any(self, source:str, target:str, opts_line:str) -> None: "invoke rsync" if self.folder_check(source): source = os.path.join(source, '') # ensure traling slash self.folder_ensure(target) else: self.parent_ensure(target) opts_list = shlex.split(opts_line) command = ['rsync'] + opts_list + [source, target ] self.execute_unit_sert(command) def files_sync_base(self, source:str, target:str) -> None: "options for DSL.COPY, DSL.CAST" rsync_base = CONFIG['wrapper/sudo']['rsync_base'] self.files_sync_any(source, target, rsync_base) def files_sync_full(self, source:str, target:str) -> None: "options for DSL.PULL, DSL.PUSH" rsync_full = CONFIG['wrapper/sudo']['rsync_full'] self.files_sync_any(source, target, rsync_full) def files_sync_time(self, source:str, target:str): "transfer file time only" self.execute_unit_sert(['touch', '-r', source, target]) # # store file meta data in xattr # def xattr_space(self) -> str: "attribute name space used by this package" return CONFIG['wrapper/sudo']['xattr_space'] def xattr_regex(self) -> str: "regular expression used to match package attributes" return CONFIG['wrapper/sudo']['xattr_regex'] def xattr_name(self, key:str) -> str: "produce package-specific attribute name" return f"{self.xattr_space()}{key}" def xattr_get(self, path:str, key:str) -> str: "load single extendend path attribute" # -n name, --name=name Dump the value of the named extended attribute # --only-values Dump out the extended attribute value(s) only name = self.xattr_name(key) result = self.execute_unit(['getfattr', '-n', name, '--only-values', path]) if result.rc == 0: return result.stdout else: return None def xattr_set(self, path:str, key:str, value:str) -> None: "save single extendend file attribute" # -n name, --name=name Specifies the name of the extended attribute to set # -v value, --value=value Specifies the new value of the extended attribute name = self.xattr_name(key) self.execute_unit_sert(['setfattr', '-n', name, '-v', value, path]) def xattr_load(self, path:str) -> Mapping[str, str]: "retrieve extended file attributes as dictionary" # -d, --dump Dump the values of all extended attributes # -m pattern, --match=pattern Only include attributes with names matching the regular expression result = self.execute_unit(['getfattr', '-d', '-m', self.xattr_regex(), path]) temp_dict = parse_text2dict(result.stdout) data_dict = dict() for name, data in temp_dict.items(): # deserialize key = name.replace(self.xattr_space(), '') value = data[1:-1] # remove quotes data_dict[key] = value return data_dict def xattr_save(self, path:str, data_dict:Mapping[str, str]) -> None: "persist dictionary as extended file attributes" for key, value in data_dict.items(): self.xattr_set(path, key, value) SUDO = Sudo()
35.232877
107
0.628888
681
5,144
4.604993
0.234949
0.029018
0.045599
0.060587
0.293686
0.165816
0.157526
0.100765
0.052934
0.028061
0
0.001537
0.241058
5,144
145
108
35.475862
0.801742
0.195956
0
0.073684
0
0
0.154854
0.005554
0
0
0
0
0.042105
1
0.252632
false
0
0.063158
0.031579
0.421053
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d8693362b05650b4c1b31dbc4438c95cc27c7e7b
3,052
py
Python
src/app.py
davidkowalk/Kalaha
2b00fce97f5559c0527ec1c8addf3c488c46fccf
[ "MIT" ]
1
2021-06-19T16:08:52.000Z
2021-06-19T16:08:52.000Z
src/app.py
davidkowalk/Kalaha
2b00fce97f5559c0527ec1c8addf3c488c46fccf
[ "MIT" ]
null
null
null
src/app.py
davidkowalk/Kalaha
2b00fce97f5559c0527ec1c8addf3c488c46fccf
[ "MIT" ]
null
null
null
from Board import Board, code_to_list from sys import argv def print_layout(): print("╔══╦══╦══╦══╦══╦══╦══╦══╗") print("║ ║ 6║ 5║ 4║ 3║ 2║ 1║ ║ <- Player 2") print("║ ╠══╬══╬══╬══╬══╬══╣ ║") print("║ ║ 1║ 2║ 3║ 4║ 5║ 6║ ║ <- Player 1") print("╚══╩══╩══╩══╩══╩══╩══╩══╝") def lpad(str, length=2): num = len(str) return " "*(length-num)+str def render(field): print(""" ╔══╦══╦══╦══╦══╦══╦══╦══╗ ║ ║{N}║{M}║{L}║{K}║{J}║{I}║ ║ ║{A}╠══╬══╬══╬══╬══╬══╣{H}║ ║ ║{B}║{C}║{D}║{E}║{F}║{G}║ ║ ╚══╩══╩══╩══╩══╩══╩══╩══╝ """.format( A = lpad(str(field[0])), B = lpad(str(field[1])), C = lpad(str(field[2])), D = lpad(str(field[3])), E = lpad(str(field[4])), F = lpad(str(field[5])), G = lpad(str(field[6])), H = lpad(str(field[7])), I = lpad(str(field[8])), J = lpad(str(field[9])), K = lpad(str(field[10])), L = lpad(str(field[11])), M = lpad(str(field[12])), N = lpad(str(field[13])) ) ) def get_index(board): if board.game_ended(): return 0 while True: i = input(f"Player {board.current_player+1}:") if i == "exit": # Print board representation print(f"Continue with code \"{board.get_code()}\"") print("> python3 ./app.py <code>") exit() elif 0 < int(i) < 7: return int(i)+board.current_player*7 else: #print("Please select number from 1 to 6 or exit via \"exit\"\r\033[A\033[A") print("Please select number from 1 to 6 or exit via \"exit\"") def game_loop(b): while not b.ended: render(b.state) i = get_index(b) code = b.play(i) if code == 1: print("You can only play your own side.", end="") elif code == 2: print("You cannot play your Mancala.", end="") elif code == 3: print("The position you want to play must have a stone count higher than 0!", end="") elif code == 4: print("You ended in your Mancala. You may play again.", end="") elif code == 5: print(f"Player {(1-b.current_player)+1} took.", end="") elif code == -1: print(f"ERROR: Index {i} not on board....", end="") elif code == 6: print("Game Ended\n\n") winner = b.finalize() print(f"Player {winner+1} won!") render(b.state) break else: print(" "*90, end="") print(" "*30) #print("\r\033[A\033[A\033[A\033[A\033[A\033[A\033[A\033[A\033[A\033[A") #Return to start def main(): #import colorama #colorama.init() if len(argv)>1: state = code_to_list(argv[1]) b = Board(state) else: b = Board() print("Layout") print_layout() print("\nGAME") game_loop(b) if __name__ == '__main__': main()
28.259259
97
0.449541
450
3,052
3.393333
0.264444
0.068762
0.11002
0.05239
0.090373
0.083824
0.083824
0.083824
0.083824
0.083824
0
0.047059
0.331586
3,052
107
98
28.523364
0.614216
0.071756
0
0.05814
0
0
0.265039
0.079972
0
0
0
0
0
1
0.069767
false
0
0.023256
0
0.127907
0.267442
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8693bfd84c89857d5a17510a59ef22ad4a7f847
4,346
py
Python
collisionSat.py
vzayakov/satelliteprogram
24c699215c44caff4ca793be3bb71890c4b41416
[ "MIT" ]
null
null
null
collisionSat.py
vzayakov/satelliteprogram
24c699215c44caff4ca793be3bb71890c4b41416
[ "MIT" ]
null
null
null
collisionSat.py
vzayakov/satelliteprogram
24c699215c44caff4ca793be3bb71890c4b41416
[ "MIT" ]
null
null
null
from skyfield.api import Topos, load from astropy import units as u from astropy import time import matplotlib.pyplot as plt import numpy as np from datetime import datetime import sys ts = load.timescale(builtin=True) #This program is able to calculate the closest pass between two given satellites over the next 5 days. #While it cannot predict collisions (yet), it can be used to verify already predicted ones. #I used the Skyfield library's documentation to write this first part of my code. #See here: https://rhodesmill.org/skyfield/earth-satellites.html #Load all satellite Two-Line Element sets from the given file(s) #I used the satellite data provided by the celestrak.com website, since it is vast, accurate and easily accessible. sat_url = 'https://celestrak.com/NORAD/elements/active.txt' tle_satellites = load.tle_file(sat_url, reload = True) print("Loaded", len(tle_satellites), "satellites") def calculate(): # Search the file(s) and display names and epochs of the desired satellites NORAD_ID1 = input("\nPlease enter the NORAD ID of the first desired object: ") NORAD_ID2 = input("\nPlease enter the NORAD ID of the second desired object: ") by_number = {sat.model.satnum: sat for sat in tle_satellites} satellite1 = by_number[int(NORAD_ID1)] satellite2 = by_number[int(NORAD_ID2)] print("\n", satellite1) print("\n", satellite2) print("\nCurrent epoch of sat1: ", satellite1.epoch.utc_jpl()) print("\nCurrent epoch of sat2: ", satellite2.epoch.utc_jpl()) # Compute the positions of both satellites every 0.864 seconds (0.00001 days) for the next 5 days using the SGP4 perturbation model. tcompute = ts.tt_jd(np.arange(satellite1.epoch.tt, satellite1.epoch.tt + 5.0, 0.00001)) # From hereon, I did not reference the documentation and everything is my original code. # Initialize the necessary arrays for the calculations. distancearray = [] timearray = [] closepassdistance = [] closepasstime = [] print("setup complete") # Compute the distance between the satellites from the tcompute array for each element, by subtracting the vectors of their current positions. # Enter the distance into distancearray and the time into timearray. for x in tcompute: y = (satellite2.at(x) - satellite1.at(x)).distance().km distancearray.append(y) timearray.append(x) print(x) print("done x in tcompute") # Initialize two more arrays, that contain the first and second derivatives of the distance between the satellites. # This is necessary to determine the minimum distance between them during the next 5 days. derivativearray = np.gradient(distancearray) secondderivativearray = np.gradient(derivativearray) print("done derivative") # This for-loop finds all relative minima using single-variable calculus, appending them to another array. # The loop first finds all instances where the derivative crosses 0. # Note that I use the intermediate value theorem to do this, since the tcompute array only has datapoints every 0.864s. # Then, the for loop finds which of these points have a second derivative greater than 0, i.e. they are relative minima. i = 0 for z in derivativearray: if z < 1.5 and z > -1.5: if secondderivativearray[i] > 0: j = distancearray[i] k = timearray[i] closepassdistance.append(j) closepasstime.append(k) i += 1 print("\n -----------------------------") print("\n") for e in closepasstime: print(e) # Find the distance and time of the closest pass between the two satellites. cpd = closepassdistance.index(min(closepassdistance)) cpt = closepasstime[cpd] # Print out the distance and time of the closest pass. print("\n", cpt.utc_datetime()) print("\n", closepassdistance[cpd], "kilometers") # Graph out a distance vs. time graph of the closest pass tgraph = ts.tt_jd(np.arange(cpt.tt - 0.001, cpt.tt + 0.001, 0.00001)) g1 = satellite1.at(tgraph) g2 = satellite2.at(tgraph) fig, ax = plt.subplots() a = tgraph.utc_datetime() b = (g2 - g1).distance().km ax.plot(a, b) ax.grid(which='both') ax.set(title='Closest pass/collision between the satellites', xlabel = 'UTC') fig.savefig('sat-separation.png', bbox_inches='tight') fig.show() #Simple loop to restart the program if desired quit = 'r' if quit == 'q': sys.exit() elif quit == 'r': calculate() quit = input("Type 'q' to quit, or 'r' to restart: ")
35.048387
142
0.736539
662
4,346
4.805136
0.39426
0.011003
0.017605
0.011317
0.069789
0.041496
0.041496
0.041496
0
0
0
0.019436
0.159457
4,346
123
143
35.333333
0.851355
0.439254
0
0
0
0
0.183023
0.012008
0
0
0
0
0
1
0.014286
false
0.128571
0.1
0
0.114286
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d869a760e43d690b730eb44ae5328a3cd8766067
3,738
py
Python
webdev/fornecedores/tests/test_fornecedores_post.py
h-zanetti/jewelry-manager
74166b89f492303b8ebf5ff8af058f394eb2a28b
[ "MIT" ]
null
null
null
webdev/fornecedores/tests/test_fornecedores_post.py
h-zanetti/jewelry-manager
74166b89f492303b8ebf5ff8af058f394eb2a28b
[ "MIT" ]
103
2021-04-25T21:28:11.000Z
2022-03-15T01:36:31.000Z
webdev/fornecedores/tests/test_fornecedores_post.py
h-zanetti/jewelry-manager
74166b89f492303b8ebf5ff8af058f394eb2a28b
[ "MIT" ]
null
null
null
import pytest from pytest_django.asserts import assertRedirects from django.urls import reverse from django.contrib.auth.models import User from webdev.fornecedores.models import Fornecedor, Fornecimento, Email, Telefone, Local, DadosBancarios # Novo Fornecedor @pytest.fixture def fornecimento(db): return Fornecimento.objects.create( nome="Programador", ) @pytest.fixture def resposta_autenticada(client, fornecimento): User.objects.create_user(username='TestUser', password='MinhaSenha123') client.login(username='TestUser', password='MinhaSenha123') resp = client.post(reverse('fornecedores:novo_fornecedor'), data={ 'nome': 'Isaac Newton', 'fornecimento': [fornecimento.id] }) return resp def test_redireciona_editar_fornecedor(resposta_autenticada): assertRedirects(resposta_autenticada, reverse( 'fornecedores:editar_fornecedor', kwargs={'fornecedor_id': 1})) def test_fornecedor_existe_no_bd(resposta_autenticada): assert Fornecedor.objects.exists() # Novo Fornecimento @pytest.fixture def criar_fornecedor(db): return Fornecedor.objects.create(nome='Zé Comédia') # Novo Email @pytest.fixture def resposta_novo_email(client, criar_fornecedor): User.objects.create_user(username='TestUser', password='MinhaSenha123') client.login(username='TestUser', password='MinhaSenha123') resp = client.post( reverse('fornecedores:novo_email', kwargs={'fornecedor_id':criar_fornecedor.id}), data={ 'fornecedor': criar_fornecedor.id, 'email': 'testEmail@gmail.com' } ) return resp def test_email_existe_no_bd(resposta_novo_email): assert Email.objects.exists() # Novo Telefone @pytest.fixture def resposta_novo_telefone(client, criar_fornecedor): usr = User.objects.create_user(username='TestUser', password='MinhaSenha123') client.login(username='TestUser', password='MinhaSenha123') resp = client.post( reverse( 'fornecedores:novo_telefone', kwargs={'fornecedor_id':criar_fornecedor.id} ), data={ 'fornecedor': criar_fornecedor.id, 'telefone': 11944647420 } ) return resp def test_telefone_existe_no_bd(resposta_novo_telefone): assert Telefone.objects.exists() # Nova Localização @pytest.fixture def resposta_novo_local(client, criar_fornecedor): usr = User.objects.create_user(username='TestUser', password='MinhaSenha123') client.login(username='TestUser', password='MinhaSenha123') resp = client.post( reverse( 'fornecedores:novo_local', kwargs={'fornecedor_id':criar_fornecedor.id} ), data={ 'fornecedor': criar_fornecedor.id, 'pais': 'Brasil', 'estado': 'SP', 'cidade': 'São Paulo', 'bairro': 'Campo Belo', 'endereco': 'Av Barão de Vali, 240', 'cep': '04613-030', } ) return resp def test_local_existe_no_bd(resposta_novo_local): assert Local.objects.exists() # Novos Dados Bancários @pytest.fixture def resposta_novos_dados_bancarios(client, criar_fornecedor): usr = User.objects.create_user(username='TestUser', password='MinhaSenha123') client.login(username='TestUser', password='MinhaSenha123') resp = client.post( reverse( 'fornecedores:novos_dados_bancarios', kwargs={'fornecedor_id':criar_fornecedor.id} ), data={ 'fornecedor': criar_fornecedor.id, 'tipo_de_transacao': 'px', 'numero': '0000030', } ) return resp def test_dados_bancarios_existe_no_bd(resposta_novos_dados_bancarios): assert DadosBancarios.objects.exists()
32.224138
103
0.692616
397
3,738
6.322418
0.229219
0.062151
0.095618
0.14741
0.476892
0.417131
0.417131
0.417131
0.417131
0.417131
0
0.01992
0.194222
3,738
115
104
32.504348
0.813413
0.02595
0
0.410526
0
0
0.187672
0.045129
0
0
0
0
0.073684
1
0.136842
false
0.105263
0.052632
0.021053
0.263158
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
d86dfa846b06600cb6626bf01cc13c4b4ba732dd
1,656
py
Python
humimp/urls.py
zain-Z/humimp
fd7e4e211dce62639e2fce2dd9f9506240a7a3d9
[ "MIT" ]
null
null
null
humimp/urls.py
zain-Z/humimp
fd7e4e211dce62639e2fce2dd9f9506240a7a3d9
[ "MIT" ]
null
null
null
humimp/urls.py
zain-Z/humimp
fd7e4e211dce62639e2fce2dd9f9506240a7a3d9
[ "MIT" ]
null
null
null
"""humimp URL Configuration The `urlpatterns` list routes URLs to views. For more information please see: https://docs.djangoproject.com/en/3.1/topics/http/urls/ Examples: Function views 1. Add an import: from my_app import views 2. Add a URL to urlpatterns: path('', views.home, name='home') Class-based views 1. Add an import: from other_app.views import Home 2. Add a URL to urlpatterns: path('', Home.as_view(), name='home') Including another URLconf 1. Import the include() function: from django.urls import include, path 2. Add a URL to urlpatterns: path('blog/', include('blog.urls')) """ from django.contrib import admin from django.urls import path, include from django.conf.urls.static import static from jobs.views import ApplicationList, ApplicationRetrieveDestroy from django.conf import settings from django.conf.urls.i18n import i18n_patterns from django.utils.translation import gettext_lazy as _ urlpatterns = [ # django rest api path('api/applications', ApplicationList.as_view()), path('api/applications/<int:pk>', ApplicationRetrieveDestroy.as_view()), path('api-auth/', include('rest_framework.urls')), ] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT) urlpatterns += i18n_patterns( path('admin/', admin.site.urls), path('', include('jobs.urls', namespace='jobs')), path('blogs/', include('blogs.urls', namespace='blogs')), path('careers/', include('careers.urls', namespace='careers')), ) admin.site.site_url = None admin.site.site_header = 'HuminImp Administration'
37.636364
78
0.736715
226
1,656
5.318584
0.353982
0.058236
0.012479
0.019967
0.097338
0.097338
0.062396
0
0
0
0
0.009777
0.135266
1,656
43
79
38.511628
0.829609
0.38587
0
0
0
0
0.157426
0.024752
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
d8709d89acee40ccaac332ee9c01a0773827a0af
499
py
Python
python/arrays/0048-rotate-image.py
karolinyoliveira/leetcode-ebbinghaus-practice
5149e06f1c187b87e280fd58541c11d8ab8626d3
[ "MIT" ]
2
2021-05-28T03:41:39.000Z
2021-10-19T16:53:16.000Z
python/arrays/0048-rotate-image.py
karolinyoliveira/leetcode-ebbinghaus-practice
5149e06f1c187b87e280fd58541c11d8ab8626d3
[ "MIT" ]
null
null
null
python/arrays/0048-rotate-image.py
karolinyoliveira/leetcode-ebbinghaus-practice
5149e06f1c187b87e280fd58541c11d8ab8626d3
[ "MIT" ]
null
null
null
from typing import List def rotate(matrix: List[List[int]]) -> None: for layer in range(len(matrix) // 2): first = layer last = len(matrix) - layer - 1 for i in range(first, last): offset = i - first top = matrix[first][i] matrix[first][i] = matrix[last - offset][first] matrix[last - offset][first] = matrix[last][last - offset] matrix[last][last - offset] = matrix[i][last] matrix[i][last] = top
35.642857
70
0.541082
64
499
4.21875
0.328125
0.185185
0.088889
0.133333
0.325926
0.192593
0
0
0
0
0
0.005882
0.318637
499
13
71
38.384615
0.788235
0
0
0
0
0
0
0
0
0
0
0
0
1
0.083333
false
0
0.083333
0
0.166667
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d870d81902f0c3d976bb8c7ff8e74c4445f9877c
6,176
py
Python
siiptool/common/configparser.py
kdbarnes-intel/iotg-fbu
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
[ "BSD-2-Clause" ]
15
2020-03-21T03:19:46.000Z
2022-03-02T07:12:57.000Z
siiptool/common/configparser.py
kdbarnes-intel/iotg-fbu
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
[ "BSD-2-Clause" ]
7
2020-03-10T03:17:24.000Z
2021-06-08T17:47:34.000Z
siiptool/common/configparser.py
kdbarnes-intel/iotg-fbu
bf6c38240ef13c64d2776e48ec277b3c8bebce6f
[ "BSD-2-Clause" ]
11
2020-02-28T19:52:22.000Z
2022-03-02T07:15:51.000Z
from collections.abc import MutableMapping import warnings import json from configparser import * from configparser import __all__ __all__.append("JSONConfigParser") _ConfigParser = ConfigParser class ConfigParser: """Accepts extra keyword config_type and returns the instance based on it""" def __new__(cls, *args, **kwds): config_type = kwds.pop("config_type", "ini") if config_type == "json": return JSONConfigParser() return _ConfigParser(*args, **kwds) class JSONConfigParser(MutableMapping): """A ConfigParser that works with json file.""" def __init__(self): self._dict = {} def defaults(self): raise NotImplementedError def sections(self): """Return a list of section names""" return list(self._dict) def add_section(self, section): """Create a new section in the configuration. Raise DuplicateSectionError if a section by the specified name already exists. """ if self.has_section(section): raise DuplicateSectionError(section) self._dict[section] = {} def has_section(self, section): """Indicate whether the named section is present in the configuration.""" if section in self._dict: return True return False def options(self, section): """Return a list of option names for the given section name.""" try: return list(self._dict[section]) except KeyError as e: raise NoSectionError(str(e)) from None def read(self, filenames, encoding=None): """Read and parse a filename or a list of filenames. Files that cannot be opened are silently ignored; this is designed so that you can specify a list of potential configuration file locations (e.g. current directory, user's home directory, systemwide directory), and all existing configuration files in the list will be read. A single filename may also be given. Return list of successfully read files. """ if isinstance(filenames, str): filenames = [filenames] read_ok = [] for filename in filenames: try: with open(filename, encoding=encoding) as f: self.read_file(f) except OSError: continue read_ok.append(filename) return read_ok def read_file(self, f, **kwds): """Like read() but the argument must be a file-like object. The 'f' argument must be a json document. """ dictionary = json.load(f) self.read_dict(dictionary) def read_string(self, string, **kwds): """Read configuration from a given string that contain json document.""" self._dict.update(json.loads(string)) def read_dict(self, dictionary, **kwds): """Read configuration from a dictionary.""" self._dict.update(dictionary) def readfp(self, fp, **kwds): """Deprecated, use read_file instead.""" warnings.warn( "This method will be removed in future versions. " "Use 'parser.read_file()' instead.", DeprecationWarning, stacklevel=2 ) self.read_file(fp, **kwds) pass def has_option(self, section, option): """Check for the existence of a given option in a given section.""" try: if option in self._dict[section]: return True return False except KeyError as e: raise NoSectionError(str(e)) from None def set(self, section, option, value=None): """Set an option.""" self._dict[section][option] = value def write(self, fp, **kwds): """Write an .json-format representation of the configuration state.""" json.dump(self._dict, fp) def remove_option(self, section, option): """Remove an option.""" try: del(self._dict[section][option]) except KeyError as e: if str(e) == section: raise NoSectionError(section) from None else: raise NoOptionError(option) from None def remove_section(self, section): """Remove a file section.""" try: del(self._dict[section]) except KeyError: raise NoSectionError(section) from None def __getitem__(self, key): return self._dict[key] def __setitem__(self, key, value): self._dict[key] = value def __delitem__(self, key): del(self._dict[key]) def __iter__(self): return iter(self._dict) def __len__(self): return len(self._dict.keys) # These methods provided directly for campatibility with orginal ConfigParser def getint(self, section, option, **kwds): try: return int(self._dict[section][option]) except KeyError as e: error_key = str(e) if error_key == section: raise NoSectionError(error_key) from None else: raise NoOptionError(error_key) from None def getfloat(self, section, option, **kwds): try: return float(self._dict[section][option]) except KeyError as e: error_key = str(e) if error_key == section: raise NoSectionError(error_key) from None else: raise NoOptionError(error_key) from None def getboolean(self, section, option, **kwds): try: val = self._dict[section][option] if isinstance(val, bool): return val raise ValueError("value is not boolean") except KeyError as e: error_key = str(e) if error_key == section: raise NoSectionError(error_key) from None else: raise NoOptionError(error_key) from None # To do:implement for this class def optionxform(self, optionstr): raise NotImplementedError @property def converters(self): raise NotImplementedError
31.510204
81
0.599093
710
6,176
5.083099
0.269014
0.044334
0.037406
0.028263
0.229149
0.167636
0.151011
0.151011
0.140482
0.140482
0
0.000236
0.312986
6,176
195
82
31.671795
0.850342
0.214864
0
0.317829
0
0
0.029185
0
0
0
0
0
0
1
0.209302
false
0.007752
0.03876
0.023256
0.379845
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d871d406a36eb042f0249d4cd661dbf2e8cf3a59
5,859
py
Python
tests/test_validation.py
danielgis/invest
b9687d249361556b874750368e856ef049447b5a
[ "BSD-3-Clause" ]
null
null
null
tests/test_validation.py
danielgis/invest
b9687d249361556b874750368e856ef049447b5a
[ "BSD-3-Clause" ]
null
null
null
tests/test_validation.py
danielgis/invest
b9687d249361556b874750368e856ef049447b5a
[ "BSD-3-Clause" ]
1
2021-04-21T10:46:17.000Z
2021-04-21T10:46:17.000Z
import unittest class ValidatorTest(unittest.TestCase): def test_args_wrong_type(self): """Validation: check for error when args is the wrong type.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): pass with self.assertRaises(AssertionError): validate(args=123) def test_limit_to_wrong_type(self): """Validation: check for error when limit_to is the wrong type.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): pass with self.assertRaises(AssertionError): validate(args={}, limit_to=1234) def test_limit_to_not_in_args(self): """Validation: check for error when limit_to is not a key in args.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): pass with self.assertRaises(AssertionError): validate(args={}, limit_to='bar') def test_args_keys_must_be_strings(self): """Validation: check for error when args keys are not all strings.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): pass with self.assertRaises(AssertionError): validate(args={1: 'foo'}) def test_invalid_return_value(self): """Validation: check for error when the return value type is wrong.""" from natcap.invest import validation for invalid_value in (1, True, None): @validation.invest_validator def validate(args, limit_to=None): return invalid_value with self.assertRaises(AssertionError): validate({}) def test_invalid_keys_iterable(self): """Validation: check for error when return keys not an iterable.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): return [('a', 'error 1')] with self.assertRaises(AssertionError): validate({'a': 'foo'}) def test_return_keys_in_args(self): """Validation: check for error when return keys not all in args.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): return [(('a',), 'error 1')] with self.assertRaises(AssertionError): validate({}) def test_error_string_wrong_type(self): """Validation: check for error when error message not a string.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): return [(('a',), 1234)] with self.assertRaises(AssertionError): validate({'a': 'foo'}) def test_wrong_parameter_names(self): """Validation: check for error when wrong function signature used.""" from natcap.invest import validation @validation.invest_validator def validate(foo): pass with self.assertRaises(AssertionError): validate({}) def test_return_value(self): """Validation: validation errors should be returned from decorator.""" from natcap.invest import validation errors = [(('a', 'b'), 'Error!')] @validation.invest_validator def validate(args, limit_to=None): return errors validation_errors = validate({'a': 'foo', 'b': 'bar'}) self.assertEqual(validation_errors, errors) def test_n_workers(self): """Validation: validation error returned on invalid n_workers.""" from natcap.invest import validation @validation.invest_validator def validate(args, limit_to=None): return [] validation_errors = validate({'n_workers': 1.5}) self.assertEqual(len(validation_errors), 1) self.assertTrue(validation_errors[0][0] == ['n_workers']) self.assertTrue('must be an integer' in validation_errors[0][1]) class ValidationContextTests(unittest.TestCase): def test_is_arg_complete_require(self): """Validation: context returns a warning for incomplete args.""" from natcap.invest import validation context = validation.ValidationContext( args={}, limit_to=None) is_complete = context.is_arg_complete('some_key', require=True) self.assertEqual(is_complete, False) self.assertEqual(len(context.warnings), 1) def test_is_arg_complete_require_and_present(self): """Validation: context ok when arg complete.""" from natcap.invest import validation context = validation.ValidationContext( args={'some_key': 'foo'}, limit_to=None) is_complete = context.is_arg_complete('some_key', require=True) self.assertEqual(is_complete, True) self.assertEqual(context.warnings, []) def test_warn_single_key(self): """Validation: check warnings when single key is given.""" from natcap.invest import validation context = validation.ValidationContext( args={'some_key': 'foo'}, limit_to=None) context.warn('some error', 'some_key') self.assertEqual(context.warnings, [(('some_key',), 'some error')]) def test_warn_iterable_keys(self): """Validation: check warnings when keys are iterable.""" from natcap.invest import validation context = validation.ValidationContext( args={'some_key': 'foo'}, limit_to=None) context.warn('some error', keys=['some_key']) self.assertEqual(context.warnings, [(('some_key',), 'some error')])
34.263158
78
0.640894
666
5,859
5.471471
0.135135
0.038419
0.065862
0.09056
0.716246
0.681669
0.643249
0.603732
0.576015
0.493688
0
0.00527
0.255163
5,859
170
79
34.464706
0.829743
0.151732
0
0.6
0
0
0.041684
0
0
0
0
0
0.172727
1
0.236364
false
0.045455
0.145455
0.054545
0.454545
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
1
d8720456fd135a7561fb3d2146537a8ebeb01212
77
py
Python
vmaig_blog/uwsgi-2.0.14/plugins/zergpool/uwsgiplugin.py
StanYaha/Blog
3cb38918e14ebe6ce2e2952ef272de116849910d
[ "BSD-3-Clause" ]
1
2018-11-24T16:10:49.000Z
2018-11-24T16:10:49.000Z
vmaig_blog/uwsgi-2.0.14/plugins/zergpool/uwsgiplugin.py
StanYaha/Blog
3cb38918e14ebe6ce2e2952ef272de116849910d
[ "BSD-3-Clause" ]
null
null
null
vmaig_blog/uwsgi-2.0.14/plugins/zergpool/uwsgiplugin.py
StanYaha/Blog
3cb38918e14ebe6ce2e2952ef272de116849910d
[ "BSD-3-Clause" ]
null
null
null
NAME='zergpool' CFLAGS = [] LDFLAGS = [] LIBS = [] GCC_LIST = ['zergpool']
9.625
23
0.584416
8
77
5.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.194805
77
7
24
11
0.709677
0
0
0
0
0
0.210526
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d87268d377955c1f6efb88d6ef67f9df1b77d9d4
6,279
py
Python
py-src/helper/img_transform.py
gabeoh/CarND-P01-LaneLines
5a35a7698f5a2efeff70d5537fedae366c1e51a0
[ "MIT" ]
null
null
null
py-src/helper/img_transform.py
gabeoh/CarND-P01-LaneLines
5a35a7698f5a2efeff70d5537fedae366c1e51a0
[ "MIT" ]
null
null
null
py-src/helper/img_transform.py
gabeoh/CarND-P01-LaneLines
5a35a7698f5a2efeff70d5537fedae366c1e51a0
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt import cv2 import math def grayscale(img): """Applies the Grayscale transform This will return an image with only one color channel but NOTE: to see the returned image as grayscale (assuming your grayscaled image is called 'gray') you should call plt.imshow(gray, cmap='gray')""" return cv2.cvtColor(img, cv2.COLOR_RGB2GRAY) # Or use BGR2GRAY if you read an image with cv2.imread() # return cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) def canny(img, low_threshold, high_threshold): """Applies the Canny transform""" return cv2.Canny(img, low_threshold, high_threshold) def gaussian_blur(img, kernel_size): """Applies a Gaussian Noise kernel""" return cv2.GaussianBlur(img, (kernel_size, kernel_size), 0) def region_of_interest(img, vertices): """ Applies an image mask. Only keeps the region of the image defined by the polygon formed from `vertices`. The rest of the image is set to black. """ # defining a blank mask to start with mask = np.zeros_like(img) # defining a 3 channel or 1 channel color to fill the mask with depending on the input image if len(img.shape) > 2: channel_count = img.shape[2] # i.e. 3 or 4 depending on your image ignore_mask_color = (255,) * channel_count else: ignore_mask_color = 255 # filling pixels inside the polygon defined by "vertices" with the fill color cv2.fillPoly(mask, vertices, ignore_mask_color) # returning the image only where mask pixels are nonzero masked_image = cv2.bitwise_and(img, mask) return masked_image def find_aggregated_line(lines_x, lines_y, y_bottom, y_top): """ Find two end-points (bottom and top) of aggregated line for given line collection. The endpoints are determined by given y coordinate range :param lines_x: x coordinates of lines :param lines_y: y coordinates of lines :param y_bottom: bottom end y coordinate of aggregated line segment :param y_top: top end y coordinate of aggregated line segment :return: (x, y) coordinates of two end-points of aggregated line segment """ # First, make sure that lines_x and lines_y are non-empty same size arrays assert(len(lines_x) > 0 and len(lines_x) == len(lines_y)) # Compute straight lines that fit line endpoints for left and right line segments line_fit = np.polyfit(lines_x, lines_y, 1) # Find start and end points for aggregated lines x_bottom = int(round((y_bottom - line_fit[1]) / line_fit[0])) x_top = int(round((y_top - line_fit[1]) / line_fit[0])) return [(x_bottom, y_bottom), (x_top, y_top)] def draw_lines(img, lines, color=[255, 0, 0], thickness=10): """ NOTE: this is the function you might want to use as a starting point once you want to average/extrapolate the line segments you detect to map out the full extent of the lane (going from the result shown in raw-lines-example.mp4 to that shown in P1_example.mp4). Think about things like separating line segments by their slope ((y2-y1)/(x2-x1)) to decide which segments are part of the left line vs. the right line. Then, you can average the position of each of the lines and extrapolate to the top and bottom of the lane. This function draws `lines` with `color` and `thickness`. Lines are drawn on the image inplace (mutates the image). If you want to make the lines semi-transparent, think about combining this function with the weighted_img() function below """ # Identify line end points of each line (separate into left and right lines) lines_left_x = [] lines_left_y = [] lines_right_x = [] lines_right_y = [] xsize = img.shape[1] x_middle = int(round(xsize / 2)) for line in lines: for x1, y1, x2, y2 in line: slope = (y2 - y1) / (x2 - x1) if (slope > -0.9 and slope < -0.5) and (x1 < x_middle and x2 < x_middle): lines_left_x.extend([x1, x2]) lines_left_y.extend([y1, y2]) elif (slope > 0.4 and slope < 0.8) and (x1 > x_middle and x2 > x_middle): lines_right_x.extend([x1, x2]) lines_right_y.extend([y1, y2]) else: #print('Ignore outlier lines - slope: %f, (%d, %d), (%d, %d)' % (slope, x1, y1, x2, y2)) pass # Determine Y range for aggregated lines ysize = img.shape[0] y_bottom, y_top = ysize - 1, min(lines_left_y + lines_right_y) # Find and draw aggregated lines for left and right line collections respectively if (len(lines_left_x) > 0): point_bottom, point_top = find_aggregated_line(lines_left_x, lines_left_y, y_bottom, y_top) cv2.line(img, point_bottom, point_top, color, thickness) if (len(lines_right_x) > 0): point_bottom, point_top = find_aggregated_line(lines_right_x, lines_right_y, y_bottom, y_top) cv2.line(img, point_bottom, point_top, color, thickness) def draw_raw_lines(img, lines, color=[255, 0, 0], thickness=2): """ The original draw_lines function provided in the project """ for line in lines: for x1, y1, x2, y2 in line: cv2.line(img, (x1, y1), (x2, y2), color, thickness) def hough_lines(img, rho, theta, threshold, min_line_len, max_line_gap): """ `img` should be the output of a Canny transform. Returns an image with hough lines drawn. """ lines = cv2.HoughLinesP(img, rho, theta, threshold, np.array([]), minLineLength=min_line_len, maxLineGap=max_line_gap) line_img = np.zeros((img.shape[0], img.shape[1], 3), dtype=np.uint8) draw_lines(line_img, lines) return line_img # Python 3 has support for cool math symbols. def weighted_img(img, initial_img, α=0.8, β=1., γ=0.): """ `img` is the output of the hough_lines(), An image with lines drawn on it. Should be a blank image (all black) with lines drawn on it. `initial_img` should be the image before any processing. The result image is computed as follows: initial_img * α + img * β + γ NOTE: initial_img and img must be the same shape! """ return cv2.addWeighted(initial_img, α, img, β, γ)
38.521472
104
0.672241
992
6,279
4.118952
0.25504
0.017621
0.010768
0.010768
0.21488
0.174743
0.111601
0.09349
0.077827
0.063632
0
0.023372
0.236821
6,279
162
105
38.759259
0.829299
0.47842
0
0.125
0
0
0
0
0
0
0
0
0.015625
1
0.140625
false
0.015625
0.0625
0
0.3125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8733f6f6015bbf69ccc3df24284440196f1074d
1,314
py
Python
src/zope/app/publication/publicationtraverse.py
zopefoundation/zope.app.publication
fd5cde576c4679aac1f7dc091d2fd6fb5ae51db4
[ "ZPL-2.1" ]
1
2021-02-01T06:13:49.000Z
2021-02-01T06:13:49.000Z
src/zope/app/publication/publicationtraverse.py
zopefoundation/zope.app.publication
fd5cde576c4679aac1f7dc091d2fd6fb5ae51db4
[ "ZPL-2.1" ]
11
2015-08-03T07:40:13.000Z
2020-12-17T06:54:55.000Z
src/zope/app/publication/publicationtraverse.py
zopefoundation/zope.app.publication
fd5cde576c4679aac1f7dc091d2fd6fb5ae51db4
[ "ZPL-2.1" ]
3
2015-04-03T07:58:39.000Z
2016-11-04T06:00:03.000Z
############################################################################## # # Copyright (c) 2001, 2002 Zope Foundation and Contributors. # All Rights Reserved. # # This software is subject to the provisions of the Zope Public License, # Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution. # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS # FOR A PARTICULAR PURPOSE. # ############################################################################## import warnings from zope.traversing.publicationtraverse import PublicationTraverse # noqa: F401 E501 (BBB and long line) from zope.traversing.publicationtraverse import PublicationTraverser # noqa: F401 E501 (BBB and long line) warnings.warn("""%s is deprecated If you want PublicationTraverser, it's now in zope.traversing.publicationtraverse. Anything else that was here is deprecated. """ % __name__, DeprecationWarning, stacklevel=1) # BBB: do not use class DuplicateNamespaces(Exception): """More than one namespace was specified in a request""" # BBB: do not use class UnknownNamespace(Exception): """A parameter specified an unknown namespace"""
37.542857
107
0.679604
155
1,314
5.735484
0.612903
0.047244
0.111361
0.08324
0.191226
0.058493
0.058493
0
0
0
0
0.020282
0.136986
1,314
34
108
38.647059
0.763668
0.502283
0
0
0
0
0.306554
0.120507
0
0
0
0
0
1
0
true
0
0.3
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
1
d873c9d5b73ad6048ae3ed992ae54074f4373aad
44
py
Python
snowav/database/__init__.py
robertson-mark/SNOWAV
ef7a470dd45a342ee454d74b6476da5807f14301
[ "CC0-1.0" ]
1
2018-09-11T17:14:01.000Z
2018-09-11T17:14:01.000Z
snowav/database/__init__.py
robertson-mark/SNOWAV
ef7a470dd45a342ee454d74b6476da5807f14301
[ "CC0-1.0" ]
15
2018-10-24T21:59:57.000Z
2021-07-01T20:37:05.000Z
snowav/database/__init__.py
USDA-ARS-NWRC/snowav
ef7a470dd45a342ee454d74b6476da5807f14301
[ "CC0-1.0" ]
null
null
null
from . import tables from . import database
14.666667
22
0.772727
6
44
5.666667
0.666667
0.588235
0
0
0
0
0
0
0
0
0
0
0.181818
44
2
23
22
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
d87785dc3dfe82dc39a25b2fc439096204f29f6f
158
py
Python
distributions/admin.py
lueho/BRIT
1eae630c4da6f072aa4e2139bc406db4f4756391
[ "MIT" ]
null
null
null
distributions/admin.py
lueho/BRIT
1eae630c4da6f072aa4e2139bc406db4f4756391
[ "MIT" ]
4
2022-03-29T20:52:31.000Z
2022-03-29T20:52:31.000Z
distributions/admin.py
lueho/BRIT
1eae630c4da6f072aa4e2139bc406db4f4756391
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Timestep, TemporalDistribution admin.site.register(TemporalDistribution) admin.site.register(Timestep)
22.571429
50
0.848101
18
158
7.444444
0.555556
0.373134
0.432836
0.552239
0
0
0
0
0
0
0
0
0.082278
158
6
51
26.333333
0.924138
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
7
d8794c7745220a34124f93774d760cb2a2e49b5f
1,581
py
Python
src/prepare_train_valid.py
partham16/demo_classification
d756ab150a1913c220f1048eda552483e88c01c1
[ "MIT" ]
null
null
null
src/prepare_train_valid.py
partham16/demo_classification
d756ab150a1913c220f1048eda552483e88c01c1
[ "MIT" ]
null
null
null
src/prepare_train_valid.py
partham16/demo_classification
d756ab150a1913c220f1048eda552483e88c01c1
[ "MIT" ]
null
null
null
from typing import List, Tuple import h2o import pandas as pd from sklearn.model_selection import train_test_split from .config import Config def get_train_valid(df: pd.DataFrame) -> Tuple[pd.DataFrame]: """Get train - valid - test""" full_train_df, test_df = train_test_split( df, test_size=Config.test_percent, random_state=Config.test_seed, stratify=df[Config.stratify_col].values, ) train_df, valid_df = train_test_split( full_train_df, test_size=Config.valid_percent, random_state=Config.valid_seed, stratify=full_train_df[Config.stratify_col].values, ) return full_train_df, train_df, valid_df, test_df def get_h2o_train_valid(dfs: Tuple[pd.DataFrame]) -> Tuple[h2o.H2OFrame]: """Convert DataFrames to H2OFrames""" full_train_df, train_df, valid_df, test_df = dfs if not Config.use_full_train: train = h2o.H2OFrame(train_df) else: train = h2o.H2OFrame(full_train_df) valid = h2o.H2OFrame(valid_df) test = h2o.H2OFrame(test_df) return train, valid, test def treat_categorical_cols( dfs: Tuple[h2o.H2OFrame], cat_cols: List[str] ) -> Tuple[h2o.H2OFrame, List[str], str]: """Set categorical columns as factor""" train, valid, test = dfs x = train.columns y = Config.target_col x.remove(y) train[y] = train[y].asfactor() for col in cat_cols: train[col] = train[col].asfactor() valid[col] = valid[col].asfactor() test[col] = test[col].asfactor() return train, valid, test, x, y
28.745455
73
0.674889
226
1,581
4.486726
0.247788
0.069034
0.065089
0.04142
0.110454
0.061144
0.061144
0.061144
0.061144
0
0
0.013699
0.215054
1,581
54
74
29.277778
0.803384
0.056926
0
0
0
0
0
0
0
0
0
0
0
1
0.073171
false
0
0.121951
0
0.268293
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d87c366bf70803b5e5a62ba14bdd8953959d7029
419
py
Python
generate-text-replacements.py
clrcrl/tech-name-fixer
4d5ab36aa28a1e2912e02c5ea33a3f8af8d0e77b
[ "Apache-2.0" ]
null
null
null
generate-text-replacements.py
clrcrl/tech-name-fixer
4d5ab36aa28a1e2912e02c5ea33a3f8af8d0e77b
[ "Apache-2.0" ]
null
null
null
generate-text-replacements.py
clrcrl/tech-name-fixer
4d5ab36aa28a1e2912e02c5ea33a3f8af8d0e77b
[ "Apache-2.0" ]
null
null
null
import csv import plistlib as plist SOURCE_FILE = "tech-names.csv" snippets_array = [] with open(SOURCE_FILE, "rt") as csvfile: reader = csv.DictReader(csvfile) firstline = True for row in reader: snippets_array.append( {"phrase": row["correct_spelling"], "shortcut": row["common_misspelling"]} ) with open("tech-names.plist", "wb") as fp: plist.dump(snippets_array, fp)
23.277778
86
0.661098
54
419
5
0.592593
0.144444
0
0
0
0
0
0
0
0
0
0
0.210024
419
17
87
24.647059
0.81571
0
0
0
0
0
0.195704
0
0
0
0
0
0
1
0
false
0
0.153846
0
0.153846
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d87dbe03e3d17cde827cd192191f97d4763ebc9a
4,524
py
Python
cjax/continuation/_perturbed_arc_len_continuation.py
harsh306/continuation-jax
c1452604558764df9cd4770130b60035eea5c5b3
[ "MIT" ]
2
2022-01-26T18:02:51.000Z
2022-02-15T01:36:39.000Z
cjax/continuation/_perturbed_arc_len_continuation.py
harsh306/continuation-jax
c1452604558764df9cd4770130b60035eea5c5b3
[ "MIT" ]
null
null
null
cjax/continuation/_perturbed_arc_len_continuation.py
harsh306/continuation-jax
c1452604558764df9cd4770130b60035eea5c5b3
[ "MIT" ]
1
2022-02-15T01:37:50.000Z
2022-02-15T01:37:50.000Z
from cjax.continuation._arc_len_continuation import PseudoArcLenContinuation from cjax.continuation.states.state_variables import StateWriter from cjax.continuation.methods.predictor.secant_predictor import SecantPredictor from jax.experimental.optimizers import l2_norm from cjax.continuation.methods.corrector.perturbed_constrained_corrector import ( PerturbedCorrecter, ) import copy from cjax.utils.profiler import profile import gc from cjax.utils.math_trees import pytree_relative_error # TODO: make **kwargs availible class PerturbedPseudoArcLenContinuation(PseudoArcLenContinuation): """Noisy Pseudo Arc-length Continuation strategy. Composed of secant predictor and noisy constrained corrector""" def __init__( self, state, bparam, state_0, bparam_0, counter, objective, dual_objective, hparams, key_state, ): super().__init__( state, bparam, state_0, bparam_0, counter, objective, dual_objective, hparams, ) self.key_state = key_state @profile(sort_by="cumulative", lines_to_print=10, strip_dirs=True) def run(self): """Runs the continuation strategy. A continuation strategy that defines how predictor and corrector components of the algorithm interact with the states of the mathematical system. """ self.sw = StateWriter(f"{self.output_file}/version_{self.key_state}.json") for i in range(self.continuation_steps): print(self._value_wrap.get_record(), self._bparam_wrap.get_record()) self._state_wrap.counter = i self._bparam_wrap.counter = i self._value_wrap.counter = i self.sw.write( [ self._state_wrap.get_record(), self._bparam_wrap.get_record(), self._value_wrap.get_record(), ] ) concat_states = [ (self._state_wrap.state, self._bparam_wrap.state), (self._prev_state, self._prev_bparam), self.prev_secant_direction, ] predictor = SecantPredictor( concat_states=concat_states, delta_s=self._delta_s, omega=self._omega, net_spacing_param=self.hparams["net_spacing_param"], net_spacing_bparam=self.hparams["net_spacing_bparam"], hparams=self.hparams, ) predictor.prediction_step() self.prev_secant_direction = predictor.secant_direction self.hparams["sphere_radius"] = ( 0.005 * self.hparams["omega"] * l2_norm(predictor.secant_direction) ) concat_states = [ predictor.state, predictor.bparam, predictor.secant_direction, predictor.get_secant_concat(), ] del predictor gc.collect() corrector = PerturbedCorrecter( optimizer=self.opt, objective=self.objective, dual_objective=self.dual_objective, lagrange_multiplier=self._lagrange_multiplier, concat_states=concat_states, delta_s=self._delta_s, ascent_opt=self.ascent_opt, key_state=self.key_state, compute_min_grad_fn=self.compute_min_grad_fn, compute_max_grad_fn=self.compute_max_grad_fn, compute_grad_fn=self.compute_grad_fn, hparams=self.hparams, pred_state=[self._state_wrap.state, self._bparam_wrap.state], pred_prev_state=[self._state_wrap.state, self._bparam_wrap.state], counter=self.continuation_steps, ) self._prev_state = copy.deepcopy(self._state_wrap.state) self._prev_bparam = copy.deepcopy(self._bparam_wrap.state) state, bparam, quality = corrector.correction_step() value = self.value_func(state, bparam) print( "How far ....", pytree_relative_error(self._bparam_wrap.state, bparam) ) self._state_wrap.state = state self._bparam_wrap.state = bparam self._value_wrap.state = value del corrector gc.collect()
36.192
100
0.600133
462
4,524
5.541126
0.279221
0.042188
0.049219
0.044531
0.225391
0.179297
0.156641
0.156641
0.142188
0.046875
0
0.003924
0.32405
4,524
124
101
36.483871
0.833224
0.070292
0
0.247619
0
0
0.029525
0.011522
0
0
0
0.008065
0
1
0.019048
false
0
0.085714
0
0.114286
0.028571
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d87e5d6a6c3a210e859a21073e4fe4f95aee7c09
1,345
py
Python
dependencytrack/bom.py
dmuse89/dependency-track-python
462d4a2b7ba5b1b1b0d0ea9066057872f5bd74bb
[ "CNRI-Python" ]
null
null
null
dependencytrack/bom.py
dmuse89/dependency-track-python
462d4a2b7ba5b1b1b0d0ea9066057872f5bd74bb
[ "CNRI-Python" ]
null
null
null
dependencytrack/bom.py
dmuse89/dependency-track-python
462d4a2b7ba5b1b1b0d0ea9066057872f5bd74bb
[ "CNRI-Python" ]
null
null
null
# SPDX-License-Identifier: GPL-2.0+ from .exceptions import DependencyTrackApiError class Bom: """Class dedicated to all "bom" related endpoints""" def upload_bom( self, file_name, project_id=None, project_name=None, project_version=None, auto_create=False, ): """Upload a supported bill of material format document API Endpoint: POST /bom :return: UUID-Token :rtype: string :raises DependencyTrackApiError: if the REST call failed """ multipart_form_data = {} multipart_form_data["bom"] = ("bom", open(file_name, "r")) if project_id: multipart_form_data["project"] = project_id if project_name: multipart_form_data["projectName"] = project_name if project_version: multipart_form_data["projectVersion"] = project_version multipart_form_data["autoCreate"] = auto_create response = self.session.post( self.api + "/bom", params=self.paginated_param_payload, files=multipart_form_data, ) if response.status_code == 200: return response.json() else: description = f"Unable to upload BOM file" raise DependencyTrackApiError(description, response)
29.888889
67
0.613383
142
1,345
5.591549
0.521127
0.11461
0.149874
0.06801
0.078086
0
0
0
0
0
0
0.005314
0.300372
1,345
44
68
30.568182
0.83847
0.186617
0
0
0
0
0.074856
0
0
0
0
0
0
1
0.034483
false
0
0.034483
0
0.137931
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d87f974916d2df6ce93e6643e73f56fff02c54aa
1,895
py
Python
Contributors/IanDavis/ValidSentence.py
FergusDevelopmentLLC/Coders-Workshop
3513bd5f79eaa85b4d2a648c5f343a224842325d
[ "MIT" ]
33
2019-12-02T23:29:47.000Z
2022-03-24T02:40:36.000Z
Contributors/IanDavis/ValidSentence.py
FergusDevelopmentLLC/Coders-Workshop
3513bd5f79eaa85b4d2a648c5f343a224842325d
[ "MIT" ]
39
2020-01-15T19:28:12.000Z
2021-11-26T05:13:29.000Z
Contributors/IanDavis/ValidSentence.py
FergusDevelopmentLLC/Coders-Workshop
3513bd5f79eaa85b4d2a648c5f343a224842325d
[ "MIT" ]
49
2019-12-02T23:29:53.000Z
2022-03-03T01:11:37.000Z
"""By Ian Davis for Bootcampers Collective Coders Workshop on 2/19/20""" """ This program evaluates a string and determines if it its a real sentence """ validString = 'This is a valid sentence.' twoSpaces = "This isn't valid" firstCharacterNotCapitalized = 'not capitalized' containsProperNoun = 'Only the firs character can be capitalized Colorado' lastCharNotTerminator = 'last not terminator' validCharacters = [',', ';', ':', '.', '?', '!', "'", ' '] def loopSentence(sentence): for i, char in enumerate(sentence[1:]): if (char.isalpha() or char in validCharacters): print(f'char {char} is valid') if sentence[i] == ' ': if sentence[i+1] == ' ': print(f'two spaces in a row') return False if char.isupper(): print('no propper nouns!') return False else: print(f'char {char} is not valid') return False return True def checkLastLetterTerminator(sentence): if sentence[-1] not in ['.', '!', '?']: print(f'last character is not a sentence terminator') return False else: return True def checkFirstLetterUppercase(sentence): if sentence[0].isupper(): print('First letter of the sentence is Uppercase') return True else: print('First letter of the sentence is NOT Uppercase') return False def combineTests(sentence): if(not checkFirstLetterUppercase(sentence) or not loopSentence(sentence)): print(f'TESTS FAILED on {sentence}\n') else: print(f'TESTS PASSED on {sentence}\n') def main(): combineTests(validString) combineTests(twoSpaces) combineTests(firstCharacterNotCapitalized) combineTests(containsProperNoun) combineTests(lastCharNotTerminator) if __name__ == "__main__": main()
31.065574
80
0.624802
206
1,895
5.708738
0.393204
0.030612
0.017007
0.02381
0.079932
0.052721
0.052721
0
0
0
0
0.006461
0.264908
1,895
60
81
31.583333
0.83776
0.034829
0
0.255319
0
0
0.236948
0
0
0
0
0
0
1
0.106383
false
0.021277
0
0
0.276596
0.191489
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d87fb2cf3d6c3c5eaead08973e95a1d7f892f80b
1,269
py
Python
MirrorMirror/theme/TextBox.py
RubanSeven/MirrorMirror
47c7a1f458f87c536d068fcf249625f426920cc3
[ "Apache-2.0" ]
2
2021-07-07T13:21:11.000Z
2021-09-24T06:57:16.000Z
MirrorMirror/theme/TextBox.py
RubanSeven/MirrorMirror
47c7a1f458f87c536d068fcf249625f426920cc3
[ "Apache-2.0" ]
null
null
null
MirrorMirror/theme/TextBox.py
RubanSeven/MirrorMirror
47c7a1f458f87c536d068fcf249625f426920cc3
[ "Apache-2.0" ]
null
null
null
# -*- coding:utf-8 -*- """ @author: RubanSeven @project: MirrorMirror """ from PyQt5.QtWidgets import * class CodeTextEdit(QTextEdit): def __init__(self, *__args): super().__init__(*__args) self.setStyleSheet( """ QTextEdit { background-color: rgb(83, 83, 83); border:0px; font-size: 15px; color: rgb(214, 214, 214); } """ ) class ParamLineEdit(QLineEdit): def __init__(self, *__args): super().__init__(*__args) self.setStyleSheet( """ QLineEdit { background-color: rgb(46, 46, 46); border:1px rgb(62, 62, 62); font-size: 15px; color: rgb(205, 205, 205); height: 30px; } """ ) class LabelText(QLabel): def __init__(self, *__args): super().__init__(*__args) self.setStyleSheet( """ QLabel { border: none; font-size: 13px; color: rgb(153, 153, 153); } """ )
24.403846
51
0.408983
98
1,269
4.928571
0.438776
0.082816
0.068323
0.093168
0.362319
0.279503
0.279503
0.279503
0.279503
0
0
0.085329
0.473601
1,269
51
52
24.882353
0.637725
0.050433
0
0.5625
0
0
0
0
0
0
0
0
0
1
0.1875
false
0
0.0625
0
0.4375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8819ce394b5003e6d8376f1810f650835d534ec
1,429
py
Python
012getLncRNA_PMID.py
qiufengdiewu/LPInsider
92fcc2ad9e05cb634c4e3f1accd1220b984a027d
[ "Apache-2.0" ]
null
null
null
012getLncRNA_PMID.py
qiufengdiewu/LPInsider
92fcc2ad9e05cb634c4e3f1accd1220b984a027d
[ "Apache-2.0" ]
null
null
null
012getLncRNA_PMID.py
qiufengdiewu/LPInsider
92fcc2ad9e05cb634c4e3f1accd1220b984a027d
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python # -*- coding: UTF-8 -*- from Bio import Entrez import MySQLdb as mySQLDB Entrez.email="A.N.Other@example.com" def savePID(): returnCount=100000#每次可以最大返回十万条数据。 handle=Entrez.esearch(db="pubmed",term="lncRNA",RetMax=returnCount) ''' 这些参数值目前是够用的,但是不能保证以后一定可以。如果运行错误,则参照官网给出的函数参数进行修改 ''' record=Entrez.read(handle) print(record) idList=record["IdList"] count=record["Count"] print("Count"+count) #打开数据库连接 db = mySQLDB.connect(host='127.0.0.1',user='root',passwd='11223366',db='ncrna',charset='utf8') print() #使用cursor()方法获取操作游标 cursor=db.cursor() for i in range(0,int(count)): sql = "insert into lncrna_pmid (pmid) values(" + idList[i] + ")" try: #执行SQL语句 cursor.execute(sql) #提交到数据库执行 db.commit() #print(sql) # 这些语句执行之后不能保证PID的唯一,我给的解决方案是讲数据库中的PID设置为unique来避免这个问题 '''SQL语句如下: CREATE TABLE `lncrna_pid` ( `Id` int(11) NOT NULL AUTO_INCREMENT, `pid` int(11) NOT NULL DEFAULT '0', PRIMARY KEY (`Id`), UNIQUE KEY `pid` (`pid`)) ENGINE=InnoDB AUTO_INCREMENT=15374 DEFAULT CHARSET=utf8;''' except: # Rollback in case there is any error db.rollback() print("Error,can't insert data. "+str(sql)) db.close() if __name__ == "__main__": savePID()
34.02381
222
0.590623
163
1,429
5.104294
0.638037
0.028846
0.019231
0.028846
0
0
0
0
0
0
0
0.032474
0.26732
1,429
42
223
34.02381
0.762178
0.13366
0
0
0
0
0.171364
0.023675
0
0
0
0
0
1
0.04
false
0.04
0.08
0
0.12
0.16
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8836803dce845ee19f76d28736903e2e7b8d35b
11,867
py
Python
scripts/generate_schema/worldbank/generate_worldbank_schema.py
liangmuxin/datamart
495a21588db39c9ad239409208bec701dca07f30
[ "MIT" ]
7
2018-10-02T01:32:23.000Z
2020-10-08T00:42:35.000Z
scripts/generate_schema/worldbank/generate_worldbank_schema.py
liangmuxin/datamart
495a21588db39c9ad239409208bec701dca07f30
[ "MIT" ]
47
2018-10-02T05:41:13.000Z
2021-02-02T21:50:31.000Z
scripts/generate_schema/worldbank/generate_worldbank_schema.py
liangmuxin/datamart
495a21588db39c9ad239409208bec701dca07f30
[ "MIT" ]
19
2018-10-01T22:27:20.000Z
2019-02-28T18:59:53.000Z
import os from argparse import ArgumentParser import requests import json import traceback LOCATIONS = [ "Aruba", "Afghanistan", "Africa", "Angola", "Albania", "Andorra", "Andean Region", "Arab World", "United Arab Emirates", "Argentina", "Armenia", "American Samoa", "Antigua and Barbuda", "Australia", "Austria", "Azerbaijan", "Burundi", "East Asia & Pacific (IBRD-only countries)", "Europe & Central Asia (IBRD-only countries)", "Belgium", "Benin", "Burkina Faso", "Bangladesh", "Bulgaria", "IBRD countries classified as high income", "Bahrain", "Bahamas, The", "Bosnia and Herzegovina", "Latin America & the Caribbean (IBRD-only countries)", "Belarus", "Belize", "Middle East & North Africa (IBRD-only countries)", "Bermuda", "Bolivia", "Brazil", "Barbados", "Brunei Darussalam", "Sub-Saharan Africa (IBRD-only countries)", "Bhutan", "Botswana", "Sub-Saharan Africa (IFC classification)", "Central African Republic", "Canada", "East Asia and the Pacific (IFC classification)", "Central Europe and the Baltics", "Europe and Central Asia (IFC classification)", "Switzerland", "Channel Islands", "Chile", "China", "Cote d'Ivoire", "Latin America and the Caribbean (IFC classification)", "Middle East and North Africa (IFC classification)", "Cameroon", "Congo, Dem. Rep.", "Congo, Rep.", "Colombia", "Comoros", "Cabo Verde", "Costa Rica", "South Asia (IFC classification)", "Caribbean small states", "Cuba", "Curacao", "Cayman Islands", "Cyprus", "Czech Republic", "East Asia & Pacific (IDA-eligible countries)", "Europe & Central Asia (IDA-eligible countries)", "Germany", "IDA countries classified as Fragile Situations", "Djibouti", "Latin America & the Caribbean (IDA-eligible countries)", "Dominica", "Middle East & North Africa (IDA-eligible countries)", "IDA countries not classified as Fragile Situations", "Denmark", "IDA countries in Sub-Saharan Africa not classified as fragile situations ", "Dominican Republic", "South Asia (IDA-eligible countries)", "IDA countries in Sub-Saharan Africa classified as fragile situations ", "Sub-Saharan Africa (IDA-eligible countries)", "IDA total, excluding Sub-Saharan Africa", "Algeria", "East Asia & Pacific (excluding high income)", "Early-demographic dividend", "East Asia & Pacific", "Europe & Central Asia (excluding high income)", "Europe & Central Asia", "Ecuador", "Egypt, Arab Rep.", "Euro area", "Eritrea", "Spain", "Estonia", "Ethiopia", "European Union", "Fragile and conflict affected situations", "Finland", "Fiji", "France", "Faroe Islands", "Micronesia, Fed. Sts.", "IDA countries classified as fragile situations, excluding Sub-Saharan Africa", "Gabon", "United Kingdom", "Georgia", "Ghana", "Gibraltar", "Guinea", "Gambia, The", "Guinea-Bissau", "Equatorial Guinea", "Greece", "Grenada", "Greenland", "Guatemala", "Guam", "Guyana", "High income", "Hong Kong SAR, China", "Honduras", "Heavily indebted poor countries (HIPC)", "Croatia", "Haiti", "Hungary", "IBRD, including blend", "IBRD only", "IDA & IBRD total", "IDA total", "IDA blend", "Indonesia", "IDA only", "Isle of Man", "India", "Not classified", "Ireland", "Iran, Islamic Rep.", "Iraq", "Iceland", "Israel", "Italy", "Jamaica", "Jordan", "Japan", "Kazakhstan", "Kenya", "Kyrgyz Republic", "Cambodia", "Kiribati", "St. Kitts and Nevis", "Korea, Rep.", "Kuwait", "Latin America & Caribbean (excluding high income)", "Lao PDR", "Lebanon", "Liberia", "Libya", "St. Lucia", "Latin America & Caribbean ", "Latin America and the Caribbean", "Least developed countries,ssification", "Low income", "Liechtenstein", "Sri Lanka", "Lower middle income", "Low & middle income", "Lesotho", "Late-demographic dividend", "Lithuania", "Luxembourg", "Latvia", "Macao SAR, China", "St. Martin (French part)", "Morocco", "Central America", "Monaco", "Moldova", "Middle East (developing only)", "Madagascar", "Maldives", "Middle East & North Africa", "Mexico", "Marshall Islands", "Middle income", "Macedonia, FYR", "Mali", "Malta", "Myanmar", "Middle East & North Africa (excluding high income)", "Montenegro", "Mongolia", "Northern Mariana Islands", "Mozambique", "Mauritania", "Mauritius", "Malawi", "Malaysia", "North America", "North Africa", "Namibia", "New Caledonia", "Niger", "Nigeria", "Nicaragua", "Netherlands", "Non-resource rich Sub-Saharan Africa countries, of which landlocked", "Norway", "Nepal", "Non-resource rich Sub-Saharan Africa countries", "Nauru", "IDA countries not classified as fragile situations, excluding Sub-Saharan Africa", "New Zealand", "OECD members", "Oman", "Other small states", "Pakistan", "Panama", "Peru", "Philippines", "Palau", "Papua New Guinea", "Poland", "Pre-demographic dividend", "Puerto Rico", "Korea, Dem. People’s Rep.", "Portugal", "Paraguay", "West Bank and Gaza", "Pacific island small states", "Post-demographic dividend", "French Polynesia", "Qatar", "Romania", "Resource rich Sub-Saharan Africa countries", "Resource rich Sub-Saharan Africa countries, of which oil exporters", "Russian Federation", "Rwanda", "South Asia", "Saudi Arabia", "Southern Cone", "Sudan", "Senegal", "Singapore", "Solomon Islands", "Sierra Leone", "El Salvador", "San Marino", "Somalia", "Serbia", "Sub-Saharan Africa (excluding high income)", "South Sudan", "Sub-Saharan Africa ", "Small states", "Sao Tome and Principe", "Suriname", "Slovak Republic", "Slovenia", "Sweden", "Eswatini", "Sint Maarten (Dutch part)", "Sub-Saharan Africa excluding South Africa", "Seychelles", "Syrian Arab Republic", "Turks and Caicos Islands", "Chad", "East Asia & Pacific (IDA & IBRD countries)", "Europe & Central Asia (IDA & IBRD countries)", "Togo", "Thailand", "Tajikistan", "Turkmenistan", "Latin America & the Caribbean (IDA & IBRD countries)", "Timor-Leste", "Middle East & North Africa (IDA & IBRD countries)", "Tonga", "South Asia (IDA & IBRD)", "Sub-Saharan Africa (IDA & IBRD countries)", "Trinidad and Tobago", "Tunisia", "Turkey", "Tuvalu", "Taiwan, China", "Tanzania", "Uganda", "Ukraine", "Upper middle income", "Uruguay", "United States", "Uzbekistan", "St. Vincent and the Grenadines", "Venezuela, RB", "British Virgin Islands", "Virgin Islands (U.S.)", "Vietnam", "Vanuatu", "World", "Samoa", "Kosovo", "Sub-Saharan Africa excluding South Africa and Nigeria", "Yemen, Rep.", "South Africa", "Zambia", "Zimbabwe" ] def getAllIndicatorList(): url = "https://api.worldbank.org/v2/indicators?format=json&page=1" res = requests.get(url) data = res.json() total = data[0]['total'] url2 = "https://api.worldbank.org/v2/indicators?format=json&page=1&per_page=" + str(total) res2 = requests.get(url2) data2 = res2.json() return data2[1] def generate_json_schema(dst_path): unique_urls_str = getAllIndicatorList() for commondata in unique_urls_str: try: urldata = "https://api.worldbank.org/v2/countries/indicators/" + commondata['id'] + "?format=json" resdata = requests.get(urldata) data_ind = resdata.json() print("Generating schema for Trading economics", commondata['name']) schema = {} schema["title"] = commondata['name'] schema["description"] = commondata['sourceNote'] schema["url"] = "https://api.worldbank.org/v2/indicators/" + commondata['id'] + "?format=json" schema["keywords"] = [i for i in commondata['name'].split()] schema["date_updated"] = data_ind[0]["lastupdated"] if data_ind else None schema["license"] = None schema["provenance"] = {"source": "http://worldbank.org"} schema["original_identifier"] = commondata['id'] schema["materialization"] = { "python_path": "worldbank_materializer", "arguments": { "url": "https://api.worldbank.org/v2/indicators/" + commondata['id'] + "?format=json" } } schema['variables'] = [] first_col = { "name": "indicator_id", "description": "id is identifier of an indicator in worldbank datasets", "semantic_type": ["https://metadata.datadrivendiscovery.org/types/CategoricalData"] } second_col = { "name": "indicator_value", "description": "name of an indicator in worldbank datasets", "semantic_type": ["http://schema.org/Text"] } third_col = { "name": "unit", "description": "unit of value returned by this indicator for a particular country", "semantic_type": ["https://metadata.datadrivendiscovery.org/types/CategoricalData"] } fourth_col = { "name": "sourceNote", "description": "Long description of the indicator", "semantic_type": ["http://schema.org/Text"] } fifth_col = { "name": "sourceOrganization", "description": "Source organization from where Worldbank acquired this data", "semantic_type": ["http://schema.org/Text"] } sixth_col = { "name": "country_value", "description": "Country for which idicator value is returned", "semantic_type": ["https://metadata.datadrivendiscovery.org/types/Location"], "named_entity": LOCATIONS } seventh_col = { "name": "countryiso3code", "description": "Country iso code for which idicator value is returned", "semantic_type": ["https://metadata.datadrivendiscovery.org/types/Location"] } eighth_col = { "name": "date", "description": "date for which indictor value is returned for a particular country", "semantic_type": ["https://metadata.datadrivendiscovery.org/types/Time"], "temporal_coverage": {"start": "1950", "end": "2100"} } schema['variables'].append(first_col) schema['variables'].append(second_col) schema['variables'].append(third_col) schema['variables'].append(fourth_col) schema['variables'].append(fifth_col) schema['variables'].append(sixth_col) schema['variables'].append(seventh_col) schema['variables'].append(eighth_col) if dst_path: os.makedirs(dst_path + '/worldbank_schema', exist_ok=True) file = os.path.join(dst_path, 'worldbank_schema', "{}_description.json".format(commondata['id'])) else: os.makedirs('WorldBank_schema', exist_ok=True) file = os.path.join('worldbank_schema', "{}_description.json".format(commondata['id'])) with open(file, "w") as fp: json.dump(schema, fp, indent=2) except: traceback.print_exc() pass if __name__ == '__main__': parser = ArgumentParser() parser.add_argument("-o", "--dst", action="store", type=str, dest="dst_path") args, _ = parser.parse_known_args() generate_json_schema(args.dst_path)
27.469907
110
0.605292
1,219
11,867
5.83347
0.400328
0.023907
0.038251
0.023625
0.259598
0.208128
0.166362
0.138096
0.088314
0.077064
0
0.002908
0.24665
11,867
431
111
27.533643
0.792506
0
0
0.01699
0
0.002427
0.574703
0.003623
0
0
0
0
0
1
0.004854
false
0.002427
0.012136
0
0.019417
0.004854
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d88413a8c6025245623ff27f30f5b74590dab51a
1,546
py
Python
examples/lineplots/devol.py
aengelke/z-plot
63e4e6656355b608487a3e4df5da13b7fad9b108
[ "BSD-3-Clause" ]
22
2016-10-19T15:02:22.000Z
2021-12-23T12:40:37.000Z
examples/lineplots/devol.py
aengelke/z-plot
63e4e6656355b608487a3e4df5da13b7fad9b108
[ "BSD-3-Clause" ]
4
2017-04-16T03:15:48.000Z
2020-10-28T11:36:35.000Z
examples/lineplots/devol.py
aengelke/z-plot
63e4e6656355b608487a3e4df5da13b7fad9b108
[ "BSD-3-Clause" ]
11
2017-01-18T02:41:57.000Z
2021-12-28T02:21:30.000Z
#! /usr/bin/env python from zplot import * import sys import sys ctype = 'eps' if len(sys.argv) < 2 else sys.argv[1] c = canvas(ctype, title='devol', dimensions=['400','340']) t = table(file='devol.data') t.addcolumns(['month','year']) t.update(set='month = substr(date, 1, 2)') t.update(set='year = substr(date, 4, 2)') d = drawable(canvas=c, xrange=[-1,t.getmax(column='rownumber') + 1], yrange=[0,2000], coord=[40,40], dimensions=[350,270]) grid(drawable=d, ystep=200, xstep=1, linecolor='lightgrey') axis(drawable=d, style='y', yauto=['','',200]) axis(drawable=d, style='x', xmanual=t.getaxislabels(column='month'), xlabelrotate=90, xlabelanchor='r,c', xlabelfontsize=7, title='Number of Inquiries Per Month', titlesize=8, titlefont='Courier-Bold', xtitle='Year and Month', xtitleshift=[0,-15]) # Just pick out the unique years that show up and use them to label the axis years, xlabels = [], [] for label in t.getaxislabels(column='year'): if label[0] not in years: years.append(label[0]) xlabels.append(label) axis(drawable=d, style='x', xmanual=xlabels, linewidth=0, xlabelshift=[5,-15], xlabelrotate=0, xlabelanchor='r,c', xlabelfontsize=7, xlabelformat='\'%s') p = plotter() p.line(drawable=d, table=t, xfield='rownumber', yfield='value', stairstep=True, linecolor='purple', labelfield='value', labelsize=7, labelcolor='purple', labelshift=[6,0], labelrotate=90, labelanchor='l,c') c.circle(coord=d.map([10.5,463]), radius=20, linecolor='red') c.render()
35.136364
80
0.666882
229
1,546
4.502183
0.558952
0.043647
0.037827
0.052376
0.106693
0.050436
0
0
0
0
0
0.048084
0.139069
1,546
43
81
35.953488
0.726521
0.062096
0
0.064516
0
0.032258
0.147099
0
0
0
0
0
0
1
0
false
0
0.096774
0
0.096774
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d88462214366d2d59a4aa76c3e990d20a7d331bd
4,208
py
Python
DLplatform/aggregating/geometric_median.py
chelseajohn/dlplatform
429e42c598039d1e9fd1df3da4247f391915a31b
[ "Apache-2.0" ]
5
2020-05-05T08:54:26.000Z
2021-02-20T07:36:28.000Z
DLplatform/aggregating/geometric_median.py
zagazao/dlplatform
ab32af8f89cfec4b478203bd5d13ce2d30e89ba7
[ "Apache-2.0" ]
1
2020-11-16T14:15:53.000Z
2020-11-16T14:15:53.000Z
DLplatform/aggregating/geometric_median.py
zagazao/dlplatform
ab32af8f89cfec4b478203bd5d13ce2d30e89ba7
[ "Apache-2.0" ]
4
2020-05-05T08:56:57.000Z
2020-07-22T11:28:52.000Z
from DLplatform.aggregating import Aggregator from DLplatform.parameters import Parameters from typing import List import numpy as np from scipy.spatial.distance import cdist, euclidean class GeometricMedian(Aggregator): ''' Provides a method to calculate an averaged model from n individual models (using the arithmetic mean) ''' def __init__(self, name="Geometric median"): ''' Returns ------- None ''' Aggregator.__init__(self, name=name) def calculateDivergence(self, param1, param2): if type(param1) is np.ndarray: return np.linalg.norm(param1 - param2) else: return param1.distance(param2) def __call__(self, params: List[Parameters]) -> Parameters: ''' This aggregator takes n lists of model parameters and returns a list of component-wise arithmetic means. Parameters ---------- params A list of Paramters objects. These objects support addition and scalar multiplication. Returns ------- A new parameter object that is the average of params. ''' Z = [] for param in params: Z_i = param.toVector() Z.append(Z_i) Z = np.array(Z) #TODO: check that the shape is correct (that is, that no transpose is required) gm = self.calcGeometricMedian(Z) #computes the GM for a numpy array newParam = params[0].getCopy()#by copying the parameters object, we ensure that the shape information is preserved newParam.fromVector(gm) return newParam def calcGeometricMedian(self, X, eps=1e-5, mat_iter = 10e6): y = np.mean(X, 0) iterCount = 0 while iterCount <= mat_iter: D = cdist(X, [y]) nonzeros = (D != 0)[:, 0] Dinv = 1 / D[nonzeros] Dinvs = np.sum(Dinv) W = Dinv / Dinvs T = np.sum(W * X[nonzeros], 0) num_zeros = len(X) - np.sum(nonzeros) if num_zeros == 0: y1 = T elif num_zeros == len(X): return y else: R = (T - y) * Dinvs r = np.linalg.norm(R) rinv = 0 if r == 0 else num_zeros/r y1 = max(0, 1-rinv)*T + min(1, rinv)*y if euclidean(y, y1) < eps: return y1 y = y1 iterCount += 1 def __str__(self): return "Geometric median" # def setToGeometricMedian(self, params : List): # models = params # # shapes = [] # b = [] # once = True # newWeightsList = [] # try: # for i, model in enumerate(models): # w2 = model.get() # c = [] # c = np.array(c) # for i in range(len(w2)): # z = np.array(w2[i]) # # if len(shapes) < 8: # shapes.append(z.shape) # d = np.array(w2[i].flatten()).squeeze() # c = np.concatenate([c, d]) # if (once): # b = np.zeros_like(c) # b[:] = c[:] # once = False # else: # once = False # b = np.concatenate([b.reshape((-1, 1)), c.reshape((-1, 1))], axis=1) # median_val = np.array(b[0]) #hd.geomedian(b)) # sizes = [] # for j in shapes: # size = 1 # for k in j: # size *= k # sizes.append(size) # newWeightsList = [] # # chunks = [] # count = 0 # for size in sizes: # chunks.append([median_val[i + count] for i in range(size)]) # count += size # for chunk, i in zip(chunks, range(len(shapes))): # newWeightsList.append(np.array(chunk).reshape(shapes[i])) # # except Exception as e: # print("Error happened! Message is ", e) # self.set(newWeightsList)
31.402985
122
0.481939
466
4,208
4.293991
0.362661
0.02099
0.011994
0.011994
0
0
0
0
0
0
0
0.017642
0.407319
4,208
133
123
31.639098
0.784683
0.512595
0
0.040816
0
0
0.017094
0
0
0
0
0.007519
0
1
0.102041
false
0
0.102041
0.020408
0.346939
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d88610b31c7b5f25ebda49d4c2f961d36945c83b
4,670
py
Python
stock_price_predictor/app.py
abdullahtk/Stock-Market-Predictor
1e97d5d2c647912447b9db8eb548e52c0ad1fe8a
[ "MIT" ]
3
2019-07-25T22:41:38.000Z
2021-04-06T04:37:05.000Z
stock_price_predictor/app.py
abdullahtk/Stock-Market-Predictor
1e97d5d2c647912447b9db8eb548e52c0ad1fe8a
[ "MIT" ]
2
2019-07-13T15:36:06.000Z
2021-06-01T23:56:50.000Z
stock_price_predictor/app.py
abdullahtk/Stock-Market-Predictor
1e97d5d2c647912447b9db8eb548e52c0ad1fe8a
[ "MIT" ]
1
2019-07-25T22:42:03.000Z
2019-07-25T22:42:03.000Z
from flask import Flask from flask import render_template, request, jsonify from source import StockPredictor as sp from source import ModelsParametersTunning as mpt from datetime import datetime import json from plotly.graph_objs import Scatter from pandas.tseries.offsets import BDay app = Flask(__name__) def install_and_import(package): import importlib try: importlib.import_module(package) except ImportError: import pip pip.main(['install', package]) finally: globals()[package] = importlib.import_module(package) install_and_import('plotly') @app.route('/') def index(): print('index') return render_template('master.html') @app.route('/go', methods=['GET', 'POST']) def go(): # save user input in query query = request.values print('go') tickers = [] ticker_of_interest = request.values.get('ticker') tickers.append(ticker_of_interest) tickers.append('SPY') start_date_str = request.values.get('start_date') start_date = datetime.strptime(start_date_str, '%Y-%m-%d').date() end_date_str = request.values.get('end_date') end_date = datetime.strptime(end_date_str, '%Y-%m-%d').date() prediction_date_str = request.values.get('prediction_date') prediction_date = datetime.strptime(prediction_date_str, '%Y-%m-%d').date() number_of_days = request.values.get('number_of_days') if (number_of_days == ""): number_of_days = 5 df = sp.getData(tickers , start_date.strftime("%Y-%m-%d"), '2019-07-08') more_features = sp.introduce_features(df, ticker_of_interest,tickers,number_of_days) data_dict = sp.split_data(more_features, ticker_of_interest, end_date) all_features_normed = data_dict['all_features_normed'] all_target = data_dict['all_target'] training_features_normed = data_dict["training_features_normed"] training_target = data_dict["training_target"] small_features_normed = data_dict["small_features_normed"] small_target = data_dict["small_target"] features_validation_normed = data_dict["features_validation_normed"] future_price_validation = data_dict["future_price_validation"] price_validation = data_dict["price_validation"] highest_model, highest_score = sp.pick_best_regressor(small_features_normed, small_target, features_validation_normed, future_price_validation) tunned_model = mpt.tune_parameters(highest_model.__class__.__name__, small_features_normed, small_target, features_validation_normed, future_price_validation) model = tunned_model.fit(all_features_normed,all_target) predictions = sp.predict_n_days(model, all_features_normed, prediction_date, number_of_days) real_data = df[predictions['Date'][0]:predictions['Date'][0]+BDay(int(number_of_days)-1)][ticker_of_interest] pct = [abs(float(r)-float(p))/float(r)*100 for r,p in zip(real_data,predictions['Predicted Price'])] # Plot closing prices graphs = [ { 'data': [ Scatter( x=df[ticker_of_interest].index, y=df[ticker_of_interest], ) ], 'layout': { 'title': 'Adjusted Close Price' , 'yaxis': { 'title': "Price" }, 'xaxis': { 'title': "Date" } } }, { 'data': [ Scatter( x=predictions['Date'], y=predictions['Predicted Price'], name= 'Predicted Price', ), Scatter( x=predictions['Date'], y=real_data, name= 'Actual Price', ), Scatter( x=predictions['Date'], y=pct, name= 'PCT', yaxis= 'y2', line = dict( width = 1, dash = 'dash') ) ], 'layout': { 'title': 'Predicted Adjusted Close Price' , 'xaxis': { 'title': "Date" }, 'yaxis': { 'title': "Price" }, 'yaxis2': { 'title': 'Actual vs. Predicted', 'overlaying': 'y', 'side': 'right' } } }] ids = ["graph-{}".format(i) for i, _ in enumerate(graphs)] graphJSON = json.dumps(graphs, cls=plotly.utils.PlotlyJSONEncoder) return render_template('go.html', query=query , df=data_dict, ids=ids, graphJSON=graphJSON) if __name__ == '__main__': app.run(debug=True)
33.357143
162
0.601927
520
4,670
5.107692
0.294231
0.033133
0.042169
0.02259
0.190136
0.111069
0.056476
0.056476
0.056476
0.056476
0
0.005341
0.278373
4,670
139
163
33.597122
0.782789
0.009422
0
0.194915
0
0
0.130651
0.020333
0
0
0
0
0
1
0.025424
false
0
0.127119
0
0.169492
0.016949
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8861a99b9bc42b6ddf7bfddb993259aa9336913
720
py
Python
generator/migrations/0001_initial.py
danielcoker/random-uuid
0cf459eb2a838923e9ee5f17e20dbd9ece27a4ce
[ "MIT" ]
null
null
null
generator/migrations/0001_initial.py
danielcoker/random-uuid
0cf459eb2a838923e9ee5f17e20dbd9ece27a4ce
[ "MIT" ]
null
null
null
generator/migrations/0001_initial.py
danielcoker/random-uuid
0cf459eb2a838923e9ee5f17e20dbd9ece27a4ce
[ "MIT" ]
null
null
null
# Generated by Django 3.2.11 on 2022-01-18 12:43 from django.db import migrations, models import generator.models class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='Vault', fields=[ ('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('uuid', models.CharField(default=generator.models.generate_uuid, max_length=40, unique=True)), ('created_at', models.DateTimeField(auto_now_add=True)), ], options={ 'ordering': ['-created_at'], }, ), ]
26.666667
117
0.579167
73
720
5.589041
0.69863
0.073529
0
0
0
0
0
0
0
0
0
0.035573
0.297222
720
26
118
27.692308
0.770751
0.063889
0
0
1
0
0.0625
0
0
0
0
0
0
1
0
false
0
0.105263
0
0.315789
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d886d8c696a1ebb65d655949115462c4ca60e2a4
274
py
Python
vmraid/patches/v6_0/document_type_rename.py
sowrisurya/vmraid
f833e00978019dad87af80b41279c0146c063ed5
[ "MIT" ]
null
null
null
vmraid/patches/v6_0/document_type_rename.py
sowrisurya/vmraid
f833e00978019dad87af80b41279c0146c063ed5
[ "MIT" ]
null
null
null
vmraid/patches/v6_0/document_type_rename.py
sowrisurya/vmraid
f833e00978019dad87af80b41279c0146c063ed5
[ "MIT" ]
null
null
null
from __future__ import unicode_literals import vmraid def execute(): vmraid.db.sql("""update tabDocType set document_type='Document' where document_type='Transaction'""") vmraid.db.sql("""update tabDocType set document_type='Setup' where document_type='Master'""")
30.444444
64
0.766423
35
274
5.742857
0.542857
0.238806
0.109453
0.169154
0.41791
0.41791
0.41791
0.41791
0
0
0
0
0.105839
274
8
65
34.25
0.820408
0
0
0
0
0
0.569343
0.343066
0
0
0
0
0
1
0.142857
true
0
0.285714
0
0.428571
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
d88805a961c2bd2e72ffebe23a8679a83ce2e0f0
1,529
py
Python
biostar/recipes/migrations/0008_label.py
Oribyne/biostar-central-fork
89a459bc66d438518cf968c3f2e3ce1f50573b9e
[ "MIT" ]
477
2015-01-01T00:18:54.000Z
2022-03-21T10:29:29.000Z
biostar/recipes/migrations/0008_label.py
coreydipsy/biostar-central
2b2f09199a6332877885ef54d9ac588ed0765770
[ "MIT" ]
247
2015-01-02T08:12:03.000Z
2022-02-24T15:20:58.000Z
biostar/recipes/migrations/0008_label.py
coreydipsy/biostar-central
2b2f09199a6332877885ef54d9ac588ed0765770
[ "MIT" ]
251
2015-01-01T16:05:57.000Z
2022-03-25T21:32:44.000Z
# Generated by Django 3.0.2 on 2020-02-17 22:42 from django.db import migrations, models import hjson import toml def init_toml(apps, schema_editor): Recipes = apps.get_model('recipes', 'Analysis') for recipe in Recipes.objects.all(): # Load the json data json_data = hjson.loads(recipe.json_text) toml_text = toml.dumps(json_data) # Bypass modifying the date on each recipe Recipes.objects.filter(id=recipe.id).update(json_text=toml_text) def init_label(apps, schema_editor): Project = apps.get_model('recipes', 'Project') for project in Project.objects.all(): # Bypass modifying the date on each project Project.objects.filter(id=project.id).update(label=project.uid) class Migration(migrations.Migration): dependencies = [ ('recipes', '0007_counts'), ] operations = [ migrations.RemoveField( model_name='analysis', name='diff_author', ), migrations.RemoveField( model_name='analysis', name='diff_date', ), migrations.AddField( model_name='project', name='label', field=models.CharField(max_length=32, null=True, unique=True), ), migrations.AlterField( model_name='analysis', name='json_text', field=models.TextField(default='', max_length=10000), ), migrations.RunPython(init_toml), migrations.RunPython(init_label), ]
27.8
74
0.620013
175
1,529
5.28
0.422857
0.038961
0.055195
0.068182
0.160173
0.160173
0.099567
0
0
0
0
0.023235
0.268149
1,529
54
75
28.314815
0.802502
0.096141
0
0.230769
1
0
0.081336
0
0
0
0
0
0
1
0.051282
false
0
0.076923
0
0.205128
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d88917a3681f54f58e6f0236e05d538383e5fe13
1,100
py
Python
src/unittest/python/modules/processing/filter_lt_tests.py
FHNW-CyberCaptain/CyberCaptain
07c989190e997353fbf57eb7a386947d6ab8ffd5
[ "MIT" ]
1
2018-10-01T10:59:55.000Z
2018-10-01T10:59:55.000Z
src/unittest/python/modules/processing/filter_lt_tests.py
FHNW-CyberCaptain/CyberCaptain
07c989190e997353fbf57eb7a386947d6ab8ffd5
[ "MIT" ]
null
null
null
src/unittest/python/modules/processing/filter_lt_tests.py
FHNW-CyberCaptain/CyberCaptain
07c989190e997353fbf57eb7a386947d6ab8ffd5
[ "MIT" ]
1
2021-11-01T00:09:00.000Z
2021-11-01T00:09:00.000Z
import unittest from cybercaptain.processing.filter import processing_filter class ProcessingFilterLTTest(unittest.TestCase): """ Test the filters for LT """ def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) arguments = {'src': '.', 'filterby': 'LT', 'rule': 'LT 500', 'target': '.'} self.processing = processing_filter(**arguments) def test_LT_positive(self): """ Test if the filter passes LT correctly. """ # border line test self.assertTrue(self.processing.filter({"LT":499}), 'should not be filtered') # deep test self.assertTrue(self.processing.filter({"LT":400}), 'should not be filtered') def test_LT_negative(self): """ Test if the filter fails LT correctly. """ # border line test self.assertFalse(self.processing.filter({"LT":500}), 'should be filtered') # deep test self.assertFalse(self.processing.filter({"LT":600}), 'should be filtered')
32.352941
85
0.579091
116
1,100
5.37069
0.37931
0.179775
0.128411
0.141252
0.433387
0.327448
0.260032
0
0
0
0
0.019133
0.287273
1,100
33
86
33.333333
0.77551
0.142727
0
0
0
0
0.135535
0
0
0
0
0
0.25
1
0.1875
false
0
0.125
0
0.375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d889b29c8ab7230fa4821ccc373f7fe3a359f78f
6,129
py
Python
src/data/clean.py
samatix/ml-asset-managers
27c9c0b3f67fd0350e80c5fb2729e64a13dccbb8
[ "Apache-2.0" ]
2
2022-01-01T11:06:22.000Z
2022-02-19T03:19:18.000Z
src/data/clean.py
samatix/ml-asset-managers
27c9c0b3f67fd0350e80c5fb2729e64a13dccbb8
[ "Apache-2.0" ]
null
null
null
src/data/clean.py
samatix/ml-asset-managers
27c9c0b3f67fd0350e80c5fb2729e64a13dccbb8
[ "Apache-2.0" ]
2
2020-08-15T05:38:49.000Z
2022-03-05T07:31:11.000Z
import logging import numpy as np import pandas as pd from sklearn.neighbors.kde import KernelDensity from scipy.optimize import minimize from src.utils import cov2corr class MarcenkoPastur: def __init__(self, points=1000): """ Marcenko-Pastur :param points: :type points: int :return:The Marcenko-Pastur probability density function :rtype: pd.Series """ self.points = points self.eigen_max = None def pdf(self, var, q): """ :param var: The variance :type var: float :param q: N/T number of observations on the number of dates :type q: float :return: :rtype: """ if isinstance(var, np.ndarray): var = var.item() eigen_min = var * (1 - (1. / q) ** .5) ** 2 eigen_max = var * (1 + (1. / q) ** .5) ** 2 eigen_values = np.linspace(eigen_min, eigen_max, self.points) pdf = q / (2 * np.pi * var * eigen_values) * ( (eigen_max - eigen_values) * ( eigen_values - eigen_min)) ** .5 pdf = pd.Series(pdf, index=eigen_values) return pdf def err_pdfs(self, var, eigenvalues, q, bandwidth): pdf0 = self.pdf(var, q) pdf1 = fit_kde( eigenvalues, bandwidth, x=pdf0.index.values.reshape(-1, 1) ) sse = np.sum((pdf1 - pdf0) ** 2) return sse def fit(self, eigenvalues, q, bandwidth): func = lambda *x: self.err_pdfs(*x) x0 = 0.5 out = minimize(func, x0, args=(eigenvalues, q, bandwidth), bounds=((1E-5, 1 - 1E-5),)) if out['success']: var = out['x'][0] else: var = 1 eigen_max = var * (1 + (1. / q) ** 0.5) ** 2 self.eigen_max = eigen_max return eigen_max, var def facts_number(self, eigenvalues): if self.eigen_max is not None: return eigenvalues.shape[0] - \ np.diag(eigenvalues)[::-1].searchsorted(self.eigen_max) else: raise ValueError(f"Eigen max is not calculated. Please " f"run the fit method before calculating the " f"facts number") def _denoise_constant_residual(self, eigenvalues, eigenvectors): facts_number = self.facts_number(eigenvalues) eigenvalues_ = eigenvalues.diagonal().copy() # Denoising by making constant the eigen values past facts_number eigenvalues_[facts_number:] = eigenvalues_[ facts_number:].sum() / float( eigenvalues_.shape[0] - facts_number) eigenvalues_ = np.diag(eigenvalues_) cov = eigenvectors @ eigenvalues_ @ eigenvectors.T # Rescaling return cov2corr(cov) def _denoise_shrink(self, eigenvalues, eigenvectors, alpha=0): # Eigenvalues and eigenvectors corresponding # to the eigenvalues less than the max value facts_number = self.facts_number(eigenvalues) eigenvalues_l = eigenvalues[:facts_number, :facts_number] eigenvectors_l = eigenvectors[:, :facts_number] # Eigenvalues and eigenvectors corresponding # to the eigenvalues more than the max value eigenvalues_r = eigenvalues[facts_number:, facts_number:] eigenvectors_r = eigenvectors[:, facts_number:] corr_l = eigenvectors_l @ eigenvalues_l @ eigenvectors_l.T corr_r = eigenvectors_r @ eigenvalues_r @ eigenvectors_r.T return corr_l + alpha * corr_r + (1 - alpha) * np.diag( corr_r.diagonal()) def denoise(self, eigenvalues, eigenvectors, method="constant", alpha=0): """ Remove noise from corr by fixing random eigenvalues :param eigenvalues: :type eigenvalues: :param eigenvectors: :type eigenvectors: :param method: :type method: str :param alpha: :type alpha: :return: :rtype: """ if method == "constant": return self._denoise_constant_residual(eigenvalues, eigenvectors) elif method == "shrink": return self._denoise_shrink(eigenvalues, eigenvectors, alpha=alpha) else: raise ValueError(f"The only available denoising methods are " f"'constant' or 'shrink'. The method provided is " f"{method}") def detone(self, eigenvalues, eigenvectors): # Test if the correlation matrix has a market component eigenvalues_m = eigenvalues[0, 0] eigenvectors_m = eigenvectors[:, 0] cov = (eigenvectors @ eigenvalues @ eigenvectors.T) - \ (eigenvectors_m @ eigenvalues_m @ eigenvectors_m.T) return cov2corr(cov) def get_pca(matrix): """ Function to retrieve the eigenvalues and eigenvector from a Hermitian matrix :param matrix: Hermitian matrix :type matrix: np.matrix or np.ndarray :return: :rtype: """ eigenvalues, eigenvectors = np.linalg.eigh(matrix) indices = eigenvalues.argsort()[::-1] eigenvalues, eigenvectors = eigenvalues[indices], eigenvectors[:, indices] eigenvalues = np.diagflat(eigenvalues) return eigenvalues, eigenvectors def fit_kde(obs, bandwidth=0.25, kernel='gaussian', x=None): """ Fit kernel to a series of observations and derive the probability of obs :param obs: :type obs: :param bandwidth: :type bandwidth: :param kernel: :type kernel: :param x: The array of values on which the fit KDE will be evaluated :type x: array like :return: :rtype: """ if len(obs.shape) == 1: obs = obs.reshape(-1, 1) kde = KernelDensity(kernel=kernel, bandwidth=bandwidth).fit(obs) if x is None: x = np.unique(obs).reshape(-1, 1) log_prob = kde.score_samples(x) pdf = pd.Series(np.exp(log_prob), index=x.flatten()) return pdf
33.309783
79
0.586229
697
6,129
5.037303
0.249641
0.050128
0.037596
0.005127
0.134435
0.096554
0.066078
0
0
0
0
0.014068
0.315712
6,129
183
80
33.491803
0.823081
0.195791
0
0.089109
0
0
0.048579
0
0
0
0
0
0
1
0.108911
false
0
0.059406
0
0.287129
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d88c5d3fd6e480648f2585b49ccf5181a7bd2748
803
py
Python
flows/builtin/tests/unit/webserver/factory/test_simple_http_server_flow_factory.py
vdloo/jobrunner
a3a7b8f7fd4d879d398f1390db0611f9c626f6db
[ "Apache-2.0" ]
1
2020-08-31T19:06:16.000Z
2020-08-31T19:06:16.000Z
flows/builtin/tests/unit/webserver/factory/test_simple_http_server_flow_factory.py
vdloo/jobrunner
a3a7b8f7fd4d879d398f1390db0611f9c626f6db
[ "Apache-2.0" ]
null
null
null
flows/builtin/tests/unit/webserver/factory/test_simple_http_server_flow_factory.py
vdloo/jobrunner
a3a7b8f7fd4d879d398f1390db0611f9c626f6db
[ "Apache-2.0" ]
null
null
null
from flows.builtin.webserver.factory import simple_http_server_flow_factory from jobrunner.utils import list_tasks_in_flow from tests.testcase import TestCase class TestSimpleHTTPServerFlowFactory(TestCase): def test_simple_http_server_flow_factory_creates_flow_with_name(self): flow = simple_http_server_flow_factory() self.assertEqual(flow.name, 'simple_http_server_flow') def test_simple_http_server_has_correct_tasks(self): flow = simple_http_server_flow_factory() expected_tasks = ( 'run_simple_webserver', ) self.assertCountEqual(list_tasks_in_flow(flow), expected_tasks) def test_simple_http_server_is_retried(self): flow = simple_http_server_flow_factory() self.assertEqual(flow.retry._attempts, 10)
33.458333
75
0.768369
102
803
5.558824
0.352941
0.141093
0.22575
0.21164
0.440917
0.252205
0.252205
0.190476
0.190476
0.190476
0
0.002994
0.16812
803
23
76
34.913043
0.845808
0
0
0.1875
0
0
0.053549
0.028643
0
0
0
0
0.1875
1
0.1875
false
0
0.1875
0
0.4375
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d88d295ec1717480689aae0ec07ffd4cad0afd39
530
py
Python
saleor/core/emails.py
cleobuck/krolocosmetics
4ae97601a18461323606d6e22673bb38cbaa6272
[ "CC-BY-4.0" ]
2
2019-12-04T19:43:51.000Z
2020-07-06T09:56:04.000Z
saleor/core/emails.py
cleobuck/krolocosmetics
4ae97601a18461323606d6e22673bb38cbaa6272
[ "CC-BY-4.0" ]
11
2021-02-02T22:34:37.000Z
2022-02-10T20:20:50.000Z
saleor/core/emails.py
cleobuck/krolocosmetics
4ae97601a18461323606d6e22673bb38cbaa6272
[ "CC-BY-4.0" ]
null
null
null
from django.contrib.sites.models import Site from django.templatetags.static import static from ..core.utils import build_absolute_uri def get_email_context(): site: Site = Site.objects.get_current() logo_url = build_absolute_uri(static("images/logo-light.jpg")) send_email_kwargs = {"from_email": site.settings.default_from_email} email_template_context = { "domain": site.domain, "logo_url": logo_url, "site_name": site.name, } return send_email_kwargs, email_template_context
31.176471
72
0.732075
71
530
5.15493
0.464789
0.057377
0.087432
0
0
0
0
0
0
0
0
0
0.167925
530
16
73
33.125
0.829932
0
0
0
0
0
0.101887
0.039623
0
0
0
0
0
1
0.076923
false
0
0.230769
0
0.384615
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d88d675c24541241d384af1e5883cd0ed00a17c1
5,045
py
Python
tests/test_views.py
acutexyz/monkeyideas
062ffa16f3bdf1fa57338837b50bfddfebdecdac
[ "MIT" ]
null
null
null
tests/test_views.py
acutexyz/monkeyideas
062ffa16f3bdf1fa57338837b50bfddfebdecdac
[ "MIT" ]
null
null
null
tests/test_views.py
acutexyz/monkeyideas
062ffa16f3bdf1fa57338837b50bfddfebdecdac
[ "MIT" ]
null
null
null
from app.models import * import pytest from app.forms import * from werkzeug.datastructures import MultiDict from flask import url_for import json from flask.ext.login import current_user, login_user, logout_user @pytest.fixture def profession(session): p = Profession(name='Software Engineer in Test') session.add(p) session.commit() return p @pytest.fixture def password(): return '123qwe' @pytest.fixture def monkey(session, profession, password): m = Monkey( email='crazy@jungles.com', fullname='Jack London', about='Struggling hard in jungles', profession_id=profession.id ) m.set_password(password) session.add(m) session.commit() return m @pytest.fixture def idea(session, monkey): i = Idea( title='This is test idea', body='Body of the test idea', author_id=monkey.id ) session.add(i) session.commit() return i @pytest.fixture def field(session): f = Field(name='Communications') session.add(f) session.commit() return f @pytest.fixture def idea_status(session): i = IdeaStatus(name='Demonstration ready') session.add(i) session.commit() return i @pytest.fixture def monkey2(session, monkey, password): """Returns a monkey independent from idea fixture """ m = Monkey( email='fast@jungles.com', fullname='Tom Sawyer', about='Jungles sharpen skills', profession_id=monkey.profession_id ) m.set_password(password) session.add(m) session.commit() return m def post_login(client, email, password): data = { 'email': email, 'password': password } client.post(url_for('auth.login'), data=data) class TestAuthViews: def test_login(self, client, monkey, password): with client: post_login(client, monkey.email, password) assert current_user == monkey def test_logout(self, client, request_ctx, monkey): with client: post_login(client, monkey.email, password) assert current_user == monkey client.post(url_for('auth.logout')) assert not current_user.is_authenticated() def test_register(self, client, profession): data = { 'email': 'test@siroca.com', 'fullname': 'Testing Registration', 'password': '123qwe', 'about': 'a' * 21, 'profession_id': profession.id } assert Monkey.query.count() == 0 r = client.post(url_for('auth.register'), data=data) assert Monkey.query.count() == 1 class TestJoinRequestViews: def test_request_to_join(self, client, idea, monkey2, password): with client: post_login(client, monkey2.email, password) assert current_user.is_authenticated() assert JoinRequest.query.count() == 0 data = { 'message': 'Please, accept this test join request' } r = client.post( url_for('join_requests.request_to_join', idea_id=idea.id), data=data ) assert JoinRequest.query.count() == 1 class TestIdeaViews: def test_add_new_idea(self, client, monkey, password, field, idea_status): with client: post_login(client, monkey.email, password) data = { 'title': 'This is test idea', 'body': 'This is test body', 'is_public': True, 'fields': [field.id], 'status_id': idea_status.id } r = client.post(url_for('ideas.add_idea'), data=data) print r print r.data assert Idea.query.count() == 1 def test_accept_request(self, client, session, idea, monkey2, password): jr = JoinRequest(monkey2, idea) session.add(jr) session.commit() with client: post_login(client, idea.author.email, password) r = client.post( url_for( 'join_requests.accept_decline_request', id=jr.id, action='accept' ) ) assert r.status_code == 200 class TestSuggestionViews: def test_suggest_to_user(self, client, idea, monkey2, password): with client: post_login(client, idea.author.email, password) data = { 'idea_id': idea.id } r = client.post( url_for( 'suggestions.suggest_to_user', monkey_id=monkey2.id ), data=data, follow_redirects=True ) assert r.status_code == 200 assert Suggestion.query.count() == 1
27.71978
76
0.5556
540
5,045
5.068519
0.224074
0.047497
0.040921
0.040921
0.313482
0.265619
0.232006
0.210815
0.194739
0.168798
0
0.008172
0.345094
5,045
181
77
27.872928
0.820218
0
0
0.337748
0
0
0.11267
0.018444
0
0
0
0
0.07947
0
null
null
0.125828
0.046358
null
null
0.013245
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
1
0
0
0
0
0
2
d88d8616334079599791ffad043f9440390cda86
482
py
Python
src/views.py
DarFig/bLg0
e8e570af13e977fe867f858c7247dfe16cc22488
[ "MIT" ]
null
null
null
src/views.py
DarFig/bLg0
e8e570af13e977fe867f858c7247dfe16cc22488
[ "MIT" ]
null
null
null
src/views.py
DarFig/bLg0
e8e570af13e977fe867f858c7247dfe16cc22488
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from src import app from flask import render_template from .controladores.blog import * from .controladores.about import * from .controladores.post import * from .interpreter import get_all_proy, get_all_posts @app.route('/') @app.route('/', methods=['GET']) def index(): title = "BLG0" proys = get_all_proy(limit=3) posts = get_all_posts(limit=3) return render_template('_views/index.html',webTitle=title, proys=proys, posts=posts)
20.083333
88
0.711618
67
482
4.955224
0.477612
0.072289
0.138554
0
0
0
0
0
0
0
0
0.00978
0.151452
482
23
89
20.956522
0.801956
0.043568
0
0
0
0
0.057143
0
0
0
0
0
0
1
0.076923
false
0
0.461538
0
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
d88e2fadb48418563200bc00bbc87d76d7edb36c
447
py
Python
src/L00_Entry_tasks/entry_task3.py
VeraVol42/EduNet
e24ef5f0bfe24df54da7d77f1e3b2164083b3331
[ "CC0-1.0" ]
null
null
null
src/L00_Entry_tasks/entry_task3.py
VeraVol42/EduNet
e24ef5f0bfe24df54da7d77f1e3b2164083b3331
[ "CC0-1.0" ]
null
null
null
src/L00_Entry_tasks/entry_task3.py
VeraVol42/EduNet
e24ef5f0bfe24df54da7d77f1e3b2164083b3331
[ "CC0-1.0" ]
null
null
null
operand = input() st1 = input().split(' ') st2 = input().split(' ') x1, y1, z1, x2, y2, z2 = float(st1[0]), float(st1[1]), float(st1[2]), float(st2[0]), float(st2[1]), float(st2[2]) if operand == "+": res = (x1+x2, y1+y2, z1+z2) elif operand == "-": res = (x1-x2, y1-y2, z1-z2) elif operand == "@": res = (x1*x2, y1*y2, z1*z2) else: res = (float(y1*z2-y2*z1), float(-x1*z2+x2*z1), float(x1*y2-x2*y1)) print("%.2f %.2f %.2f" % res)
31.928571
113
0.536913
81
447
2.962963
0.259259
0.066667
0.15
0.175
0.308333
0.308333
0.308333
0.308333
0.308333
0.308333
0
0.143631
0.174497
447
13
114
34.384615
0.506775
0
0
0
0
0
0.042506
0
0
0
0
0
0
1
0
false
0
0
0
0
0.076923
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d88fb1b1d25ac7b6d5954cd96c458c9d471fb3b6
4,109
py
Python
inb/tests/test_linkedin/test_driver.py
JoshiAyush/LinkedIn-Automator
6341867fb9bb974ecfe388d90d1860e9c85a3b3c
[ "MIT" ]
1
2021-01-05T17:29:02.000Z
2021-01-05T17:29:02.000Z
inb/tests/test_linkedin/test_driver.py
JoshiAyush/LinkedIn-Automator
6341867fb9bb974ecfe388d90d1860e9c85a3b3c
[ "MIT" ]
null
null
null
inb/tests/test_linkedin/test_driver.py
JoshiAyush/LinkedIn-Automator
6341867fb9bb974ecfe388d90d1860e9c85a3b3c
[ "MIT" ]
null
null
null
# MIT License # # Copyright (c) 2019 Creative Commons # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the "Software"), # to deal in the Software without restriction, including without limitation # the rights to use, copy, modify, merge, publish, distribute, sublicense, # and/or sell copies of the Software, and to permit persons to whom the # Software is furnished to do so, subject to the following conditions # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE. # from __future__ imports must occur at the beginning of the file. DO NOT CHANGE! from __future__ import annotations import os import stat import unittest from unittest.mock import call from unittest.mock import Mock from unittest.mock import patch from linkedin import Driver from lib import DRIVER_PATH from errors import WebDriverPathNotGivenException from errors import WebDriverNotExecutableException class TestDriverClass(unittest.TestCase): @unittest.skipIf(not os.getuid() == 0, "Requires root privileges!") def test_constructor_method_with_invalid_executable_path( self: TestDriverClass) -> None: paths = [1, (1, 2, 3), [1, 2, 3], {1: 1, 2: 2}] for path in paths: with self.assertRaises(WebDriverPathNotGivenException): driver = Driver(path) original_file_permissions = stat.S_IMODE( os.lstat(DRIVER_PATH).st_mode) def remove_execute_permissions(path): """Remove write permissions from this path, while keeping all other permissions intact. Params: path: The path whose permissions to alter. """ NO_USER_EXECUTE = ~stat.S_IXUSR NO_GROUP_EXECUTE = ~stat.S_IXGRP NO_OTHER_EXECUTE = ~stat.S_IXOTH NO_EXECUTE = NO_USER_EXECUTE & NO_GROUP_EXECUTE & NO_OTHER_EXECUTE current_permissions = stat.S_IMODE(os.lstat(path).st_mode) os.chmod(path, current_permissions & NO_EXECUTE) remove_execute_permissions(DRIVER_PATH) with self.assertRaises(WebDriverNotExecutableException): driver = Driver(driver_path=DRIVER_PATH) # place the original file permissions back os.chmod(DRIVER_PATH, original_file_permissions) @patch("linkedin.Driver.enable_webdriver_chrome") def test_constructor_method_with_valid_chromedriver_path(self: TestDriverClass, mock_enable_webdriver_chrome: Mock) -> None: driver = Driver(driver_path=DRIVER_PATH) mock_enable_webdriver_chrome.assert_called() @patch("selenium.webdriver.ChromeOptions.add_argument") def test_constructor_method_add_argument_internal_calls( self: TestDriverClass, mock_add_argument: Mock) -> None: calls = [ call(Driver.HEADLESS), call(Driver.INCOGNITO), call(Driver.NO_SANDBOX), call(Driver.DISABLE_GPU), call(Driver.START_MAXIMIZED), call(Driver.DISABLE_INFOBARS), call(Driver.ENABLE_AUTOMATION), call(Driver.DISABLE_EXTENSIONS), call(Driver.DISABLE_NOTIFICATIONS), call(Driver.DISABLE_SETUID_SANDBOX), call(Driver.IGNORE_CERTIFICATE_ERRORS)] driver = Driver(driver_path=DRIVER_PATH, options=[ Driver.HEADLESS, Driver.INCOGNITO, Driver.NO_SANDBOX, Driver.DISABLE_GPU, Driver.START_MAXIMIZED, Driver.DISABLE_INFOBARS, Driver.ENABLE_AUTOMATION, Driver.DISABLE_EXTENSIONS, Driver.DISABLE_NOTIFICATIONS, Driver.DISABLE_SETUID_SANDBOX, Driver.IGNORE_CERTIFICATE_ERRORS]) mock_add_argument.assert_has_calls(calls)
40.683168
127
0.748601
531
4,109
5.595104
0.369115
0.037025
0.02861
0.022215
0.088522
0.051161
0
0
0
0
0
0.004755
0.181066
4,109
100
128
41.09
0.878158
0.324166
0
0.035714
0
0
0.040015
0.030837
0
0
0
0
0.071429
1
0.071429
false
0
0.196429
0
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d894b0c0efe076de25e246895a699bb7e8b99808
417
py
Python
app/__init__.py
6ixBit/Personal-Website
0f51563b06c1955775ba2ac4e78786cad21cdefa
[ "MIT" ]
null
null
null
app/__init__.py
6ixBit/Personal-Website
0f51563b06c1955775ba2ac4e78786cad21cdefa
[ "MIT" ]
1
2021-06-02T00:29:28.000Z
2021-06-02T00:29:28.000Z
app/__init__.py
6ixBit/Personal-Website
0f51563b06c1955775ba2ac4e78786cad21cdefa
[ "MIT" ]
null
null
null
from flask import Flask from flask_bootstrap import Bootstrap from flask_nav import Nav from flask_sqlalchemy import SQLAlchemy from flask_migrate import Migrate from flask_mail import Mail from Config import Config app = Flask(__name__) app.config.from_object(Config) Bootstrap(app) nav = Nav(app) db = SQLAlchemy(app) migrate = Migrate(app, db) mail = Mail(app) from app import views, forms, models, errors, tasks
24.529412
51
0.805755
64
417
5.09375
0.28125
0.165644
0
0
0
0
0
0
0
0
0
0
0.131894
417
17
51
24.529412
0.900552
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.533333
0
0.533333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
d895cb920f81b7b33316df8ee7c07eb1ad364352
3,037
py
Python
example-tests/example_Grid.py
Indomerun/pyHiChi
fdceb238dfed6433ee350d5c593ca5e2cd4fbd2b
[ "MIT" ]
11
2019-08-22T12:47:40.000Z
2022-01-28T16:07:29.000Z
example-tests/example_Grid.py
Indomerun/pyHiChi
fdceb238dfed6433ee350d5c593ca5e2cd4fbd2b
[ "MIT" ]
14
2019-09-02T08:24:55.000Z
2022-02-14T11:40:43.000Z
example-tests/example_Grid.py
Indomerun/pyHiChi
fdceb238dfed6433ee350d5c593ca5e2cd4fbd2b
[ "MIT" ]
9
2019-07-31T13:25:20.000Z
2022-01-28T16:07:45.000Z
import sys sys.path.append("../bin/") import pyHiChi as hichi import numpy as np def valueE(x, y, z): E = hichi.Vector3d(0, np.cos(z), 0) #sin(x) return E def valueEx(x, y, z): Ex = 0 return Ex def valueEy(x, y, z): Ey = np.cos(z) return Ey def valueEz(x, y, z): Ez = 0 return Ez def valueB(x, y, z): B = hichi.Vector3d(-np.cos(z), 0, 0) return B def valueBx(x, y, z): Bx = -np.cos(z) return Bx def valueBy(x, y, z): By = 0 return By def valueBz(x, y, z): Bz = 0 return Bz field_size = hichi.Vector3d(5, 10, 11) min_coords = hichi.Vector3d(0.0, 1.0, 0.0) max_coords = hichi.Vector3d(3.5, 7.0, 2*np.pi) field_step = (max_coords - min_coords) / field_size time_step = 1e-16 field1 = hichi.YeeField(field_size, min_coords, field_step, time_step) field2 = hichi.YeeField(field_size, min_coords, field_step, time_step) field1.set_E(valueE) field1.set_B(valueB) field2.set_E(valueEx, valueEy, valueEz) field2.set_B(valueBx, valueBy, valueBz) #show import matplotlib.pyplot as plt N = 37 x = np.arange(0, 3.5, 3.5/N) z = np.arange(0, 2*np.pi, 2*np.pi/N) Ex1 = np.zeros(shape=(N,N)) Ex2 = np.zeros(shape=(N,N)) Ey1 = np.zeros(shape=(N,N)) Ey2 = np.zeros(shape=(N,N)) Bx1 = np.zeros(shape=(N,N)) Bx2 = np.zeros(shape=(N,N)) for ix in range(N): for iy in range(N): coord_xz = hichi.Vector3d(x[ix], 0.0, z[iy]) E1 = field1.get_E(coord_xz) Ex1[ix, iy] = E1.x Ey1[ix, iy] = E1.y Bx1[ix, iy] = field1.get_B(coord_xz).x E2 = field2.get_E(coord_xz) Ex2[ix, iy] = E2.x Ey2[ix, iy] = E2.y Bx2[ix, iy] = field2.get_B(coord_xz).x fig, axes = plt.subplots(ncols=3, nrows=2) bar11 = axes[0, 0].imshow(Ex1, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) fig.colorbar(bar11, ax=axes[0, 0]) axes[0, 0].set_title("Ex1") axes[0, 0].set_xlabel("x") axes[0, 0].set_ylabel("z") bar12 = axes[0, 1].imshow(Ey1, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) fig.colorbar(bar12, ax=axes[0, 1]) axes[0, 1].set_title("Ey1") axes[0, 1].set_xlabel("x") axes[0, 1].set_ylabel("z") bar13 = axes[0, 2].imshow(Bx1, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) fig.colorbar(bar13, ax=axes[0, 2]) axes[0, 2].set_title("Bx1") axes[0, 2].set_xlabel("x") axes[0, 2].set_ylabel("z") bar21 = axes[1, 0].imshow(Ex2, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) fig.colorbar(bar21, ax=axes[1, 0]) axes[1, 0].set_title("Ex2") axes[1, 0].set_xlabel("x") axes[1, 0].set_ylabel("z") bar22 = axes[1, 1].imshow(Ey2, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) fig.colorbar(bar22, ax=axes[1, 1]) axes[1, 1].set_title("Ey2") axes[1, 1].set_xlabel("x") axes[1, 1].set_ylabel("z") bar23 = axes[1, 2].imshow(Bx2, cmap='RdBu', interpolation='none', extent=(0, 2*np.pi, 0, 3.5)) cbar = fig.colorbar(bar23, ax=axes[1, 2]) axes[1, 2].set_title("Bx2") axes[1, 2].set_xlabel("x") axes[1, 2].set_ylabel("z") plt.tight_layout() plt.show()
24.103175
94
0.622654
592
3,037
3.113176
0.179054
0.040695
0.024417
0.026044
0.322843
0.212154
0.212154
0.212154
0.212154
0.212154
0
0.079541
0.167929
3,037
125
95
24.296
0.649782
0.003293
0
0
0
0
0.028118
0
0
0
0
0
0
1
0.086022
false
0
0.043011
0
0.215054
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8965469242d4e72828c54c19635f40c52cf043e
850
py
Python
douyu/douyu/spiders/spider.py
smujm/ScrapyProjects
04e9eb42c64805475893be595db4f3b6530ba597
[ "MIT" ]
null
null
null
douyu/douyu/spiders/spider.py
smujm/ScrapyProjects
04e9eb42c64805475893be595db4f3b6530ba597
[ "MIT" ]
null
null
null
douyu/douyu/spiders/spider.py
smujm/ScrapyProjects
04e9eb42c64805475893be595db4f3b6530ba597
[ "MIT" ]
null
null
null
import scrapy import json from douyu.items import DouyuItem class SpiderSpider(scrapy.Spider): name = 'douyu' allowed_domains = ['https://www.douyu.com'] base_url = 'http://capi.douyucdn.cn/api/v1/getVerticalRoom?limit=20&offset=' offset = 0 start_urls = [base_url + str(offset)] def parse(self, response): # 提取数据 data_list = json.loads(response.body)['data'] if len(data_list) == 0: return for data in data_list: item = DouyuItem() item['nickname'] = data['nickname'].encode('utf-8') item['vertical_src'] = data['vertical_src'] yield item self.offset += 20 url = self.base_url + str(self.offset) # callback 回调函数,将得到请求的相应交给自己处理 yield scrapy.Request(url=url, callback=self.parse, dont_filter=True)
29.310345
80
0.611765
105
850
4.847619
0.580952
0.041257
0.039293
0
0
0
0
0
0
0
0
0.012759
0.262353
850
28
81
30.357143
0.799043
0.038824
0
0
0
0
0.169533
0
0
0
0
0
0
1
0.047619
false
0
0.142857
0
0.52381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d896eb0aaef01df589adcc60f76141166ae14cf0
1,716
py
Python
corehq/ex-submodules/dimagi/utils/parsing.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
471
2015-01-10T02:55:01.000Z
2022-03-29T18:07:18.000Z
corehq/ex-submodules/dimagi/utils/parsing.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
14,354
2015-01-01T07:38:23.000Z
2022-03-31T20:55:14.000Z
corehq/ex-submodules/dimagi/utils/parsing.py
dimagilg/commcare-hq
ea1786238eae556bb7f1cbd8d2460171af1b619c
[ "BSD-3-Clause" ]
175
2015-01-06T07:16:47.000Z
2022-03-29T13:27:01.000Z
from datetime import date, time, datetime from dateutil.parser import parse import dateutil.tz TRUE_STRINGS = ("true", "t", "yes", "y", "1") FALSE_STRINGS = ("false", "f", "no", "n", "0") def string_to_boolean(val): """ A very dumb string to boolean converter. Will fail hard if the conversion doesn't succeed. """ if val is None: return False if isinstance(val, bool): return val if val.lower().strip() in TRUE_STRINGS: return True elif val.lower().strip() in FALSE_STRINGS: return False raise ValueError("%s is not a parseable boolean!" % val) def string_to_datetime(val): """ Try to convert a string to a date. """ if isinstance(val, datetime): return val elif isinstance(val, date): return datetime.combine(val, time()) return parse(val) def string_to_utc_datetime(val): val = string_to_datetime(val) if val.tzinfo is None: return val return val.astimezone(dateutil.tz.tzutc()).replace(tzinfo=None) ISO_DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%fZ' ISO_DATE_FORMAT = '%Y-%m-%d' def json_format_datetime(dt): """ includes microseconds (always) >>> json_format_datetime(datetime.datetime(2015, 4, 8, 12, 0, 1)) '2015-04-08T12:00:01.000000Z' """ from dimagi.ext.jsonobject import _assert _assert(isinstance(dt, datetime), 'json_format_datetime expects a datetime: {!r}'.format(dt)) if isinstance(dt, datetime): _assert(dt.tzinfo is None, 'json_format_datetime expects offset-naive: {!r}'.format(dt)) return dt.strftime(ISO_DATETIME_FORMAT) def json_format_date(date_): return date_.strftime(ISO_DATE_FORMAT)
26.8125
77
0.654429
241
1,716
4.510373
0.365145
0.044158
0.066237
0.027599
0
0
0
0
0
0
0
0.023845
0.217949
1,716
63
78
27.238095
0.78614
0.147436
0
0.081081
0
0
0.121708
0.014947
0
0
0
0
0.081081
1
0.135135
false
0
0.108108
0.027027
0.513514
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
d897d34629e02e537f13d11f12451d99e9ab865b
521
py
Python
synonym.py
amber5634/Synonym-Generator-using-Word-Net
5ce0f71d4639bbae39ee0d279103e576065c094a
[ "MIT" ]
null
null
null
synonym.py
amber5634/Synonym-Generator-using-Word-Net
5ce0f71d4639bbae39ee0d279103e576065c094a
[ "MIT" ]
null
null
null
synonym.py
amber5634/Synonym-Generator-using-Word-Net
5ce0f71d4639bbae39ee0d279103e576065c094a
[ "MIT" ]
null
null
null
import nltk from nltk.corpus import wordnet class Keyword: def synonymn_generator(self): synonyms = [] antonyms = [] word = input("enter the word : ") for syn in wordnet.synsets(word): for l in syn.lemmas(): synonyms.append(l.name()) if l.antonyms(): antonyms.append(l.antonyms()[0].name()) print(set(synonyms)) print(set(antonyms)) p1 = Keyword() p1.synonymn_generator()
26.05
60
0.520154
55
521
4.890909
0.545455
0.126394
0
0
0
0
0
0
0
0
0
0.009063
0.364683
521
20
61
26.05
0.803625
0
0
0
0
0
0.033797
0
0
0
0
0
0
1
0.0625
false
0
0.125
0
0.25
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d89df34e44c6bfd5607bac84838a10b568961067
3,846
py
Python
scripts/src/__main__.py
9999years/dotfiles
763c2ca5f8aeb3b64eb28262e6708135e6cd2005
[ "MIT" ]
1
2020-09-09T15:06:43.000Z
2020-09-09T15:06:43.000Z
scripts/src/__main__.py
9999years/dotfiles
763c2ca5f8aeb3b64eb28262e6708135e6cd2005
[ "MIT" ]
2
2020-09-09T14:16:21.000Z
2020-09-29T17:31:15.000Z
scripts/src/__main__.py
9999years/dotfiles
763c2ca5f8aeb3b64eb28262e6708135e6cd2005
[ "MIT" ]
2
2020-09-04T14:55:57.000Z
2020-10-30T19:08:58.000Z
"""Entry point for linking dotfiles. """ from __future__ import annotations import argparse import os import subprocess import sys from dataclasses import dataclass from pathlib import Path from typing import Optional from . import log from .link import Linker from .resolver import Resolver from .scan import Scanner from .schema import DotfilesJson, PrettyPath def main() -> None: """Entry point. """ args = Args.parse_args() if args.dotfiles is None: repo_root = _get_repo_root() dotfiles_fh = open(repo_root / "dotfiles.json") else: repo_root = args.dotfiles.parent.absolute() dotfiles_fh = args.dotfiles.open() dotfiles = DotfilesJson.load_from_file(dotfiles_fh) dotfiles_fh.close() link_root = Path.home() if args.link_root is None else args.link_root resolver = Resolver( repo_root=repo_root, link_root=link_root, relative=not args.absolute ) resolved = resolver.resolve_all(dotfiles) if args.scan: log.warn("Scanning for dotfiles is an experimental feature.") scanner = Scanner(link_root, resolved.ignored, resolved.dotfiles) for p in scanner.find_dotfiles(): # TODO: Fill in scanner processing. # Actions: # - skip # - quit # - ignore the path # - move it to dotfiles # - if it's a directory, recurse # - if it's a file, cat it / display its stat # # Should also note if it's a directory or file. p_disp = str(PrettyPath.from_path(p).disp) if p.is_dir(): log.info("📁 " + p_disp) else: log.info(p_disp) # TODO: Offer to commit new files...? else: linker = Linker(verbose=args.verbose,) linker.link_all(resolved.dotfiles) @dataclass class Args: """Command-line arguments; see ``_argparser``. """ dotfiles: Optional[Path] link_root: Optional[Path] absolute: bool scan: bool verbose: bool @classmethod def parse_args(cls) -> Args: """Parse args from ``sys.argv``. """ args = _argparser().parse_args() return cls( dotfiles=args.dotfiles, link_root=args.link_root, absolute=args.absolute, scan=args.scan, verbose=args.verbose, ) def _argparser() -> argparse.ArgumentParser: """Command-line argument parser. """ parser = argparse.ArgumentParser(description="links dotfiles") parser.add_argument( "-d", "--dotfiles", type=Path, help="The dotfiles.json file to load", ) parser.add_argument( "-l", "--link-root", type=Path, help="Where to create links from; defaults to your home directory", ) parser.add_argument( "-a", "--absolute", action="store_true", help="Create absolute links, rather than relative ones", ) parser.add_argument( "-s", "--scan", action="store_true", help="Scan for untracked dotfiles", ) parser.add_argument( "-v", "--verbose", action="store_true", help="Make output more verbose", ) return parser def _get_repo_root() -> Path: try: proc = subprocess.run( ["git", "rev-parse", "--show-toplevel"], capture_output=True, text=True, check=False, ) except FileNotFoundError: log.fatal( "Couldn't run `git` to determine repo root; pass --dotfiles explicitly." ) sys.exit(1) if proc.returncode != 0: log.fatal("Couldn't get repo root from git; pass --dotfiles explicitly.") sys.exit(1) return Path(proc.stdout.strip()).absolute() if __name__ == "__main__": main()
26.524138
84
0.595684
450
3,846
4.957778
0.348889
0.035858
0.0381
0.008068
0.040341
0.026894
0
0
0
0
0
0.001103
0.292772
3,846
144
85
26.708333
0.81875
0.111544
0
0.1
0
0
0.152959
0
0
0
0
0.006944
0
1
0.04
false
0.02
0.13
0
0.26
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d89ebf9a6b581abb7634d793d29dd4afbd5a6f07
3,778
py
Python
verified.py
tophersmith/veracode-verified-checker
f2f85dbb4b8039c9ccd9848367a37b9caab0c9aa
[ "MIT" ]
null
null
null
verified.py
tophersmith/veracode-verified-checker
f2f85dbb4b8039c9ccd9848367a37b9caab0c9aa
[ "MIT" ]
null
null
null
verified.py
tophersmith/veracode-verified-checker
f2f85dbb4b8039c9ccd9848367a37b9caab0c9aa
[ "MIT" ]
null
null
null
import sys import json import requests from veracode_api_signing.plugin_requests import RequestsAuthPluginVeracodeHMAC from pprint import pprint from datetime import datetime from app_definition import AppDefinition from verified_check import VerifiedStandard, VerifiedTeam, VerifiedContinuous from verified_report import VerifiedReport, ConsoleReport from pprint import pprint url_base = 'https://api.veracode.com/appsec' min_severity = 3 # findings api only returns medium + def main(): if len(sys.argv) != 4: print('Usage: [API Key] [API Secret Key] [Check Type s=Standard t=Team c=Continuous a=All]') exit(1) auth = RequestsAuthPluginVeracodeHMAC(api_key_id=sys.argv[1], api_key_secret=sys.argv[2]) ''' Process: Make Veracode Verified Checks class Make reporter Get all policies Get all apps For each app Get findings for the app Check the app + policies based on the Verified level Report any failures from the Verified Checks ''' try: checks = make_checks(sys.argv[3]) report = ConsoleReport() policies_dict = get_policies_dict(auth) apps_list = get_applications_list(auth) apps_size = len(apps_list) print('%d apps found' % (apps_size)) count = 1 for app in apps_list: print('Checking %s (%d/%d)' % (app.name, count, apps_size)) add_findings_to_app(auth, app) check(app, policies_dict, report, checks) count = count + 1 report.output() except Exception as e: print('Error while scanning or uploading. ' + str(e)) raise e def get_policies_dict(auth): #Get all policies available to the user as a dict of 'policy_name': 'policy_json' done = False policies = {} page_count = 0 while not done: r = requests.get(url_base + '/v1/policies', auth=auth, params={'size':500, 'page': page_count}) if not r.ok: print(r.text) raise Exception('ERROR: Received status code %s while trying to get applications' % r.status_code) #Check pagination total_pages = r.json()['page']['total_pages'] page_count = page_count + 1 if page_count == total_pages: done = True policies.update({policy['name']:policy for policy in r.json()['_embedded']['policy_versions']}) return policies def make_checks(check_type): #Create the Verified Check class for the given check_type cases = {'s': [VerifiedStandard], 't': [VerifiedTeam], 'c': [VerifiedContinuous], 'a': [VerifiedStandard,VerifiedTeam, VerifiedContinuous]} if check_type in cases: return cases[check_type] else: raise Exception('Unknown case. Must be one of %s' % ( ', '.join(cases.keys()) )) def get_applications_list(auth): #Get all applications done = False apps_list = [] page_count = 0 while not done: r = requests.get(url_base + '/v1/applications', auth=auth, params={'size':500, 'page':page_count}) if not r.ok: print(r.text) raise Exception('ERROR: Received status code %s while trying to get applications' % r.status_code) #Check pagination total_pages = r.json()['page']['total_pages'] page_count = page_count + 1 if page_count == total_pages: done = True apps_list.extend([AppDefinition(application) for application in r.json()['_embedded']['applications']]) return apps_list def add_findings_to_app(auth, app): #Add the findings json to the app r = requests.get(url_base + ('/v2/applications/%s/findings' % app.guid), auth=auth, params={'severity_gte': min_severity}) if not r.ok: print(r.text) raise Exception('ERROR: Received status code %s while trying to get findings' % r.status_code) app.add_findings(r.json()) def check(app, policies_dict, report, checks): #Using the Verified Check, check the app + policies for check_func in checks: check = check_func(app, policies_dict) check.do_check(report) if __name__ == '__main__': sys.exit(main())
32.568966
123
0.724722
553
3,778
4.79566
0.264014
0.033937
0.016968
0.016968
0.274887
0.267722
0.226244
0.226244
0.226244
0.226244
0
0.006627
0.161196
3,778
116
124
32.568966
0.83023
0.080731
0
0.27907
0
0.011628
0.178705
0.008717
0
0
0
0
0
1
0.069767
false
0
0.116279
0
0.22093
0.104651
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8a1ec63d4e8397c65e7cf35b675ad0d08be473f
3,850
py
Python
pulsemeeter/EqPopover.py
dacid44/pulsemeeter
5367fe910c8d1eea2a0523549c2c26c446547445
[ "MIT" ]
null
null
null
pulsemeeter/EqPopover.py
dacid44/pulsemeeter
5367fe910c8d1eea2a0523549c2c26c446547445
[ "MIT" ]
null
null
null
pulsemeeter/EqPopover.py
dacid44/pulsemeeter
5367fe910c8d1eea2a0523549c2c26c446547445
[ "MIT" ]
null
null
null
import os from .settings import LAYOUT_DIR from gi import require_version as gi_require_version gi_require_version('Gtk', '3.0') from gi.repository import Gtk,Gdk class EqPopover(): def __init__(self, button, pulse, index): self.builder = Gtk.Builder() self.pulse = pulse self.layout = pulse.config['layout'] try: self.builder.add_objects_from_file( os.path.join(LAYOUT_DIR, f'{self.layout}.glade'), [ 'eq_popup', 'eq_50_hz_adjust', 'eq_100_hz_adjust', 'eq_156_hz_adjust', 'eq_220_hz_adjust', 'eq_311_hz_adjust', 'eq_440_hz_adjust', 'eq_622_hz_adjust', 'eq_880_hz_adjust', 'eq_1_25_khz_adjust', 'eq_1_75_khz_adjust', 'eq_2_5_khz_adjust', 'eq_3_5_khz_adjust', 'eq_5_khz_adjust', 'eq_10_khz_adjust', 'eq_20_khz_adjust', 'apply_eq_button', 'reset_eq_button', ] ) except Exception as ex: print('Error building main window!\n{}'.format(ex)) sys.exit(1) for i in range(1, 16): mark = self.builder.get_object(f'eq_{i}') mark.add_mark(0, Gtk.PositionType.TOP, '') self.eq = [] self.eq.append(self.builder.get_object('eq_50_hz_adjust')) self.eq.append(self.builder.get_object('eq_100_hz_adjust')) self.eq.append(self.builder.get_object('eq_156_hz_adjust')) self.eq.append(self.builder.get_object('eq_220_hz_adjust')) self.eq.append(self.builder.get_object('eq_311_hz_adjust')) self.eq.append(self.builder.get_object('eq_440_hz_adjust')) self.eq.append(self.builder.get_object('eq_622_hz_adjust')) self.eq.append(self.builder.get_object('eq_880_hz_adjust')) self.eq.append(self.builder.get_object('eq_1_25_khz_adjust')) self.eq.append(self.builder.get_object('eq_1_75_khz_adjust')) self.eq.append(self.builder.get_object('eq_2_5_khz_adjust')) self.eq.append(self.builder.get_object('eq_3_5_khz_adjust')) self.eq.append(self.builder.get_object('eq_5_khz_adjust')) self.eq.append(self.builder.get_object('eq_10_khz_adjust')) self.eq.append(self.builder.get_object('eq_20_khz_adjust')) self.Apply_EQ_Button = self.builder.get_object('apply_eq_button') self.Reset_EQ_Button = self.builder.get_object('reset_eq_button') control = self.pulse.config[index[0]][index[1]]['eq_control'] j = 0 if control != '': for i in control.split(','): self.eq[j].set_value(float(i)) j = j + 1 self.Apply_EQ_Button.connect('pressed', self.apply_eq, index) self.Reset_EQ_Button.connect('pressed', self.reset_eq) self.EQ_Popup = self.builder.get_object('eq_popup') self.EQ_Popup.set_relative_to(button) self.EQ_Popup.popup() self.builder.connect_signals(self) def apply_eq(self, widget, index): control='' for i in self.eq: control = control + ',' + str(i.get_value()) control = control[1:] if self.pulse.config[index[0]][index[1]]['use_eq'] == False: return self.pulse.apply_eq(index, control=control) def disable_eq(self, widget, index): self.pulse.remove_eq(index) def reset_eq(self, widget): for i in self.eq: i.set_value(0) def reset_value(self, widget, event): if event.type == gtk.gdk.BUTTON_PRESS and event.button == 3: widget.set_value(0)
37.378641
73
0.585714
524
3,850
3.975191
0.192748
0.116179
0.1277
0.182429
0.432549
0.356697
0.329813
0.303889
0.287566
0.287566
0
0.034103
0.291688
3,850
102
74
37.745098
0.72974
0
0
0.023256
0
0
0.172468
0
0
0
0
0
0
1
0.05814
false
0
0.046512
0
0.127907
0.011628
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d8a43b2abf45ed0ee34b929a0e82dda9a77e57e1
717
py
Python
ModfiyImagesSizes.py
Idolized22/Coco-llike-dataset-creator-
f0aa95710fdb339e31788b366d7360f0f994b88e
[ "MIT" ]
null
null
null
ModfiyImagesSizes.py
Idolized22/Coco-llike-dataset-creator-
f0aa95710fdb339e31788b366d7360f0f994b88e
[ "MIT" ]
null
null
null
ModfiyImagesSizes.py
Idolized22/Coco-llike-dataset-creator-
f0aa95710fdb339e31788b366d7360f0f994b88e
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- """ @author: idolized22 """ import PIL.Image from matplotlib import pyplot import numpy as np def resize_image (Img, DesieredSize=[1300,1300]): #desired size to be passed as [width , height ] factor=1 while (Img.size[0] * Img.size[1])>( DesieredSize[0]* DesieredSize[1]): #reduce_size Img=Img.resize((Img.size[0]//2,Img.size[1]//2),resample=PIL.Image.LANCZOS) factor=factor+1 return [Img , Img.size] def resize_binary_mask(array, new_size): #from pycocotools on github image = PIL.Image.fromarray(array.astype(np.uint8)*255) image = image.resize(new_size) return np.asarray(image).astype(np.bool_)
24.724138
82
0.659693
104
717
4.480769
0.5
0.075107
0.034335
0
0
0
0
0
0
0
0
0.04475
0.189679
717
28
83
25.607143
0.757315
0.205021
0
0
0
0
0
0
0
0
0
0
0
1
0.153846
false
0
0.230769
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
d8a4ffb4de362b2f4a2070e30f28d8fd00e06627
206
py
Python
try-except.py
arhue/python-learning
058c93315fd5aa76584e32432e7c80cb3972478e
[ "MIT" ]
null
null
null
try-except.py
arhue/python-learning
058c93315fd5aa76584e32432e7c80cb3972478e
[ "MIT" ]
null
null
null
try-except.py
arhue/python-learning
058c93315fd5aa76584e32432e7c80cb3972478e
[ "MIT" ]
null
null
null
x=input("Enter a no. I will convert to integer") z=1 try: y=int(float(x)) z="float" except: z="wrong" if z=="wrong": print("fix your input") else: print("int of your input is:", y)
15.846154
48
0.57767
37
206
3.216216
0.675676
0.10084
0
0
0
0
0
0
0
0
0
0.006452
0.247573
206
12
49
17.166667
0.76129
0
0
0
0
0
0.42233
0
0
0
0
0
0
1
0
false
0
0
0
0
0.181818
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8a6e96e4ea3e27e49331d747cf1b4b5f9ab87ec
3,426
py
Python
merge_vcfs_individual.py
AndersenLab/pyPipeline
89af32c545f309686b6bbf2d1f0e9681136642ba
[ "MIT" ]
5
2015-09-18T15:48:01.000Z
2022-03-11T02:24:12.000Z
merge_vcfs_individual.py
AndersenLab/pyPipeline
89af32c545f309686b6bbf2d1f0e9681136642ba
[ "MIT" ]
null
null
null
merge_vcfs_individual.py
AndersenLab/pyPipeline
89af32c545f309686b6bbf2d1f0e9681136642ba
[ "MIT" ]
null
null
null
#!/usr/bin/python #SBATCH --nodes=1 #SBATCH --ntasks=1 #SBATCH --cpus-per-task=8 #SBATCH --nodes=1 #SBATCH --mem=8192 # # This script merges SNP and Indel Files called with Samtool and Freebayes that are called individually. # import sys import os from utils import * from utils.configuration import * from commands import * import glob from pprint import pprint as pp #====================# # Load Configuration # #====================# cf = config(sys.argv[1]) sf = cf.get_sample_file() eav = cf.eav #=========# # Command # #=========# # Merging requires that filters within individual vcfs have passed. merge_vcfs = """bcftools merge --apply-filters PASS -O z {vcf_set} > {merged_vcf_name}; bcftools index -f {merged_vcf_name}""" #=====================# # Merge SNP VCF Files # #=====================# # If a union variant file does not exist, merge vcfs and generate. for caller in cf.snp_callers: union_variant_all = cf.union_variants[caller]["ALL"] if not file_exists(union_variant_all): for variant_type in ["SNP", "INDEL"]: union_variant_file = cf.union_variants[caller][variant_type] vcf_set = [] merged_vcf_name = "{cf.vcf_dir}/{cf.config_name}.{variant_type}.{caller}.individual.vcf.gz".format(**locals()) for SM, data in sf.SM_Group_set.items(): vcf_file = data["vcf_files"][caller + "_individual"][variant_type] assert(file_exists(vcf_file)) vcf_set.append(vcf_file) vcf_set = ' '.join(vcf_set) comm = merge_vcfs.format(**locals()) cf.command(comm) # Create union variant set. comm = r"""bcftools query -f '%CHROM\t%POS\t%REF,%ALT\n' {merged_vcf_name} > {union_variant_file}""".format(**locals()) cf.command(comm) # Remove individual if file_exists(union_variant_file) and check_seq_file(merged_vcf_name): vcf_set = ' '.join([x for x in vcf_set.split(" ")] + [x + ".csi" for x in vcf_set.split(" ")]) comm = "rm " + vcf_set cf.command(comm) merge_varsets = """ for i in `cat {cf.config_name}.SNP.{caller}.union_variants.txt | cut -f 1 | uniq`; do touch {union_variant_all} for f in `ls {cf.config_name}.*.{caller}.union_variants.txt`; do egrep "^$i\t" $f >> $i.{cf.config_name}.union_variant_temp.txt done; cat $i.{cf.config_name}.union_variant_temp.txt | sort -k2,2n >> {union_variant_all} rm $i.{cf.config_name}.union_variant_temp.txt done; """.format(**locals()) cf.command(merge_varsets) else: for variant_type in ["SNP", "INDEL"]: union_variant_file = cf.union_variants[caller][variant_type] vcf_set = [] print "RUNNING UNION" # Run Union merged_vcf_name = "{cf.vcf_dir}/{cf.config_name}.{caller}.union.vcf.gz".format(**locals()) for SM, data in sf.SM_Group_set.items(): print data["vcf_files"][caller + "_union"][variant_type] vcf_file = data["vcf_files"][caller + "_union"][variant_type] assert(file_exists(vcf_file)) vcf_set.append(vcf_file) vcf_set = ' '.join(vcf_set) comm = merge_vcfs.format(**locals()) cf.command(comm)
38.066667
131
0.585231
448
3,426
4.254464
0.292411
0.094439
0.044071
0.027282
0.435992
0.395068
0.360441
0.330535
0.313746
0.241343
0
0.00471
0.256276
3,426
89
132
38.494382
0.743328
0.158786
0
0.385965
0
0.052632
0.318596
0.130175
0
0
0
0
0.035088
0
null
null
0.017544
0.122807
null
null
0.052632
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
d8a7d33720089c11a74552c8c79ff625254ee85a
769
py
Python
cuhk03/init_env.py
cwpeng-cn/TorchReID
e6cf1d38bfc3100ea19e3e92aa4306b79fd3517b
[ "MIT" ]
null
null
null
cuhk03/init_env.py
cwpeng-cn/TorchReID
e6cf1d38bfc3100ea19e3e92aa4306b79fd3517b
[ "MIT" ]
null
null
null
cuhk03/init_env.py
cwpeng-cn/TorchReID
e6cf1d38bfc3100ea19e3e92aa4306b79fd3517b
[ "MIT" ]
null
null
null
import zipfile import os def download_and_prepare(): reid_path = "/content/drive/My Drive/Colab/datasets/reid.zip" file_zip = zipfile.ZipFile(reid_path, 'r') for file in file_zip.namelist(): file_zip.extract(file, r'.') with open("/content/drive/My Drive/Colab/ReID works/CVPR fintuning/resnet_ibn_b.py", "rb") as f, open( './resnet_ibn_b.py', 'wb') as fw: fw.write(f.read()) with open("/content/drive/My Drive/Colab/ReID works/CVPR fintuning/net_149.pth", "rb") as f, open('./net_149.pth', 'wb') as fw: fw.write(f.read()) if not os.path.exists('./resnet_ibn_b.py'): download_and_prepare()
34.954545
118
0.559168
106
769
3.896226
0.40566
0.087167
0.101695
0.138015
0.40678
0.348668
0.348668
0.261501
0.261501
0.261501
0
0.011132
0.29909
769
21
119
36.619048
0.755102
0
0
0.25
0
0
0.314694
0.097529
0
0
0
0
0
1
0.0625
false
0
0.125
0
0.1875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8a907f41af888797cb8bfb82d2555a46654432c
2,109
py
Python
myutils/dictionaries.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
3
2021-11-23T19:03:02.000Z
2021-11-24T08:44:23.000Z
myutils/dictionaries.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
2
2021-11-23T18:47:31.000Z
2021-12-08T15:36:11.000Z
myutils/dictionaries.py
joeledwardson/betfair-browser
b641f134e60307250a0e51bafa849422ecf5264b
[ "MIT" ]
null
null
null
from typing import Iterable, Dict import copy from collections.abc import Mapping from .exceptions import DictException def validate_config(cfg: Dict, cfg_spec: Dict): _cfg = copy.deepcopy(cfg) for k, spec in cfg_spec.items(): exist = k in _cfg val = _cfg.pop(k, None) if not spec.get('optional'): if not exist: raise DictException(f'expected key "{k}" in configuration dict as per config spec: "{cfg_spec}"') if exist: # if 'type' in spec: if not isinstance(val, spec['type']): raise DictException(f'expected key "{k}" value to be type "{spec["type"]}", got "{type(val)}"') if _cfg: raise DictException(f'configuration dictionary has unexpected values: "{_cfg}"') def is_dict_subset(x, y): """recursively determine if key value pairs in x are a subset of y""" for k, v in x.items(): if k not in y: return False elif type(v) is dict: if not isinstance(y[k], Iterable): return False elif not is_dict_subset(v, y[k]): return False elif v != y[k]: return False return True def dict_update(updates: Mapping, base_dict: Mapping): """recursively update key value pairs of base_dict with updates""" for k, v in updates.items(): if type(v) is not dict: # value is not dict base_dict[k] = v continue # value is dict if k not in base_dict: # value is dict & key not found in y base_dict[k] = v continue # value is dict & key found in y if isinstance(base_dict[k], Iterable): # value is dict & key found in y & value in y is iterable dict_update(v, base_dict[k]) continue # value is dict & key found in y & value in y is not iterable base_dict[k] = v def dict_sort(d: dict, key=lambda item: item[1]) -> Dict: """sort a dictionary items""" return {k: v for k, v in sorted(d.items(), key=key)}
31.954545
113
0.573732
305
2,109
3.888525
0.239344
0.040472
0.037943
0.047218
0.194772
0.171164
0.118887
0.118887
0.053963
0.053963
0
0.000706
0.328118
2,109
66
114
31.954545
0.836274
0.181129
0
0.232558
0
0
0.124267
0
0
0
0
0
0
1
0.093023
false
0
0.093023
0
0.325581
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8a962009dd9b86f4b5fe67a974e8bc3e24fe091
307
py
Python
Test/test_main_methods.py
abualrubbaraa/Baraa-Validator
bff356f4e35ea7de66de799e7f063c383e298d1f
[ "MIT" ]
null
null
null
Test/test_main_methods.py
abualrubbaraa/Baraa-Validator
bff356f4e35ea7de66de799e7f063c383e298d1f
[ "MIT" ]
null
null
null
Test/test_main_methods.py
abualrubbaraa/Baraa-Validator
bff356f4e35ea7de66de799e7f063c383e298d1f
[ "MIT" ]
null
null
null
from BaraaValidator.transactionValidators import validateTransactionsFolder, validateTransactionsFile import os def test_mainMethods(): dirpath = os.path.dirname(__file__) filepath = os.path.join(dirpath, 'transaction.json') assert (validateTransactionsFile(filepath)) == True test_mainMethods()
43.857143
101
0.807818
29
307
8.344828
0.689655
0.123967
0
0
0
0
0
0
0
0
0
0
0.104235
307
7
102
43.857143
0.88
0
0
0
0
0
0.051948
0
0
0
0
0
0.142857
1
0.142857
false
0
0.285714
0
0.428571
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d8aa198eaa6cbdf332e325c46c63a1a3c514ea9f
2,020
py
Python
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/headstock/api/activity.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
1
2017-03-28T06:41:51.000Z
2017-03-28T06:41:51.000Z
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/headstock/api/activity.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
null
null
null
linux-distro/package/nuxleus/Source/Vendor/Microsoft/IronPython-2.0.1/Lib/headstock/api/activity.py
mdavid/nuxleus
653f1310d8bf08eaa5a7e3326c2349e56a6abdc2
[ "BSD-3-Clause" ]
1
2016-12-13T21:08:58.000Z
2016-12-13T21:08:58.000Z
# -*- coding: utf-8 -*- # supports for XEP 0012 __all__ = ['Activity'] from headstock.api import Entity from headstock.api.jid import JID from headstock.lib.utils import generate_unique from bridge import Element as E from bridge import Attribute as A from bridge.common import XMPP_LAST_NS, XMPP_CLIENT_NS class Activity(Entity): def __init__(self, from_jid, to_jid, type=u'get', stanza_id=None, seconds=None, message=None): Entity.__init__(self, from_jid, to_jid) self.seconds = seconds self.message = message self.type = type self.stanza_id = stanza_id or generate_unique() def __repr__(self): return '<Activity (%s) at %s>' % (self.stanza_id, hex(id(self)),) @staticmethod def from_element(e): activity = Activity(JID.parse(e.get_attribute_value('from')), JID.parse(e.get_attribute_value('to')), e.get_attribute_value('type'), e.get_attribute_value('id')) for child in e.xml_children: if not isinstance(child, E): continue if child.xml_ns == XMPP_LAST_NS: seconds = child.get_attribute_value('seconds') if seconds != None: activity.seconds = long(seconds) activity.message = child.xml_text return activity @staticmethod def to_element(a): attrs = {} if a.from_jid: attrs[u'from'] = unicode(a.from_jid) if a.to_jid: attrs[u'to'] = unicode(a.to_jid) if a.type: attrs[u'type'] = a.type if a.stanza_id: attrs[u'id'] = a.stanza_id iq = E(u'iq', attributes=attrs, namespace=XMPP_CLIENT_NS) attr = {} if a.seconds != None: attr[u'seconds'] = unicode(a.seconds) E(u'query', namespace=XMPP_LAST_NS, parent=iq, content=a.message, attributes=attr) return iq
30.606061
73
0.578713
260
2,020
4.276923
0.273077
0.043165
0.076439
0.064748
0.082734
0.082734
0
0
0
0
0
0.003605
0.313366
2,020
65
74
31.076923
0.798125
0.021287
0
0.04
1
0
0.039007
0
0
0
0
0
0
1
0.08
false
0
0.12
0.02
0.28
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d8aa39d9d29606bfc3d0bf3b107305b6d1c667aa
3,406
py
Python
metallic/metalearners/mbml/base.py
Renovamen/metallic
c3992e4b322f9d41d9b7997c472baf99c843046c
[ "MIT" ]
5
2021-04-14T07:31:06.000Z
2021-12-11T08:12:10.000Z
metallic/metalearners/mbml/base.py
Renovamen/metallic
c3992e4b322f9d41d9b7997c472baf99c843046c
[ "MIT" ]
1
2021-04-14T07:44:36.000Z
2021-04-15T14:01:52.000Z
metallic/metalearners/mbml/base.py
Renovamen/metallic
c3992e4b322f9d41d9b7997c472baf99c843046c
[ "MIT" ]
null
null
null
import os from abc import ABC, abstractmethod from typing import Callable, Optional, Tuple import torch from torch import nn, optim from ..base import MetaLearner class MBML(MetaLearner, ABC): """ A base class for metric-based meta-learning algorithms. Parameters ---------- model : torch.nn.Module Model to be wrapped optim : torch.optim.Optimizer Optimizer root : str Root directory to save checkpoints save_basename : str, optional Base name of the saved checkpoints lr_scheduler : callable, optional Learning rate scheduler loss_function : callable, optional Loss function device : optional Device on which the model is defined. If `None`, device will be detected automatically. """ def __init__( self, model: nn.Module, optim: optim.Optimizer, root: Optional[str] = None, save_basename: Optional[str] = None, lr_scheduler: Optional[Callable] = None, loss_function: Optional[Callable] = None, device: Optional = None ) -> None: super(MBML, self).__init__( model = model, root = root, save_basename = save_basename, lr_scheduler = lr_scheduler, loss_function = loss_function, device = device ) self.optim = optim @classmethod def load(cls, model_path: str, **kwargs): """Load a trained model.""" state = torch.load(model_path) # load model and optimizers kwargs['model'] = state['model'] kwargs['optim'] = state['optim'] # model name and save path if 'root' not in kwargs: kwargs['root'] = os.path.dirname(model_path) if 'save_basename' not in kwargs: kwargs['save_basename'] = os.path.basename(model_path) return cls(**kwargs) def save(self, prefix: Optional[str] = None) -> str: """Save the trained model.""" if self.root is None or self.save_basename is None: raise RuntimeError('The root directory or save basename of the' 'checkpoints is not defined.') state = { 'model': self.model, 'optim': self.optim } name = self.save_basename if prefix is not None: name = prefix + name + '.pth.tar' path = os.path.join(self.root, name) torch.save(state, os.path.join(self.root, name)) return path def step(self, batch: dict, meta_train: bool = True) -> Tuple[float]: if meta_train: self.model.train() else: self.model.eval() task_batch, n_tasks = self.get_tasks(batch) losses, accuracies = 0., 0. self.optim.zero_grad() for task_data in task_batch: loss, accuracy = self.single_task(task_data) losses += loss.detach().item() accuracies += accuracy.item() if meta_train == True: (loss / n_tasks).backward() self.optim.step() # average the losses and accuracies losses /= n_tasks accuracies /= n_tasks return losses, accuracies @abstractmethod def single_task( self, task: Tuple[torch.Tensor], meta_train: bool = True ) -> Tuple[float]: pass
26.818898
75
0.579272
391
3,406
4.933504
0.2711
0.055988
0.023328
0.017626
0.050804
0.050804
0
0
0
0
0
0.00087
0.325308
3,406
126
76
27.031746
0.838555
0.192601
0
0
0
0
0.053127
0
0
0
0
0
0
1
0.068493
false
0.013699
0.082192
0
0.205479
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8aa62a69b511bd6fe8eb61dcb1de9e114c05f4a
8,331
py
Python
backend/app/settings.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
2
2021-01-28T08:23:15.000Z
2021-03-09T06:06:58.000Z
backend/app/settings.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
9
2020-01-02T15:31:04.000Z
2021-12-09T01:59:26.000Z
backend/app/settings.py
draihal/main-pr
81814c5370b592963e91ad0683caa560b0ea9579
[ "MIT" ]
1
2021-03-09T06:11:16.000Z
2021-03-09T06:11:16.000Z
""" Django settings for app project. Generated by 'django-admin startproject' using Django 2.2.5. For more information on this file, see https://docs.djangoproject.com/en/2.2/topics/settings/ For the full list of settings and their values, see https://docs.djangoproject.com/en/2.2/ref/settings/ """ from datetime import timedelta import os # Build paths inside the project like this: os.path.join(BASE_DIR, ...) BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # Quick-start development settings - unsuitable for production # See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! SECRET_KEY = os.environ.get('SECRET_KEY', 'you-will-never-guess') # SECURITY WARNING: don't run with debug turned on in production! # To disable debug, remove the variable from the environment instead of trying to type cast DEBUG = int(os.environ.get("DEBUG", default=0)) # 'DJANGO_ALLOWED_HOSTS' should be a single string of hosts with a space between each. # For example: 'DJANGO_ALLOWED_HOSTS=localhost 127.0.0.1 [::1]' ALLOWED_HOSTS = os.environ.get("DJANGO_ALLOWED_HOSTS", "localhost 127.0.0.1 [::1]").split(" ") # Application definition INSTALLED_APPS = [ 'django.contrib.admin', 'django.contrib.auth', 'django.contrib.contenttypes', 'django.contrib.sessions', 'django.contrib.messages', 'django.contrib.staticfiles', 'corsheaders', 'solo.apps.SoloAppConfig', 'rest_framework', 'drf_yasg', 'djoser', 'django_celery_beat', 'django_celery_results', 'djcelery_email', # Local 'users.apps.UsersConfig', 'education.apps.EducationConfig', 'pages.apps.PagesConfig', 'django_cleanup.apps.CleanupConfig', ] AUTH_USER_MODEL = 'users.CustomUser' MIDDLEWARE = [ 'corsheaders.middleware.CorsMiddleware', 'django.middleware.security.SecurityMiddleware', 'django.contrib.sessions.middleware.SessionMiddleware', 'django.middleware.common.CommonMiddleware', 'django.middleware.csrf.CsrfViewMiddleware', 'django.contrib.auth.middleware.AuthenticationMiddleware', 'django.contrib.messages.middleware.MessageMiddleware', 'django.middleware.clickjacking.XFrameOptionsMiddleware', ] ROOT_URLCONF = 'app.urls' TEMPLATES = [ { 'BACKEND': 'django.template.backends.django.DjangoTemplates', 'DIRS': [os.path.join(BASE_DIR, 'templates')], 'APP_DIRS': True, 'OPTIONS': { 'context_processors': [ 'django.template.context_processors.debug', 'django.template.context_processors.request', 'django.contrib.auth.context_processors.auth', 'django.contrib.messages.context_processors.messages', ], }, }, ] WSGI_APPLICATION = 'app.wsgi.application' # Database # https://docs.djangoproject.com/en/2.2/ref/settings/#databases DATABASES = { "default": { "ENGINE": os.environ.get("SQL_ENGINE", "django.db.backends.sqlite3"), "NAME": os.environ.get("SQL_DATABASE", os.path.join(BASE_DIR, "db.sqlite3")), "USER": os.environ.get("SQL_USER", "user"), "PASSWORD": os.environ.get("SQL_PASSWORD", "password"), "HOST": os.environ.get("SQL_HOST", "localhost"), "PORT": os.environ.get("SQL_PORT", "5432"), } } # Password validation # https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', }, { 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', }, { 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', }, { 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', }, ] # Internationalization # https://docs.djangoproject.com/en/2.2/topics/i18n/ LANGUAGE_CODE = 'ru-RU' TIME_ZONE = 'Europe/Moscow' USE_I18N = True USE_L10N = True USE_TZ = True # Static files (CSS, JavaScript, Images) # https://docs.djangoproject.com/en/2.1/howto/static-files/ STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles') STATIC_URL = '/staticfiles/' # Extra places for collectstatic to find static files. STATICFILES_DIRS = ( os.path.join(BASE_DIR, 'static'), ) MEDIA_URL = '/mediafiles/' MEDIA_ROOT = os.path.join(BASE_DIR, "/mediafiles/") REST_FRAMEWORK = { 'DEFAULT_AUTHENTICATION_CLASSES': [ 'rest_framework_simplejwt.authentication.JWTAuthentication', 'rest_framework.authentication.SessionAuthentication', ], 'DEFAULT_PERMISSION_CLASSES': [ 'rest_framework.permissions.AllowAny', ], 'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.PageNumberPagination', 'PAGE_SIZE': 20, 'DEFAULT_SCHEMA_CLASS': 'rest_framework.schemas.coreapi.AutoSchema', } SIMPLE_JWT = { 'AUTH_HEADER_TYPES': ('JWT', 'Bearer'), 'ACCESS_TOKEN_LIFETIME': timedelta(hours=1), # timedelta(minutes=15), 'REFRESH_TOKEN_LIFETIME': timedelta(days=1), } DJOSER = { 'TOKEN_MODEL': None, # needed for JWT 'PERMISSIONS': { 'user_delete': ['users.permissions.IsAdminUser'], }, 'SERIALIZERS': { 'user_create': 'users.serializers.CustomUserCreateSerializer', 'user': 'users.serializers.CustomUserWithProfileSerializer', 'current_user': 'users.serializers.CustomUserSerializer', }, # 'HIDE_USERS': If set to True, listing /users/ enpoint by normal user will return only that user’s # profile in the list. Beside that, accessing /users/<id>/ endpoints by user without # proper permission will result in HTTP 404 instead of HTTP 403. 'HIDE_USERS': True, 'ACTIVATION_URL': 'users/activation/{uid}/{token}', # TODO: urls in frontend, POST to back 'PASSWORD_RESET_CONFIRM_URL': 'users/password/reset/confirm/{uid}/{token}', # TODO: urls in frontend, POST to back 'USERNAME_RESET_CONFIRM_URL': 'users/reset/confirm/{uid}/{token}', # TODO: urls in frontend, POST to back 'SEND_ACTIVATION_EMAIL': False, 'SEND_CONFIRMATION_EMAIL': False, 'PASSWORD_CHANGED_EMAIL_CONFIRMATION': False, 'USERNAME_CHANGED_EMAIL_CONFIRMATION': False, 'EMAIL': { 'activation': 'users.emails.CustomActivationEmail', 'confirmation': 'users.emails.CustomConfirmationEmail', 'password_reset': 'users.emails.CustomPasswordResetEmail', 'password_changed_confirmation': 'users.emails.CustomPasswordChangedConfirmationEmail', 'username_changed_confirmation': 'users.emails.CustomUsernameChangedConfirmationEmail', 'username_reset': 'users.emails.CustomUsernameResetEmail', } } SWAGGER_SETTINGS = { 'SECURITY_DEFINITIONS': { 'basic': { 'type': 'basic' } }, } CELERY_BROKER_URL = os.environ.get('REDIS_URL') CELERY_RESULT_BACKEND = os.environ.get('REDIS_URL') CELERY_ACCEPT_CONTENT = ['application/json'] CELERY_TASK_SERIALIZER = 'json' CELERY_RESULT_SERIALIZER = 'json' CELERY_TIMEZONE = TIME_ZONE EMAIL_BACKEND = 'djcelery_email.backends.CeleryEmailBackend' EMAIL_HOST = os.environ.get('EMAIL_HOST', 'smtp.sendgrid.net') EMAIL_PORT = os.environ.get('EMAIL_PORT', 587) EMAIL_HOST_USER = os.environ.get('EMAIL_HOST_USER') EMAIL_HOST_PASSWORD = os.environ.get('EMAIL_HOST_PASSWORD') EMAIL_USE_TLS = os.environ.get('EMAIL_USE_TLS', True) DEFAULT_FROM_EMAIL = os.environ.get('DEFAULT_FROM_EMAIL', 'noreply@webmaster') CORS_ORIGIN_ALLOW_ALL = False CORS_ORIGIN_WHITELIST = ( # can be like r'^https://\w+\.example\.com$' os.environ.get('CORS_ORIGIN_WHITELIST', 'http://localhost:3000'), ) if 'SENTRY_DSN' in os.environ: import sentry_sdk from sentry_sdk.integrations.django import DjangoIntegration sentry_sdk.init( dsn=os.environ['SENTRY_DSN'], integrations=[DjangoIntegration()] ) # try: # from local_settings import * # except ImportError as e: # # No local settings was found, skipping. # pass # if not DEBUG and len(SECRET_KEY) < 25: # print(f'The value of DJANGO_SECRET_KEY does not contain enough characters ({len(SECRET_KEY)} characters)') # raise RuntimeError(f'DJANGO_SECRET_KEY is not long enough (in environment variable "DJANGO_SECRET_KEY")')
31.79771
119
0.706638
967
8,331
5.910031
0.355739
0.031496
0.037795
0.030621
0.153456
0.130884
0.074891
0.074891
0.055993
0.016798
0
0.009302
0.161205
8,331
261
120
31.91954
0.808529
0.25099
0
0.018293
1
0
0.52228
0.36939
0
0
0
0.003831
0
1
0
false
0.067073
0.02439
0
0.02439
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
2
d8aadfacb7f4de5abfc2dccb19ef5736e4d36538
593
py
Python
python/sorting/group_0s_1s.py
amitsaha/playground
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
[ "Unlicense" ]
4
2018-04-14T16:28:39.000Z
2021-11-14T12:08:02.000Z
python/sorting/group_0s_1s.py
amitsaha/playground
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
[ "Unlicense" ]
3
2022-02-14T10:38:51.000Z
2022-02-27T16:01:16.000Z
python/sorting/group_0s_1s.py
amitsaha/playground
82cb5ac02ac90d3fa858a5153b0a5705187c14ce
[ "Unlicense" ]
4
2015-07-07T01:01:27.000Z
2019-04-12T05:38:26.000Z
''' Groups the 0s and 1s together from a random array Reference: http://www.geeksforgeeks.org/segregate-0s-and-1s-in-an-array-by-traversing-array-once/ ''' from __future__ import print_function def rearrange(arr): p1 = 0 p2 = len(arr) - 1 while p1 < p2: if arr[p1] == 0: p1 += 1 if arr[p2] == 1: p2 -= 1 if p1 < p2: arr[p1], arr[p2] = arr[p2], arr[p1] return arr print(rearrange([0, 0, 1, 1])) print(rearrange([1, 0, 0, 1, 1])) print(rearrange([1, 0, 0, 0, 1, 0, 0])) print(rearrange([0, 1, 0, 1, 0, 1, 0, 1]))
21.962963
97
0.548061
100
593
3.2
0.36
0.04375
0.0375
0.0375
0.15
0.15
0.125
0.125
0.125
0
0
0.111628
0.274874
593
26
98
22.807692
0.632558
0.247892
0
0
0
0
0
0
0
0
0
0
0
1
0.0625
false
0
0.0625
0
0.1875
0.3125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8ad33478b60fc223af35de65ba50412bd1bf355
3,039
py
Python
GABClient/GAB.Client/wwwroot/ml/pipeline1/mu.py
intelequia/GAB2019ScienceLab.Client
982bcfacc31c25201755eb2353aef2204923261b
[ "MIT" ]
null
null
null
GABClient/GAB.Client/wwwroot/ml/pipeline1/mu.py
intelequia/GAB2019ScienceLab.Client
982bcfacc31c25201755eb2353aef2204923261b
[ "MIT" ]
null
null
null
GABClient/GAB.Client/wwwroot/ml/pipeline1/mu.py
intelequia/GAB2019ScienceLab.Client
982bcfacc31c25201755eb2353aef2204923261b
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- import numpy as np from scipy.signal import savgol_filter import sys def Interpolate(time, mask, y): yy = np.array(y) t_ = np.delete(time, mask) y_ = np.delete(y, mask, axis = 0) if len(yy.shape) == 1: yy[mask] = np.interp(time[mask], t_, y_) elif len(yy.shape) == 2: for n in range(yy.shape[1]): yy[mask, n] = np.interp(time[mask], t_, y_[:, n]) else: raise Exception("Array ``y`` must be either 1- or 2-d.") return yy def Chunks(l, n, all = False): if all: jarr = range(0, n - 1) else: jarr = [0] for j in jarr: for i in range(j, len(l), n): if i + 2 * n <= len(l): yield l[i:i + n] else: if not all: yield l[i:] break def Smooth(x, window_len = 100, window = 'hanning'): if window_len == 0: return np.zeros_like(x) s = np.r_[2 * x[0] - x[window_len - 1::-1], x, 2 * x[-1] - x[-1:-window_len:-1]] if window == 'flat': w = np.ones(window_len, 'd') else: w = eval('np.' + window + '(window_len)') y = np.convolve(w / w.sum(), s, mode = 'same') return y[window_len:-window_len + 1] def Scatter(y, win = 13, remove_outliers = False): if remove_outliers: if len(y) >= 50: ys = y - Smooth(y, 50) else: ys = y M = np.nanmedian(ys) MAD = 1.4826 * np.nanmedian(np.abs(ys - M)) out = [] for i, _ in enumerate(y): if (ys[i] > M + 5 * MAD) or (ys[i] < M - 5 * MAD): out.append(i) out = np.array(out, dtype = int) y = np.delete(y, out) if len(y): return 1.e6 * np.nanmedian([np.std(yi) / np.sqrt(win) for yi in Chunks(y, win, all = True)]) else: return np.nan def SavGol(y, win = 49): if len(y) >= win: return y - savgol_filter(y, win, 2) + np.nanmedian(y) else: return y def _float(s): try: res = float(s) except: res = np.nan return res def Downbin(x, newsize, axis = 0, operation = 'mean'): assert newsize < x.shape[axis], "The new size of the array must be smaller than the current size." oldsize = x.shape[axis] newshape = list(x.shape) newshape[axis] = newsize newshape.insert(axis + 1, oldsize // newsize) trim = oldsize % newsize if trim: xtrim = x[:-trim] else: xtrim = x if operation == 'mean': xbin = np.nanmean(xtrim.reshape(newshape), axis = axis + 1) elif operation == 'sum': xbin = np.nansum(xtrim.reshape(newshape), axis = axis + 1) elif operation == 'quadsum': xbin = np.sqrt(np.nansum(xtrim.reshape(newshape) ** 2, axis = axis + 1)) elif operation == 'median': xbin = np.nanmedian(xtrim.reshape(newshape), axis = axis + 1) else: raise ValueError("`operation` must be either `mean`, `sum`, `quadsum`, or `median`.") return xbin
29.504854
102
0.524844
453
3,039
3.472406
0.284768
0.045772
0.050858
0.045772
0.159568
0.094723
0.053401
0.053401
0
0
0
0.023717
0.320171
3,039
102
103
29.794118
0.737657
0.01382
0
0.101124
0
0
0.07379
0
0
0
0
0
0.011236
1
0.078652
false
0
0.033708
0
0.213483
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8ad658c4df19c485095900714b12cbc63dc40bd
544
py
Python
setup.py
farouk-muha/pav_bsc
f12e2365e97146d05a1e60f1a6112bb3e08295dd
[ "MIT" ]
null
null
null
setup.py
farouk-muha/pav_bsc
f12e2365e97146d05a1e60f1a6112bb3e08295dd
[ "MIT" ]
null
null
null
setup.py
farouk-muha/pav_bsc
f12e2365e97146d05a1e60f1a6112bb3e08295dd
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from setuptools import setup, find_packages with open('requirements.txt') as f: install_requires = f.read().strip().split('\n') # get version from __version__ variable in pav_bsc/__init__.py from pav_bsc import __version__ as version setup( name='pav_bsc', version=version, description='Partner ERPNext - Add Value On Balanced Scorecard', author='Farouk Muharram', author_email='farouk1dev@gmail.com', packages=find_packages(), zip_safe=False, include_package_data=True, install_requires=install_requires )
25.904762
65
0.766544
75
544
5.24
0.693333
0.114504
0
0
0
0
0
0
0
0
0
0.004158
0.115809
544
20
66
27.2
0.81289
0.150735
0
0
0
0
0.237473
0
0
0
0
0
0
1
0
false
0
0.133333
0
0.133333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8adc735050a0fd5a61d2b42aa76a945a006c221
2,957
py
Python
components/resnet-cmle/resnet/deploy.py
cbreuel/pipelines
22a85b4af642b896b57293c0d15d0f20c995be99
[ "Apache-2.0" ]
9
2019-03-28T02:20:45.000Z
2021-12-01T22:43:36.000Z
components/resnet-cmle/resnet/deploy.py
cbreuel/pipelines
22a85b4af642b896b57293c0d15d0f20c995be99
[ "Apache-2.0" ]
2
2019-10-17T16:51:43.000Z
2019-10-18T01:18:35.000Z
components/resnet-cmle/resnet/deploy.py
cbreuel/pipelines
22a85b4af642b896b57293c0d15d0f20c995be99
[ "Apache-2.0" ]
4
2019-04-11T12:09:59.000Z
2020-10-11T15:53:53.000Z
# Copyright 2018 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import json import argparse import os from time import gmtime, strftime import time import subprocess import logging logging.getLogger().setLevel(logging.INFO) def parse_arguments(): """Parse command line arguments.""" parser = argparse.ArgumentParser() parser.add_argument('--model', type = str, default = 'flowers_model', help = 'What to name your ml-engine model') parser.add_argument('--version', type = str, default = 'resnet', help = 'What to name the version of the model') parser.add_argument('--model_dir', type = str, required=True, help = 'The model directory generated by the train component.') parser.add_argument('--project_id', type = str, required = True, default = '', help = 'Pass in your project id.') parser.add_argument('--region', type = str, default = 'us-central1', help = 'Region to use.') parser.add_argument('--TFVERSION', type = str, default = '1.8', help = 'Version of TensorFlow to use.') args = parser.parse_args() return args if __name__== "__main__": args = parse_arguments() model_export_dir = os.path.join(args.model_dir, 'export') logging.info('Writing latest model directory name: ' + model_export_dir) subprocess.call('gsutil ls ' + model_export_dir + ' | tail -1 > model.txt', shell=True) with open("./model.txt", "r") as model_path_file: model_location = model_path_file.read()[:-1] logging.info('Deploying ' + args.model + ' ' + args.version + ' from ' + model_location + ' ... this will take a few minutes') subprocess.call('gcloud ml-engine versions delete ' + args.version + ' --model=' + args.model + ' --quiet', shell=True) subprocess.call('gcloud ml-engine models create ' + args.model + ' --regions ' + args.region, shell=True) subprocess.check_call('gcloud ml-engine versions create ' + args.version + ' --model ' + args.model + ' --origin ' + str(model_location) + ' --runtime-version=' + args.TFVERSION, shell=True)
41.069444
131
0.600609
347
2,957
5.017291
0.420749
0.034463
0.058587
0.031017
0.080414
0
0
0
0
0
0
0.006185
0.289144
2,957
72
132
41.069444
0.822074
0.195807
0
0.163265
0
0
0.249894
0
0
0
0
0
0
1
0.020408
false
0.020408
0.142857
0
0.183673
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8adf264910375ea507ebd88b7147dd9829ca904
3,506
py
Python
tests/test_quickbooks_payroll.py
fulfilio/trytond-quickbooks-payroll
18148e6f366025268b4335a89f07d2506ad5f446
[ "BSD-3-Clause" ]
null
null
null
tests/test_quickbooks_payroll.py
fulfilio/trytond-quickbooks-payroll
18148e6f366025268b4335a89f07d2506ad5f446
[ "BSD-3-Clause" ]
null
null
null
tests/test_quickbooks_payroll.py
fulfilio/trytond-quickbooks-payroll
18148e6f366025268b4335a89f07d2506ad5f446
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """ tests/test_quickbooks_payroll.py """ import csv import tempfile class TestQuickBooksPayroll: def test_views(self, install_module): "Test all tryton views" from trytond.tests.test_tryton import test_view test_view('quickbooks_payroll') def test_depends(self, install_module): "Test missing depends on fields" from trytond.tests.test_tryton import test_depends test_depends() def test_import_payroll_item(self, test_dataset, transaction): "Test import payroll item wizard" Date = self.POOL.get('ir.date') Account = self.POOL.get('account.account') Move = self.POOL.get('account.move') Employee = self.POOL.get('company.employee') QuickBooksPayroll = self.POOL.get('quickbooks.payroll_account') ImportPayrollItem = self.POOL.get( 'quickbooks.wizard_import_payroll_item', type='wizard' ) # Map quickbooks payroll item to tryton main_expense, = Account.search([('name', '=', 'Main Expense')]) main_expense.party_required = True main_expense.save() main_tax, = Account.search([('name', '=', 'Main Tax')]) main_tax.party_required = True main_tax.save() main_cash, = Account.search([('name', '=', 'Main Cash')]) QuickBooksPayroll.create([{ 'account': main_expense.id, 'payroll_item': 'Salary Expense', }, { 'account': main_tax.id, 'payroll_item': 'Federal Income Taxes Payable', }, { 'account': main_tax.id, 'payroll_item': 'State Income Taxes Payable', }, { 'account': main_tax.id, 'payroll_item': 'FICA Taxes Payable', }]) # Map employee to quickbooks source name employee, = Employee.search([]) employee.quickbooks_source_name = 'Pandey, Prakash' employee.save() credit_account, = Account.search([], limit=1) import_payroll_item = ImportPayrollItem( ImportPayrollItem.create()[0] ) import_payroll_item.start.credit_account = main_cash with tempfile.NamedTemporaryFile(delete=False) as csv_file: csv_writer = csv.writer(csv_file, quoting=csv.QUOTE_ALL) csv_writer.writerow([ 'Date', 'Num', 'Type', 'Source Name', 'Payroll Item', 'Wage Base', 'Amount', ]) csv_writer.writerow([ Date.today(), '309333', 'Cash', "Pandey, Prakash", 'Salary Expense', '', '-100000', ]) csv_writer.writerow([ '', '', '', "Pandey, Prakash", 'Federal Income Taxes Payable', '', 15000, ]) csv_writer.writerow([ '', '', '', "Pandey, Prakash", 'State Income Taxes Payable', '', 5000, ]) csv_writer.writerow([ '', '', '', "Pandey, Prakash", 'FICA Taxes Payable', '', 7650, ]) csv_writer.writerow([ '', '', '', '', '', '', 72350 ]) csv_file.flush() import_payroll_item.start.csv_file = \ buffer(open(csv_file.name).read()) _, res = import_payroll_item.do_import_(action=None) move, = Move.search([]) assert move.id in res['res_id'] assert len(move.lines) == 5 Move.post([move])
31.303571
78
0.553622
350
3,506
5.351429
0.302857
0.076348
0.063534
0.033636
0.148959
0.100908
0.086492
0.048051
0.048051
0
0
0.014114
0.312892
3,506
111
79
31.585586
0.763387
0.061894
0
0.222222
0
0
0.195892
0.018756
0
0
0
0
0.024691
1
0.037037
false
0
0.160494
0
0.209877
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8aed52f5f4d4d6a14a346f71946749b037d0d84
4,284
py
Python
general/cc12m.py
robvanvolt/DALLE-datasets
527e54aeac879bc4da669fa5c5b64c9354890728
[ "MIT" ]
60
2021-05-09T02:51:10.000Z
2022-03-27T06:36:04.000Z
general/cc12m.py
robvanvolt/DALLE-datasets
527e54aeac879bc4da669fa5c5b64c9354890728
[ "MIT" ]
4
2021-07-07T21:24:33.000Z
2021-11-17T21:54:17.000Z
general/cc12m.py
robvanvolt/DALLE-datasets
527e54aeac879bc4da669fa5c5b64c9354890728
[ "MIT" ]
9
2021-05-20T14:38:59.000Z
2022-02-18T11:51:20.000Z
import pandas as pd import os import requests from pathlib import Path from PIL import Image from tqdm import tqdm from multiprocessing import Pool import gc import glob cc_url = 'https://storage.googleapis.com/conceptual_12m/cc12m.tsv' root_folder = './' total = 12423374 maxwidth = 256 maxheight = 256 thread_count = 16 batch = 10000 def load_caption(x): name, caption, text_folder = x fid = str(int(int(name) / 10000 )) subdir = "0"*(5-len(fid)) + fid os.makedirs(Path(text_folder+"/"+subdir), exist_ok=True) fp = text_folder + '/' + subdir + "/" + "0"*(9-len(str(name))) + str(name) + '.txt' with open(fp, 'w') as f: f.write(caption) def download_file(url): response = requests.get(url, stream=True) total_size_in_bytes= int(response.headers.get('content-length', 0)) block_size = 1024 progress_bar = tqdm(total=total_size_in_bytes, unit='iB', unit_scale=True) with open(Path(root_folder + '/cc12m.tsv'), 'wb') as file: for data in response.iter_content(block_size): progress_bar.update(len(data)) file.write(data) progress_bar.close() if total_size_in_bytes != 0 and progress_bar.n != total_size_in_bytes: print("Error, something went wrong...") def load_image(x): name, url, image_folder, skip_folder = x fid = str(int(int(name) / 10000 )) subdir = "0"*(5-len(fid)) + fid os.makedirs(Path(image_folder+"/"+subdir), exist_ok=True) id = subdir + "/" + "0"*(9-len(str(name))) + str(name) try: with Image.open(requests.get(url, headers={'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:72.0) Gecko/20100101 Firefox/72.0'}, stream=True, timeout=3).raw) as foo: a = max(maxwidth/foo.size[0], maxheight/foo.size[1]) foo = foo.resize((int(foo.size[0] * a), int(foo.size[1] * a)), Image.ANTIALIAS) with open(Path(image_folder + "/" + id + '.jpg'), 'wb') as file: foo.save(file, optimize=True, quality=85) except Exception: os.makedirs(Path(skip_folder+"/"+subdir), exist_ok=True) open(Path(skip_folder + '/' + id), 'a').close pass if __name__ == '__main__': if not os.path.isfile(Path(root_folder + '/cc12m.tsv')): print('Missing cc12m url-caption-dataset. Downloading...') download_file(cc_url) else: print('cc12m.tsv already downloaded. Proceeding with downloading images!') dfc = pd.read_csv(root_folder + "cc12m.tsv", sep='\t', names=["url", "caption"]) image_folder = root_folder + '/images' text_folder = root_folder + '/texts' skip_folder = root_folder + '/skip' paths = [image_folder, text_folder, skip_folder] for path in paths: os.makedirs(path, exist_ok=True) def list_ids(path): return [int(os.path.splitext(os.path.basename(a))[0]) for a in glob.glob(path+"/**/*")] skiplist = list_ids(text_folder) remaining = total - len(skiplist) percent_remaining = 100 * (total - remaining) / total df = dfc.loc[~dfc.index.isin(skiplist)] print('Remaining {} captions to be written - {} ({:.5f} %) already written.'.format(remaining, len(skiplist), percent_remaining)) if len(df) > 0: captions = zip(df.index, df["caption"], [text_folder]*len(df)) pool = Pool(thread_count) for _ in tqdm(pool.imap_unordered(load_caption, captions), total=len(df)): pass pool.close() print('Done with captions!') skiplist = list_ids(skip_folder) + list_ids(image_folder) remaining = total - len(skiplist) percent_remaining = 100 * (total - remaining) / total df = dfc.loc[~dfc.index.isin(skiplist)] print('Remaining {} images to be downloaded - {} ({:.5f} %) already downloaded.'.format(remaining, len(skiplist), percent_remaining)) images = list(zip(df.index, df["url"], [image_folder]*len(df), [skip_folder]*len(df))) for i in tqdm(range(0, len(df), batch)): pool = Pool(thread_count) for _ in tqdm(pool.imap_unordered(load_image, images[i:i+batch]), total=batch): pass pool.terminate() pool.join() del pool gc.collect() print('Finished downloading available images from conceptual images!')
37.578947
137
0.635854
595
4,284
4.433613
0.29916
0.026535
0.021228
0.024261
0.259666
0.216831
0.184989
0.184989
0.166035
0.166035
0
0.029325
0.211951
4,284
113
138
37.911504
0.752073
0
0
0.15625
0
0.010417
0.147292
0
0
0
0
0
0
1
0.041667
false
0.03125
0.09375
0.010417
0.145833
0.072917
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8afbaf2b23594f1f8428b9469b3f739261f7278
518
py
Python
jwtauthenticator/test_jwtauthenticator.py
YuanZhencai/jwtauthenticator
0be5e45a3c63f479758ea2768035266d3de5ec41
[ "Apache-2.0" ]
null
null
null
jwtauthenticator/test_jwtauthenticator.py
YuanZhencai/jwtauthenticator
0be5e45a3c63f479758ea2768035266d3de5ec41
[ "Apache-2.0" ]
null
null
null
jwtauthenticator/test_jwtauthenticator.py
YuanZhencai/jwtauthenticator
0be5e45a3c63f479758ea2768035266d3de5ec41
[ "Apache-2.0" ]
null
null
null
from unittest import TestCase from jose import jwt class TestJSONWebTokenLoginHandler(TestCase): def test_parse_jwt(self): json_web_token = 'eyJhbGciOiJIUzUxMiJ9.eyJzdWIiOiJ5dWFuemhlbmNhaSIsImF1dGgiOiJST0xFX1VTRVIiLCJleHAiOjE1OTAyMjk5NTJ9.PcI6wGxXew-AASYqCKneUyW4ZUVHosgfE0qkWh0Y5pB4nNr1kneSC8yt8liJ31TSjhzt2VVAgyoYnci-_R-Wfw' secret = 'b939fce9c8879b8d41886695da17c363a0004bf7' data = jwt.decode(json_web_token, secret, 'HS512', options={ 'verify_signature': False}) self.assertIsNotNone(data)
37
207
0.835907
42
518
10.119048
0.738095
0.032941
0.056471
0
0
0
0
0
0
0
0
0.106838
0.096525
518
13
208
39.846154
0.801282
0
0
0
0
0
0.474806
0.434109
0
0
0
0
0.125
1
0.125
false
0
0.25
0
0.5
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d8b00b965eee02af4b8f3676c77e8a154d98eecb
5,707
py
Python
src/itint/widget.py
ColorsWind/iTint
48d18ed42d9ca44caa2c71104cf4f489fe54d98d
[ "MIT" ]
1
2022-01-15T07:01:41.000Z
2022-01-15T07:01:41.000Z
src/itint/widget.py
ColorsWind/iTint
48d18ed42d9ca44caa2c71104cf4f489fe54d98d
[ "MIT" ]
null
null
null
src/itint/widget.py
ColorsWind/iTint
48d18ed42d9ca44caa2c71104cf4f489fe54d98d
[ "MIT" ]
null
null
null
import numpy as np from PySide2.QtCore import Qt, QUrl, QSize, QEventLoop from PySide2.QtGui import QPixmap, QDropEvent, QDragEnterEvent, QMouseEvent, QResizeEvent, QHideEvent from PySide2.QtWidgets import QApplication, QWidget, QHBoxLayout, QFileDialog, QWidgetItem from itint.octree import Octree from itint.ui_widget import Ui_MainWidget from itint.widget_color_display import qimage_to_pil, ColorDisplayWidget from itint.widget_screen_color_picker import ScreenColorPicker from itint.widget_screenshot import WidgetScreenShot class MainWidget(QWidget): def __init__(self, parent=None): super(MainWidget, self).__init__(parent) self.setAcceptDrops(True) self.internal_loader = Ui_MainWidget() self.internal_loader.setupUi(self) self.screen = WidgetScreenShot() self.picker = ScreenColorPicker() self.layout = QHBoxLayout(self.internal_loader.colorDisplayContent) self.layout.setAlignment(Qt.AlignLeft) self.internal_loader.btnFromScan.clicked.connect(self.btn_from_screen) self.default_text = self.internal_loader.labelImagePreview.text() self.internal_loader.labelImagePreview.mousePressEvent = self.btn_from_file self.internal_loader.btnColorPickup.clicked.connect(self.btn_from_screen_color_picker) self.internal_loader.btnFromClipboard.clicked.connect(self.btn_from_clipboard) self.pixmap = QPixmap() self.hide_callback = None def dropEvent(self, event: QDropEvent) -> None: url: QUrl = event.mimeData().urls()[0] self.pixmap.load(url.toLocalFile()) self.update_image() if not self.pixmap.isNull(): self.update_color_display(self.pixmap) def dragEnterEvent(self, event: QDragEnterEvent) -> None: if event.mimeData().hasUrls() and event.mimeData().urls()[0].isLocalFile(): event.acceptProposedAction() def check_and_clear_color_display(self): if self.internal_loader.cBtnAutoClear.isChecked(): for i in range(self.layout.count()): color_display_widget: QWidgetItem = self.layout.itemAt(i) color_display_widget.widget().deleteLater() def update_color_display(self, image: QPixmap): if image.isNull(): return data = np.asarray(qimage_to_pil(image)).reshape((-1, 3)) tree = Octree() tree.build(data, 8) colors = tree.get_color(tree.root) self.check_and_clear_color_display() for r, g, b in colors: color_display_widget = ColorDisplayWidget(r, g, b, self) self.layout.addWidget(color_display_widget) def resizeEvent(self, event: QResizeEvent): self.update_image() def hideEvent(self, event: QHideEvent): if self.hide_callback is not None: self.hide_callback() self.hide_callback = None def update_image(self): if self.pixmap.isNull(): self.internal_loader.labelImagePreview.setText(self.default_text) else: pixel_ratio = QApplication.primaryScreen().devicePixelRatio() pixmap_aspect = self.pixmap.width() / self.pixmap.height() label_width = self.internal_loader.labelImagePreview.width() * pixel_ratio label_height = self.internal_loader.labelImagePreview.height() * pixel_ratio label_aspect = label_width / label_height if pixmap_aspect > label_aspect: pixmap = self.pixmap.scaled( QSize(label_width, label_width / pixmap_aspect), Qt.KeepAspectRatio, Qt.SmoothTransformation, ) else: pixmap = self.pixmap.scaled( QSize(label_height * pixmap_aspect, label_height), Qt.KeepAspectRatio, Qt.SmoothTransformation, ) self.internal_loader.labelImagePreview.setPixmap(pixmap) def btn_from_screen_color_picker(self): def callback_screen_color_picker(rgb): r, g, b = rgb color_display_widget = ColorDisplayWidget(r, g, b, self) self.layout.addWidget(color_display_widget) if self.internal_loader.cBtnAutoHide.isChecked(): self.setVisible(True) self.setWindowOpacity(1.0) if self.internal_loader.cBtnAutoHide.isChecked(): self.setVisible(False) self.setWindowOpacity(0.0) QApplication.processEvents(QEventLoop.AllEvents) # time.sleep(0.20) # 窗口动画 self.picker.pick_color(callback=callback_screen_color_picker) def btn_from_screen(self): def callback_captured_image(pixmap: QPixmap): self.pixmap = pixmap self.update_image() if self.internal_loader.cBtnAutoHide.isChecked(): self.setVisible(True) self.update_color_display(pixmap) if self.internal_loader.cBtnAutoHide.isChecked(): self.setVisible(False) self.screen.capture_screen(callback=callback_captured_image) def btn_from_file(self, event: QMouseEvent): filepath, _ = QFileDialog.getOpenFileName(self, "选择文件", "", "图片 (*.png;*.jpg;*.gif;*.bmp);;所有类型 (*)") self.pixmap.load(filepath) self.update_image() if not self.pixmap.isNull(): self.update_color_display(self.pixmap) def btn_from_clipboard(self): clipboard = QApplication.clipboard() self.pixmap = clipboard.pixmap() self.update_image() self.update_color_display(self.pixmap)
39.909091
109
0.656913
611
5,707
5.92144
0.255319
0.056385
0.084577
0.058043
0.264234
0.209232
0.153676
0.153676
0.153676
0.153676
0
0.003514
0.251971
5,707
142
110
40.190141
0.843992
0.003855
0
0.278261
0
0
0.007394
0.005458
0
0
0
0
0
1
0.121739
false
0
0.078261
0
0.217391
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8b1c38d182c0ed927319fd435a35f4a6f89a701
162
py
Python
saleor/webhook/observability/exceptions.py
DevPoke/saleor
ced3a2249a18031f9f593e71d1d18aa787ec1060
[ "CC-BY-4.0" ]
null
null
null
saleor/webhook/observability/exceptions.py
DevPoke/saleor
ced3a2249a18031f9f593e71d1d18aa787ec1060
[ "CC-BY-4.0" ]
null
null
null
saleor/webhook/observability/exceptions.py
DevPoke/saleor
ced3a2249a18031f9f593e71d1d18aa787ec1060
[ "CC-BY-4.0" ]
null
null
null
class ObservabilityError(Exception): pass class ConnectionNotConfigured(ObservabilityError): pass class TruncationError(ObservabilityError): pass
14.727273
50
0.790123
12
162
10.666667
0.5
0.140625
0
0
0
0
0
0
0
0
0
0
0.154321
162
10
51
16.2
0.934307
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
d8b1d59e248305d321ae1893e6fe00d702a94519
4,480
py
Python
public/PushPrices.py
efalken/OracleSwap
4884cf06f809103a22e39486fcc83c0b956ca7f5
[ "MIT" ]
5
2020-03-11T07:18:36.000Z
2020-09-14T23:57:21.000Z
public/PushPrices.py
efalken/OracleSwap
4884cf06f809103a22e39486fcc83c0b956ca7f5
[ "MIT" ]
null
null
null
public/PushPrices.py
efalken/OracleSwap
4884cf06f809103a22e39486fcc83c0b956ca7f5
[ "MIT" ]
1
2020-08-18T00:01:46.000Z
2020-08-18T00:01:46.000Z
import requests from datetime import datetime, timedelta import os from pathlib import Path from web3 import Web3, HTTPProvider import time import json # put real private key and address here wallet_private_key = "0x00000000000000000000000000000000" wallet_address = "0x000000000000000000000000000000" # put personal infura address here # this only works for oracleContract provider_url = "https://mainnet.infura.io/v3/00000000000000000000000000000000" # change to your oracle address contract_address = "0x000000000000000000000000000000" start_time = time.time() gas_url = "https://ethgasstation.info/json/ethgasAPI.json" gas_p = 0 req = requests.get(gas_url) if (req.status_code == 200): t = json.loads(req.content) gas_p = t['fast'] / 5 print('gas price', gas_p) else: gas_p = 15 w3 = Web3(HTTPProvider(provider_url)) with open(contract_abi) as f: contract_abi = json.load(f) # 1 for mainnet, 3 for ropsten contract_address = contract_abi['address'] contract_address = w3.toChecksumAddress(contract_address) contract = w3.eth.contract(address=contract_address, abi=contract_abi['abi']) def update_PriceBatch(ethprice, spxprice, btcprice, final_day): nonce = w3.eth.getTransactionCount(wallet_address) txn_dict = contract.functions.updatePrices(ethprice, spxprice, btcprice, final_day).buildTransaction({ 'gas': 500000, 'gasPrice': w3.toWei(gas_p, 'gwei'), 'nonce': nonce, }) signed_txn = w3.eth.account.signTransaction(txn_dict, private_key=wallet_private_key) result = w3.eth.sendRawTransaction(signed_txn.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(result) if tx_receipt is None: return {'status': 'failed', 'error': 'timeout'} return {'status': 'added'} def update_Settle(ethprice, spxprice, btcprice): nonce = w3.eth.getTransactionCount(wallet_address) txn_dict = contract.functions.settlePrice(ethprice, spxprice, btcprice).buildTransaction({ 'gas': 900000, 'gasPrice': w3.toWei(gas_p, 'gwei'), 'nonce': nonce, }) signed_txn = w3.eth.account.signTransaction(txn_dict, private_key=wallet_private_key) result = w3.eth.sendRawTransaction(signed_txn.rawTransaction) tx_receipt = w3.eth.waitForTransactionReceipt(result) if tx_receipt is None: return {'status': 'failed', 'error': 'timeout'} return {'status': 'added'} def is_final_day(): date_num = datetime.today().weekday() if date_num == 3: return True else: return False if __name__ == "__main__": curr_date = datetime.now().date() current_date = curr_date.strftime("%y%m%d") ''' if a thursday, will report true, indicating tomorrow is a settlemnet price. If Thurs or Friday is a holiday, this needs to be adjusted so you will settle correctly ''' is_final = is_final_day() eth_app = 'eth' + current_date + '.txt' btc_app = 'btc' + current_date + '.txt' spx_app = 'spx' + current_date + '.txt' eth_new = Path('/home/lorenzo/oracle/data/') / eth_app btc_new = Path('/home/lorenzo/oracle/data/') / btc_app spx_new = Path('/home/lorenzo/oracle/data/') / spx_app contract_abi = Path('/home/lorenzo/oracle/OracleMain.json') spx_final_f = open(spx_new, 'r') spx_final = spx_final_f.readline() spx_final_f.close() spx_final = float(spx_final) eth_final_f = open(eth_new, 'r') eth_final = eth_final_f.readline() eth_final_f.close() eth_final = float(eth_final) btc_final_f = open(btc_new, 'r') btc_final = btc_final_f.readline() btc_final_f.close() btc_final = float(btc_final) w3 = Web3(HTTPProvider(provider_url)) with open(contract_abi) as f: contract_abi = json.load(f) contract = w3.eth.contract(address=contract_address, abi=contract_abi['abi']) isSettle = contract.functions.nextUpdateSettle().call() updateTime = contract.functions.lastUpdateTime().call() OracleContractDay = datetime.utcfromtimestamp(updateTime).strftime("%y%m%d") ''' checks to see if date of last price is today. If so, it does not send ''' if current_date != OracleContractDay: ''' if not the settlement day, uses the intraweek price update function ''' if not isSettle: eth_tx = update_PriceBatch(int(eth_final * 1e2), int(spx_final * 1e2), int(btc_final * 1e2), is_final) else: eth_tx = update_Settle(int(eth_final * 1e2), int(spx_final * 1e2), int(btc_final * 1e2))
36.422764
114
0.707143
595
4,480
5.112605
0.302521
0.016437
0.028928
0.027613
0.380671
0.359632
0.332018
0.332018
0.332018
0.332018
0
0.048959
0.174777
4,480
122
115
36.721311
0.773871
0.036607
0
0.329787
0
0
0.125157
0.053173
0
0
0.02458
0
0
0
null
null
0
0.074468
null
null
0.010638
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
d8b41261c2c681fcdb62fde84ac5266ed078c65f
816
py
Python
hwtLib/examples/statements/constDriver_test.py
optical-o/hwtLib
edad621f5ad4cdbea20a5751ff4468979afe2f77
[ "MIT" ]
null
null
null
hwtLib/examples/statements/constDriver_test.py
optical-o/hwtLib
edad621f5ad4cdbea20a5751ff4468979afe2f77
[ "MIT" ]
null
null
null
hwtLib/examples/statements/constDriver_test.py
optical-o/hwtLib
edad621f5ad4cdbea20a5751ff4468979afe2f77
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- from hwt.hdl.constants import Time from hwt.simulator.simTestCase import SingleUnitSimTestCase from hwtLib.examples.statements.constDriver import ConstDriverUnit class ConstDriverTC(SingleUnitSimTestCase): @classmethod def getUnit(cls): cls.u = ConstDriverUnit() return cls.u def test_simple(self): u = self.u self.runSim(20 * Time.ns) self.assertValSequenceEqual(u.out0._ag.data, [0, 0]) self.assertValSequenceEqual(u.out1._ag.data, [1, 1]) if __name__ == "__main__": import unittest suite = unittest.TestSuite() # suite.addTest(TwoCntrsTC('test_nothingEnable')) suite.addTest(unittest.makeSuite(ConstDriverTC)) runner = unittest.TextTestRunner(verbosity=3) runner.run(suite)
26.322581
66
0.699755
93
816
6.010753
0.612903
0.025045
0.0322
0
0
0
0
0
0
0
0
0.016541
0.185049
816
30
67
27.2
0.82406
0.11152
0
0
0
0
0.01108
0
0
0
0
0
0.105263
1
0.105263
false
0
0.210526
0
0.421053
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8b43126c4341230aae3fa4c8b5aa73490e76164
356
py
Python
uebung/bmi.py
wieerwill/Python-Intro
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
[ "MIT" ]
3
2019-03-02T16:34:53.000Z
2021-11-15T11:43:53.000Z
uebung/bmi.py
wieerwill/Python-Intro
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
[ "MIT" ]
null
null
null
uebung/bmi.py
wieerwill/Python-Intro
6b6f1d8b1b5c95590ffe15b0b4ddf188b680b491
[ "MIT" ]
null
null
null
# Calculate your Body-Mass-Index with Python print("BMI - Calculator!") weight_str = input("Please insert your weight (in kg): ") height_str = input("Please insert your bodys height(in m): ") weight = float(weight_str.replace(",", ".")) height = float(height_str.replace(",", ".")) bmi = weight / height ** 2 print("Your BMI is: " + str(round(bmi, 1)))
29.666667
61
0.668539
51
356
4.588235
0.509804
0.076923
0.119658
0.17094
0.205128
0
0
0
0
0
0
0.006579
0.146067
356
12
62
29.666667
0.763158
0.117978
0
0
0
0
0.345048
0
0
0
0
0
0
1
0
false
0
0
0
0
0.285714
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d8b44009ab655e1119911f81cd812061c34aa19f
491
py
Python
tutorial_web_scraper.py
mariusciurea/webscraping-tutorials
9fb53252c4cc08d5e2b8b0d46e67c2374e7c84c5
[ "Unlicense" ]
null
null
null
tutorial_web_scraper.py
mariusciurea/webscraping-tutorials
9fb53252c4cc08d5e2b8b0d46e67c2374e7c84c5
[ "Unlicense" ]
null
null
null
tutorial_web_scraper.py
mariusciurea/webscraping-tutorials
9fb53252c4cc08d5e2b8b0d46e67c2374e7c84c5
[ "Unlicense" ]
null
null
null
import requests from bs4 import BeautifulSoup # with open('index.html', 'rb') as hf: # soup = BeautifulSoup(hf, 'html.parser') # print(soup.prettify()) # print(soup.head.title.text) # print(soup.li.a.h2.text) # print(soup.li.a.p.text) source_code = requests.get('https://mariusciurea.github.io/links/') soup = BeautifulSoup(source_code.content, 'lxml') apps = soup.find_all('a', {'title':'Ajuta un elev sa aleaga informat facultatea'}) for app in apps: print(app)
28.882353
83
0.684318
72
491
4.625
0.638889
0.108108
0.078078
0.09009
0.096096
0
0
0
0
0
0
0.004796
0.150713
491
16
84
30.6875
0.793765
0.366599
0
0
0
0
0.3125
0
0
0
0
0
0
1
0
false
0
0.285714
0
0.285714
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8b44da55092c3a84cde6a1b4d363471d58fc94a
333
py
Python
PBO_18126/tugas 3.1.py
nurhumairaabri21/PBO
12bbc2e3ca408330ac5ff2e02492bfa56b21c3eb
[ "MIT" ]
null
null
null
PBO_18126/tugas 3.1.py
nurhumairaabri21/PBO
12bbc2e3ca408330ac5ff2e02492bfa56b21c3eb
[ "MIT" ]
null
null
null
PBO_18126/tugas 3.1.py
nurhumairaabri21/PBO
12bbc2e3ca408330ac5ff2e02492bfa56b21c3eb
[ "MIT" ]
null
null
null
#input--> berfungsi untuk menerima baris input dari user dan mengembalikan dalam bentuk string #int--> berfungsi untuk menkonversi bilangan maupun string angka menjadi bilangan bulat a=int(input("masukan nilai A: ")) b=int(input("masukan nilai B: ")) a +=3 print(a) b -=10 print(b) a *=4 print(a) b **=2 print(b)
19.588235
95
0.684685
52
333
4.384615
0.538462
0.026316
0.131579
0.175439
0
0
0
0
0
0
0
0.018587
0.192192
333
17
96
19.588235
0.828996
0.537538
0
0.4
0
0
0.248175
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
d8b5512902d4771f78aff0209b9c30789409995b
21,636
py
Python
tests/6_performance.py
Lucieno/SEAL-Python
9be8f65e945f43de03669a6550c26de0b098326b
[ "MIT" ]
3
2019-12-24T07:08:33.000Z
2020-05-13T17:40:28.000Z
tests/6_performance.py
adi-ar/LogisticRegression_SEAL-Python
047bcf7e2142bbdca178754de26cace4298c5936
[ "MIT" ]
null
null
null
tests/6_performance.py
adi-ar/LogisticRegression_SEAL-Python
047bcf7e2142bbdca178754de26cace4298c5936
[ "MIT" ]
null
null
null
import time import math import random from seal import * from seal_helper import * def rand_int(): return int(random.random()*(10**10)) def bfv_performance_test(context): print_parameters(context) parms = context.first_context_data().parms() plain_modulus = parms.plain_modulus() poly_modulus_degree = parms.poly_modulus_degree() print("Generating secret/public keys: ", end="") keygen = KeyGenerator(context) print("Done") secret_key = keygen.secret_key() public_key = keygen.public_key() relin_keys = RelinKeys() gal_keys = GaloisKeys() if context.using_keyswitching(): # Generate relinearization keys. print("Generating relinearization keys: ", end="") time_start = time.time() relin_keys = keygen.relin_keys() time_end = time.time() print("Done [" + "%.0f" % ((time_end-time_start)*1000000) + " microseconds]") if not context.key_context_data().qualifiers().using_batching: print("Given encryption parameters do not support batching.") return 0 print("Generating Galois keys: ", end="") time_start = time.time() gal_keys = keygen.galois_keys() time_end = time.time() print("Done [" + "%.0f" % ((time_end-time_start)*1000000) + " microseconds]") encryptor = Encryptor(context, public_key) decryptor = Decryptor(context, secret_key) evaluator = Evaluator(context) batch_encoder = BatchEncoder(context) encoder = IntegerEncoder(context) # These will hold the total times used by each operation. time_batch_sum = 0 time_unbatch_sum = 0 time_encrypt_sum = 0 time_decrypt_sum = 0 time_add_sum = 0 time_multiply_sum = 0 time_multiply_plain_sum = 0 time_square_sum = 0 time_relinearize_sum = 0 time_rotate_rows_one_step_sum = 0 time_rotate_rows_random_sum = 0 time_rotate_columns_sum = 0 # How many times to run the test? count = 10 # Populate a vector of values to batch. slot_count = batch_encoder.slot_count() pod_vector = uIntVector() for i in range(slot_count): pod_vector.push_back(rand_int() % plain_modulus.value()) print("Running tests ", end="") for i in range(count): ''' [Batching] There is nothing unusual here. We batch our random plaintext matrix into the polynomial. Note how the plaintext we create is of the exactly right size so unnecessary reallocations are avoided. ''' plain = Plaintext(parms.poly_modulus_degree(), 0) time_start = time.time() batch_encoder.encode(pod_vector, plain) time_end = time.time() time_batch_sum += (time_end-time_start)*1000000 ''' [Unbatching] We unbatch what we just batched. ''' pod_vector2 = uIntVector() time_start = time.time() batch_encoder.decode(plain, pod_vector2) time_end = time.time() time_unbatch_sum += (time_end-time_start)*1000000 for j in range(slot_count): if pod_vector[j] != pod_vector2[j]: raise Exception("Batch/unbatch failed. Something is wrong.") ''' [Encryption] We make sure our ciphertext is already allocated and large enough to hold the encryption with these encryption parameters. We encrypt our random batched matrix here. ''' encrypted = Ciphertext() time_start = time.time() encryptor.encrypt(plain, encrypted) time_end = time.time() time_encrypt_sum += (time_end-time_start)*1000000 ''' [Decryption] We decrypt what we just encrypted. ''' plain2 = Plaintext(poly_modulus_degree, 0) time_start = time.time() decryptor.decrypt(encrypted, plain2) time_end = time.time() time_decrypt_sum += (time_end-time_start)*1000000 if plain.to_string() != plain2.to_string(): raise Exception("Encrypt/decrypt failed. Something is wrong.") ''' [Add] We create two ciphertexts and perform a few additions with them. ''' encrypted1 = Ciphertext() encryptor.encrypt(encoder.encode(i), encrypted1) encrypted2 = Ciphertext(context) encryptor.encrypt(encoder.encode(i + 1), encrypted2) time_start = time.time() evaluator.add_inplace(encrypted1, encrypted1) evaluator.add_inplace(encrypted2, encrypted2) evaluator.add_inplace(encrypted1, encrypted2) time_end = time.time() time_add_sum += (time_end-time_start)*1000000 ''' [Multiply] We multiply two ciphertexts. Since the size of the result will be 3, and will overwrite the first argument, we reserve first enough memory to avoid reallocating during multiplication. ''' encrypted1.reserve(3) time_start = time.time() evaluator.multiply_inplace(encrypted1, encrypted2) time_end = time.time() time_multiply_sum += (time_end-time_start)*1000000 ''' [Multiply Plain] We multiply a ciphertext with a random plaintext. Recall that multiply_plain does not change the size of the ciphertext so we use encrypted2 here. ''' time_start = time.time() evaluator.multiply_plain_inplace(encrypted2, plain) time_end = time.time() time_multiply_plain_sum += (time_end-time_start)*1000000 ''' [Square] We continue to use encrypted2. Now we square it; this should be faster than generic homomorphic multiplication. ''' time_start = time.time() evaluator.square_inplace(encrypted2) time_end = time.time() time_square_sum += (time_end-time_start)*1000000 if context.using_keyswitching(): ''' [Relinearize] Time to get back to encrypted1. We now relinearize it back to size 2. Since the allocation is currently big enough to contain a ciphertext of size 3, no costly reallocations are needed in the process. ''' time_start = time.time() evaluator.relinearize_inplace(encrypted1, relin_keys) time_end = time.time() time_relinearize_sum += (time_end-time_start)*1000000 ''' [Rotate Rows One Step] We rotate matrix rows by one step left and measure the time. ''' time_start = time.time() evaluator.rotate_rows_inplace(encrypted, 1, gal_keys) evaluator.rotate_rows_inplace(encrypted, -1, gal_keys) time_end = time.time() time_rotate_rows_one_step_sum += (time_end-time_start)*1000000 ''' [Rotate Rows Random] We rotate matrix rows by a random number of steps. This is much more expensive than rotating by just one step. ''' row_size = batch_encoder.slot_count() / 2 random_rotation = int(rand_int() % row_size) time_start = time.time() evaluator.rotate_rows_inplace( encrypted, random_rotation, gal_keys) time_end = time.time() time_rotate_rows_random_sum += (time_end-time_start)*1000000 ''' [Rotate Columns] Nothing surprising here. ''' time_start = time.time() evaluator.rotate_columns_inplace(encrypted, gal_keys) time_end = time.time() time_rotate_columns_sum += (time_end-time_start)*1000000 # Print a dot to indicate progress. print(".", end="", flush=True) print(" Done", flush=True) avg_batch = time_batch_sum / count avg_unbatch = time_unbatch_sum / count avg_encrypt = time_encrypt_sum / count avg_decrypt = time_decrypt_sum / count avg_add = time_add_sum / (3 * count) avg_multiply = time_multiply_sum / count avg_multiply_plain = time_multiply_plain_sum / count avg_square = time_square_sum / count avg_relinearize = time_relinearize_sum / count avg_rotate_rows_one_step = time_rotate_rows_one_step_sum / (2 * count) avg_rotate_rows_random = time_rotate_rows_random_sum / count avg_rotate_columns = time_rotate_columns_sum / count print("Average batch: " + "%.0f" % avg_batch + " microseconds", flush=True) print("Average unbatch: " + "%.0f" % avg_unbatch + " microseconds", flush=True) print("Average encrypt: " + "%.0f" % avg_encrypt + " microseconds", flush=True) print("Average decrypt: " + "%.0f" % avg_decrypt + " microseconds", flush=True) print("Average add: " + "%.0f" % avg_add + " microseconds", flush=True) print("Average multiply: " + "%.0f" % avg_multiply + " microseconds", flush=True) print("Average multiply plain: " + "%.0f" % avg_multiply_plain + " microseconds", flush=True) print("Average square: " + "%.0f" % avg_square + " microseconds", flush=True) if context.using_keyswitching(): print("Average relinearize: " + "%.0f" % avg_relinearize + " microseconds", flush=True) print("Average rotate rows one step: " + "%.0f" % avg_rotate_rows_one_step + " microseconds", flush=True) print("Average rotate rows random: " + "%.0f" % avg_rotate_rows_random + " microseconds", flush=True) print("Average rotate columns: " + "%.0f" % avg_rotate_columns + " microseconds", flush=True) def ckks_performance_test(context): print_parameters(context) parms = context.first_context_data().parms() plain_modulus = parms.plain_modulus() poly_modulus_degree = parms.poly_modulus_degree() print("Generating secret/public keys: ", end="") keygen = KeyGenerator(context) print("Done") secret_key = keygen.secret_key() public_key = keygen.public_key() relin_keys = RelinKeys() gal_keys = GaloisKeys() if context.using_keyswitching(): print("Generating relinearization keys: ", end="") time_start = time.time() relin_keys = keygen.relin_keys() time_end = time.time() print("Done [" + "%.0f" % ((time_end-time_start)*1000000) + " microseconds]") if not context.key_context_data().qualifiers().using_batching: print("Given encryption parameters do not support batching.") return 0 print("Generating Galois keys: ", end="") time_start = time.time() gal_keys = keygen.galois_keys() time_end = time.time() print("Done [" + "%.0f" % ((time_end-time_start)*1000000) + " microseconds]") encryptor = Encryptor(context, public_key) decryptor = Decryptor(context, secret_key) evaluator = Evaluator(context) ckks_encoder = CKKSEncoder(context) time_encode_sum = 0 time_decode_sum = 0 time_encrypt_sum = 0 time_decrypt_sum = 0 time_add_sum = 0 time_multiply_sum = 0 time_multiply_plain_sum = 0 time_square_sum = 0 time_relinearize_sum = 0 time_rescale_sum = 0 time_rotate_one_step_sum = 0 time_rotate_random_sum = 0 time_conjugate_sum = 0 # How many times to run the test? count = 10 # Populate a vector of floating-point values to batch. pod_vector = DoubleVector() slot_count = ckks_encoder.slot_count() for i in range(slot_count): pod_vector.push_back(1.001 * float(i)) print("Running tests ", end="") for i in range(count): ''' [Encoding] For scale we use the square root of the last coeff_modulus prime from parms. ''' plain = Plaintext(parms.poly_modulus_degree() * len(parms.coeff_modulus()), 0) # [Encoding] scale = math.sqrt(parms.coeff_modulus()[-1].value()) time_start = time.time() ckks_encoder.encode(pod_vector, scale, plain) time_end = time.time() time_encode_sum += (time_end-time_start)*1000000 # [Decoding] pod_vector2 = DoubleVector() time_start = time.time() ckks_encoder.decode(plain, pod_vector2) time_end = time.time() time_decode_sum += (time_end-time_start)*1000000 # [Encryption] encrypted = Ciphertext(context) time_start = time.time() encryptor.encrypt(plain, encrypted) time_end = time.time() time_encrypt_sum += (time_end-time_start)*1000000 # [Decryption] plain2 = Plaintext(poly_modulus_degree, 0) time_start = time.time() decryptor.decrypt(encrypted, plain2) time_end = time.time() time_decrypt_sum += (time_end-time_start)*1000000 # [Add] encrypted1 = Ciphertext(context) ckks_encoder.encode(i + 1, plain) encryptor.encrypt(plain, encrypted1) encrypted2 = Ciphertext(context) ckks_encoder.encode(i + 1, plain2) encryptor.encrypt(plain2, encrypted2) time_start = time.time() evaluator.add_inplace(encrypted1, encrypted1) evaluator.add_inplace(encrypted2, encrypted2) evaluator.add_inplace(encrypted1, encrypted2) time_end = time.time() time_add_sum += (time_end-time_start)*1000000 # [Multiply] encrypted1.reserve(3) time_start = time.time() evaluator.multiply_inplace(encrypted1, encrypted2) time_end = time.time() time_multiply_sum += (time_end-time_start)*1000000 # [Multiply Plain] time_start = time.time() evaluator.multiply_plain_inplace(encrypted2, plain) time_end = time.time() time_multiply_plain_sum += (time_end-time_start)*1000000 # [Square] time_start = time.time() evaluator.square_inplace(encrypted2) time_end = time.time() time_square_sum += (time_end-time_start)*1000000 if context.using_keyswitching(): # [Relinearize] time_start = time.time() evaluator.relinearize_inplace(encrypted1, relin_keys) time_end = time.time() time_relinearize_sum += (time_end-time_start)*1000000 # [Rescale] time_start = time.time() evaluator.rescale_to_next_inplace(encrypted1) time_end = time.time() time_rescale_sum += (time_end-time_start)*1000000 # [Rotate Vector] time_start = time.time() evaluator.rotate_vector_inplace(encrypted, 1, gal_keys) evaluator.rotate_vector_inplace(encrypted, -1, gal_keys) time_end = time.time() time_rotate_one_step_sum += (time_end-time_start)*1000000 # [Rotate Vector Random] random_rotation = int(rand_int() % ckks_encoder.slot_count()) time_start = time.time() evaluator.rotate_vector_inplace( encrypted, random_rotation, gal_keys) time_end = time.time() time_rotate_random_sum += (time_end-time_start)*1000000 # [Complex Conjugate] time_start = time.time() evaluator.complex_conjugate_inplace(encrypted, gal_keys) time_end = time.time() time_conjugate_sum += (time_end-time_start)*1000000 print(".", end="", flush=True) print(" Done\n", flush=True) avg_encode = time_encode_sum / count avg_decode = time_decode_sum / count avg_encrypt = time_encrypt_sum / count avg_decrypt = time_decrypt_sum / count avg_add = time_add_sum / (3 * count) avg_multiply = time_multiply_sum / count avg_multiply_plain = time_multiply_plain_sum / count avg_square = time_square_sum / count avg_relinearize = time_relinearize_sum / count avg_rescale = time_rescale_sum / count avg_rotate_one_step = time_rotate_one_step_sum / (2 * count) avg_rotate_random = time_rotate_random_sum / count avg_conjugate = time_conjugate_sum / count print("Average encode: " + "%.0f" % avg_encode + " microseconds", flush=True) print("Average decode: " + "%.0f" % avg_decode + " microseconds", flush=True) print("Average encrypt: " + "%.0f" % avg_encrypt + " microseconds", flush=True) print("Average decrypt: " + "%.0f" % avg_decrypt + " microseconds", flush=True) print("Average add: " + "%.0f" % avg_add + " microseconds", flush=True) print("Average multiply: " + "%.0f" % avg_multiply + " microseconds", flush=True) print("Average multiply plain: " + "%.0f" % avg_multiply_plain + " microseconds", flush=True) print("Average square: " + "%.0f" % avg_square + " microseconds", flush=True) if context.using_keyswitching(): print("Average relinearize: " + "%.0f" % avg_relinearize + " microseconds", flush=True) print("Average rescale: " + "%.0f" % avg_rescale + " microseconds", flush=True) print("Average rotate vector one step: " + "%.0f" % avg_rotate_one_step + " microseconds", flush=True) print("Average rotate vector random: " + "%.0f" % avg_rotate_random + " microseconds", flush=True) print("Average complex conjugate: " + "%.0f" % avg_conjugate + " microseconds", flush=True) def example_bfv_performance_default(): print_example_banner( "BFV Performance Test with Degrees: 4096, 8192, and 16384") parms = EncryptionParameters(scheme_type.BFV) poly_modulus_degree = 4096 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) parms.set_plain_modulus(786433) bfv_performance_test(SEALContext.Create(parms)) print() poly_modulus_degree = 8192 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) parms.set_plain_modulus(786433) bfv_performance_test(SEALContext.Create(parms)) print() poly_modulus_degree = 16384 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) parms.set_plain_modulus(786433) bfv_performance_test(SEALContext.Create(parms)) # Comment out the following to run the biggest example. # poly_modulus_degree = 32768 def example_bfv_performance_custom(): print("\nSet poly_modulus_degree (1024, 2048, 4096, 8192, 16384, or 32768): ") poly_modulus_degree = input("Input the poly_modulus_degree: ").strip() if len(poly_modulus_degree) < 4 or not poly_modulus_degree.isdigit(): print("Invalid option.") return 0 poly_modulus_degree = int(poly_modulus_degree) if poly_modulus_degree < 1024 or poly_modulus_degree > 32768 or (poly_modulus_degree & (poly_modulus_degree - 1) != 0): print("Invalid option.") return 0 print("BFV Performance Test with Degree: " + str(poly_modulus_degree)) parms = EncryptionParameters(scheme_type.BFV) parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) if poly_modulus_degree == 1024: parms.set_plain_modulus(12289) else: parms.set_plain_modulus(786433) bfv_performance_test(SEALContext.Create(parms)) def example_ckks_performance_default(): print_example_banner( "CKKS Performance Test with Degrees: 4096, 8192, and 16384") parms = EncryptionParameters(scheme_type.CKKS) poly_modulus_degree = 4096 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) ckks_performance_test(SEALContext.Create(parms)) print() poly_modulus_degree = 8192 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) ckks_performance_test(SEALContext.Create(parms)) poly_modulus_degree = 16384 parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) ckks_performance_test(SEALContext.Create(parms)) # Comment out the following to run the biggest example. # poly_modulus_degree = 32768 def example_ckks_performance_custom(): print("\nSet poly_modulus_degree (1024, 2048, 4096, 8192, 16384, or 32768): ") poly_modulus_degree = input("Input the poly_modulus_degree: ").strip() if len(poly_modulus_degree) < 4 or not poly_modulus_degree.isdigit(): print("Invalid option.") return 0 poly_modulus_degree = int(poly_modulus_degree) if poly_modulus_degree < 1024 or poly_modulus_degree > 32768 or (poly_modulus_degree & (poly_modulus_degree - 1) != 0): print("Invalid option.") return 0 print("CKKS Performance Test with Degree: " + str(poly_modulus_degree)) parms = EncryptionParameters(scheme_type.CKKS) parms.set_poly_modulus_degree(poly_modulus_degree) parms.set_coeff_modulus(CoeffModulus.BFVDefault(poly_modulus_degree)) ckks_performance_test(SEALContext.Create(parms)) if __name__ == '__main__': print_example_banner("Example: Performance Test") example_bfv_performance_default() example_bfv_performance_custom() example_ckks_performance_default() example_ckks_performance_custom()
36.921502
123
0.650259
2,552
21,636
5.222571
0.10815
0.05042
0.082908
0.03699
0.76133
0.725015
0.685474
0.66139
0.641131
0.609469
0
0.031909
0.252588
21,636
585
124
36.984615
0.792282
0.028332
0
0.685504
0
0
0.101646
0
0
0
0
0
0
1
0.017199
false
0
0.012285
0.002457
0.046683
0.149877
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
d8b7b5fb9877032f2872e9552bf777e9672ad149
452
py
Python
semestr7/subd/model/client.py
ffedoroff/omgtu
0cad61a2ce6a7c9bb5c9cac72c16eccf33bb4b77
[ "MIT" ]
1
2017-08-01T03:49:15.000Z
2017-08-01T03:49:15.000Z
semestr7/subd/model/client.py
ffedoroff/omgtu
0cad61a2ce6a7c9bb5c9cac72c16eccf33bb4b77
[ "MIT" ]
null
null
null
semestr7/subd/model/client.py
ffedoroff/omgtu
0cad61a2ce6a7c9bb5c9cac72c16eccf33bb4b77
[ "MIT" ]
4
2020-05-22T17:10:38.000Z
2021-12-08T00:52:00.000Z
# -*- coding: utf-8 -*- from django.db import models from django.db.models import DateTimeField, GenericIPAddressField class Client(models.Model): class Meta: app_label = "subd" verbose_name = "Клиент" verbose_name_plural = "Клиенты" fio = models.CharField("ФИО", max_length=200) def __str__(self): return unicode(self).encode('utf-8') def __unicode__(self): return u"{0.fio}".format(self)
22.6
65
0.650442
56
452
5.017857
0.660714
0.02847
0.085409
0
0
0
0
0
0
0
0
0.017094
0.223451
452
19
66
23.789474
0.783476
0.04646
0
0
0
0
0.074592
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
2
d8ba6e17bc85f2ea591e7b78c0b6ba596ae2eb60
2,866
py
Python
google_assist.py
eholic/dash-assistant
97204e1402fbb742fb7838e995110a22ea814ab5
[ "MIT" ]
null
null
null
google_assist.py
eholic/dash-assistant
97204e1402fbb742fb7838e995110a22ea814ab5
[ "MIT" ]
null
null
null
google_assist.py
eholic/dash-assistant
97204e1402fbb742fb7838e995110a22ea814ab5
[ "MIT" ]
null
null
null
import os import sys import requests import logging import json import google.auth.transport.grpc import google.auth.transport.requests import google.oauth2.credentials from google.assistant.embedded.v1alpha2 import ( embedded_assistant_pb2, embedded_assistant_pb2_grpc ) from config import Config # Ref: https://github.com/googlesamples/assistant-sdk-python/blob/master/google-assistant-sdk/googlesamples/assistant/grpc/textinput.py ASSISTANT_API_ENDPOINT = 'embeddedassistant.googleapis.com' DEFAULT_GRPC_DEADLINE = 60 * 3 + 5 def gassist(text_query, lang_code='en-US'): logging.info(text_query) # Load OAuth 2.0 credentials. try: with open(Config.CREDENTIALS, 'r') as f: credentials = google.oauth2.credentials.Credentials(token=None, **json.load(f)) session = requests.Session() http_request = google.auth.transport.requests.Request(session) credentials.refresh(http_request) except Exception as e: logging.error('Error loading credentials', exc_info=True) sys.exit(-1) # Create an authorized gRPC channel. grpc_channel = google.auth.transport.grpc.secure_authorized_channel( credentials, http_request, ASSISTANT_API_ENDPOINT) # Create an assistant. assistant = embedded_assistant_pb2_grpc.EmbeddedAssistantStub(grpc_channel) def assist(text_query): def iter_assist_requests(): config = embedded_assistant_pb2.AssistConfig( audio_out_config=embedded_assistant_pb2.AudioOutConfig( encoding='LINEAR16', sample_rate_hertz=16000, volume_percentage=0, ), dialog_state_in=embedded_assistant_pb2.DialogStateIn( language_code=lang_code, conversation_state=None, is_new_conversation=True, ), device_config=embedded_assistant_pb2.DeviceConfig( device_id=Config.DEVICE_ID, device_model_id=Config.DEVICE_MODEL_ID, ), text_query=text_query, ) req = embedded_assistant_pb2.AssistRequest(config=config) yield req text_response = None html_response = None for resp in assistant.Assist(iter_assist_requests(), DEFAULT_GRPC_DEADLINE): if resp.screen_out.data: html_response = resp.screen_out.data if resp.dialog_state_out.supplemental_display_text: text_response = resp.dialog_state_out.supplemental_display_text return text_response, html_response text, html = assist(text_query) logging.info(text) grpc_channel.close() session.close() return text if __name__ == '__main__': print(gassist('hello'))
34.95122
135
0.665736
316
2,866
5.756329
0.386076
0.074766
0.08796
0.042881
0.04508
0.04508
0.04508
0
0
0
0
0.0127
0.2582
2,866
81
136
35.382716
0.842897
0.075715
0
0.046154
0
0
0.03177
0.012103
0
0
0
0
0
1
0.046154
false
0
0.153846
0
0.230769
0.015385
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
d8bab771f310012235c06cb7dfdf54237fe96981
125
py
Python
src/pycrds/__init__.py
lokamigauti/metroclima-crds
70ad9d13810ed551deba1363e475e6a7271f3af1
[ "BSD-3-Clause" ]
null
null
null
src/pycrds/__init__.py
lokamigauti/metroclima-crds
70ad9d13810ed551deba1363e475e6a7271f3af1
[ "BSD-3-Clause" ]
null
null
null
src/pycrds/__init__.py
lokamigauti/metroclima-crds
70ad9d13810ed551deba1363e475e6a7271f3af1
[ "BSD-3-Clause" ]
null
null
null
__all__ = ["datafile", "flags", "graphs"] for module in __all__: __import__(__name__ + "." + module, globals(), locals())
41.666667
60
0.648
13
125
5
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.152
125
3
60
41.666667
0.613208
0
0
0
0
0
0.15873
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
d8bbf917cb0bbef6cac30c8d90613ab18336f8eb
2,145
py
Python
d3d/box/__init__.py
minghanz/d3d
1d08013238b300489f61be57cdd20a105d16a632
[ "MIT" ]
null
null
null
d3d/box/__init__.py
minghanz/d3d
1d08013238b300489f61be57cdd20a105d16a632
[ "MIT" ]
null
null
null
d3d/box/__init__.py
minghanz/d3d
1d08013238b300489f61be57cdd20a105d16a632
[ "MIT" ]
null
null
null
import torch from .box_impl import ( iou2d as iou2d_cc, iou2d_cuda, nms2d as nms2d_cc, nms2d_cuda, rbox_2d_crop as rbox_2d_crop_cc, IouType, SupressionType) def box2d_iou(boxes1, boxes2, method="box"): ''' :param method: 'box' - normal box, 'rbox' - rotated box ''' if len(boxes1.shape) != 2 or len(boxes2.shape) != 2: raise ValueError("Input of rbox_2d_iou should be Nx2 tensors!") if boxes1.shape[1] != 5 or boxes2.shape[1] != 5: raise ValueError("Input boxes should have 5 fields: x, y, w, h, r") iou_type = getattr(IouType, method.upper()) if boxes1.is_cuda and boxes2.is_cuda: impl = iou2d_cuda else: impl = iou2d_cc return impl(boxes1, boxes2, iou_type) # TODO: implement IoU loss, GIoU, DIoU, CIoU: https://zhuanlan.zhihu.com/p/104236411 def box2d_nms(boxes, scores, iou_method="box", supression_method="hard", iou_threshold=0, score_threshold=0, supression_param=0): ''' :param method: 'box' - normal box, 'rbox' - rotated box Soft-NMS: Bodla, Navaneeth, et al. "Soft-NMS--improving object detection with one line of code." Proceedings of the IEEE international conference on computer vision. 2017. ''' if len(boxes) != len(scores): raise ValueError("Numbers of boxes and scores are inconsistent!") if len(scores.shape) == 2: scores = scores.max(axis=1).values if boxes.numel() == 0: return torch.tensor([], dtype=torch.bool) iou_type = getattr(IouType, iou_method.upper()) supression_type = getattr(SupressionType, supression_method.upper()) if boxes.is_cuda and scores.is_cuda: impl = nms2d_cuda else: impl = nms2d_cc suppressed = impl(boxes, scores, iou_type, supression_type, iou_threshold, score_threshold, supression_param ) return ~suppressed def box2d_crop(cloud, boxes): ''' Crop point cloud points out given rotated boxes. The result is a list of indices tensor where each tensor is corresponding to indices of points lying in the box ''' result = rbox_2d_crop_cc(cloud, boxes) return result
34.047619
175
0.673193
304
2,145
4.611842
0.394737
0.017118
0.021398
0.017118
0.052782
0.052782
0.052782
0.052782
0
0
0
0.032316
0.220979
2,145
62
176
34.596774
0.806703
0.24662
0
0.051282
0
0
0.09283
0
0
0
0
0.016129
0
1
0.076923
false
0
0.051282
0
0.230769
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
d8bd67134893a262683665a0dbc9878a51447c79
15,809
py
Python
menu.py
Jasonlmx/Touhou-Star-Salvation
a8804450625957af7b81d0075873a68708374db8
[ "MIT" ]
4
2021-10-15T13:18:43.000Z
2022-03-05T10:49:47.000Z
menu.py
Jasonlmx/Touhou-Star-Salvation
a8804450625957af7b81d0075873a68708374db8
[ "MIT" ]
null
null
null
menu.py
Jasonlmx/Touhou-Star-Salvation
a8804450625957af7b81d0075873a68708374db8
[ "MIT" ]
1
2021-11-29T04:17:32.000Z
2021-11-29T04:17:32.000Z
import pygame,sys import random import math from pygame.locals import * from pygame.sprite import Group import gF import Bullet import DADcharacter import Slave import global_var import Effect import Item import gameRule class titleStar(pygame.sprite.Sprite): def __init__(self): super(titleStar,self).__init__() self.tx=0.0 self.ty=0.0 self.speedx=0 self.speedy=0 self.image=pygame.Surface((64,64)).convert_alpha() self.image.fill((0,0,0,0)) self.image.blit(global_var.get_value('titleStar'),(0,0),(0,0,64,64)) self.lastFrame=0 self.rAngle=random.random()*360 self.rDirection=random.randint(0,1) if self.rDirection==0: self.rDirection=-1 self.rotation=(random.random()*1.5+1.2)*self.rDirection self.maxFrame=270+random.randint(0,80) self.shadowInt=4 self.voidifyFrame=30 self.speed=0 self.dDeg=-0.07*random.random()-0.07 def initial(self,posx,posy): self.tx=posx self.ty=posy def movement(self): tick=global_var.get_value('DELTA_T') self.tx+=self.speedx*60/1000*tick self.ty+=self.speedy*60/1000*tick def speedAlter(self,speedx,speedy): self.speedx=speedx self.speedy=speedy def countAngle(self): if self.speedx!=0: t=self.speedy/self.speedx deg=math.atan(t)*180/math.pi else: if self.speedy>0: deg=90 if self.speedy<0: deg=270 if deg<0: deg+=360 if self.speedy>0 and deg>=180: deg=deg-180 if self.speedy<0 and deg<=180: deg=deg+180 if self.speedy==0 and self.speedx<0: deg=180 self.angle=deg def setSpeed(self,angle,speed): s=math.sin(math.radians(angle)) c=math.cos(math.radians(angle)) self.speedy=s*speed self.speedx=c*speed self.speed=speed def arc(self): if self.angle>95: angle=self.angle+self.dDeg self.setSpeed(angle,self.speed) def checkValid(self): if self.lastFrame>self.maxFrame: self.kill() def update(self,screen,titleDec): self.lastFrame+=1 self.rAngle+=self.rotation self.movement() self.countAngle() self.arc() self.draw(screen) if self.lastFrame%self.shadowInt==0: self.newShadow(titleDec) self.checkValid() def newShadow(self,titleDec): new_shadow=starShadow((self.tx,self.ty),80,self.rAngle) titleDec.add(new_shadow) def draw(self,screen): pos=(round(self.tx)-32,round(self.ty)-32) if self.lastFrame<=self.voidifyFrame: tempImg=self.image alpha=round((256-56)*self.lastFrame/self.voidifyFrame+56) tempImg.set_alpha(alpha) gF.drawRotation(tempImg,pos,self.rAngle,screen) elif (self.maxFrame-self.lastFrame)<=self.voidifyFrame: tempImg=self.image alpha=round((256-56)*(self.maxFrame-self.lastFrame)/self.voidifyFrame+56) tempImg.set_alpha(alpha) gF.drawRotation(tempImg,pos,self.rAngle,screen) else: #pos=(round(self.tx)-32,round(self.ty)-32) gF.drawRotation(self.image,pos,self.rAngle,screen) #screen.blit(self.image,pos) class starShadow(pygame.sprite.Sprite): def __init__(self,pos,length=20,angle=0): super(starShadow,self).__init__() self.maxFrame=length self.angle=angle self.pos=pos self.image=pygame.Surface((64,64)).convert_alpha() self.image.fill((0,0,0,0)) self.image.blit(global_var.get_value('titleStar'),(0,0),(0,0,64,64)) self.lastFrame=0 def checkValid(self): if self.lastFrame>=self.maxFrame: self.kill() def update(self,screen,*arg): self.lastFrame+=1 self.draw(screen) self.checkValid() def draw(self,screen): self.percentage=self.lastFrame/self.maxFrame self.alpha=round((120-0)*(1-self.percentage)+0) self.size=round(33*(1-self.percentage))+1 tempImg=pygame.Surface((64,64)).convert_alpha() tempImg.fill((0,0,0,0)) tempImg.blit(self.image,(0,0),(0,0,64,64)) tempImg=pygame.transform.smoothscale(tempImg,(self.size,self.size)) tempImg.set_alpha(self.alpha) x,y=self.pos pos=(round(x-self.size/2),round(y-self.size/2)) gF.drawRotation(tempImg,pos,self.angle,screen) class Menu(): def __init__(self): super(Menu,self).__init__() self.image=pygame.image.load('resource/title/menu.png').convert() self.sign=global_var.get_value('menuSign') self.shadow=global_var.get_value('menuShadow') self.playerTitleImg=global_var.get_value('playerTitleImg') self.kanjiLogo=global_var.get_value('kanjiLogo') self.engLogo=global_var.get_value('engLogo') self.lightLogo=global_var.get_value('lightLogo') self.tachie=global_var.get_value('reimuLogo') self.selectImg=global_var.get_value('menuSelectImg') self.levelImg=global_var.get_value('levelImg') self.font=pygame.font.SysFont('arial', 20) self.selectNum=[0,0,0,0] self.stairMax=[7,0,1,1] self.menuStair=0 #0:main menu, 1 stage selection, 2 player selection, 3 practice menu self.playerReset=False self.lightStrength=0.0 self.logoPosAdj=[0,0] self.lastFrame=0 self.testSpellNum=1 self.ifSpell=False self.substract=False self.plus=False self.starInt=180 def update(self,screen,pressed_keys,pressed_keys_last,player,titleDec): self.lastFrame+=1 self.addTitleStar(titleDec) if self.lastFrame>360: self.lastFrame=self.lastFrame%360 screen.blit(self.image,(0,0)) self.alterSelect(pressed_keys,pressed_keys_last) self.drawSign(screen,titleDec) self.doSelection(pressed_keys,pressed_keys_last,player) def addTitleStar(self,titleDec): if self.lastFrame%self.starInt==0: new_star=titleStar() i_x=300+random.random()*660 i_y=random.random()*5+10 new_star.initial(i_x,i_y) new_star.setSpeed(135+random.random()*10,1.8+0.6*random.random()) titleDec.add(new_star) def alterSelect(self,pressed_keys,pressed_keys_last): if self.menuStair!=2 and self.menuStair!=3: if not (pressed_keys[K_UP] and pressed_keys_last[K_UP]): if pressed_keys[K_UP]: self.selectNum[self.menuStair]-=1 global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if not (pressed_keys[K_DOWN] and pressed_keys_last[K_DOWN]): if pressed_keys[K_DOWN]: self.selectNum[self.menuStair]+=1 global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() elif self.menuStair==2: if not (pressed_keys[K_LEFT] and pressed_keys_last[K_LEFT]): if pressed_keys[K_LEFT]: self.selectNum[self.menuStair]-=1 global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if not (pressed_keys[K_RIGHT] and pressed_keys_last[K_RIGHT]): if pressed_keys[K_RIGHT]: self.selectNum[self.menuStair]+=1 global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() elif self.menuStair==3: if not (pressed_keys[K_LEFT] and pressed_keys_last[K_LEFT]): if pressed_keys[K_LEFT]: self.testSpellNum-=1 self.substract=True global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if not (pressed_keys[K_RIGHT] and pressed_keys_last[K_RIGHT]): if pressed_keys[K_RIGHT]: self.testSpellNum+=1 self.plus=True global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if self.testSpellNum>10: self.testSpellNum=1 elif self.testSpellNum<1: self.testSpellNum=10 if not (pressed_keys[K_DOWN] and pressed_keys_last[K_DOWN]): if pressed_keys[K_DOWN]: self.ifSpell=False global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if not (pressed_keys[K_UP] and pressed_keys_last[K_UP]): if pressed_keys[K_UP]: self.ifSpell=True global_var.get_value('select_sound').stop() global_var.get_value('select_sound').play() if not self.ifSpell and self.testSpellNum==10: if self.substract: self.testSpellNum=9 elif self.plus: self.testSpellNum=1 else: self.ifSpell=True self.substract=False self.plus=False if (pressed_keys[K_ESCAPE]!=pressed_keys_last[K_ESCAPE] and pressed_keys[K_ESCAPE]) or (pressed_keys[K_x]!=pressed_keys_last[K_x] and pressed_keys[K_x]): if self.menuStair>0: self.menuStair-=1 global_var.get_value('cancel_sound').play() else: if self.selectNum[0]!=7: self.selectNum[0]=7 global_var.get_value('cancel_sound').play() else: global_var.get_value('cancel_sound').play() sys.exit() if self.selectNum[self.menuStair]>self.stairMax[self.menuStair]: self.selectNum[self.menuStair]=0 elif self.selectNum[self.menuStair]<0: self.selectNum[self.menuStair]=self.stairMax[self.menuStair] def drawSign(self,screen,titleDec): #stars if self.menuStair!=0: for entity in titleDec: entity.update(screen,titleDec) if self.menuStair==0: screen.blit(self.tachie,(600,90)) for entity in titleDec: entity.update(screen,titleDec) self.logoPosAdj=[math.sin(self.lastFrame*math.pi/180)*20,math.sin(self.lastFrame*0.5*math.pi/180)*5] screen.blit(self.kanjiLogo,(100+self.logoPosAdj[0],30+self.logoPosAdj[1])) self.lightStrength=0.5*math.sin(self.lastFrame*2*math.pi/180)+0.5 alpha=round(self.lightStrength*256) self.lightLogo.set_alpha(alpha) screen.blit(self.lightLogo,(100-5,164)) screen.blit(self.engLogo,(100,164)) for i in range(0,8): if i!=self.selectNum[self.menuStair]: screen.blit(self.shadow[i],(100,250+i*48)) else: screen.blit(self.sign[i],(100,250+i*48)) elif self.menuStair==1: screen.blit(self.selectImg[0],(40,10)) screen.blit(self.levelImg[0],(288,264)) elif self.menuStair==2: if self.selectNum[0]==0 or self.selectNum[0]==2: screen.blit(self.selectImg[1],(40,10)) for i in range(0,2): self.playerTitleImg[i].set_alpha(256) if self.selectNum[2]==0: self.playerTitleImg[1].set_alpha(100) elif self.selectNum[2]==1: self.playerTitleImg[0].set_alpha(100) for i in range(0,2): screen.blit(self.playerTitleImg[i],(450*i,120)) elif self.menuStair==3: if self.selectNum[0]==2: if self.ifSpell: pracText=self.font.render('Test: Start From Spell No.'+str(self.testSpellNum),True,(255,255,255)) else: pracText=self.font.render('Test: Start From non-Spell No.'+str(self.testSpellNum),True,(255,255,255)) screen.blit(pracText,(200,300)) def doSelection(self,pressed_keys,pressed_keys_last,player): if pressed_keys[K_z]!=pressed_keys_last[K_z] and pressed_keys[K_z]: if self.menuStair==0: if self.selectNum[self.menuStair]==0: global_var.get_value('ok_sound').play() self.menuStair+=1 elif self.selectNum[self.menuStair]==2: global_var.get_value('ok_sound').play() self.menuStair+=1 elif self.selectNum[self.menuStair]==7: global_var.get_value('ok_sound').play() pygame.quit() sys.exit() else: global_var.get_value('invalid_sound').stop() global_var.get_value('invalid_sound').play() elif self.menuStair==1: if self.selectNum[0]==0 or self.selectNum[0]==2: if self.selectNum[self.menuStair]==0: global_var.get_value('ok_sound').play() self.menuStair+=1 elif self.menuStair==2: if self.selectNum[0]==0: if self.selectNum[self.menuStair]==0: global_var.set_value('playerNum',0) elif self.selectNum[self.menuStair]==1: global_var.set_value('playerNum',1) global_var.get_value('ok_sound').play() global_var.get_value('ok_sound').play() global_var.set_value('ifTest',False) pygame.mixer.music.stop() pygame.mixer.music.load('resource/bgm/lightnessOnTheWay.mp3') # 载入背景音乐文件 #pygame.mixer.music.load('resource/bgm/上海アリス幻樂団 - 死体旅行~ Be of good cheer!.mp3') pygame.mixer.music.set_volume(0.6) # 设定背景音乐音量 pygame.mixer.music.play(loops=-1) self.menuStair=0 global_var.set_value('menu',False) self.playerReset=True if self.selectNum[0]==2: if self.selectNum[self.menuStair]==0: global_var.set_value('playerNum',0) elif self.selectNum[self.menuStair]==1: global_var.set_value('playerNum',1) global_var.get_value('ok_sound').play() self.menuStair+=1 elif self.menuStair==3: if self.selectNum[0]==2: global_var.get_value('ok_sound').play() global_var.set_value('ifTest',True) global_var.set_value('ifSpellTest',self.ifSpell) global_var.set_value('spellNum',self.testSpellNum) pygame.mixer.music.stop() pygame.mixer.music.load('resource/bgm/lightnessOnTheWay.mp3') # 载入背景音乐文件 #pygame.mixer.music.load('resource/bgm/上海アリス幻樂団 - 死体旅行~ Be of good cheer!.mp3') pygame.mixer.music.set_volume(0.6) # 设定背景音乐音量 pygame.mixer.music.play(loops=-1) self.menuStair=0 global_var.set_value('menu',False) self.playerReset=True
42.727027
161
0.567651
1,953
15,809
4.456221
0.119304
0.053775
0.056532
0.080087
0.531541
0.493853
0.444904
0.427439
0.395266
0.369298
0
0.040084
0.310393
15,809
370
162
42.727027
0.75821
0.021001
0
0.427746
0
0
0.042475
0.005883
0
0
0
0
0
1
0.060694
false
0
0.037572
0
0.106936
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0