hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
58f7131f6e0460b873d8e3c9e2d3590bf118e404
173
py
Python
client/python-client/config.py
colesteere/updated-gabriel-proxy-server
d3c94ef03952f8612f76af1e7e42dd89f0c6f401
[ "Apache-2.0" ]
7
2020-02-04T10:58:58.000Z
2021-11-26T07:37:22.000Z
client/python-client/config.py
colesteere/updated-gabriel-proxy-server
d3c94ef03952f8612f76af1e7e42dd89f0c6f401
[ "Apache-2.0" ]
1
2021-02-19T03:56:10.000Z
2021-02-19T03:56:10.000Z
client/python-client/config.py
colesteere/updated-gabriel-proxy-server
d3c94ef03952f8612f76af1e7e42dd89f0c6f401
[ "Apache-2.0" ]
8
2019-12-05T10:05:36.000Z
2021-01-27T14:09:53.000Z
#!/usr/bin/env python class Config(object): GABRIEL_IP='128.2.213.107' RECEIVE_FRAME=True VIDEO_STREAM_PORT = 9098 RESULT_RECEIVING_PORT = 9101 TOKEN=1
19.222222
32
0.699422
26
173
4.423077
0.961538
0
0
0
0
0
0
0
0
0
0
0.136691
0.196532
173
8
33
21.625
0.690647
0.115607
0
0
0
0
0.085526
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
450e839b5cbfe8975335738924db450b042c957b
608
py
Python
ibsng/handler/perm/delete_perm_template.py
ParspooyeshFanavar/pyibsng
d48bcf4f25e3f23461528bf0ff8870cc3d537444
[ "MIT" ]
6
2018-03-06T10:16:36.000Z
2021-12-05T12:43:10.000Z
ibsng/handler/perm/delete_perm_template.py
ParspooyeshFanavar/pyibsng
d48bcf4f25e3f23461528bf0ff8870cc3d537444
[ "MIT" ]
3
2018-03-06T10:27:08.000Z
2022-01-02T15:21:27.000Z
ibsng/handler/perm/delete_perm_template.py
ParspooyeshFanavar/pyibsng
d48bcf4f25e3f23461528bf0ff8870cc3d537444
[ "MIT" ]
3
2018-01-06T16:28:31.000Z
2018-09-17T19:47:19.000Z
"""Delete permission template API method.""" from ibsng.handler.handler import Handler class deletePermTemplate(Handler): """Delete permission template method class.""" def control(self): """Validate inputs after method setup. :return: None :rtype: None """ self.is_valid(self.perm_template_name, str) def setup(self, perm_template_name): """Setup required parameters. :param str perm_template_name: permission template name :return: None :rtype: None """ self.perm_template_name = perm_template_name
24.32
63
0.648026
67
608
5.716418
0.432836
0.18799
0.208877
0.156658
0.120104
0
0
0
0
0
0
0
0.258224
608
24
64
25.333333
0.849224
0.419408
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0
0.666667
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
45260c369563b1835eb05a3453c5ee46286b3cca
4,721
py
Python
code/tests/unit/api/test_authorization.py
CiscoSecurity/tr-05-serverless-cyberprotect
f977f78e42d67994550bc5e6a3b04b0cd157d476
[ "MIT" ]
3
2020-06-24T08:17:58.000Z
2021-05-25T06:00:28.000Z
code/tests/unit/api/test_authorization.py
CiscoSecurity/tr-05-serverless-cyberprotect
f977f78e42d67994550bc5e6a3b04b0cd157d476
[ "MIT" ]
null
null
null
code/tests/unit/api/test_authorization.py
CiscoSecurity/tr-05-serverless-cyberprotect
f977f78e42d67994550bc5e6a3b04b0cd157d476
[ "MIT" ]
null
null
null
from http import HTTPStatus from pytest import fixture from requests.exceptions import InvalidURL, ConnectionError from api.utils import ( NO_AUTH_HEADER, WRONG_AUTH_TYPE, WRONG_JWKS_HOST, JWK_HOST_MISSING, WRONG_KEY, WRONG_JWT_STRUCTURE, WRONG_AUDIENCE, KID_NOT_FOUND ) from .utils import headers from ..conftest import cyberprotect_api_response from ..mock_for_tests import ( EXPECTED_RESPONSE_OF_JWKS_ENDPOINT, RESPONSE_OF_JWKS_ENDPOINT_WITH_WRONG_KEY ) def routes(): yield '/health' yield '/observe/observables' yield '/deliberate/observables' @fixture(scope='module', params=routes(), ids=lambda route: f'POST {route}') def route(request): return request.param def test_call_with_authorization_header_failure( route, client, valid_json, authorization_errors_expected_payload ): response = client.post(route, json=valid_json) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( NO_AUTH_HEADER ) def test_call_with_wrong_auth_type( route, client, valid_json, valid_jwt, authorization_errors_expected_payload ): response = client.post( route, json=valid_json, headers=headers(valid_jwt(), auth_type='not') ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( WRONG_AUTH_TYPE ) def test_call_with_wrong_jwks_host( route, client, valid_json, valid_jwt, cyberprotect_api_request, authorization_errors_expected_payload ): for error in (ConnectionError, InvalidURL): cyberprotect_api_request.side_effect = error() response = client.post( route, json=valid_json, headers=headers(valid_jwt()) ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( WRONG_JWKS_HOST ) def test_call_with_missing_jwks_host( route, client, valid_json, valid_jwt, cyberprotect_api_request, authorization_errors_expected_payload, ): cyberprotect_api_request.return_value = \ cyberprotect_api_response(payload=EXPECTED_RESPONSE_OF_JWKS_ENDPOINT) response = client.post( route, json=valid_json, headers=headers(valid_jwt(wrong_jwks_host=True)) ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( JWK_HOST_MISSING ) def test_call_with_wrong_key( route, client, valid_json, valid_jwt, cyberprotect_api_request, authorization_errors_expected_payload, ): cyberprotect_api_request.return_value = \ cyberprotect_api_response( payload=RESPONSE_OF_JWKS_ENDPOINT_WITH_WRONG_KEY ) response = client.post( route, json=valid_json, headers=headers(valid_jwt()) ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( WRONG_KEY ) def test_call_with_wrong_jwt_structure( route, client, valid_json, cyberprotect_api_request, authorization_errors_expected_payload, ): cyberprotect_api_request.return_value = \ cyberprotect_api_response(payload=EXPECTED_RESPONSE_OF_JWKS_ENDPOINT) response = client.post( route, json=valid_json, headers=headers('valid_jwt()') ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( WRONG_JWT_STRUCTURE ) def test_call_with_wrong_audience( route, client, valid_json, valid_jwt, cyberprotect_api_request, authorization_errors_expected_payload, ): cyberprotect_api_request.return_value = \ cyberprotect_api_response(payload=EXPECTED_RESPONSE_OF_JWKS_ENDPOINT) response = client.post( route, json=valid_json, headers=headers(valid_jwt(aud='wrong_aud')) ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( WRONG_AUDIENCE ) def test_call_with_wrong_kid( route, client, valid_json, valid_jwt, cyberprotect_api_request, authorization_errors_expected_payload, ): cyberprotect_api_request.return_value = \ cyberprotect_api_response(payload=EXPECTED_RESPONSE_OF_JWKS_ENDPOINT) response = client.post( route, json=valid_json, headers=headers(valid_jwt(kid='wrong_kid')) ) assert response.status_code == HTTPStatus.OK assert response.json == authorization_errors_expected_payload( KID_NOT_FOUND )
28.786585
77
0.728236
547
4,721
5.862888
0.140768
0.084191
0.134705
0.169629
0.753352
0.714687
0.696601
0.675398
0.675398
0.675398
0
0
0.201229
4,721
163
78
28.96319
0.850438
0
0
0.462121
0
0
0.021182
0.004872
0
0
0
0
0.121212
1
0.075758
false
0
0.05303
0.007576
0.136364
0
0
0
0
null
0
0
1
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
452d8122263635ca75335a981efe2ea56d796a77
536
py
Python
driving_force.py
bcgreen24/ten-lines-or-less
7a34ff7d7222fd3946e9cbb418afc992bc84e5e5
[ "MIT" ]
44
2018-08-15T08:32:43.000Z
2022-02-15T20:25:03.000Z
driving_force.py
bcgreen24/ten-lines-or-less
7a34ff7d7222fd3946e9cbb418afc992bc84e5e5
[ "MIT" ]
null
null
null
driving_force.py
bcgreen24/ten-lines-or-less
7a34ff7d7222fd3946e9cbb418afc992bc84e5e5
[ "MIT" ]
7
2018-09-08T20:05:58.000Z
2021-11-22T12:46:15.000Z
b1 >> fuzz([0, 2, 3, 5], dur=1/2, amp=0.8, lpf=linvar([100, 1000], 12), lpr=0.4, oct=3).every(16, "shuffle").every(8, "bubble") d1 >> play("x o [xx] oxx o [xx] {oO} ", room=0.4).every(16, "shuffle") d2 >> play("[--]", amp=[1.3, 0.5, 0.5, 0.5], hpf=linvar([6000, 10000], 8), hpr=0.4, spin=4) p1 >> pasha([0, 2, 3, 5], dur=4, oct=4, amp=0.65, pan=[-0.5, 0.5], striate=16).every(9, "bubble") m1 >> karp([0, 2, 3, 7, 9], dur=[1/2, 1, 1/2, 1, 1, 1/2]).every(12, "shuffle").every(7, "bubble") p1.every(4, "stutter", 4) b1.every(8, "rotate")
67
127
0.539179
119
536
2.428571
0.394958
0.034602
0.031142
0.041522
0.048443
0
0
0
0
0
0
0.196544
0.136194
536
7
128
76.571429
0.427646
0
0
0
0
0
0.151119
0
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
4535b19aaa2277a6f4dbe0469e53ec470e9bd860
23,317
py
Python
synthetic_expts/synthetic_expts.py
mvp18/gAL-MELEX
58fd26b41d40acac3ab98f37c34e4de2d757ac01
[ "MIT" ]
null
null
null
synthetic_expts/synthetic_expts.py
mvp18/gAL-MELEX
58fd26b41d40acac3ab98f37c34e4de2d757ac01
[ "MIT" ]
null
null
null
synthetic_expts/synthetic_expts.py
mvp18/gAL-MELEX
58fd26b41d40acac3ab98f37c34e4de2d757ac01
[ "MIT" ]
null
null
null
""" Author: saneem """ import pickle import tensorflow as tf import numpy as np import os, sys import csv from keras.preprocessing.image import ImageDataGenerator from keras.models import Sequential, load_model, Model from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D from keras.layers import Activation, Dropout, Flatten, Dense, Input, Concatenate from keras.callbacks import EarlyStopping import keras.backend as K from keras.optimizers import Adam, SGD from keras.layers.normalization import BatchNormalization from keras.regularizers import Regularizer from keras.applications.resnet50 import ResNet50 from keras.callbacks import Callback # early stopping import matplotlib.pyplot as plt import sklearn.linear_model as linear_model import time from sklearn.metrics import average_precision_score from sklearn.metrics import log_loss import argparse from copy import deepcopy from utils.utils import * from utils.utils import da_get_Yadv from utils.flipGrad import * from utils.RateScheduler import GradientReversalScheduler argparser = argparse.ArgumentParser( description="To train the model with our without our proposed adverserial regularizer.") argparser.add_argument( '-a', '--algo', help="algorithm name: reg, base, fs, adv", default='base') argparser.add_argument( '-hp', '--hyper_param', help="Hyper param list: will adv_weight for fs and adv and lamda for reg", default=[1.0], type=float, nargs='+') argparser.add_argument( '-f', '--data_file', help="data directory", nargs='+', default=None) argparser.add_argument( '-fs_test', '--tfile', help="what test to try", default=None) args = argparser.parse_args() num_feat = 5 num_attr = 2 comm_rep_dim = 2 # custom regularizers lamda = 0.2 def L12_reg(weight_matrix): return lamda* K.sum(K.square(K.sum(K.abs(weight_matrix), axis=1))) def Linf1_reg(weight_matrix): return lamda* K.sum(K.sqrt(K.max(K.abs(weight_matrix), axis=1))) def Lh1_reg(weight_matrix): return lamda* K.sum(K.square(K.sum(K.sqrt(K.abs(weight_matrix)), axis=1))) def evaluate_model(model, Xtest, Ytest): Ypred = model.predict(Xtest) if len(model.output_names) >= 2: Ypred = model.predict(Xtest)[model.output_names.index('task')] Ypred_task = Ypred[:,0] Ytest_task = np.reshape(Ytest[:,0], Ypred_task.shape) mAP = average_precision_score(Ytest_task, Ypred_task, average='macro') acc = np.mean((Ypred_task > 0.5) == Ytest_task) return {'map':mAP, 'acc':acc, 'Ypred':Ypred, 'Ytrue':Ytest} def save_results(model, Xtrain, Ytrain, Xtest, Ytest, filename, algo): res_train = evaluate_model(model, Xtrain, Ytrain) if type(Xtest) == dict: for tfile in Xtest: res = evaluate_model(model, Xtest[tfile], Ytest[tfile]) for key in res_train: res[key+'_train'] = res_train[key] result_file = '../synthetic_results/' + filename.split('.pckl')[0] + '__on__' +\ tfile.split('.pckl')[0] + '__' + algo + '.pckl' pickle.dump(res, open(result_file, 'w')) else: res = evaluate_model(model, Xtest, Ytest) for key in res_train: res[key+'_train'] = res_train[key] result_file = '../synthetic_results/' + filename.split('.pckl')[0] + '__' + algo + '.pckl' pickle.dump(res, open(result_file, 'w')) def keras_basic_model(Xtrain, Ytrain, if_hdh=False): input_s = (np.shape(Xtrain)[1],) # optim = Adam(lr = 005) optim = SGD(lr=0.005, nesterov=True) num_epochs = 1000 if len(np.shape(Ytrain)) == 1: num_label = 1 else: num_label = np.shape(Ytrain)[1] # model earlystopper = EarlyStopping(min_delta=0.00001, patience=20, verbose=1) input_layer = Input(shape=input_s) if if_hdh: layer = Dense(np.shape(Xtrain)[1], activation = 'relu')(input_layer) output = Dense(num_label, activation='sigmoid')(layer) else: output = Dense(num_label, activation='sigmoid')(input_layer) model = Model(inputs=[input_layer], outputs=output) model.compile(loss='binary_crossentropy', #'mse', #'categorical_crossentropy', optimizer=optim, ) model.fit(x=Xtrain, y=Ytrain, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain), verbose=0, callbacks=[earlystopper]) return model def cross_entropy(Ytrue, Ypred): Ypred = np.clip(Ypred, 1e-7, 1.-1e-7) return np.mean(np.log(Ypred)*Ytrue + np.log(1-Ypred)*(1-Ytrue)) ################################################################################### def compete_LR(Xtrain, Ytrain): num_attributes = num_attr input_s = (num_feat*2,) adam = Adam(lr=0.01, decay=0.0) num_epochs = 2000 data_points = len(Ytrain) # model input_layer = Input(shape=input_s) common_rep = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer) task_out = Dense(num_attributes, activation='sigmoid', name='task', kernel_regularizer=L12_reg)(common_rep) model = Model(inputs=[input_layer], outputs=task_out) model.compile(loss='binary_crossentropy', #'mse', #'categorical_crossentropy', optimizer=adam, metrics=['binary_accuracy'] ) earlystopper = EarlyStopping(min_delta=0.00001, patience=20, verbose=1) model.fit(x=Xtrain, y=Ytrain, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain), verbose=0, callbacks=[earlystopper]) # Ypred_train = model.predict(Xtrain) # train_acc = 1 - np.mean(abs((Ypred_train > 0.5)- Ytrain), axis=0)[0] # # Ypred_test = model.predict(Xtest) # test_acc = 1 - np.mean(abs((Ypred_test > 0.5)- Ytest), axis=0)[0] return model def adv_wrapper_FS(Xtrain, Ytrain, adv_weight=1.0): def task_adv_obj(selected_feat, adv_weight=adv_weight): model = keras_basic_model(Xtrain[:,selected_feat], Ytrain[:,0]) Ypred = model.predict(Xtrain[:,selected_feat]) loss_task = cross_entropy(Ytrain[:,0], Ypred) model_adv = keras_basic_model(Xtrain[:,selected_feat], Ytrain[:,1]) Ypred = model_adv.predict(Xtrain[:,selected_feat]) loss_adv = cross_entropy(Ytrain[:,1], Ypred) return loss_task - adv_weight*loss_adv feat_num = Xtrain.shape[1] feat_list = range(feat_num) stop_flag = False sel_feat = [] obj_val = np.inf while not stop_flag: min_obj = np.inf min_f = None for f in feat_list: feat_set = deepcopy(sel_feat) feat_set.append(f) val = task_adv_obj(feat_set) feat_set.remove(f) if val < min_obj: min_obj = val min_f = f if min_obj < obj_val: obj_val = min_obj sel_feat.append(min_f) feat_list.remove(min_f) else: stop_flag = True if len(feat_list) == 0: stop_flag = True return keras_basic_model(Xtrain[:,sel_feat], Ytrain), sel_feat def logistic_regression(Xtrain, Ytrain): input_s = (Xtrain.shape[1],) optim = SGD(lr=0.01, decay=0.0) num_epochs = 1000 data_points = len(Ytrain) # model input_layer_init = Input(shape=input_s) common_rep_init = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_init) task_out_init = Dense(1, activation='sigmoid', name='task')(common_rep_init) model = Model(inputs=[input_layer_init], outputs=task_out_init) model.compile(loss='binary_crossentropy', #'mse', #'categorical_crossentropy', optimizer=optim, metrics=['binary_accuracy'] ) earlystopper = EarlyStopping(min_delta=0.00001, patience=20, verbose=1) model.fit(x=Xtrain, y=Ytrain[:,0], epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain[:,0]), verbose=1, callbacks=[earlystopper]) # train_acc = model_init.evaluate(x=Xtrain, y=Ytrain[:,0], verbose=0) # test_acc = model_init.evaluate(x=Xtest, y=Ytest[:,0], verbose=0) return model def adv_logistic(Xtrain, Ytrain, adv_weight=1.0, model_init=None): num_attributes = num_attr input_s = (num_feat*2,) adam = Adam(lr=0.01, decay=0.0) num_epochs = 2000 data_points = len(Ytrain) earlystopper = EarlyStopping(min_delta=0.00001, patience=20, verbose=1) if model_init == None: # init model input_layer_init = Input(shape=input_s) common_rep_init = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_init) task_out_init = Dense(1, activation='sigmoid', name='task')(common_rep_init) model_init = Model(inputs=[input_layer_init], outputs=task_out_init) model_init.compile(loss='binary_crossentropy', #'mse', #'categorical_crossentropy', optimizer=adam, metrics=['binary_accuracy'] ) model_init.fit(x=Xtrain, y=Ytrain[:,0], epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain[:,0]), verbose=0, callbacks=[earlystopper]) # adv model. Flip = GradientReversal(hp_lambda=1) input_layer_all = Input(shape=input_s) common_rep_all = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_all) task_out_all = Dense(1, activation='sigmoid', name='task')(common_rep_all) adv = Flip(common_rep_all) adv_out_all = Dense(1, activation='sigmoid', name='adv')(adv) model_all = Model(inputs=[input_layer_all], outputs=[task_out_all, adv_out_all]) model_all.compile(loss={'task':'binary_crossentropy', 'adv':'binary_crossentropy'}, optimizer=adam, metrics=['binary_accuracy'], loss_weights={'task':1, 'adv': adv_weight} ) # copying values from init model to all model. model_all.layers[1].set_weights(model_init.layers[1].get_weights()) model_all.layers[3].set_weights(model_init.layers[2].get_weights()) # Ytrain_dict = {'task': Ytrain[:,0], # 'adv': Ytrain[:,1]} Ytrain_dict = {'task': Ytrain[:,0], 'adv': Ytrain[:,1]} model_all.fit(x=Xtrain, y=Ytrain_dict, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain_dict), verbose=0, callbacks=[earlystopper]) return model_all def adv_logistic_scheduler(Xtrain, Ytrain, adv_weight=1.0, increment_epochs=200): num_attributes = num_attr input_s = (num_feat*2,) adam = Adam(lr=0.01, decay=0.0) num_epochs = 1000 data_points = len(Ytrain) # adv model. lamda_sched = K.variable(0.0, dtype='float32') Flip = GradientReversal(lamda_sched) input_layer_all = Input(shape=input_s) common_rep_all = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_all) task_out_all = Dense(1, activation='sigmoid', name='task')(common_rep_all) adv = Flip(common_rep_all) adv_out_all = Dense(1, activation='sigmoid', name='adv')(adv) model_all = Model(inputs=[input_layer_all], outputs=[task_out_all, adv_out_all]) model_all.compile(loss={'task':'binary_crossentropy', 'adv':'binary_crossentropy'}, optimizer=adam, metrics=['binary_accuracy'] ) Ytrain_dict = {'task': Ytrain[:,0], 'adv': Ytrain[:,1]} def l_scheduler(i): if i < int(increment_epochs): return adv_weight * float(i)/increment_epochs else: return adv_weight lscheduler = GradientReversalScheduler(lamda_sched, l_scheduler) model_all.fit(x=Xtrain, y=Ytrain_dict, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain_dict), verbose=0, callbacks=[lscheduler]) return model_all def da_logistic(Xtrain, Ytrain, adv_weight=1.0, Ytest=None, guess_type='uniform', increment_epochs=200): Yadv = da_get_Yadv(Ytrain, Ytest, guess_type) input_s = (num_feat*2,) adam = Adam(lr=0.01, decay=0.0) num_epochs = 1000 data_points = len(Ytrain) # adv model. lamda_sched = K.variable(0.0, dtype='float32') Flip = GradientReversal(lamda_sched) input_layer_all = Input(shape=input_s) common_rep_all = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_all) task_out_all = Dense(1, activation='sigmoid', name='task')(common_rep_all) adv = Flip(common_rep_all) adv = Dense(comm_rep_dim, activation='relu')(adv) adv_out_all = Dense(1, activation='sigmoid', name='adv')(adv) model_all = Model(inputs=[input_layer_all], outputs=[task_out_all, adv_out_all]) model_all.compile(loss={'task':'binary_crossentropy', 'adv':'binary_crossentropy'}, optimizer=adam, metrics=['binary_accuracy'] ) Ytrain_dict = {'task': Ytrain[:,0], 'adv': Yadv} def l_scheduler(i): if i < int(increment_epochs): return adv_weight * float(i)/increment_epochs else: return adv_weight lscheduler = GradientReversalScheduler(lamda_sched, l_scheduler) model_all.fit(x=Xtrain, y=Ytrain_dict, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain_dict), verbose=0, callbacks=[lscheduler]) return model_all def da_balance(Xtrain, Ytrain, adv_weight=1.0, increment_epochs=200): Yadv = np.prod(Ytrain, axis=1) + np.prod(1-Ytrain, axis=1) input_s = (num_feat*2,) adam = Adam(lr=0.01, decay=0.0) num_epochs = 1000 data_points = len(Ytrain) # adv model. lamda_sched = K.variable(0.0, dtype='float32') Flip = GradientReversal(lamda_sched) input_layer_all = Input(shape=input_s) common_rep_all = Dense(comm_rep_dim, name='rep', use_bias=False)(input_layer_all) task_out_all = Dense(1, activation='sigmoid', name='task')(common_rep_all) adv = Flip(common_rep_all) adv = Dense(5, activation='relu')(adv) adv = Dense(2, activation='relu')(adv) adv_out_all = Dense(1, activation='sigmoid', name='adv')(adv) model_all = Model(inputs=[input_layer_all], outputs=[task_out_all, adv_out_all]) model_all.compile(loss={'task':'binary_crossentropy', 'adv':'binary_crossentropy'}, optimizer=adam, metrics=['binary_accuracy'] ) Ytrain_dict = {'task': Ytrain[:,0], 'adv': Yadv} def l_scheduler(i): if i < int(increment_epochs): return adv_weight * float(i)/increment_epochs else: return adv_weight lscheduler = GradientReversalScheduler(lamda_sched, l_scheduler) model_all.fit(x=Xtrain, y=Ytrain_dict, epochs=num_epochs, batch_size=50, validation_data=(Xtrain, Ytrain_dict), verbose=0, callbacks=[lscheduler]) return model_all def da_FS(Xtrain, Ytrain, adv_weight=1.0, Ytest=None, guess_type='uniform'): Yadv = da_get_Yadv(Ytrain, Ytest, guess_type) def da_obj(selected_feat, adv_weight=adv_weight): model = keras_basic_model(Xtrain[:,selected_feat], Ytrain[:,0]) Ypred = model.predict(Xtrain[:,selected_feat]) loss_task = cross_entropy(Ytrain[:,0], Ypred) model_adv = keras_basic_model(Xtrain[:,selected_feat], Yadv, if_hdh=True) Ypred = model_adv.predict(Xtrain[:,selected_feat]) loss_adv = cross_entropy(Yadv, Ypred) return loss_task - adv_weight*loss_adv feat_num = Xtrain.shape[1] feat_list = range(feat_num) stop_flag = False sel_feat = [] obj_val = np.inf while not stop_flag: min_obj = np.inf min_f = None for f in feat_list: feat_set = deepcopy(sel_feat) feat_set.append(f) val = da_obj(feat_set) feat_set.remove(f) if val < min_obj: min_obj = val min_f = f if min_obj < obj_val: obj_val = min_obj sel_feat.append(min_f) feat_list.remove(min_f) else: stop_flag = True if len(feat_list) == 0: stop_flag = True return keras_basic_model(Xtrain[:,sel_feat], Ytrain), sel_feat #, Xtest[:,sel_feat], Ytest) adv_weight = 1.0 algo = args.algo print algo print args.hyper_param print args.data_file data_direc = '../iccv_synthetic_data/' results_direc = '../iccv_synthetic_results' train_datasets = [d for d in os.listdir(data_direc) if 'train' in d] test_corr_expts = [d for d in os.listdir(data_direc) if 'test' in d and 'da-1.5' in d] all_results = os.listdir(results_direc) if args.data_file != None: train_datasets = [d for d in train_datasets if all(x in d for x in args.data_file)] start_time = time.time() for filename in train_datasets: print '\n' + filename data = pickle.load(open(data_direc + filename)) Xtrain = data['X'] Ytrain = data['Y'] # get all relevant test set # if filename == 'train_data_n-1000_cor-0.8_dp-1.5_da-1.5.pckl': if filename == 'train_data_n-1000_cor-0.8_dp-1.5_da-1.5.pckl': Xtest = {} Ytest = {} for tfile in test_corr_expts: data = pickle.load(open(data_direc + tfile)) Xtest[tfile] = data['X'] Ytest[tfile] = data['Y'] print 'Test dicts loaded' # gs_filename = tfile.split('.pckl')[0] + '___gold_std.pckl' # if gs_filename not in all_results: # print 'creating gold standard for ' + tfile # model_gs = logistic_regression(Xtest[tfile], Ytest[tfile]) # save_results(model_gs, Xtest[tfile], Ytest[tfile], Xtest[tfile], Ytest[tfile], tfile, '_gold_std') # else: # test_name = 'test_data_cor-0.5_dp' + filename.split('dp')[1] # data = pickle.load(open(data_direc + test_name)) # Xtest = data['X'] # Ytest = data['Y'] # gs_filename = test_name.split('.pckl')[0] + '___gold_std.pckl' # if gs_filename not in all_results: # model_gs = keras_basic_model(Xtest, Ytest) # save_results(model_gs, Xtest, Ytest, Xtest, Ytest, test_name, '_gold_std') # base model if algo == 'base': model_base = logistic_regression(Xtrain, Ytrain) model_base.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '.h5') save_results(model_base, Xtrain, Ytrain, Xtest, Ytest, filename, algo) # task compete regularizer elif algo == 'reg': for lamda in args.hyper_param: def L12_reg(weight_matrix): return lamda* K.sum(K.square(K.sum(K.abs(weight_matrix), axis=1))) model_reg = compete_LR(Xtrain, Ytrain) model_reg.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_lam-' + str(lamda) + '.h5') save_results(model_reg, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_lam-' + str(lamda)) # adversarial FS elif algo == 'fs': for adv_weight in args.hyper_param: model_FS, sel_feat = adv_wrapper_FS(Xtrain, Ytrain, adv_weight) model_FS.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.h5') np.save('../models/Selected_features___' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.npy', sel_feat) save_results(model_FS, Xtrain[:,sel_feat], Ytrain, Xtest[:,sel_feat], Ytest, filename, algo + '_aw-' + str(adv_weight)) # proposed model elif algo == 'adv': for adv_weight in args.hyper_param: model_adv = adv_logistic(Xtrain, Ytrain, adv_weight) # model_adv.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.h5') save_results(model_adv, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_aw-' + str(adv_weight)) # proposed model with scheduler elif algo == 'adv_sched': for adv_weight in args.hyper_param: model_adv = adv_logistic_scheduler(Xtrain, Ytrain, adv_weight) # model_adv.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.h5') save_results(model_adv, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_aw-' + str(adv_weight)) # All domain adaptation variants go here elif 'da' in algo: for adv_weight in args.hyper_param: if 'FS' in algo: if 'uni' in algo: model_da, sel_feat = da_FS(Xtrain, Ytrain, adv_weight, Ytest=None, guess_type='uniform') np.save('../models/Selected_features___' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.npy', sel_feat) save_results(model_da, Xtrain[:,sel_feat], Ytrain, Xtest[:,sel_feat], Ytest, filename, algo + '_aw-' + str(adv_weight)) elif 'uncorr' in algo: model_da, sel_feat = da_FS(Xtrain, Ytrain, adv_weight, Ytest=None, guess_type='uncorrelated') np.save('../models/Selected_features___' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.npy', sel_feat) save_results(model_da, Xtrain[:,sel_feat], Ytrain, Xtest[:,sel_feat], Ytest, filename, algo + '_aw-' + str(adv_weight)) elif 'exact' in algo: if type(Ytest) == dict: keys = Ytest.keys() if args.tfile != None: keys = [k for k in keys if args.tfile in k]: for tfile in keys: model_da, sel_feat = da_FS(Xtrain, Ytrain, adv_weight, Ytest=Ytest[tfile]) np.save('../models/Selected_features___' + filename.split('.pckl')[0] + '__on__' + tfile.split('.pckl')[0] +\ algo + '_aw-' + str(adv_weight) + '.npy', sel_feat) save_results(model_da, Xtrain[:,sel_feat], Ytrain, Xtest[tfile][:,sel_feat], Ytest[tfile], filename.split('.pckl')[0] + '__on__' + tfile, algo + '_aw-' + str(adv_weight)) else: model_da, sel_feat = da_FS(Xtrain, Ytrain, adv_weight, Ytest=Ytest) np.save('../models/Selected_features___' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.npy', sel_feat) save_results(model_da, Xtrain[:,sel_feat], Ytrain, Xtest[:,sel_feat], Ytest, filename, algo + '_aw-' + str(adv_weight)) # model_da.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.h5') else: if 'uni' in algo: model_da = da_logistic(Xtrain, Ytrain, adv_weight, Ytest=None, guess_type='uniform') save_results(model_da, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_aw-' + str(adv_weight)) elif 'uncorr' in algo: model_da = da_logistic(Xtrain, Ytrain, adv_weight, Ytest=None, guess_type='uncorrelated') save_results(model_da, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_aw-' + str(adv_weight)) elif 'exact' in algo: if type(Ytest) == dict: for tfile in Ytest: model_da = da_logistic(Xtrain, Ytrain, adv_weight, Ytest=Ytest[tfile]) save_results(model_da, Xtrain, Ytrain, Xtest[tfile], Ytest[tfile], filename.split('.pckl')[0]+'__on__'+tfile, algo+'_aw-'+ str(adv_weight)) else: model_da = da_logistic(Xtrain, Ytrain, adv_weight, Ytest=Ytest) save_results(model_da, Xtrain, Ytrain, Xtest, Ytest, filename, algo + '_aw-' + str(adv_weight)) # model_da.save('../models/' + filename.split('.pckl')[0] + '__' + algo + '_aw-' + str(adv_weight) + '.h5') print str(time.time() - start_time) + ' seconds' # screen \-d \-m python synthetic_expts.py -a adv -hp 0.2 # screen \-d \-m python synthetic_expts.py -a adv -hp 0.4 # screen \-d \-m python synthetic_expts.py -a adv -hp 0.6 # screen \-d \-m python synthetic_expts.py -a adv -hp 0.8 # screen \-d \-m python synthetic_expts.py -a adv -hp 0.9 # screen \-d \-m python synthetic_expts.py -a adv -hp 1.0 # screen \-d \-m python synthetic_expts.py -a adv -hp 1.1 # screen \-d \-m python synthetic_expts.py -a adv -hp 1.5 # screen \-d \-m python synthetic_expts.py -a adv -hp 2.0 # screen \-d \-m python synthetic_expts.py -a adv -hp 3.0 # screen \-d \-m python synthetic_expts.py -a adv -hp 4.0 # screen \-d \-m python synthetic_expts.py -a fs -hp 0.5 # screen \-d \-m python synthetic_expts.py -a fs -hp 0.8 # screen \-d \-m python synthetic_expts.py -a fs -hp 0.85 # screen \-d \-m python synthetic_expts.py -a fs -hp 0.9 # screen \-d \-m python synthetic_expts.py -a fs -hp 0.95 # screen \-d \-m python synthetic_expts.py -a fs -hp 1.0 # screen \-d \-m python synthetic_expts.py -a fs -hp 1.05 # screen \-d \-m python synthetic_expts.py -a fs -hp 1.1 # screen \-d \-m python synthetic_expts.py -a fs -hp 1.2 # screen \-d \-m python synthetic_expts.py -a fs -hp 1.5 # screen \-d \-m python synthetic_expts.py -a fs -hp 2.0 # screen \-d \-m python synthetic_expts.py -a reg -hp 0.2 # screen \-d \-m python synthetic_expts.py -a reg -hp 0.5 # screen \-d \-m python synthetic_expts.py -a reg -hp 0.7 # screen \-d \-m python synthetic_expts.py -a reg -hp 1.0 # screen \-d \-m python synthetic_expts.py -a reg -hp 1.2 # screen \-d \-m python synthetic_expts.py -a reg -hp 1.4 # screen \-d \-m python synthetic_expts.py -a reg -hp 1.8 # screen \-d \-m python synthetic_expts.py -a reg -hp 2.0 # screen \-d \-m python synthetic_expts.py -a reg -hp 3.0 # screen \-d \-m python synthetic_expts.py -a reg -hp 4.0 # screen \-d \-m python synthetic_expts.py -a reg -hp 5.0
37.366987
136
0.697517
3,576
23,317
4.311521
0.088926
0.033273
0.017123
0.029965
0.738358
0.719484
0.687832
0.665326
0.652419
0.637112
0
0.019517
0.14959
23,317
623
137
37.426966
0.758031
0.168075
0
0.585106
0
0
0.082227
0.014789
0
0
0
0
0
0
null
null
0
0.057447
null
null
0.012766
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
453a2742018049e65a52d9001633ae9f304aa4fa
28
py
Python
constants.py
cenwangumass/cs590m-homework06
f2ca68318dcc54503e77dcb62ff973515eaf764a
[ "MIT" ]
null
null
null
constants.py
cenwangumass/cs590m-homework06
f2ca68318dcc54503e77dcb62ff973515eaf764a
[ "MIT" ]
null
null
null
constants.py
cenwangumass/cs590m-homework06
f2ca68318dcc54503e77dcb62ff973515eaf764a
[ "MIT" ]
null
null
null
IDLE = "IDLE" BUSY = "BUSY"
9.333333
13
0.571429
4
28
4
0.5
0
0
0
0
0
0
0
0
0
0
0
0.214286
28
2
14
14
0.727273
0
0
0
0
0
0.285714
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
18a37a362f29ef2ae5519e1d68048127fb48f719
1,544
py
Python
leetcode/lessons/hash_table/349_intersection_of_two_arrays/__init__.py
wangkuntian/leetcode
e8dc9c8032c805a7d071ad19b94841ee8e52e834
[ "MIT" ]
null
null
null
leetcode/lessons/hash_table/349_intersection_of_two_arrays/__init__.py
wangkuntian/leetcode
e8dc9c8032c805a7d071ad19b94841ee8e52e834
[ "MIT" ]
2
2020-03-24T18:00:21.000Z
2020-03-26T11:33:51.000Z
leetcode/lessons/hash_table/349_intersection_of_two_arrays/__init__.py
wangkuntian/leetcode
e8dc9c8032c805a7d071ad19b94841ee8e52e834
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- """ __project__ = 'leetcode' __file__ = '__init__.py' __author__ = 'king' __time__ = '2020/1/31 20:25' _ooOoo_ o8888888o 88" . "88 (| -_- |) O\ = /O ____/`---'\____ .' \\| |// `. / \\||| : |||// \ / _||||| -:- |||||- \ | | \\\ - /// | | | \_| ''\---/'' | | \ .-\__ `-` ___/-. / ___`. .' /--.--\ `. . __ ."" '< `.___\_<|>_/___.' >'"". | | : `- \`.;`\ _ /`;.`/ - ` : | | \ \ `-. \_ __\ /__ _/ .-` / / ======`-.____`-.___\_____/___.-`____.-'====== `=---=' ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 佛祖保佑 永无BUG """ """ 难度:简单 给定两个数组,编写一个函数来计算它们的交集。 示例 1: 输入: nums1 = [1,2,2,1], nums2 = [2,2] 输出: [2] 示例 2: 输入: nums1 = [4,9,5], nums2 = [9,4,9,8,4] 输出: [9,4] 说明: 输出结果中的每个元素一定是唯一的。 我们可以不考虑输出结果的顺序。 """ class Solution(object): def intersection(self, nums1, nums2): """ :type nums1: List[int] :type nums2: List[int] :rtype: List[int] """ return list(set(nums1) & set(nums2)) print(Solution().intersection([4, 9, 5], [9, 4, 9, 8, 4]))
24.125
58
0.284326
100
1,544
3.62
0.55
0.022099
0.016575
0.022099
0.027624
0
0
0
0
0
0
0.07371
0.472798
1,544
63
59
24.507937
0.371007
0.720855
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0
0.75
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
18b2748d4db3f32aa2c547796c669a19b6491fcb
232
py
Python
setup.py
doandv-java/detect-yolo
e36dd94557431a3833220fdcfe0dad794c6bc828
[ "MIT" ]
null
null
null
setup.py
doandv-java/detect-yolo
e36dd94557431a3833220fdcfe0dad794c6bc828
[ "MIT" ]
null
null
null
setup.py
doandv-java/detect-yolo
e36dd94557431a3833220fdcfe0dad794c6bc828
[ "MIT" ]
null
null
null
from setuptools import setup setup(name='yolov3_tf2', version='0.1', url='https://github.com/zzh8829/yolov3-tf2', author='Zihao Zhang', author_email='zzh8829@gmail.com', packages=['yolov3_tf2'])
29
51
0.633621
29
232
4.965517
0.724138
0.1875
0
0
0
0
0
0
0
0
0
0.086957
0.206897
232
8
52
29
0.695652
0
0
0
0
0
0.389381
0
0
0
0
0
0
1
0
true
0
0.142857
0
0.142857
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
18b70f312f7bc7a978f8b4dac5d2e717efd5e67a
635
py
Python
intake/container/ndarray.py
ah-/intake
1c971a9e579a18be603b4a74a71dbc111afbcb0c
[ "BSD-2-Clause" ]
null
null
null
intake/container/ndarray.py
ah-/intake
1c971a9e579a18be603b4a74a71dbc111afbcb0c
[ "BSD-2-Clause" ]
null
null
null
intake/container/ndarray.py
ah-/intake
1c971a9e579a18be603b4a74a71dbc111afbcb0c
[ "BSD-2-Clause" ]
null
null
null
import msgpack import numpy as np import msgpack_numpy from .base import BaseContainer class NdArray(BaseContainer): @staticmethod def merge(parts): raise Exception('Need to implement ndarray case') @staticmethod def to_dask(parts, dtype): raise Exception('Need to implement ndarray case') @staticmethod def encode(obj): return msgpack.packb(obj, default=msgpack_numpy.encode) @staticmethod def decode(bytestr): return msgpack.unpackb(bytestr, object_hook=msgpack_numpy.decode) @staticmethod def read(chunks): return np.concatenate(chunks, axis=0)
22.678571
73
0.703937
75
635
5.893333
0.493333
0.169683
0.081448
0.090498
0.248869
0.248869
0.248869
0.248869
0.248869
0
0
0.002012
0.217323
635
28
74
22.678571
0.887324
0
0
0.35
0
0
0.09434
0
0
0
0
0
0
1
0.25
false
0
0.2
0.15
0.65
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
18bcb33e2c36ca40bd83b6f3e06dad0e7e7042de
68
py
Python
app/src/main/python/youtubedl/version.py
daniel-sudz/youtubedl-mobile
c68f141a4c22b2f359eb5e15d855ed31f4faa6eb
[ "MIT" ]
4
2019-07-18T01:44:07.000Z
2019-11-22T14:11:39.000Z
app/src/main/python/youtubedl/version.py
daniel-sudz/youtubedl-mobile
c68f141a4c22b2f359eb5e15d855ed31f4faa6eb
[ "MIT" ]
null
null
null
app/src/main/python/youtubedl/version.py
daniel-sudz/youtubedl-mobile
c68f141a4c22b2f359eb5e15d855ed31f4faa6eb
[ "MIT" ]
1
2019-12-05T15:05:50.000Z
2019-12-05T15:05:50.000Z
from __future__ import unicode_literals __version__ = '2019.07.12'
17
39
0.808824
9
68
5.111111
1
0
0
0
0
0
0
0
0
0
0
0.133333
0.117647
68
3
40
22.666667
0.633333
0
0
0
0
0
0.147059
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
18e6d6eb8c7c6f9128162d1bd1f24040f068a80e
460
py
Python
PRATICAR.py
klebervieirati/PYTHON
1bb03e775df2ff0d996aab0e3ce8f6f058bc5a05
[ "MIT" ]
null
null
null
PRATICAR.py
klebervieirati/PYTHON
1bb03e775df2ff0d996aab0e3ce8f6f058bc5a05
[ "MIT" ]
null
null
null
PRATICAR.py
klebervieirati/PYTHON
1bb03e775df2ff0d996aab0e3ce8f6f058bc5a05
[ "MIT" ]
null
null
null
print ('\n\nSistema de Cadastro Pessoal') print ('______________________________________') nome = input('\nESCREVA SEU NOME COMPLETO.: ') idade = int(input('\nDIGA QUANTOS ANOS VOCÊ TEM?.: ')) peso = input('\nDIGITE SEU PESO !.: ') print('========================================================\n') print ('Seu nome é {}\nE você tem {} Anos\nE pesa {} Quilos'.format(nome,idade,peso)) print('\n========================================================\n')
35.384615
85
0.51087
46
460
4.282609
0.543478
0.091371
0.101523
0
0
0
0
0
0
0
0
0
0.102174
460
12
86
38.333333
0.476998
0
0
0
0
0
0.7
0.33913
0
0
0
0
0
1
0
false
0
0
0
0
0.625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
18edb7e6dbf3862deeedbb272cb4330fb8412a96
210
py
Python
tests/slim_testing_config.py
avalentino/slim
4b037ebe37325069183b841242f83f00411eae61
[ "MIT" ]
1
2016-12-29T18:05:22.000Z
2016-12-29T18:05:22.000Z
tests/slim_testing_config.py
avalentino/slim
4b037ebe37325069183b841242f83f00411eae61
[ "MIT" ]
null
null
null
tests/slim_testing_config.py
avalentino/slim
4b037ebe37325069183b841242f83f00411eae61
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- TESTING = True SECURITY_PASSWORD_HASH = 'plaintext' SQLALCHEMY_DATABASE_URI = 'sqlite://' SLIM_FILE_LOGGING_LEVEL = None # LOGIN_DISABLED = True # PRESERVE_CONTEXT_ON_EXCEPTION = False
23.333333
39
0.761905
26
210
5.730769
0.961538
0
0
0
0
0
0
0
0
0
0
0.005435
0.12381
210
8
40
26.25
0.804348
0.385714
0
0
0
0
0.144
0
0
0
0
0
0
1
0
false
0.25
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
e13ead3131e4730fd69f85d2db25383cd59c99c1
338
py
Python
backend/nodes/admin.py
solitariaa/CMPUT404-project-socialdistribution
f9e23a10e209f8bf7ed062e105f44038751f7c74
[ "W3C-20150513" ]
1
2022-03-01T03:03:40.000Z
2022-03-01T03:03:40.000Z
backend/nodes/admin.py
solitariaa/CMPUT404-project-socialdistribution
f9e23a10e209f8bf7ed062e105f44038751f7c74
[ "W3C-20150513" ]
51
2022-02-09T06:18:27.000Z
2022-03-28T19:01:54.000Z
backend/nodes/admin.py
solitariaa/CMPUT404-project-socialdistribution
f9e23a10e209f8bf7ed062e105f44038751f7c74
[ "W3C-20150513" ]
2
2022-03-13T20:58:10.000Z
2022-03-19T06:29:56.000Z
from django.contrib import admin from django.urls import reverse from django.utils.safestring import mark_safe from .models import Node class NodeAdmin(admin.ModelAdmin): ordering = ('name',) search_fields = ('name', 'host') list_display = ('name', 'host', 'username', 'password') admin.site.register(Node, NodeAdmin)
24.142857
59
0.718935
42
338
5.714286
0.642857
0.125
0
0
0
0
0
0
0
0
0
0
0.156805
338
13
60
26
0.842105
0
0
0
0
0
0.106509
0
0
0
0
0
0
1
0
false
0.111111
0.444444
0
0.888889
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
e140b6621336f87088b76c9f3e87521bc3042832
224
py
Python
apps/usuarios/models.py
RagAndRoll/BlueList
12e9cc03ac35c2c82c96cb4bfe89d227646a15a3
[ "MIT" ]
null
null
null
apps/usuarios/models.py
RagAndRoll/BlueList
12e9cc03ac35c2c82c96cb4bfe89d227646a15a3
[ "MIT" ]
null
null
null
apps/usuarios/models.py
RagAndRoll/BlueList
12e9cc03ac35c2c82c96cb4bfe89d227646a15a3
[ "MIT" ]
null
null
null
# from django.db import models # Create your models here. # comentarios, sera un capo de texto mas relacion comentario a comentario # los itens que pueden ser comentados tendran la llave foranea # el delete no en casacada
28
73
0.78125
35
224
5
0.942857
0
0
0
0
0
0
0
0
0
0
0
0.183036
224
7
74
32
0.956284
0.941964
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
e15ab695501902033016a101ef60aca490b44121
1,985
py
Python
test/unit/test_config.py
managedbyq/q-dbt
01f1918fe5cbf3036b7197b8e3211960403718f3
[ "Apache-2.0" ]
1
2018-06-20T17:51:20.000Z
2018-06-20T17:51:20.000Z
test/unit/test_config.py
managedbyq/q-dbt
01f1918fe5cbf3036b7197b8e3211960403718f3
[ "Apache-2.0" ]
null
null
null
test/unit/test_config.py
managedbyq/q-dbt
01f1918fe5cbf3036b7197b8e3211960403718f3
[ "Apache-2.0" ]
1
2018-10-18T18:45:38.000Z
2018-10-18T18:45:38.000Z
import os import unittest import yaml import dbt.config if os.name == 'nt': TMPDIR = 'c:/Windows/TEMP' else: TMPDIR = '/tmp' class ConfigTest(unittest.TestCase): def set_up_empty_config(self): profiles_path = '{}/profiles.yml'.format(TMPDIR) with open(profiles_path, 'w') as f: f.write(yaml.dump({})) def set_up_config_options(self, **kwargs): profiles_path = '{}/profiles.yml'.format(TMPDIR) config = { 'config': kwargs } with open(profiles_path, 'w') as f: f.write(yaml.dump(config)) def tearDown(self): profiles_path = '{}/profiles.yml'.format(TMPDIR) try: os.remove(profiles_path) except: pass def test__implicit_opt_in(self): self.set_up_empty_config() config = dbt.config.read_config(TMPDIR) self.assertTrue(dbt.config.send_anonymous_usage_stats(config)) def test__explicit_opt_out(self): self.set_up_config_options(send_anonymous_usage_stats=False) config = dbt.config.read_config(TMPDIR) self.assertFalse(dbt.config.send_anonymous_usage_stats(config)) def test__explicit_opt_in(self): self.set_up_config_options(send_anonymous_usage_stats=True) config = dbt.config.read_config(TMPDIR) self.assertTrue(dbt.config.send_anonymous_usage_stats(config)) def test__implicit_colors(self): self.set_up_empty_config() config = dbt.config.read_config(TMPDIR) self.assertTrue(dbt.config.colorize_output(config)) def test__explicit_opt_out(self): self.set_up_config_options(use_colors=False) config = dbt.config.read_config(TMPDIR) self.assertFalse(dbt.config.colorize_output(config)) def test__explicit_opt_in(self): self.set_up_config_options(use_colors=True) config = dbt.config.read_config(TMPDIR) self.assertTrue(dbt.config.colorize_output(config))
29.626866
71
0.672544
256
1,985
4.90625
0.234375
0.093153
0.052548
0.062102
0.788217
0.788217
0.756369
0.694268
0.679936
0.66879
0
0
0.220151
1,985
66
72
30.075758
0.81137
0
0
0.42
0
0
0.03728
0
0
0
0
0
0.12
1
0.18
false
0.02
0.08
0
0.28
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
e162a57aecad5276810ea272b886660613b5a8a8
315
py
Python
vmec/example_call_surface.py
gjhartwell/cth-python
558148a5755fd0bd3b12e1380d365f8bf51efa19
[ "MIT" ]
null
null
null
vmec/example_call_surface.py
gjhartwell/cth-python
558148a5755fd0bd3b12e1380d365f8bf51efa19
[ "MIT" ]
null
null
null
vmec/example_call_surface.py
gjhartwell/cth-python
558148a5755fd0bd3b12e1380d365f8bf51efa19
[ "MIT" ]
null
null
null
import numpy as np import xarray as ncdata import matplotlib.pyplot as plt from mayavi import mlab # to overrid plt.mlab from surface import * import os cwd = os.getcwd() name = cwd + "/wout_vmec.nc" #x, y , z = plot_vmec_surface(name,plottype='surface3d') plot_vmec_surface(name,plottype='cross-section',zeta=360)
28.636364
57
0.765079
52
315
4.538462
0.615385
0.067797
0.127119
0.161017
0.228814
0
0
0
0
0
0
0.014599
0.130159
315
10
58
31.5
0.846715
0.234921
0
0
0
0
0.109244
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
e17ddfee1dc335b3323bbf28dd3955b87c67fa4a
21
py
Python
projects/borrow-a-book/api/src/secret.py
anouaraissani/H4ckT0b3rF3st-2k20
1f77652add0effdc7462c829dfb88d5f6818d07e
[ "MIT" ]
27
2020-10-04T14:11:54.000Z
2021-05-14T03:51:43.000Z
projects/borrow-a-book/api/src/secret.py
anouaraissani/H4ckT0b3rF3st-2k20
1f77652add0effdc7462c829dfb88d5f6818d07e
[ "MIT" ]
56
2020-10-06T06:38:54.000Z
2020-11-25T10:22:40.000Z
projects/borrow-a-book/api/src/secret.py
anouaraissani/H4ckT0b3rF3st-2k20
1f77652add0effdc7462c829dfb88d5f6818d07e
[ "MIT" ]
63
2020-10-04T12:46:09.000Z
2021-01-10T16:35:53.000Z
SALT = 'change this'
10.5
20
0.666667
3
21
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.190476
21
1
21
21
0.823529
0
0
0
0
0
0.52381
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
e182755a60efe85eb57572745046ed933f34905a
662
py
Python
data_resource_api/api/core/versioned_resource.py
brighthive/tpot-programs-api
425861f279fa25e3b42b57eb6894fc9f34b08b28
[ "MIT" ]
null
null
null
data_resource_api/api/core/versioned_resource.py
brighthive/tpot-programs-api
425861f279fa25e3b42b57eb6894fc9f34b08b28
[ "MIT" ]
1
2021-06-01T23:48:03.000Z
2021-06-01T23:48:03.000Z
data_resource_api/api/core/versioned_resource.py
brighthive/tpot-programs-api
425861f279fa25e3b42b57eb6894fc9f34b08b28
[ "MIT" ]
1
2020-04-29T18:19:09.000Z
2020-04-29T18:19:09.000Z
""" Versioned Resource This class extends the Flask-Restful Resource class with the ability to look up the API version number in a request header. """ from flask_restful import Resource from data_resource_api.config import Config class VersionedResource(Resource): def __init__(self): Resource.__init__(self) def get_api_version(self, headers): try: api_version = headers['X-Api-Version'] except Exception: api_version = Config.get_api_version() return api_version def get_request_handler(self, headers): """ Override this method to provide the request handler. """ pass
25.461538
76
0.693353
84
662
5.22619
0.47619
0.159453
0.059226
0
0
0
0
0
0
0
0
0
0.23716
662
25
77
26.48
0.869307
0.297583
0
0
0
0
0.028889
0
0
0
0
0
0
1
0.230769
false
0.076923
0.153846
0
0.538462
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
3
e1933330f218ac04a3231dde5003a755445cb62b
1,680
py
Python
src/warrior.py
marcinadd/corewars
6a50c1d7ab3ae9836e0a3aed1c10236bda737ca4
[ "MIT" ]
null
null
null
src/warrior.py
marcinadd/corewars
6a50c1d7ab3ae9836e0a3aed1c10236bda737ca4
[ "MIT" ]
null
null
null
src/warrior.py
marcinadd/corewars
6a50c1d7ab3ae9836e0a3aed1c10236bda737ca4
[ "MIT" ]
null
null
null
from src.config import WARRIOR_DEFAULT_NAME from src.gui.colors import Color class Warrior: def __init__(self, instructions=None, processes=None, color=Color.WARRIOR_DEFAULT.value, name=None): """ :param instructions: Warrior instruction list; Optional to simplify testing :param processes: Init warrior with queued processes for testing purposes """ self._instructions = instructions if instructions else [] self._processes = processes if processes else [] self._color = color self._warrior_info = WarriorInfo(name if name else WARRIOR_DEFAULT_NAME) def add_process(self, position): """ Adds process to warrior processes queue list :param position:int: Address in core """ self._processes.append(position) def processes(self): return self._processes def color(self): return self._color def set_color(self, color): self._color = color def warrior_info(self): return self._warrior_info def instructions(self): return self._instructions class WarriorInfo: """ Warrior details name, wins, ties, loses for game results """ def __init__(self, name): self._name = name self._wins = 0 self._ties = 0 self._loses = 0 def inc_wins(self): self._wins += 1 def inc_ties(self): self._ties += 1 def inc_loses(self): self._loses += 1 def wins(self): return self._wins def ties(self): return self._ties def loses(self): return self._loses def name(self): return self._name
23.661972
104
0.630952
202
1,680
5.039604
0.267327
0.078585
0.11002
0
0
0
0
0
0
0
0
0.005013
0.2875
1,680
70
105
24
0.845447
0.171429
0
0.05
0
0
0
0
0
0
0
0
0
1
0.375
false
0
0.05
0.2
0.675
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
e1a932f43d464f192e8bea26a578d81268c5cb3a
171
py
Python
tests/py_test_lib/test_package/firstdir/call_from_first.py
iduartgomez/rustypy
b6d0f5ba7e4961bed75f454a21dd3ce25243faee
[ "BSD-3-Clause" ]
40
2016-07-02T13:30:50.000Z
2021-03-19T06:33:50.000Z
tests/py_test_lib/test_package/firstdir/call_from_first.py
iduartgomez/rustypy
b6d0f5ba7e4961bed75f454a21dd3ce25243faee
[ "BSD-3-Clause" ]
5
2017-06-17T17:13:57.000Z
2020-04-28T08:15:33.000Z
tests/py_test_lib/test_package/firstdir/call_from_first.py
iduartgomez/rustypy
b6d0f5ba7e4961bed75f454a21dd3ce25243faee
[ "BSD-3-Clause" ]
5
2016-09-07T07:46:56.000Z
2019-10-14T14:54:53.000Z
from rustypy.pywrapper import rust_bind @rust_bind def first_module() -> None: print('... called from first module') if __name__ == "__main__": first_module()
15.545455
41
0.695906
22
171
4.863636
0.681818
0.308411
0
0
0
0
0
0
0
0
0
0
0.181287
171
10
42
17.1
0.764286
0
0
0
0
0
0.210526
0
0
0
0
0
0
1
0.166667
true
0
0.166667
0
0.333333
0.166667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
e1bd09b9c9cedb9212065bc232e9a3e67d43fa60
44,906
py
Python
test/integration/wikiclir.py
eugene-yang/ir_datasets
2b5a42edfb9ab8c4ee8f11674ffe14d60f41ec1e
[ "Apache-2.0" ]
null
null
null
test/integration/wikiclir.py
eugene-yang/ir_datasets
2b5a42edfb9ab8c4ee8f11674ffe14d60f41ec1e
[ "Apache-2.0" ]
null
null
null
test/integration/wikiclir.py
eugene-yang/ir_datasets
2b5a42edfb9ab8c4ee8f11674ffe14d60f41ec1e
[ "Apache-2.0" ]
null
null
null
import re import unittest from ir_datasets.datasets.wikiclir import WikiClirDoc, WikiClirQuery from ir_datasets.formats import TrecQrel from .base import DatasetIntegrationTest class TestWikiclir(DatasetIntegrationTest): def test_docs(self): self._test_docs('wikiclir/ar', count=535118, items={ 0: WikiClirDoc('7', 'ماء', re.compile('^ الماء هو سائل شفاف لا لون لهُ ولا رائحة، ويوجد في الكرة الأرضية في المسطّحات المائيّة من الجداول وا.{959}يّة في المناطق القطبيّة، في حين تتواجد 0\\.3 % من الماء العذب في الأنهار والبحيرات وفي الغلاف الجوّي \\.$', flags=48)), 9: WikiClirDoc('90', 'عنكبوت', re.compile('^ الرُتَيْلاوات رتبة من صف العنكبيات، وهي أكبر رتبة في هذا الصف، إذا تشمل أكثر من 40,000 نوع في 3700 .{1001}نها تقضي معظم وقتها في اصطياد الحشرات والفتك بها \\. فلولاها لتكاثرت الحشرات وأتت على الأخضر واليابس \\.$', flags=48)), 535117: WikiClirDoc('3769734', 'ريجينا سنيفر', re.compile("^ ريجينا سنيفرهي كاتبة ولدت في لبنان في عام 1962 \\. كتبت العديد من الكتب، ونشر آخرها في عام 2013، '' ب.{341} الحرب '' في لبنان، كتاب ترجم إلى اللغة العربية وحرره الفارابي في يوليو / تموز 2008 وقبله جورج قرم \\.$", flags=48)), }) self._test_docs('wikiclir/ca', count=548722, items={ 0: WikiClirDoc('1', 'Àbac', re.compile("^àbac l'àbac \\( del llatí `` abăcus '' , i grec άβαξ\\-ακος , que significa `` taula '' \\) és una eina pe.{962}e napier i permeten llegir directament el resultat de la multiplicació sense fer sumes intermèdies \\.$", flags=48)), 9: WikiClirDoc('18', 'Aeròbic', re.compile("^ laeròbic és una modalitat de gimnàstica sueca amb acompanyament musical que consisteix en una sèrie.{931} la combinació d'exercicis aeròbics amb tonificació dels músculs , també anomenat `` body power \\. ''$", flags=48)), 548721: WikiClirDoc('1514683', 'Bepink-Cogeas', re.compile('^ el bepink\\-cogeas \\( codi uci : bpk \\) és un equip ciclista femení italià \\. creat al 2012 , té categor.{367}\\- web oficial \\- plantilles i resultats a cyclebase\\.nl \\- plantilles i resultats a procyclingstats\\.com$', flags=48)), }) self._test_docs('wikiclir/zh', count=951480, items={ 0: WikiClirDoc('13', '数学', re.compile('^ 数学是利用符号语言研究数量、结构、变化以及空间等概念的一门学科,从某种角度看属于形式科学的一种。数学透过抽象化和逻辑推理的使用,由计数、计算、量度和对物体形状及运动的观察而产生。数学家们拓展这些概念.{939}问 ” )。 史前的人类就已尝试用自然的法则来衡量物质的多少、时间的长短等抽象的数量关系,比如时间单位有日、季节和年等。算术(加减乘除)也自然而然地产生了。古代的石碑及泥版亦证实了当时已有几何的知识。$', flags=48)), 9: WikiClirDoc('53', '经济学', re.compile('^ 经济学是一门对产品和服务的生产、分配以及消费进行研究的社会科学。西方语言中的 “ 经济学 ” 一词源于古希腊的。起初这一领域被称为政治经济学,但19世纪经济学家採用简短的「经济学」一词来代表「经济科.{881}过度广泛,而且无法将分析的范围侷限在对于市场的研究上。然而,自从1960年代起,由于理性选择理论和其引发的赛局理论不断将经济学的研究领域扩张,这个定义已经获得广泛认同,尽管仍有一些对此定义的批评存在。$', flags=48)), 951479: WikiClirDoc('5795145', '族群', re.compile('^ 族群(),是指一群人,他们认为彼此共享了相同的祖先、血缘、外貌、历史、文化、习俗、语言、地域、宗教、生活习惯与国家体验等,因此形成一个共同的群体。为区分我族及「他者」的分类方式之一。族群含义在20世.{689}或方言分类,如客家人、闽南人。 宗教层面 \\. \\- 按宗教信仰对人群分类,如信仰伊斯兰教的群体穆斯林。 参见 \\. \\- 国民(nation) \\- 人种(race) \\- 民系 \\- 氏族 \\- 部落 \\- 原住民$', flags=48)), }) self._test_docs('wikiclir/cs', count=386906, items={ 0: WikiClirDoc('10', 'Astronomie', re.compile('^ astronomie , řecky αστρονομία z άστρον \\( astron \\) hvězda a νόμος \\( nomos \\) zákon , česky též hvězdá.{845} , například planet ve sluneční soustavě \\. základem nebeské mechaniky jsou práce keplera a newtona \\.$', flags=48)), 9: WikiClirDoc('21', 'Matematika', re.compile("^ matematika \\( z řeckého \\( `` mathematikós '' \\) = `` milující poznání '' ; \\( `` máthema '' \\) = `` věd.{1089}kem v reálném světě \\. některé obory čisté matematiky se nacházejí na pomezí s logikou či filozofií \\.$", flags=48)), 386905: WikiClirDoc('1319204', 'Helena Rubinsteinová', re.compile("^ helena rubinsteinová , rodným jménem chaja rubinsteinová \\( 25\\. prosince 1872 , krakov – 1\\. dubna 19.{1121}aci `` helena rubinstein foundation '' , kterou založila \\. byla sestřenicí filozofa martina bubera \\.$", flags=48)), }) self._test_docs('wikiclir/nl', count=1908260, items={ 0: WikiClirDoc('1', 'Albert Speer', re.compile('^ berthold konrad hermann albert speer \\( mannheim , 19 maart 1905 – londen , 1 september 1981 \\) was e.{1195}ald in 1927 bleef speer nog meerdere jaren , als tessenows assistent , aan de hogeschool verbonden \\.$', flags=48)), 9: WikiClirDoc('13', 'Astronomie', re.compile('^ astronomie of sterrenkunde is de wetenschap die zich bezighoudt met de observatie en de studie van .{949}eten bijvoorbeeld zijn meestal ontleend aan amateurastronomen die deze komeet als eerste waarnamen \\.$', flags=48)), 1908259: WikiClirDoc('4848272', 'Karen Briggs (judoka)', re.compile('^karen briggs \\( judoka \\) karen valerie briggs \\( kingston upon hull , 11 april 1963 \\) is een voormalig.{884}land \\( – 48 kg \\) \\- – 1991 praag , tsjecho\\-slowakije \\( – 48 kg \\) \\- – 1981 madrid , spanje \\( – 48 kg \\)$', flags=48)), }) self._test_docs('wikiclir/fi', count=418677, items={ 0: WikiClirDoc('1', 'Amsterdam', re.compile('^ amsterdam on alankomaiden pääkaupunki \\. amsterdam on väkiluvultaan alankomaiden suurin kaupunki , h.{1173}voi , kun se pystyi viemään yhä useampaa tuotetta muualle eurooppaan vapaasti , esimerkiksi olutta \\.$', flags=48)), 9: WikiClirDoc('14', 'Aleksis Kivi', re.compile('^ aleksis kivi \\( oikealta nimeltään alexis stenvall \\) \\( 10\\. lokakuuta 1834 nurmijärvi – 31\\. joulukuut.{1117}johan stenvall oli merimies \\. kirjailijan oma isä erik stenvall oli asunut lapsuutensa helsingissä \\.$', flags=48)), 418676: WikiClirDoc('1401493', 'Jordan Rowley', ' jordan rowley ( s. 3. huhtikuuta 1990 edmonton ) on jääkiekkoilija , joka pelaa lahden pelicansissa .'), }) self._test_docs('wikiclir/fr', count=1894397, items={ 0: WikiClirDoc('3', 'Antoine Meillet', re.compile("^ paul jules antoine meillet , né le à moulins \\( allier \\) et mort le à châteaumeillant \\( cher \\) , est.{884}a au linguiste auguste carrière à la tête de la chaire d'arménien à l'école des langues orientales \\.$", flags=48)), 9: WikiClirDoc('19', 'Algorithme', re.compile("^ un algorithme est une suite finie et non ambiguë d ’ opérations ou d'instructions permettant de rés.{1688}nécessaire pour amener un algorithme à son terme , en fonction de la quantité de données à traiter \\.$", flags=48)), 1894396: WikiClirDoc('11055655', 'Elisabeth Maxwell', re.compile("^ elisabeth `` betty '' maxwell , née meynard , née le et morte le , est une historienne d'origine fr.{866}mpire de la presse \\. elle donne naissance à 9 enfants , mais deux d ’ entre eux meurent en bas\\-âge \\.$", flags=48)), }) self._test_docs('wikiclir/de', count=2091278, items={ 0: WikiClirDoc('1', 'Alan Smithee', re.compile("^ alan smithee steht als pseudonym für einen fiktiven regisseur , der filme verantwortet , bei denen .{1223}ariante `` alan smithee '' war das anagramm `` the alias men '' vermutlich kein entstehungsgrund \\) \\.$", flags=48)), 9: WikiClirDoc('17', 'Liste von Autoren/K', ' kh . - yasmina khadra ( * 1955 )'), 2091277: WikiClirDoc('10015849', 'Soli (BiH)', re.compile('^soli \\( bih \\) soli war ein bosnisches gebiet \\( oblast \\) und eine gespanschaft im mittelalter \\. das ze.{1109} teil des sandžaks zvornik \\( zvornički sandžak \\) und des kadiluk srebrenik \\( srebrenički kadiluk \\) \\.$', flags=48)), }) self._test_docs('wikiclir/it', count=1347011, items={ 0: WikiClirDoc('2', 'Armonium', re.compile("^ l'armonium o armonio \\( in francese , `` harmonium '' \\) è un tipo di organo costituito da una tastie.{1119}quella più alta e , rispettivamente , svolgono l'azione verso l'ottava bassa e verso l'ottava alta \\.$", flags=48)), 9: WikiClirDoc('20', 'Abbie Hoffman', re.compile('^ di origini ebraiche , dotato di una personalità sardonica e vulcanica , di orientamento anarchico e.{937}ers \\( degli attori diventati attivisti sociali \\) , distribuendo cibo gratis e organizzando alloggi \\.$', flags=48)), 1347010: WikiClirDoc('6494686', 'Superflat', re.compile("^ il `` superflat '' è un movimento artistico postmoderno , influenzato dai manga e dagli anime , fon.{959}e boy : the arts of japan ’ s exploding subculture \\. new york : japan society \\. isbn 0\\-913304\\-57\\-3 \\.$", flags=48)), }) self._test_docs('wikiclir/ja', count=1071292, items={ 0: WikiClirDoc('5', 'アンパサンド', re.compile("^ アンパサンド \\( , \\& \\) とは「…と…」を意味する記号である。ラテン語の の合字で、trebuchet msフォントでは、と表示され `` et '' の合字であることが容易にわかる。amper.{874}グル \\) (アンド)は、浜崎あゆみが2003年に発売した4曲入りマキシシングル。 \\- \\& \\( 一青窈のアルバム \\) (アンド)は、一青窈が2005年に発売したアルバム、及び同アルバムに収録された楽曲。$", flags=48)), 9: WikiClirDoc('43', 'コケ植物', re.compile('^ コケ植物(コケしょくぶつ、)とは、陸上植物かつ非維管束植物であるような植物の総称、もしくはそこに含まれる植物のこと。コケ類(コケるい)や蘚苔類(せんたいるい)、蘚苔植物(せんたいしょくぶつ)などとも.{861}糸状の原糸体(げんしたい、protonema)というものを形成する。原糸体は葉緑体をもち、基質表面に伸びた後、その上に植物体が発達を始め配偶体となる。なお、一部に生涯にわたって原糸体を持つものがある。$', flags=48)), 1071291: WikiClirDoc('3641139', '若杉明', re.compile('^ 若杉明(わかすぎ あきら、1929年11月19日\\- \\) は、日本の会計学者、横浜国立大学名誉教授。 横須賀市出身。1958年東京大学大学院経済学会計学博士課程満期退学。68年「実現概念の展開 その会.{814} \\& aの財務・会計戦略』編著 ビジネス教育出版社 1989 \\- 『ソフト化社会と会計』編著 ビジネス教育出版社 1989 \\- 『リストラクチャリングの財務・会計戦略』編 ビジネス教育出版社 1991$', flags=48)), }) self._test_docs('wikiclir/ko', count=394177, items={ 0: WikiClirDoc('5', '지미 카터', re.compile("^ 제임스 얼 `` 지미 '' 카터 주니어 \\( , 1924년 10월 1일 \\~ \\) 는 민주당 출신 미국 39번째 대통령 \\( 1977년 \\~ 1981년 \\) 이다 \\. 지미 카터는 조지아 주.{1114}신 6,000명을 감축하는 데 그쳤다 \\. 또한 박정희 정권의 인권 문제 등과의 논란으로 불협화음을 냈으나 , 1979년 6월 하순 , 대한민국을 방문하여 관계가 다소 회복되었다 \\.$", flags=48)), 9: WikiClirDoc('31', '음계', re.compile('^ 음계 \\( 音階 \\) 는 음악에서 음높이 \\( pitch \\) 순서로 된 음의 집합을 말한다 \\. 악곡을 주로 구성하는 음을 나타낸 것이며 음계의 종류에 따라 곡의 분위기가 달라진다 \\. .{888} 變徵 \\) \\-올림화 \\( fa \\) ·치\\-솔·우\\-라·변궁 \\( 變宮 \\) \\-시로 7음계를 많이 쓴다 \\. 한국 전통 음악에서는 5음계 외에도 3음계 또는 악계통에서는 7음계 등이 쓰인다 \\.$', flags=48)), 394176: WikiClirDoc('1824675', '안세호', re.compile('^ 안세호 \\( 1981년 2월 17일 \\~ \\) 는 대한민국의 배우이다 \\. 학력 \\. \\- 대진대학교 연극영화학부 출연작 \\. 영화 \\. \\- 《골든슬럼버》 \\( 2017년 \\) \\- 《군함도》 \\( .{445}링》 \\( 2005년 \\) \\- 찌라시청년 역 드라마 \\. \\- 《나의 판타스틱한 장례식》 \\( 2015년 , sbs \\) \\- 동수네 작업반 반장 역 \\- 《삼총사》 \\( 2014년 , tvn \\)$', flags=48)), }) self._test_docs('wikiclir/no', count=471420, items={ 0: WikiClirDoc('2', 'Akershus', re.compile("^ akershus \\( fra norrønt `` akr '' , åker , og `` hús '' , borg eller kastell \\) er et norsk fylke , s.{1272}lsen fylke \\. i 1948 ble aker herred overført fra akershus til å bli en del av oslo \\( by og fylke \\) \\.$", flags=48)), 9: WikiClirDoc('18', 'Atalanta BC', re.compile('^ atalanta bergamasca calcio er en italiensk fotballklubb \\. den ble grunnlagt i 1907 i byen bergamo i.{383} 1978–79 \\) \\- glenn strömberg \\( 1984–85 \\) \\- filippo inzaghi \\( 1996–97 \\) \\- christian vieri \\( 2006–07 \\)$', flags=48)), 471419: WikiClirDoc('1521098', 'VM i vektløfting 1910', ' vm i vektløfting 1910 ( verdensmesterskapet i vektløfting ) ble arrangert i düsseldorf og wien i to forskjellige turneringer i 1910 .'), }) self._test_docs('wikiclir/nn', count=133290, items={ 0: WikiClirDoc('1', 'Hovudside', ' __ingainnhaldsliste__ __ingabolkredigering__'), 9: WikiClirDoc('28', 'Jødedommen', re.compile('^ jødedommen er den religiøse kulturen åt det jødiske folket \\. han er ein av dei først dokumenterte m.{1106} utøvinga av desse lovane og boda slik dei blir tolka av dei ulike antikke og moderne autoritetane \\.$', flags=48)), 133289: WikiClirDoc('341641', 'Harry Danielsen', re.compile('^ harry danielsen var ein norsk skulemann og politikar frå rødøy i nordland \\. han representerte nordl.{883} då medlem i forbrukar\\- og administrasjonskomitéen \\. sommaren 1987 melde danielsen seg ut av høgre \\.$', flags=48)), }) self._test_docs('wikiclir/pl', count=1234316, items={ 0: WikiClirDoc('2', 'AWK', re.compile('^ awk – interpretowany język programowania , którego główną funkcją jest wyszukiwanie i przetwarzanie.{895}dice_2 \\) i wartości , które mają być udostępnione w predefiniowanych zmiennych codice_3 i codice_4 \\.$', flags=48)), 9: WikiClirDoc('15', 'AmigaOS', re.compile('^ amigaos – system operacyjny opracowany przez firmę commodore international dla produkowanych przez .{942}ta implementacja systemu amigaos pod nazwą aros \\. dostępna jest ona między innymi na platformę x86 \\.$', flags=48)), 1234315: WikiClirDoc('4059443', 'Sóweczka ekwadorska', re.compile("^ sóweczka ekwadorska \\( `` glaucidium nubicola '' \\) – gatunek małego ptaka z rodziny puszczykowatych .{1006}żej spokrewnione są z dwuplamistymi \\( `` g\\. gnoma '' \\) i kostarykańskimi \\( `` g\\. costaricanum '' \\) \\.$", flags=48)), }) self._test_docs('wikiclir/pt', count=973057, items={ 0: WikiClirDoc('220', 'Astronomia', re.compile("^ astronomia é uma ciência natural que estuda corpos celestes \\( como estrelas , planetas , cometas , .{963}vium '' que , junto com o `` trivium '' , compunha a metodologia de ensino das sete artes liberais \\.$", flags=48)), 9: WikiClirDoc('235', 'Lista de padrões de arquivo gráfico', re.compile('^ \\- amiga interchange file format \\( iff \\) \\- adobe photoshop image \\( psd \\) \\- compuserv graphics interc.{293}f/tiff \\) \\- truevision targa \\( tga \\) \\- windows and os/2 bitmap \\( bmp/dib \\) \\- zsoft paintbrush \\( pcx \\)$', flags=48)), 973056: WikiClirDoc('5499216', 'Chaudhry Muhammad Ali', re.compile('^ chaudhry mohammad ali \\( punjabi , urdu : چوہدری محمد علی\u200e ; 15 de julho de 1905 – 2 de dezembro de .{189}e estado em 1958\\. ao longo da sua carreira , foi também ministro das finanças e ministro da defesa \\.$', flags=48)), }) self._test_docs('wikiclir/ro', count=376655, items={ 0: WikiClirDoc('1', 'Rocarta', re.compile('^ rocarta este o enciclopedie în format electronic care conține articole legate de românia , republic.{1032}imilare tipărite , precum și albume de imagini în care s\\-au investit mai mulți sau mai puțini bani \\.$', flags=48)), 9: WikiClirDoc('24', 'Romania (dezambiguizare)', re.compile('^romania \\( dezambiguizare \\) romania , în această grafie , se poate referi la : \\- capul , mina de baux.{948}ui roman de răsărit în secolele vi și vii , când restul italiei trecuse sub stăpânirea lombarzilor \\.$', flags=48)), 376654: WikiClirDoc('2013894', 'Rezonanță (chimie)', re.compile('^rezonanță \\( chimie \\) în chimie , rezonanța sau mezomeria face referire la oscilarea structurii chimi.{805}r , și nu prin poziția nucleelor \\. vezi și \\. \\- aromaticitate \\- tautomerie \\- delocalizare electronică$', flags=48)), }) self._test_docs('wikiclir/ru', count=1413945, items={ 0: WikiClirDoc('7', 'Литва', re.compile('^литва литва́ \\( \\) , официальное название — лито́вская респу́блика \\( \\) — государство , географически р.{867}олм аукштояс \\( \\) \\( или аукштасис калнас \\( \\) \\) в юго\\-восточной части страны , в 23,5 км от вильнюса \\.$', flags=48)), 9: WikiClirDoc('27', 'Киевская Русь', re.compile("^киевская русь ки́евская русь , древнеру́сское госуда́рство , дре́вняя русь \\( ' , ' , , др\\.\\-сканд \\. `.{909}ической дезинтеграции , что впоследствии сыграло важную роль в процессе объединения русских земель \\.$", flags=48)), 1413944: WikiClirDoc('7070375', 'Перекрёстки (телесериал, 1994)', re.compile('^перекрёстки \\( телесериал , 1994 \\) перекрёстки \\( \\) — мексиканский 68 серийный телесериал 1994 года те.{840}chir \\- orlando soles \\- isabel andrade \\- celia álvarez de soles \\- héctor cruz lara \\- reynaldo álvarez$', flags=48)), }) self._test_docs('wikiclir/en-simple', count=127089, items={ 0: WikiClirDoc('1', 'April', re.compile('^ april is the fourth month of the year , and comes between march and may \\. it is one of four months .{1212}mmediately after that , april finishes on the same day of the week as january of the previous year \\.$', flags=48)), 9: WikiClirDoc('18', 'Andouille', re.compile('^ andouille is a type of pork sausage \\. it is spicy \\( hot in taste \\) and smoked \\. there are different.{560}ane for a maximum of seven or eight hours , at about 175 degrees fahrenheit \\( 80 degrees celsius \\) \\.$', flags=48)), 127088: WikiClirDoc('594702', 'Digital video', re.compile('^ digital video is a representation of moving visual images in the form of encoded \\. this is in contr.{831}include hdmi , displayport , digital visual interface \\( dvi \\) and serial digital interface \\( sdi \\) \\.$', flags=48)), }) self._test_docs('wikiclir/es', count=1302958, items={ 0: WikiClirDoc('7', 'Andorra', re.compile('^ andorra , oficialmente principado de andorra \\( \\) , es un pequeño país soberano del suroeste de euro.{831}a oficial es el catalán que convive con el español y en menor medida con el francés y el portugués \\.$', flags=48)), 9: WikiClirDoc('24', 'Arquitectura', re.compile("^ la arquitectura es el arte y la técnica de proyectar , diseñar , construir y modificar el hábitat h.{1568}ángulos y llevada a término por una mente y una inteligencia culta '' '' \\( del lib \\. i , cap \\. i \\) \\.$", flags=48)), 1302957: WikiClirDoc('8045476', 'Inés Márquez Moreno', re.compile('^ poetisa cuencana nacida el 7 de junio de 1916 , hija del dr\\. ricardo márquez tapia y de la sra \\. ro.{1166}o vega , humberto mata , enrique noboa arízaga , rigoberto cordero león y jacinto cordero espinoza \\.$', flags=48)), }) self._test_docs('wikiclir/sw', count=37079, items={ 0: WikiClirDoc('2', 'Akiolojia', re.compile("^ akiolojia \\( kutoka kiyunani αρχαίος = `` zamani '' na λόγος = `` neno , usemi '' \\) ni somo linalohu.{1003}i ya kiroma , lakini mji uliharibika kabisa na kufunikwa na majivu ya volkeno vesuvio mwaka 79 b\\.k \\.$", flags=48)), 9: WikiClirDoc('33', 'Lugha asilia', re.compile("^ 1\\. lugha asilia ni lugha ambayo ilikua kama sehemu ya utamaduni wa umma fulani ambao watu wake wana.{131} kuzungumzwa na watu , na lugha za kompyuta na za kuandaa programu zinaitwa `` lugha za kuundwa '' \\.$", flags=48)), 37078: WikiClirDoc('92114', 'Kaptura', re.compile('^ kaptura ni vazi lililovaliwa na wanaume na wanawake juu ya eneo la pelvic yao , wakizunguka kiuno n.{272}joto au katika mazingira ambapo faraja na mtiririko wa hewa ni muhimu zaidi kuliko ulinzi wa miguu \\.$', flags=48)), }) self._test_docs('wikiclir/sv', count=3785412, items={ 0: WikiClirDoc('1', 'Amager', re.compile('^ amager är en dansk ö i öresund \\. öns norra och västra delar tillhör köpenhamn , medan övriga delar .{1153}i många köpenhamnsbors ögon , men inställningen håller på att ändras i takt med stigande huspriser \\.$', flags=48)), 9: WikiClirDoc('12', '1 april', re.compile('^ 1 april är den 91 : a dagen på året i den gregorianska kalendern \\( 92 : a under skottår \\) \\. det åte.{923}en fransk abbot från 1100\\-talet , på dagens datum före 1747 , då det utgick till förmån för harald \\.$', flags=48)), 3785411: WikiClirDoc('8048978', 'Surçina', ' surçina ( albanska : surçina , serbiska : svrčina ) är en by i kosovo . den ligger i kommunen ferizaj . enligt den senaste folkräkningen år 2011 fanns det 222 invånare .'), }) self._test_docs('wikiclir/tl', count=79008, items={ 0: WikiClirDoc('5', 'Wikipedia', re.compile('^ ang wikipedia ay isang ensiklopedya na may basehang wiki at may malayang nilalaman \\. ito ay tinataw.{845}l , at apache \\) \\. ang mga kalahok sa wikipediang sumusunod , at pinagtitibay , ang ilang patakaran \\.$', flags=48)), 9: WikiClirDoc('603', 'Astronomiya', re.compile("^ ang dalubtalaan \\( astronomiya \\) ay isang agham na kinapapalooban ng pagmamasid at pagpapaliwanag ng.{963}' = `` astron '' \\+ `` nomos '' , na mayroong literal na kahulugang `` '' batas ng mga bituin '' '' \\.$", flags=48)), 79007: WikiClirDoc('267691', 'Nao Iwadate', ' si nao iwadate ( ipinaganak agosto 17 , 1988 ) ay isang manlalaro ng putbol sa hapon .'), }) self._test_docs('wikiclir/tr', count=295593, items={ 0: WikiClirDoc('10', 'Cengiz Han', re.compile("^ cengiz han \\( `` cenghis khan '' , `` çinggis haan '' ya da doğum adıyla temuçin \\( anlamı : demirci .{1648}a çağırmış ve moğolca için uygur alfabesini uyarlatarak bunu çocuklarına da öğretmesini istemiştir \\.$", flags=48)), 9: WikiClirDoc('40', 'Beşiktaş JK', re.compile("^ beşiktaş jimnastik kulübü , 1903 yılında istanbul'da kurulan spor kulübüdür \\. bereket jimnastik kul.{864}ndan biridir \\. armasında türk bayrağı amblemi taşıma hakkını elde etmiş az sayıda takımdan biridir \\.$", flags=48)), 295592: WikiClirDoc('2268203', 'Prachatice İlçesi', re.compile("^prachatice ilçesi prachatice ilçesi , çek cumhuriyeti'nin güney bohemya bölgesinde bulunan ilçedir \\..{707}zí \\- volary \\- vrbice \\- záblatí \\- zábrdí \\- zálezly \\- zbytiny \\- zdíkov \\- žárovná \\- želnava \\- žernovice$", flags=48)), }) self._test_docs('wikiclir/uk', count=704903, items={ 0: WikiClirDoc('3', 'Головна сторінка', 'головна сторінка'), 9: WikiClirDoc('592', 'Біологія', re.compile('^біологія біоло́гія \\( — життя , — слово ; наука \\) — система наук , що вивчає життя в усіх його проява.{1135}авляють самостійні дисципліни — анатомія , фізіологія , гістологія , біохімія , мікробіологія тощо \\.$', flags=48)), 704902: WikiClirDoc('2485891', 'Хліб, любов і фантазія', re.compile("^хліб , любов і фантазія « хліб .*, що не любить чужої жалості , розриває її .$", flags=48)), }) self._test_docs('wikiclir/vi', count=1392152, items={ 0: WikiClirDoc('4', 'Internet Society', re.compile("^ internet society hay isoc là một tổ chức quốc tế hoạt động phi lợi nhuận , phi chính phủ và bao gồm.{820}d the internet society '' \\- về internet engineering task force và isoc , bài của vint cerf 18/6/1995$", flags=48)), 9: WikiClirDoc('56', 'Lào', re.compile("^ lào \\( , , `` lāo '' \\) , tên chính thức là nước cộng hoà dân chủ nhân dân lào , \\( tiếng lào : ສາທາລະ.{922} kết quả là chấm dứt chế độ quân chủ , phong trào pathet lào theo chủ nghĩa cộng sản lên nắm quyền \\.$", flags=48)), 1392151: WikiClirDoc('6111969', 'Sơn lục đậu', re.compile('^ còn gọi là vọng giang nam , cốt khí mồng , dương giác đậu , giang nam đậu , thạch quyết minh , dã b.{875} ta dùng toàn bộ cây , hay chỉ lá , hái hạt về phơi khô \\. ở việt nam người ta chưa chú ý khai thác \\.$', flags=48)), }) def test_queries(self): self._test_queries('wikiclir/ar', count=324489, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 324488: WikiClirQuery('54964051', 'Tal Afar offensive (2017)', 'the is an ongoing announced on 20 august 2017 by iraqi prime minister haider al-abadi in order to liberate the region from the islamic state of iraq and the levant (isil).'), }) self._test_queries('wikiclir/ca', count=339586, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 339585: WikiClirQuery('54965687', 'Karl Heinrich Gräffe', '(1799-1873) was a german mathematician, who was professor at university of zurich.'), }) self._test_queries('wikiclir/zh', count=463273, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 463272: WikiClirQuery('54967133', 'United Nations Security Council Resolution 2371', 'the unanimously adopted on august 5, 2017, with approval of all the five permanent members and the ten non-permanent members in response to north korea’s july 2017 missile tests.'), }) self._test_queries('wikiclir/cs', count=233553, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('334', 'International Atomic Time', "(tai, from the french name ) is a high-precision coordinate standard based on the notional passage of proper on earth's geoid."), 233552: WikiClirQuery('54961893', 'Vincenzo Legrenzio Ciampi', '(piacenza, 2 april 1719 – venice, 30 march 1762) was an italian composer.'), }) self._test_queries('wikiclir/nl', count=687718, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 687717: WikiClirQuery('54967572', 'SV Marken', 'sportvereniging (dutch for "sport club marken", commonly shortened to cv marken, or just marken) is an association football club from marken, netherlands.'), }) self._test_queries('wikiclir/fi', count=273819, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 273818: WikiClirQuery('54966570', 'Nadezhda Babkina', 'georgieva (; born 19 march, 1950, chyorny yar, astrakhan oblast, soviet union) is а soviet and russian folk and pop singer.'), }) self._test_queries('wikiclir/fr', count=1089179, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 1089178: WikiClirQuery('54967313', 'Lilly Wood and The Prick au Trianon', 'is a 2013 french musical movie directed by benjamin lemaire.'), }) self._test_queries('wikiclir/de', count=938217, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 938216: WikiClirQuery('54967235', 'Journal of Risk and Uncertainty', 'the is a bimonthly peer-reviewed academic covering the study of analysis and decision-making under uncertainty.'), }) self._test_queries('wikiclir/it', count=808605, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 808604: WikiClirQuery('54967555', '1999 Merano Open – Doubles', 'lucas arnold ker and jaime oncins win the title by defeating marc-kevin goellner and eric taino 6–4, 7–6 in the final.'), }) self._test_queries('wikiclir/ja', count=426431, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 426430: WikiClirQuery('54966134', "Pu'an Signal Station", '() is a railway on the taiwan railways administration (tra) south-link line located in daren township, taitung county, taiwan.'), }) self._test_queries('wikiclir/ko', count=224855, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 224854: WikiClirQuery('54965501', 'Lee Yoo-jin (actor)', '(hangul: ; born april 6, 1992) is a south korean actor.'), }) self._test_queries('wikiclir/no', count=299897, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 299896: WikiClirQuery('54967547', 'Wassana Panyapuek', '(born 14 december 1968) is a thai sprinter.'), }) self._test_queries('wikiclir/nn', count=99493, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('339', 'Ayn Rand', "(; born alisa zinov'yevna rosenbaum, ; – march 6, 1982) was a russian-american novelist, philosopher, playwright, and screenwriter."), 99492: WikiClirQuery('54952283', 'Lekamøya', 'is a mountain in the municipality of leka in nord-trøndelag, norway.'), }) self._test_queries('wikiclir/pl', count=693656, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 693655: WikiClirQuery('54966439', 'Top fermentation', 'or high is a brewing method for beer whereby the yeast floats on of the wort.'), }) self._test_queries('wikiclir/pt', count=611732, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 611731: WikiClirQuery('54964827', 'Monalysa Alcântara', '(born 26 january 1999) is a brazilian model and beauty pageant titleholder who won miss brasil 2017.'), }) self._test_queries('wikiclir/ro', count=199264, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 199263: WikiClirQuery('54965687', 'Karl Heinrich Gräffe', '(1799-1873) was a german mathematician, who was professor at university of zurich.'), }) self._test_queries('wikiclir/ru', count=664924, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 664923: WikiClirQuery('54966570', 'Nadezhda Babkina', 'georgieva (; born 19 march, 1950, chyorny yar, astrakhan oblast, soviet union) is а soviet and russian folk and pop singer.'), }) self._test_queries('wikiclir/en-simple', count=114572, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 114571: WikiClirQuery('54964009', 'Pyotr Deynekin', 'stepanovich (14 december 1937 – 19 august 2017) was a russian military general.'), }) self._test_queries('wikiclir/es', count=781642, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 781641: WikiClirQuery('54966770', 'Selene Johnson', '(february 20, 1876-december 11, 1960) was an american stage and silent film actress born in philadelphia, pennsylvania (usa) as knapp johnson.'), }) self._test_queries('wikiclir/sw', count=22860, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('580', 'Astronomer', 'an is a scientist in the field of astronomy who concentrates their studies on a specific question or field outside the scope of earth.'), 22859: WikiClirQuery('54716724', 'Tirax language', 'is an oceanic spoken in north east malakula, vanuatu.'), }) self._test_queries('wikiclir/sv', count=639073, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('316', 'Academy Award for Best Production Design', 'the recognizes achievement for art direction in film.'), 639072: WikiClirQuery('54963595', 'Sirkka Selja', '(sirkka-liisa tulonen; 20 march 1920 – 17 august 2017) was a finnish poet and writer.'), }) self._test_queries('wikiclir/tl', count=48930, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('358', 'Algeria', "( '; , '; ), officially the people's democratic republic of algeria, is a sovereign state in north africa on the mediterranean coast."), 48929: WikiClirQuery('54959191', 'Miho Yoshioka (tarento)', 'she was born from higashiōsaka, osaka prefecture.'), }) self._test_queries('wikiclir/tr', count=185388, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 185387: WikiClirQuery('54965031', 'Himmet Karadağ', '(born 1974, denizli, turkey) is a turkish bureaucrat and chairman of borsa istanbul the sole exchange entity of turkey.'), }) self._test_queries('wikiclir/uk', count=348222, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 348221: WikiClirQuery('54966570', 'Nadezhda Babkina', 'georgieva (; born 19 march, 1950, chyorny yar, astrakhan oblast, soviet union) is а soviet and russian folk and pop singer.'), }) self._test_queries('wikiclir/vi', count=354312, items={ 0: WikiClirQuery('12', 'Anarchism', 'is a political philosophy that advocates self-governed societies based on voluntary institutions.'), 9: WikiClirQuery('324', 'Academy Awards', 'the , now known officially as the oscars, is a set of twenty-four for artistic and technical merit in the american film industry, given annually by the of motion picture arts and sciences (ampas), to recognize excellence in cinematic achievements as assessed by the voting membership.'), 354311: WikiClirQuery('54960571', 'Dictyocaryum lamarckianum', 'is a species of flowering plant in the arecaceae family.'), }) def test_qrels(self): self._test_qrels('wikiclir/ar', count=519269, items={ 0: TrecQrel('12', '23571', 2, 'Q0'), 9: TrecQrel('12', '804785', 1, 'Q0'), 519268: TrecQrel('54964051', '3769457', 2, 'Q0'), }) self._test_qrels('wikiclir/ca', count=965233, items={ 0: TrecQrel('12', '15902', 2, 'Q0'), 9: TrecQrel('12', '1010721', 1, 'Q0'), 965232: TrecQrel('54965687', '1423451', 2, 'Q0'), }) self._test_qrels('wikiclir/zh', count=926130, items={ 0: TrecQrel('12', '87200', 2, 'Q0'), 9: TrecQrel('12', '16304', 1, 'Q0'), 926129: TrecQrel('54967133', '5795012', 2, 'Q0'), }) self._test_qrels('wikiclir/cs', count=954370, items={ 0: TrecQrel('12', '12682', 2, 'Q0'), 9: TrecQrel('12', '430366', 1, 'Q0'), 954369: TrecQrel('54961893', '2646', 1, 'Q0'), }) self._test_qrels('wikiclir/nl', count=2334644, items={ 0: TrecQrel('12', '11036', 2, 'Q0'), 9: TrecQrel('12', '134021', 1, 'Q0'), 2334643: TrecQrel('54967572', '2716534', 2, 'Q0'), }) self._test_qrels('wikiclir/fi', count=939613, items={ 0: TrecQrel('12', '7556', 2, 'Q0'), 9: TrecQrel('12', '1101970', 1, 'Q0'), 939612: TrecQrel('54966570', '529972', 2, 'Q0'), }) self._test_qrels('wikiclir/fr', count=5137366, items={ 0: TrecQrel('12', '178', 2, 'Q0'), 9: TrecQrel('12', '1312543', 1, 'Q0'), 5137365: TrecQrel('54967313', '6378662', 1, 'Q0'), }) self._test_qrels('wikiclir/de', count=5550454, items={ 0: TrecQrel('12', '24409', 2, 'Q0'), 9: TrecQrel('12', '3103271', 1, 'Q0'), 5550453: TrecQrel('54967235', '7427899', 1, 'Q0'), }) self._test_qrels('wikiclir/it', count=3443633, items={ 0: TrecQrel('12', '22305', 2, 'Q0'), 9: TrecQrel('12', '14627', 1, 'Q0'), 3443632: TrecQrel('54967555', '3455512', 2, 'Q0'), }) self._test_qrels('wikiclir/ja', count=3338667, items={ 0: TrecQrel('12', '1430709', 2, 'Q0'), 9: TrecQrel('12', '2963727', 1, 'Q0'), 3338666: TrecQrel('54966134', '1664146', 1, 'Q0'), }) self._test_qrels('wikiclir/ko', count=568205, items={ 0: TrecQrel('12', '10071', 2, 'Q0'), 9: TrecQrel('12', '86969', 1, 'Q0'), 568204: TrecQrel('54965501', '1824430', 1, 'Q0'), }) self._test_qrels('wikiclir/no', count=963514, items={ 0: TrecQrel('12', '31', 2, 'Q0'), 9: TrecQrel('12', '285079', 1, 'Q0'), 963513: TrecQrel('54967547', '1387292', 2, 'Q0'), }) self._test_qrels('wikiclir/nn', count=250141, items={ 0: TrecQrel('12', '10770', 2, 'Q0'), 9: TrecQrel('12', '130318', 1, 'Q0'), 250140: TrecQrel('54952283', '2757', 1, 'Q0'), }) self._test_qrels('wikiclir/pl', count=2471360, items={ 0: TrecQrel('12', '25', 2, 'Q0'), 9: TrecQrel('12', '14226', 1, 'Q0'), 2471359: TrecQrel('54966439', '1937710', 1, 'Q0'), }) self._test_qrels('wikiclir/pt', count=1741889, items={ 0: TrecQrel('12', '230', 2, 'Q0'), 9: TrecQrel('12', '2121768', 1, 'Q0'), 1741888: TrecQrel('54964827', '1311522', 1, 'Q0'), }) self._test_qrels('wikiclir/ro', count=451180, items={ 0: TrecQrel('12', '23210', 2, 'Q0'), 9: TrecQrel('12', '226810', 1, 'Q0'), 451179: TrecQrel('54965687', '1736377', 2, 'Q0'), }) self._test_qrels('wikiclir/ru', count=2321384, items={ 0: TrecQrel('12', '3021', 2, 'Q0'), 9: TrecQrel('12', '2051069', 1, 'Q0'), 2321383: TrecQrel('54966570', '3117631', 1, 'Q0'), }) self._test_qrels('wikiclir/en-simple', count=250380, items={ 0: TrecQrel('12', '4807', 2, 'Q0'), 9: TrecQrel('25', '46790', 1, 'Q0'), 250379: TrecQrel('54964009', '594669', 2, 'Q0'), }) self._test_qrels('wikiclir/es', count=2894807, items={ 0: TrecQrel('12', '2190809', 2, 'Q0'), 9: TrecQrel('12', '221716', 1, 'Q0'), 2894806: TrecQrel('54966770', '8045048', 2, 'Q0'), }) self._test_qrels('wikiclir/sw', count=57924, items={ 0: TrecQrel('12', '16420', 2, 'Q0'), 9: TrecQrel('303', '6834', 1, 'Q0'), 57923: TrecQrel('54716724', '74685', 2, 'Q0'), }) self._test_qrels('wikiclir/sv', count=2069453, items={ 0: TrecQrel('12', '149', 2, 'Q0'), 9: TrecQrel('12', '79772', 1, 'Q0'), 2069452: TrecQrel('54963595', '263597', 1, 'Q0'), }) self._test_qrels('wikiclir/tl', count=72359, items={ 0: TrecQrel('12', '87382', 2, 'Q0'), 9: TrecQrel('305', '202908', 1, 'Q0'), 72358: TrecQrel('54959191', '155814', 2, 'Q0'), }) self._test_qrels('wikiclir/tr', count=380651, items={ 0: TrecQrel('12', '21889', 2, 'Q0'), 9: TrecQrel('12', '54359', 1, 'Q0'), 380650: TrecQrel('54965031', '2098262', 2, 'Q0'), }) self._test_qrels('wikiclir/uk', count=913358, items={ 0: TrecQrel('12', '12101', 2, 'Q0'), 9: TrecQrel('12', '1370301', 1, 'Q0'), 913357: TrecQrel('54966570', '2004654', 1, 'Q0'), }) self._test_qrels('wikiclir/vi', count=611355, items={ 0: TrecQrel('12', '307178', 2, 'Q0'), 9: TrecQrel('303', '33804', 2, 'Q0'), 611354: TrecQrel('54960571', '2174311', 2, 'Q0'), }) if __name__ == '__main__': unittest.main()
113.686076
342
0.655191
5,789
44,906
5.064778
0.414407
0.020464
0.010232
0.017053
0.308458
0.292667
0.254604
0.254604
0.254604
0.254604
0
0.094593
0.205941
44,906
394
343
113.974619
0.725644
0
0
0.313472
0
0.246114
0.642119
0.024963
0
0
0
0
0
1
0.007772
false
0.002591
0.012953
0
0.023316
0.002591
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
becd6a72d53984442dca6d578e97cfc2a8721647
152
py
Python
wagtailpurge/testapp/urls.py
ababic/wagtail-purge
9f48b1fa11f738fc11b6f3b708190daef3e80926
[ "MIT" ]
null
null
null
wagtailpurge/testapp/urls.py
ababic/wagtail-purge
9f48b1fa11f738fc11b6f3b708190daef3e80926
[ "MIT" ]
2
2021-08-24T18:38:43.000Z
2021-08-24T18:43:25.000Z
wagtailpurge/testapp/urls.py
ababic/wagtailpurge
9f48b1fa11f738fc11b6f3b708190daef3e80926
[ "MIT" ]
null
null
null
from django.urls import include, path urlpatterns = [ path("admin/", include("wagtail.admin.urls")), path("", include("wagtail.core.urls")), ]
21.714286
50
0.657895
18
152
5.555556
0.555556
0.28
0
0
0
0
0
0
0
0
0
0
0.144737
152
6
51
25.333333
0.769231
0
0
0
0
0
0.269737
0
0
0
0
0
0
1
0
false
0
0.2
0
0.2
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
bef28dbbf9186487d2d137351d753f60f9b49cd8
256
py
Python
abpytools/core/__init__.py
gf712/AbPyTools
9ff0d4346ad80487d43875bc77d99fbe76170db4
[ "MIT" ]
13
2017-06-13T12:31:47.000Z
2022-03-23T02:14:01.000Z
abpytools/core/__init__.py
gf712/AbPyTools
9ff0d4346ad80487d43875bc77d99fbe76170db4
[ "MIT" ]
8
2018-02-21T22:15:35.000Z
2022-02-01T12:27:58.000Z
abpytools/core/__init__.py
gf712/AbPyTools
9ff0d4346ad80487d43875bc77d99fbe76170db4
[ "MIT" ]
3
2018-04-10T08:01:39.000Z
2021-10-10T14:37:43.000Z
from .cache import Cache from .base import CollectionBase from .chain import Chain from .chain_collection import ChainCollection from .fab_collection import FabCollection from .fab import Fab __all__ = ["Chain", "ChainCollection", "FabCollection", "Fab"]
28.444444
62
0.800781
31
256
6.419355
0.354839
0.090452
0
0
0
0
0
0
0
0
0
0
0.121094
256
8
63
32
0.884444
0
0
0
0
0
0.140625
0
0
0
0
0
0
1
0
false
0
0.857143
0
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
bef2e498f04136c43b5154c82d0e901213789638
89
py
Python
educore/apps.py
LawrenceDarko/School-management-system
2ff4334c97b3103e7bc279b48b0109bbed61ac95
[ "MIT" ]
null
null
null
educore/apps.py
LawrenceDarko/School-management-system
2ff4334c97b3103e7bc279b48b0109bbed61ac95
[ "MIT" ]
1
2021-02-26T21:26:08.000Z
2021-02-26T21:27:07.000Z
educore/apps.py
LawrenceDarko/School-management-system
2ff4334c97b3103e7bc279b48b0109bbed61ac95
[ "MIT" ]
null
null
null
from django.apps import AppConfig class EducoreConfig(AppConfig): name = 'educore'
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
bef55aac98dfc679be00280b4eae9156ee20e888
5,379
py
Python
app/machine_learning/hmm_smoother.py
ChristopherGS/sensor_readings
1695349276292ba16206cbc9efb94c10b65d439e
[ "BSD-3-Clause" ]
2
2015-10-01T09:03:31.000Z
2019-07-23T19:42:00.000Z
app/machine_learning/hmm_smoother.py
ChristopherGS/sensor_readings
1695349276292ba16206cbc9efb94c10b65d439e
[ "BSD-3-Clause" ]
null
null
null
app/machine_learning/hmm_smoother.py
ChristopherGS/sensor_readings
1695349276292ba16206cbc9efb94c10b65d439e
[ "BSD-3-Clause" ]
3
2016-01-13T13:07:26.000Z
2019-04-12T05:43:21.000Z
import numpy as np import pandas as pd import os from hmmlearn import hmm from utilities import convert_to_words n_components = 8 # ('ybc', 'ymount', 'ysc', 'ycg', 'ocg', 'osc_mount', 'obc', 'other') startprob = np.array([0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.05, 0.65,]) # users will probably turn on sensor standing """ probability of these positions given current state: your_mount' if v == 0 else 'your_side_control' if v == 1 else 'your_closed_guard' if v == 2 else 'your_back_control' if v == 3 else 'opponent_mount_or_sc' if v == 4 else 'opponent_closed_guard' if v == 5 else 'opponent_back_control' if v == 6 else 'OTHER' if v == 7 transition_probability = { 'ymt' : {'ymount': 0.800, 'ysc': 0.050, 'ycg': 0.010, 'ybc': 0.050, 'osc_mount': 0.001, 'ocg': 0.050, 'obc': 0.001, 'other': 0.038}, 'ysc' : {'ymount': 0.100, 'ysc': 0.800, 'ycg': 0.010, 'ybc': 0.010, 'osc_mount': 0.001, 'ocg': 0.050, 'obc': 0.001, 'other': 0.028}, 'ycg' : {'ymount': 0.010, 'ysc': 0.050, 'ycg': 0.800, 'ybc': 0.010, 'osc_mount': 0.050, 'ocg': 0.001, 'obc': 0.001, 'other': 0.078}, 'ybc' : {'ymount': 0.050, 'ysc': 0.010, 'ycg': 0.050, 'ybc': 0.800, 'osc_mount': 0.001, 'ocg': 0.010, 'obc': 0.001, 'other': 0.078}, 'omt' : {'ymount': 0.001, 'ysc': 0.050, 'ycg': 0.010, 'ybc': 0.001, 'osc_mount': 0.800, 'ocg': 0.050, 'obc': 0.050, 'other': 0.038}, 'ocg' : {'ymount': 0.100, 'ysc': 0.050, 'ycg': 0.010, 'ybc': 0.010, 'osc_mount': 0.001, 'ocg': 0.800, 'obc': 0.001, 'other': 0.028}, 'obc' : {'ymount': 0.010, 'ysc': 0.050, 'ycg': 0.001, 'ybc': 0.010, 'osc_mount': 0.050, 'ocg': 0.001, 'obc': 0.800, 'other': 0.078}, 'oth' : {'ymount': 0.050, 'ysc': 0.010, 'ycg': 0.050, 'ybc': 0.078, 'osc_mount': 0.001, 'ocg': 0.010, 'obc': 0.001, 'other': 0.800} } """ transmat = np.array([ [0.800, 0.050, 0.010, 0.050, 0.001, 0.050, 0.001, 0.038], [0.100, 0.800, 0.010, 0.010, 0.001, 0.050, 0.001, 0.028], [0.010, 0.050, 0.800, 0.010, 0.050, 0.001, 0.001, 0.078], [0.050, 0.010, 0.050, 0.800, 0.001, 0.010, 0.001, 0.078], [0.001, 0.050, 0.010, 0.001, 0.800, 0.050, 0.050, 0.038], [0.100, 0.050, 0.010, 0.010, 0.001, 0.800, 0.001, 0.028], [0.010, 0.050, 0.001, 0.010, 0.050, 0.001, 0.800, 0.078], [0.050, 0.010, 0.050, 0.078, 0.001, 0.010, 0.001, 0.800], ]) """ probability of these positions given current state: your_mount' if v == 0 else 'your_side_control' if v == 1 else 'your_closed_guard' if v == 2 else 'your_back_control' if v == 3 else 'opponent_mount_or_sc' if v == 4 else 'opponent_closed_guard' if v == 5 else 'opponent_back_control' if v == 6 else 'OTHER' if v == 7 emission_probability = { 'ymt' : {'ymount': 0.500, 'ysc': 0.050, 'ycg': 0.010, 'ybc': 0.050, 'osc_mount': 0.001, 'ocg': 0.350, 'obc': 0.001, 'other': 0.038}, 'ysc' : {'ymount': 0.100, 'ysc': 0.800, 'ycg': 0.010, 'ybc': 0.010, 'osc_mount': 0.001, 'ocg': 0.050, 'obc': 0.001, 'other': 0.028}, 'ycg' : {'ymount': 0.010, 'ysc': 0.050, 'ycg': 0.400, 'ybc': 0.010, 'osc_mount': 0.500, 'ocg': 0.001, 'obc': 0.001, 'other': 0.078}, 'ybc' : {'ymount': 0.050, 'ysc': 0.010, 'ycg': 0.050, 'ybc': 0.600, 'osc_mount': 0.001, 'ocg': 0.010, 'obc': 0.201, 'other': 0.078}, 'omt' : {'ymount': 0.001, 'ysc': 0.050, 'ycg': 0.210, 'ybc': 0.050, 'osc_mount': 0.600, 'ocg': 0.050, 'obc': 0.001, 'other': 0.038}, 'ocg' : {'ymount': 0.400, 'ysc': 0.050, 'ycg': 0.010, 'ybc': 0.010, 'osc_mount': 0.001, 'ocg': 0.400, 'obc': 0.001, 'other': 0.028}, 'obc' : {'ymount': 0.010, 'ysc': 0.050, 'ycg': 0.001, 'ybc': 0.110, 'osc_mount': 0.050, 'ocg': 0.001, 'obc': 0.700, 'other': 0.078}, 'oth' : {'ymount': 0.050, 'ysc': 0.010, 'ycg': 0.050, 'ybc': 0.078, 'osc_mount': 0.001, 'ocg': 0.010, 'obc': 0.001, 'other': 0.800} } """ emissionprob = np.array([ [0.500, 0.050, 0.010, 0.050, 0.001, 0.350, 0.001, 0.038], [0.100, 0.800, 0.010, 0.010, 0.001, 0.050, 0.001, 0.028], [0.010, 0.050, 0.350, 0.010, 0.500, 0.001, 0.001, 0.078], [0.050, 0.010, 0.050, 0.700, 0.001, 0.010, 0.101, 0.078], [0.001, 0.050, 0.210, 0.050, 0.600, 0.050, 0.001, 0.038], [0.400, 0.050, 0.010, 0.010, 0.001, 0.400, 0.001, 0.028], [0.010, 0.050, 0.001, 0.110, 0.050, 0.001, 0.700, 0.078], [0.050, 0.010, 0.050, 0.078, 0.001, 0.010, 0.001, 0.800], ]) # Hidden Markov Model with multinomial (discrete) emissions model = hmm.MultinomialHMM(n_components=n_components, n_iter=10, verbose=False) model.startprob_ = startprob model.transmat_ = transmat model.emissionprob_ = emissionprob def apply_hmm(data): """smooth the predictions from the Random Forest classifier using HMM logic""" data_ = np.array(data) n_samples = len(data) raw_data = data_.reshape((n_samples, -1)) result = model.decode(raw_data, algorithm='viterbi') result_words = convert_to_words(result[1]) print 'result words: {}'.format(result_words) print 'result accuracy: {}'.format(result[0]) return result[1]
51.721154
140
0.535044
947
5,379
2.968321
0.129884
0.088225
0.055141
0.051227
0.688367
0.670224
0.648524
0.614728
0.612238
0.563501
0
0.261029
0.237219
5,379
103
141
52.223301
0.42408
0.031418
0
0.142857
0
0
0.019535
0
0
0
0
0
0
0
null
null
0
0.119048
null
null
0.047619
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
831298201fc70935ecadc3a7cce9eaaec65f095c
31,413
py
Python
ooiui/core/routes/science.py
oceanobservatories/ooi-ui
028fc317c240dca3a1d2c6c66e54135265f23ee9
[ "MIT" ]
6
2015-02-12T13:23:00.000Z
2019-02-19T06:55:56.000Z
ooiui/core/routes/science.py
oceanobservatories/ooi-ui
028fc317c240dca3a1d2c6c66e54135265f23ee9
[ "MIT" ]
442
2015-01-09T14:33:33.000Z
2020-01-23T15:57:05.000Z
ooiui/core/routes/science.py
oceanobservatories/ooi-ui
028fc317c240dca3a1d2c6c66e54135265f23ee9
[ "MIT" ]
13
2015-02-05T00:32:05.000Z
2017-05-22T18:23:49.000Z
#!/usr/bin/env python ''' ooiui.core.routes.science Defines the application routes ''' from ooiui.core.app import app from flask import request, render_template, Response, jsonify from flask import stream_with_context from ooiui.core.routes.common import get_login, login_required import requests from ooiui.core.routes.decorators import login_required, scope_required import json @app.route('/cilogonhome') def new_index(): return render_template('common/home.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/landing/pioneer') @login_required() def landing_pioneer(): return render_template('landing/pioneer.html', tracking=app.config['GOOGLE_ANALYTICS']) # @app.route('/assets/list') # @app.route('/assets/list/') # @login_required() # def instr_index(): # return render_template('asset_management/assetslist.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/assets/management') @app.route('/assets/management/') def assets_management(): return render_template('asset_management/asset_management.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/assets/cruises') @app.route('/assets/cruises/') def asset_management_cruises(): return render_template('asset_management/cruises.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/assets/deployments') @app.route('/assets/deployments/') def asset_management_deployments(): return render_template('asset_management/deployments.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/events/list/') @login_required() def event_list(): return render_template('asset_management/eventslist.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/event/<int:id>', methods=['GET']) @login_required() def event_index(id): return render_template('asset_management/event.html', id=id, tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/event/<string:new>/<int:aid>/<string:aclass>', methods=['GET']) @login_required() def event_new(new, aid, aclass): return render_template('asset_management/event.html', id=str(new), assetid=aid, aclass=str(aclass), tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/streams/') def streams_page(): return render_template('science/streams.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/streamingdata/') @app.route('/streamingdata') def streaming_data_page(): return render_template('science/streaming_data.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/antelope_acoustic/') def acoustics_page(): return render_template('science/antelope_acoustic.html', tracking=app.config['GOOGLE_ANALYTICS']) # @app.route('/plot', methods=['GET']) # @app.route('/plot/', methods=['GET']) # def show_plot_no_path(): # return plot_page(None) # # # @app.route('/plot/<path:path>', methods=['GET']) # def plot_page(path): # return render_template('science/plot.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/datacatalog/') @app.route('/plot', methods=['GET']) @app.route('/plot/', methods=['GET']) @app.route('/data_access', methods=['GET']) @app.route('/data_access/', methods=['GET']) def show_data_access_no_path(): return render_template('science/data_access.html', tracking=app.config['GOOGLE_ANALYTICS']) # @app.route('/data_access/<path:path>', methods=['GET']) # def data_access(path): # return render_template('science/data_access.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/getdata/') def getData(): ''' gets data in the google chart format ''' instr = request.args['instrument'] stream = request.args['stream'] #std = request.args['startdate'] #edd = request.args['enddate'] #param = request.args['variables'] #ann = request.args['annotaton'] ann = "?annotation=true" response = requests.get(app.config['SERVICES_URL'] + '/uframe/get_data'+"/"+instr+"/"+stream+ann, params=request.args) return response.text, response.status_code @app.route('/api/get_data', methods=['GET']) def getUframeDataProxy(): ''' gets data in the google chart format ''' try: instr = request.args['instrument'] stream = request.args['stream'] # comma list xvars = request.args['xvars'] yvars = request.args['yvars'] # there should be a start and end date in the params # ?startdate=2015-01-21T22:01:48.103Z&enddate=2015-04-29T10:10:51.563Z data_url = "/".join([app.config['SERVICES_URL'], 'uframe/get_data', instr, stream, xvars, yvars]) response = requests.get(data_url, params=request.args) data_text = response.text data_text = data_text.replace("NaN", "null") return data_text, response.status_code, dict(response.headers) except Exception, e: return jsonify(error=str(e)) @app.route('/api/get_multistream', methods=['GET']) def getUframeMultiStreamInterp(): ''' Makes a request to the backend services to get the multi stream interpolated data Example request: /uframe/get_multistream/CP05MOAS-GL340-03-CTDGVM000/CP05MOAS-GL340-02-FLORTM000/telemetered_ctdgv_m_glider_instrument/ telemetered_flort_m_glider_instrument/sci_water_pressure/sci_flbbcd_chlor_units?startdate=2015-05-07T02:49:22.745Z&enddate=2015-06-28T04:00:41.282Z ''' try: # Parse the parameters ref_des1 = request.args['ref_des1'] ref_des2 = request.args['ref_des2'] instr1 = request.args['instr1'] instr2 = request.args['instr2'] var1 = request.args['var1'] var2 = request.args['var2'] startdate = request.args['startdate'] enddate = request.args['enddate'] # Build the URL params = '?startdate=%s&enddate=%s' % (startdate, enddate) # http://localhost:4000/uframe/get_multistream/CP05MOAS-GL340-03-CTDGVM000/CP05MOAS-GL340-02-FLORTM000/telemetered_ctdgv_m_glider_instrument/ # telemetered_flort_m_glider_instrument/sci_water_pressure/sci_flbbcd_chlor_units?startdate=2015-05-07T02:49:22.745Z&enddate=2015-06-28T04:00:41.282Z data_url = "/".join([app.config['SERVICES_URL'], 'uframe/get_multistream', ref_des1, ref_des2, instr1, instr2, var1, var2 + params]) print data_url # Get the response response = requests.get(data_url, params=request.args) data_text = response.text data_text = data_text.replace("NaN", "null") return data_text, response.status_code, dict(response.headers) except Exception, e: return jsonify(error=str(e)) # TODO @app.route('/api/get_large_format_data', methods=['GET']) def get_uframe_large_format_data(): ''' Make a request to the services to get the listing of large format data Example request: /uframe/get_large_format_files_by_ref/RS03ASHS-MJ03B-05-OBSSPA302/2015-11-30 ''' try: # Parse the parameters ref_des = request.args['ref_des'] date = request.args['date'] # Expecting ISO format <yyyy-mm-dd> # Build the URL data_url = "/".join([app.config['SERVICES_URL'], 'uframe/get_large_format_files_by_rd', ref_des, date]) # Get the response response = requests.get(data_url, params=request.args) return response.text, response.status_code, dict(response.headers) except Exception, e: return jsonify(error=str(e)) @app.route('/api/annotation', methods=['GET']) def get_annotations(): try: response = requests.get(app.config['SERVICES_URL'] + '/annotation', params=request.args) return response.text, response.status_code, dict(response.headers) except Exception, e: return jsonify(error=str(e)) @scope_required('annotate') @login_required() @app.route('/api/annotation', methods=['POST']) def post_annotation(): token = get_login() headers = {'Content-Type': 'application/json'} url = app.config['SERVICES_URL'] + '/annotation' print request.data response = requests.post(url, auth=(token, ''), data=request.data, headers=headers) return response.text, response.status_code, dict(response.headers) @scope_required('annotate') @login_required() @app.route('/api/annotation/<string:id>', methods=['PUT']) def put_annotation(id): token = get_login() headers = {'Content-Type': 'application/json'} url = app.config['SERVICES_URL'] + '/annotation/%s' % id response = requests.put(url, auth=(token, ''), data=request.data, headers=headers) return response.text, response.status_code @scope_required('annotate') @login_required() @app.route('/api/annotation/<string:id>', methods=['DELETE']) def delete_annotation(id): token = get_login() headers = {'Content-Type': 'application/json'} url = app.config['SERVICES_URL'] + '/annotation/delete/%s' % id response = requests.get(url, auth=(token, ''), data=request.data, headers=headers) return response.text, response.status_code @app.route('/api/annotation/qcflags', methods=['GET']) def get_qcflags(): try: response = requests.get(app.config['SERVICES_URL'] + '/annotation/qcflags', params=request.args) return response.text, response.status_code, dict(response.headers) except Exception, e: return jsonify(error=str(e)) # old @app.route('/api/array') def array_proxy(): response = requests.get(app.config['SERVICES_URL'] + '/arrays', params=request.args) # response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/arrays', params=request.args) return response.text, response.status_code @app.route('/api/uframe/status/arrays') def status_arrays(): try: response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/arrays', params=request.args) return response.text, response.status_code except Exception, e: print "error" + e.message return "Error getting array status from services", 400 @app.route('/api/uframe/status/sites/<string:array_code>') def status_sites(array_code): if request.args: array_code = request.args['node'] response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/sites/%s' % array_code, params=request.args) return response.text, response.status_code @app.route('/api/uframe/status/sites') def status_sites_tree(): if request.args: array_code = request.args['node'] else: return "Bad node parameter", 400 response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/sites/%s' % array_code, params=request.args) return response.text, response.status_code @app.route('/api/uframe/status/platforms/<string:array_code>') def status_platforms(array_code): if array_code: response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/platforms/%s' % array_code, params=request.args) return response.text, response.status_code else: return "No platform status response.", 500 @app.route('/api/uframe/status/instrument/<string:ref_des>') def status_instrument(ref_des): response = requests.get(app.config['SERVICES_URL'] + '/uframe/status/instrument/%s' % ref_des, params=request.args) return response.text, response.status_code @app.route('/api/uframe/get_structured_toc') def structured_toc_proxy(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/get_structured_toc', params=request.args) return response.text, response.status_code # old @app.route('/api/platform_deployment') def platform_deployment_proxy(): response = requests.get(app.config['SERVICES_URL'] + '/platform_deployments', params=request.args) return response.text, response.status_code # @app.route('/api/display_name') # def display_name(): # ref = request.args['reference_designator'] # response = requests.get(app.config['SERVICES_URL'] + '/display_name'+"?reference_designator="+ref, params=request.args) # return response.text, response.status_code # Assets @app.route('/api/asset_deployment', methods=['GET']) def instrument_deployment_proxy(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/assets', params=request.args) if 'export' in request.args: return Response(response.text, mimetype='application/json', headers={'Content-Disposition':'attachment;filename=filtered_assets.json'}) else: return response.text, response.status_code @app.route('/api/asset_deployment/<int:id>', methods=['GET']) def instrument_deployment_get(id): response = requests.get(app.config['SERVICES_URL'] + '/uframe/assets/%s' % id, data=request.data) return response.text, response.status_code @app.route('/api/asset_deployment/<int:id>', methods=['PUT']) @scope_required('asset_manager') @login_required() def instrument_deployment_put(id): # print request.data response = requests.put(app.config['SERVICES_URL'] + '/uframe/assets/%s' % id, data=request.data) return response.text, response.status_code @app.route('/api/asset_deployment/edit_phase_values', methods=['GET']) @scope_required('asset_manager') @login_required() def asset_edit_phase_values(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/assets/edit_phase_values', data=request.data) select = create_html_select_from_list(json.loads(response.text)['values']) return select, response.status_code @app.route('/api/asset_deployment/asset_type_values', methods=['GET']) @scope_required('asset_manager') @login_required() def asset_types_values(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/assets/types/supported', data=request.data) select = create_html_select_from_list(json.loads(response.text)['asset_types']) # print select return select, response.status_code @app.route('/api/asset_deployment/ajax', methods=['POST']) @scope_required('asset_manager') @login_required() def instrument_deployment_put_ajax(): token = get_login() # print request.form # print request.data json_data = '' if (len(request.data) > 0): json_data = dot_to_json(json.loads(request.data)) if (len(request.form) > 0): json_data = dot_to_json(json.loads(json.dumps(request.form.to_dict()))) if len(json_data) > 0: clean_data = {k:v for k,v in json_data.iteritems() if (k != 'oper')} # print json_data # print clean_data else: return 'No operation type found in data!', 500 # print 'eventId' # print clean_data['eventId'] if 'oper' in json_data: operation_type = json_data['oper'] # print operation_type else: return 'No operation type found in data!', 500 if operation_type == 'edit': # print 'edit record' # print clean_data['eventId'] # print app.config['SERVICES_URL'] + '/uframe/events/%s' % clean_data['eventId'] # json.dumps(clean_data) response = requests.put(app.config['SERVICES_URL'] + '/uframe/assets/%s' % clean_data['id'], auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Edit record operation', 200 if operation_type == 'add': # print 'add record' clean_data = {k:v for k,v in clean_data.iteritems() if (k != 'id' and k != 'lastModifiedTimestamp')} # print clean_data response = requests.post(app.config['SERVICES_URL'] + '/uframe/assets', auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Add record operation!', 200 return 'No operation performed!', 200 def dot_to_json(a): output = {} for key, value in a.iteritems(): path = key.split('.') if path[0] == 'json': path = path[1:] target = reduce(lambda d, k: d.setdefault(k, {}), path[:-1], output) target[path[-1]] = value return output def create_html_select_from_list(the_values): output = "<select>" for value in the_values: output += '<option value="%s">%s</option>' % (value, value) output += "</select>" return output @app.route('/api/asset_deployment', methods=['POST']) @scope_required('asset_manager') @login_required() def instrument_deployment_post(): # print request.data response = requests.post(app.config['SERVICES_URL'] + '/uframe/assets', data=request.data) return response.text, response.status_code # not working/using now @app.route('/api/asset_deployment/<int:id>', methods=['DELETE']) @scope_required('asset_manager') @login_required() def instrument_deployment_delete(id): response = requests.delete(app.config['SERVICES_URL'] + '/uframe/assets/%s' % id, data=request.data) return response.text, response.status_code # Events @app.route('/api/asset_events', methods=['GET']) def event_deployments_proxy(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/events', params=request.args) return response.text, response.status_code @app.route('/api/asset_events/<int:id>', methods=['GET']) def event_deployment_get(id): response = requests.get(app.config['SERVICES_URL'] + '/uframe/assets/%s/events' % id, params=request.args) # print response.text return response.text, response.status_code @app.route('/api/asset_events/<int:id>', methods=['PUT']) @scope_required('asset_manager') @login_required() def asset_event_put(id): token = get_login() response = requests.put(app.config['SERVICES_URL'] + '/uframe/events/%s' % id, auth=(token, ''), data=request.data) return response.text, response.status_code @app.route('/api/uframe/events/operational_status_values', methods=['GET']) def get_operational_status_values(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/events/operational_status_values', params=request.args) select = create_html_select_from_list(json.loads(response.text)['operational_status_values']) return select, response.status_code @app.route('/api/asset_events', methods=['POST']) @scope_required('asset_manager') @login_required() def asset_event_post(): token = get_login() # print request.form # print request.data json_data = '' if (len(request.data) > 0): json_data = dot_to_json(json.loads(request.data)) if (len(request.form) > 0): json_data = dot_to_json(json.loads(json.dumps(request.form.to_dict()))) if len(json_data) > 0: clean_data = {k:v for k,v in json_data.iteritems() if (k != 'oper' and k != 'id')} # print json_data # print clean_data else: return 'No operation type found in data!', 500 # print 'eventId' # print clean_data['eventId'] if 'oper' in json_data: operation_type = json_data['oper'] # print operation_type else: return 'No operation type found in data!', 500 if operation_type == 'edit': # print 'edit record' # print clean_data['eventId'] # print app.config['SERVICES_URL'] + '/uframe/events/%s' % clean_data['eventId'] # print json.dumps(clean_data) response = requests.put(app.config['SERVICES_URL'] + '/uframe/events/%s' % clean_data['eventId'], auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Edit record operation', 200 if operation_type == 'add': # print 'add record' clean_data = {k:v for k,v in clean_data.iteritems() if (k != 'eventId' and k != 'lastModifiedTimestamp')} # print clean_data response = requests.post(app.config['SERVICES_URL'] + '/uframe/events', auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Add record operation!', 200 return 'No operation performed!', 200 @app.route('/api/events', methods=['GET']) def get_event_by_ref_des(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/events?ref_des=%s' % request.args.get('ref_des'), data=request.args) return response.text, response.status_code @app.route('/opLog.html') def op_log(): return render_template('common/opLog.html', tracking=app.config['GOOGLE_ANALYTICS']) @app.route('/api/uframe/streams_for/<string:reference_designator>', methods=['GET']) def streams_for(reference_designator): token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/streams_for/%s' % reference_designator, auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/instrument_list', methods=['GET']) def instrument_list(): token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/instrument_list', auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/stream', methods=['GET']) def stream_proxy(): token = get_login() search_request_arg = request.args.get('search','') if len(search_request_arg) > 0 or len(request.args) == 0: response = requests.get(app.config['SERVICES_URL'] + '/uframe/stream', auth=(token, ''), params=request.args) return response.text, response.status_code else: return {}, 400 @app.route('/api/uframe/get_stream_for_model', methods=['GET']) def stream_for_model(): token = get_login() if len(request.args) > 0: response = requests.get(app.config['SERVICES_URL'] + '/uframe/get_stream_for_model/%s/%s/%s' % (request.args.get('ref_des',''), request.args.get('stream_method',''), request.args.get('stream','')), auth=(token, ''), params=request.args) return response.text, response.status_code else: return {}, 400 @app.route('/api/antelope_acoustic/list', methods=['GET']) def get_acoustic_datalist(): token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/antelope_acoustic/list', auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/get_metadata/<string:stream_name>/<string:reference_designator>', methods=['GET']) def metadata_proxy(stream_name, reference_designator): ''' get metadata for a given ref and stream ''' token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/get_metadata/%s/%s' % (stream_name, reference_designator), auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/get_metadata_times/<string:stream_name>/<string:reference_designator>', methods=['GET']) def metadata_times_proxy(stream_name, reference_designator): ''' get metadata times for a given ref and stream ''' token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/get_metadata_times/%s/%s' % (stream_name, reference_designator), auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/stream/parameters/<string:reference_des>/<string:stream_method>/<string:stream>', methods=['GET']) def get_stream_parameters(reference_des, stream_method, stream): ''' get metadata times for a given ref and stream ''' token = get_login() response = requests.get(app.config['SERVICES_URL'] + '/uframe/stream/parameters/%s/%s/%s' % (reference_des, stream_method, stream), auth=(token, ''), params=request.args) return response.text, response.status_code @app.route('/api/uframe/get_csv/<string:stream_name>/<string:reference_designator>/<string:start>/<string:end>') def get_csv(stream_name, reference_designator, start, end): token = get_login() dpa = "1" user = request.args.get('user', '') email = request.args.get('email', '') parameters = request.args.get('parameters', '') estimate_only = request.args.get('estimate', 'false') url = app.config['SERVICES_URL'] + '/uframe/get_csv/%s/%s/%s/%s/%s?user=%s&email=%s&parameters=%s&estimate_only=%s' \ % (stream_name, reference_designator, start, end, dpa, user, email, parameters, estimate_only) req = requests.get(url, auth=(token, ''), stream=True) return Response(stream_with_context(req.iter_content(chunk_size=1024*1024*4)), headers=dict(req.headers)) @app.route('/api/uframe/get_json/<string:stream_name>/<string:reference_designator>/<string:start>/<string:end>/<string:provenance>/<string:annotations>') def get_json(stream_name, reference_designator, start, end, provenance, annotations): token = get_login() dpa = "0" user = request.args.get('user', '') email = request.args.get('email', '') parameters = request.args.get('parameters', '') estimate_only = request.args.get('estimate', 'false') url = app.config['SERVICES_URL'] + '/uframe/get_json/%s/%s/%s/%s/%s/%s/%s?user=%s&email=%s&parameters=%s&estimate_only=%s' \ % (stream_name, reference_designator, start, end, dpa, provenance, annotations, user, email, parameters, estimate_only) req = requests.get(url, auth=(token, ''), stream=True, params=request.args) return Response(stream_with_context(req.iter_content(chunk_size=1024*1024*4)), headers=dict(req.headers)) @app.route('/api/uframe/get_netcdf/<string:stream_name>/<string:reference_designator>/<string:start>/<string:end>/<string:provenance>/<string:annotations>') def get_netcdf(stream_name, reference_designator, start, end, provenance, annotations): token = get_login() dpa = "0" user = request.args.get('user', '') email = request.args.get('email', '') parameters = request.args.get('parameters', '') estimate_only = request.args.get('estimate', 'false') req = requests.get(app.config['SERVICES_URL'] + '/uframe/get_netcdf/%s/%s/%s/%s/%s/%s/%s?user=%s&email=%s&parameters=%s&estimate_only=%s' % (stream_name, reference_designator, start, end, dpa, provenance, annotations, user, email, parameters, estimate_only), auth=(token, ''), stream=True) return Response(stream_with_context(req.iter_content(chunk_size=1024*1024*4)), headers=dict(req.headers)) @app.route('/api/uframe/get_profiles/<string:stream_name>/<string:reference_designator>') def get_profiles(stream_name, reference_designator): token = get_login() req = requests.get(app.config['SERVICES_URL'] + '/uframe/get_profiles/%s/%s/%s/%s' % (stream_name, reference_designator), auth=(token, ''), stream=True) return Response(stream_with_context(req.iter_content(chunk_size=1024*1024*4)), headers=dict(req.headers)) @app.route('/svg/plot/<string:instrument>/<string:stream>', methods=['GET']) def get_plotdemo(instrument, stream): token = get_login() import time t0 = time.time() req = requests.get(app.config['SERVICES_URL'] + '/uframe/plot/%s/%s' % (instrument, stream), auth=(token, ''), params=request.args) t1 = time.time() # they fake the response to 200 return req.content, req.status_code, dict(req.headers) # C2 Routes @app.route('/api/c2/array_display/<string:array_code>', methods=['GET']) def get_c2_array_display(array_code): response = requests.get(app.config['SERVICES_URL'] + '/c2/array_display/%s' % (array_code)) return response.text, response.status_code @app.route('/api/c2/platform_display/<string:reference_designator>', methods=['GET']) def get_c2_platform_display(reference_designator): response = requests.get(app.config['SERVICES_URL'] + '/c2/platform_display/%s' % (reference_designator)) return response.text, response.status_code @app.route('/api/c2/instrument_display/<string:reference_designator>', methods=['GET']) def get_c2_instrument_display(reference_designator): response = requests.get(app.config['SERVICES_URL'] + '/c2/instrument_display/%s' % (reference_designator)) return response.text, response.status_code @app.route('/api/c2/instrument/<string:reference_designator>/<string:stream_name>', methods=['GET']) def get_c2_instrument_fields(reference_designator, stream_name): response = requests.get(app.config['SERVICES_URL'] + '/c2/instrument/%s/%s/fields' % (reference_designator, stream_name)) return response.text, response.status_code @app.route('/api/cruises', methods=['GET']) def get_cruises(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/cruises') return response.text, response.status_code @app.route('/api/cruises/<string:eventId>/deployments', methods=['GET']) def get_cruise_deployments(eventId): response = requests.get(app.config['SERVICES_URL'] + '/uframe/cruises/%s/deployments' % (eventId)) return response.text, response.status_code @app.route('/api/deployments/subsites', methods=['GET']) def get_deployments_inv(): response = requests.get(app.config['SERVICES_URL'] + '/uframe/deployments/inv') return response.text, response.status_code @app.route('/api/deployments/<string:subsiteRd>/nodes', methods=['GET']) def get_deployments_nodes(subsiteRd): response = requests.get(app.config['SERVICES_URL'] + '/uframe/deployments/inv/' + subsiteRd) return response.text, response.status_code @app.route('/api/deployments/<string:subsiteRd>/<string:nodeRd>/sensors', methods=['GET']) def get_deployments_sensors(subsiteRd, nodeRd): response = requests.get(app.config['SERVICES_URL'] + '/uframe/deployments/inv/' + subsiteRd + '/' + nodeRd) return response.text, response.status_code @app.route('/api/deployments/<string:rd>', methods=['GET']) def get_deployments_by_rd(rd): response = requests.get(app.config['SERVICES_URL'] + '/uframe/deployments/' + rd) return response.text, response.status_code @app.route('/api/deployments/ajax', methods=['POST']) @scope_required('asset_manager') @login_required() def deployment_post_ajax(): token = get_login() # print request.form # print request.data json_data = '' if (len(request.data) > 0): json_data = dot_to_json(json.loads(request.data)) if (len(request.form) > 0): json_data = dot_to_json(json.loads(json.dumps(request.form.to_dict()))) if len(json_data) > 0: clean_data = {k:v for k,v in json_data.iteritems() if (k != 'oper')} # print json_data # print clean_data else: return 'No operation type found in data!', 500 # print 'eventId' # print clean_data['eventId'] if 'oper' in json_data: operation_type = json_data['oper'] # print operation_type else: return 'No operation type found in data!', 500 if operation_type == 'edit': # print 'edit record' # print clean_data['eventId'] # print app.config['SERVICES_URL'] + '/uframe/events/%s' % clean_data['eventId'] print json.dumps(clean_data) response = requests.put(app.config['SERVICES_URL'] + '/uframe/deployments/%s' % clean_data['eventId'], auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Edit record operation', 200 if operation_type == 'add': # print 'add record' clean_data = {k:v for k,v in clean_data.iteritems() if (k != 'id' and k != 'lastModifiedTimestamp')} # print clean_data response = requests.post(app.config['SERVICES_URL'] + '/uframe/deployments', auth=(token, ''), data=json.dumps(clean_data)) return response.text, response.status_code # return 'Add record operation!', 200 return 'No operation performed!', 200
39.713021
244
0.694139
4,079
31,413
5.172836
0.082128
0.031469
0.050758
0.059716
0.780711
0.731564
0.703697
0.695213
0.653602
0.570332
0
0.011738
0.148442
31,413
790
245
39.763291
0.777047
0.097189
0
0.435484
0
0.014113
0.239451
0.130467
0
0
0
0.001266
0
0
null
null
0
0.016129
null
null
0.008065
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
833a72069a3b407d6e65677670ecedfdfd6040e6
336
py
Python
tests/run_tests.py
zdu863/3dcorrelation
1683ee0af665e68924e67a11bffda26ab3269fe5
[ "BSD-3-Clause" ]
null
null
null
tests/run_tests.py
zdu863/3dcorrelation
1683ee0af665e68924e67a11bffda26ab3269fe5
[ "BSD-3-Clause" ]
null
null
null
tests/run_tests.py
zdu863/3dcorrelation
1683ee0af665e68924e67a11bffda26ab3269fe5
[ "BSD-3-Clause" ]
null
null
null
from numpy.testing import run_module_suite # Per-module accuracy and input correctness tests from ccl_test_distances import * from ccl_test_growth import * from ccl_test_core import * # Overall interface functionality tests from ccl_test_pyccl_interface import * if __name__ == "__main__": # Run all tests run_module_suite()
24
49
0.797619
48
336
5.145833
0.541667
0.11336
0.178138
0.129555
0
0
0
0
0
0
0
0
0.154762
336
13
50
25.846154
0.869718
0.294643
0
0
0
0
0.034335
0
0
0
0
0
0
1
0
true
0
0.714286
0
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
835321ec0544eecbe2598eb29a8145edb42be3e7
54
py
Python
{{cookiecutter.publication_slug}}/process/preprocess/example.py
mberz/cookiecutter-publication
94b2ac661df4ae4678037b234e8d39d8b35fd5c4
[ "MIT" ]
null
null
null
{{cookiecutter.publication_slug}}/process/preprocess/example.py
mberz/cookiecutter-publication
94b2ac661df4ae4678037b234e8d39d8b35fd5c4
[ "MIT" ]
null
null
null
{{cookiecutter.publication_slug}}/process/preprocess/example.py
mberz/cookiecutter-publication
94b2ac661df4ae4678037b234e8d39d8b35fd5c4
[ "MIT" ]
null
null
null
# %% __depends__ = [] __dest__ = ['../../data/'] # %%
10.8
26
0.425926
3
54
5
1
0
0
0
0
0
0
0
0
0
0
0
0.185185
54
5
27
10.8
0.340909
0.092593
0
0
0
0
0.234043
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
55d7870fa4a418f045eefc0ad2933a1681f87050
21
py
Python
scale_client/sensors/__init__.py
prav33nv/scale_client
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
[ "BSD-2-Clause-FreeBSD" ]
3
2018-05-24T00:59:05.000Z
2020-01-03T08:03:33.000Z
scale_client/sensors/__init__.py
prav33nv/scale_client
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
[ "BSD-2-Clause-FreeBSD" ]
26
2015-01-19T22:47:07.000Z
2017-05-03T01:43:10.000Z
scale_client/sensors/__init__.py
prav33nv/scale_client
dcbd6ed4c8f4a27606ebef5b5f9dabb2e4f3b806
[ "BSD-2-Clause-FreeBSD" ]
6
2015-01-20T20:05:09.000Z
2017-06-01T02:19:01.000Z
__author__ = 'kyle'
7
19
0.666667
2
21
5
1
0
0
0
0
0
0
0
0
0
0
0
0.190476
21
2
20
10.5
0.588235
0
0
0
0
0
0.2
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
55de47e8ec0b9759b4584edf9723ccc898f5dee9
405
py
Python
clean_architecture_helper/serializers.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
7
2019-11-08T20:08:24.000Z
2022-03-27T17:43:04.000Z
clean_architecture_helper/serializers.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
null
null
null
clean_architecture_helper/serializers.py
HerlanAssis/django-clean-architecture-helper
194c5bd524eb4a66c71904fc57cfef6f25916c0e
[ "Apache-2.0" ]
1
2019-10-31T20:45:32.000Z
2019-10-31T20:45:32.000Z
from rest_framework import serializers class BaseSerializer(serializers.Serializer): ''' Base serializer ''' id = serializers.CharField(read_only=True) created_at = serializers.DateTimeField(read_only=True) updated_at = serializers.DateTimeField(read_only=True) deleted_at = serializers.DateTimeField(read_only=True) is_deleted = serializers.BooleanField(read_only=True)
31.153846
58
0.767901
45
405
6.688889
0.466667
0.13289
0.199336
0.299003
0.378738
0.378738
0
0
0
0
0
0
0.145679
405
12
59
33.75
0.869942
0.037037
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.142857
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
55ea1ea8e3cc7600d46244e81fbf44f82f1dd700
309
py
Python
codeforces.com/50A/solution.py
zubtsov/competitive-programming
919d63130144347d7f6eddcf8f5bc2afb85fddf3
[ "MIT" ]
null
null
null
codeforces.com/50A/solution.py
zubtsov/competitive-programming
919d63130144347d7f6eddcf8f5bc2afb85fddf3
[ "MIT" ]
null
null
null
codeforces.com/50A/solution.py
zubtsov/competitive-programming
919d63130144347d7f6eddcf8f5bc2afb85fddf3
[ "MIT" ]
null
null
null
board_width, board_length = list(map(int, input().split())) domino_length = 2 if board_width % 2 == 0: print((board_width // domino_length) * board_length) elif board_length % 2 == 0: print((board_length // domino_length) * board_width) else: print((board_width * board_length) // domino_length)
30.9
59
0.702265
44
309
4.613636
0.340909
0.246305
0.147783
0.206897
0
0
0
0
0
0
0
0.019231
0.158576
309
9
60
34.333333
0.761538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.375
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
55eb0d1aa42b32acbb82b840ccd0d8010c5a895a
200
py
Python
progress_bars.py
JonatasFontele/Algorithms_Structured-and-OOP-Exercises
338f1c2f0bd14d52b183dd2572e2ac59bad13d17
[ "MIT" ]
null
null
null
progress_bars.py
JonatasFontele/Algorithms_Structured-and-OOP-Exercises
338f1c2f0bd14d52b183dd2572e2ac59bad13d17
[ "MIT" ]
null
null
null
progress_bars.py
JonatasFontele/Algorithms_Structured-and-OOP-Exercises
338f1c2f0bd14d52b183dd2572e2ac59bad13d17
[ "MIT" ]
null
null
null
from tqdm import tqdm #pip install tqdm import time def complicated_function(): time.sleep(2) #Simulating some complicated processing for i in tqdm(range(100)): complicated_function()
25
58
0.74
27
200
5.407407
0.703704
0.136986
0
0
0
0
0
0
0
0
0
0.02454
0.185
200
8
59
25
0.871166
0.27
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
55f622587bf6c18638ebd98e606f876766d28408
91
py
Python
test/test_run.py
tusharvaja/Uplabel
c832253b03673d0356b257d60ea3719ab7941575
[ "MIT" ]
3
2019-06-08T00:35:03.000Z
2021-01-25T13:17:22.000Z
test/test_run.py
tusharvaja/Uplabel
c832253b03673d0356b257d60ea3719ab7941575
[ "MIT" ]
null
null
null
test/test_run.py
tusharvaja/Uplabel
c832253b03673d0356b257d60ea3719ab7941575
[ "MIT" ]
1
2019-06-06T20:13:58.000Z
2019-06-06T20:13:58.000Z
import sys sys.path.append('../code') import main as m ul = m.Main('tal', debug_iter_id=1)
18.2
35
0.692308
18
91
3.388889
0.777778
0
0
0
0
0
0
0
0
0
0
0.0125
0.120879
91
5
35
18.2
0.75
0
0
0
0
0
0.108696
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
55fd7d996fb641c455f339dc1789a387490a230a
214
py
Python
resttorrent/commands/get_session.py
shlee322/resttorrent
781c1c1810140c27c4e29c7a0b421e86e2ed8d1d
[ "MIT" ]
null
null
null
resttorrent/commands/get_session.py
shlee322/resttorrent
781c1c1810140c27c4e29c7a0b421e86e2ed8d1d
[ "MIT" ]
null
null
null
resttorrent/commands/get_session.py
shlee322/resttorrent
781c1c1810140c27c4e29c7a0b421e86e2ed8d1d
[ "MIT" ]
null
null
null
from resttorrent.decorators import command @command('1', '/sessions/<session_id>') def get_session(session_id): from resttorrent.modules.torrent import get_session as get_info return get_info(session_id)
26.75
67
0.785047
30
214
5.366667
0.533333
0.167702
0
0
0
0
0
0
0
0
0
0.005319
0.121495
214
7
68
30.571429
0.851064
0
0
0
0
0
0.107477
0.102804
0
0
0
0
0
1
0.2
false
0
0.4
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
361f3ee422406ebcd127d9e2f10596fbf0daf888
854
py
Python
backend/polls/migrations/0002_auto_20181107_1347.py
IINamelessII/YesOrNo
0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19
[ "Apache-2.0" ]
3
2019-02-17T01:25:19.000Z
2019-04-01T12:57:00.000Z
backend/polls/migrations/0002_auto_20181107_1347.py
IINamelessII/YesOrNo
0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19
[ "Apache-2.0" ]
3
2021-03-08T23:44:34.000Z
2022-02-12T05:07:13.000Z
backend/polls/migrations/0002_auto_20181107_1347.py
IINamelessII/YesOrNo
0ebbdfbae73f0be7c807a8f6ca0ec7c2040cca19
[ "Apache-2.0" ]
2
2018-12-12T19:24:59.000Z
2018-12-14T20:01:42.000Z
# Generated by Django 2.1.2 on 2018-11-07 13:47 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('polls', '0001_initial'), ] operations = [ migrations.AlterField( model_name='poll', name='agree', field=models.PositiveIntegerField(default=1), ), migrations.AlterField( model_name='poll', name='disagree', field=models.PositiveIntegerField(default=1), ), migrations.AlterField( model_name='poll', name='dislikes', field=models.PositiveIntegerField(default=1), ), migrations.AlterField( model_name='poll', name='likes', field=models.PositiveIntegerField(default=1), ), ]
25.117647
57
0.555035
75
854
6.253333
0.453333
0.170576
0.21322
0.247335
0.648188
0.565032
0.486141
0.486141
0.486141
0.486141
0
0.04021
0.330211
854
33
58
25.878788
0.77972
0.052693
0
0.592593
1
0
0.07311
0
0
0
0
0
0
1
0
false
0
0.037037
0
0.148148
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
362234037271924ee91ae7a661d9cb4f1dbcebf0
365
py
Python
example.py
jinserk/PyASR
97fceed74852a0b8ab54ee2fa593d3fc0a5a49ba
[ "Apache-2.0" ]
null
null
null
example.py
jinserk/PyASR
97fceed74852a0b8ab54ee2fa593d3fc0a5a49ba
[ "Apache-2.0" ]
null
null
null
example.py
jinserk/PyASR
97fceed74852a0b8ab54ee2fa593d3fc0a5a49ba
[ "Apache-2.0" ]
null
null
null
import os.path as op from pyasr.features import mfcc from pyasr.features import logfbank import scipy.io.wavfile as wav timit_base = "/home/leo/work/neural/timit/TIMIT" wav_file = op.join(timit_base, "TRAIN/DR1/FCJF0/SI1027.WAV") print wav_file (rate,sig) = wav.read(wav_file) mfcc_feat = mfcc(sig,rate) fbank_feat = logfbank(sig,rate) print(fbank_feat[1:3,:])
22.8125
60
0.764384
64
365
4.234375
0.53125
0.077491
0.125461
0.169742
0
0
0
0
0
0
0
0.02454
0.106849
365
15
61
24.333333
0.806748
0
0
0
0
0
0.161644
0.161644
0
0
0
0
0
0
null
null
0
0.363636
null
null
0.181818
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
3
362508fa2aa4e3ee5235324f2687470d014503d9
411
py
Python
bds/argparser.py
ervitis/bdir-data-service
95b9ae3dbf7bd4dc2583d415ae2500593d914bea
[ "MIT" ]
null
null
null
bds/argparser.py
ervitis/bdir-data-service
95b9ae3dbf7bd4dc2583d415ae2500593d914bea
[ "MIT" ]
null
null
null
bds/argparser.py
ervitis/bdir-data-service
95b9ae3dbf7bd4dc2583d415ae2500593d914bea
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from argparse import ArgumentParser class DataServiceArgumentParser(object): def __init__(self): self._parser = ArgumentParser() def parse_build(self, short_description, long_description, **kwargs): self._parser.add_argument(short_description, long_description, **kwargs) return self @property def parser(self): return self._parser
21.631579
80
0.693431
43
411
6.325581
0.55814
0.110294
0.147059
0.227941
0.272059
0
0
0
0
0
0
0.003077
0.209246
411
18
81
22.833333
0.833846
0.051095
0
0
0
0
0
0
0
0
0
0
0
1
0.3
false
0
0.1
0.1
0.7
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
3629126f10ebd6a700f0bb4ea76af84257329415
259
py
Python
evm/utils/hexadecimal.py
zixuanzh/py-evm
de05e73036c663e85083316bc503549044792892
[ "MIT" ]
137
2017-03-17T11:37:51.000Z
2022-03-07T07:51:28.000Z
evm/utils/hexadecimal.py
zixuanzh/py-evm
de05e73036c663e85083316bc503549044792892
[ "MIT" ]
102
2017-04-07T10:43:03.000Z
2018-11-11T18:01:56.000Z
evm/utils/hexadecimal.py
zixuanzh/py-evm
de05e73036c663e85083316bc503549044792892
[ "MIT" ]
39
2017-03-17T11:38:52.000Z
2021-02-18T23:05:17.000Z
from __future__ import unicode_literals import codecs def encode_hex(value): return '0x' + codecs.decode(codecs.encode(value, 'hex'), 'utf8') def decode_hex(value): _, _, hex_part = value.rpartition('x') return codecs.decode(hex_part, 'hex')
19.923077
68
0.706564
35
259
4.914286
0.485714
0.093023
0
0
0
0
0
0
0
0
0
0.009174
0.158301
259
12
69
21.583333
0.779817
0
0
0
0
0
0.050193
0
0
0
0
0
0
1
0.285714
false
0
0.285714
0.142857
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
362fc78ccc8facbb517d515ac0e5b9560b2ea751
5,605
py
Python
saleor/graphql/payment/tests/mutations/test_payment_capture.py
eanknd/saleor
08aa724176be00d7aaf654f14e9ae99dd4327f97
[ "CC-BY-4.0" ]
1,392
2021-10-06T15:54:28.000Z
2022-03-31T20:50:55.000Z
saleor/graphql/payment/tests/mutations/test_payment_capture.py
eanknd/saleor
08aa724176be00d7aaf654f14e9ae99dd4327f97
[ "CC-BY-4.0" ]
888
2021-10-06T10:48:54.000Z
2022-03-31T11:00:30.000Z
saleor/graphql/payment/tests/mutations/test_payment_capture.py
gustavoarmoa/saleor
f81b2f347e4c7a624cd68a1eca3b0a5611498f6e
[ "CC-BY-4.0" ]
538
2021-10-07T16:21:27.000Z
2022-03-31T22:58:57.000Z
from unittest.mock import patch import graphene from .....payment import TransactionKind from .....payment.gateways.dummy_credit_card import ( TOKEN_EXPIRED, TOKEN_VALIDATION_MAPPING, ) from .....payment.models import ChargeStatus from ....tests.utils import get_graphql_content CAPTURE_QUERY = """ mutation PaymentCapture($paymentId: ID!, $amount: PositiveDecimal) { paymentCapture(paymentId: $paymentId, amount: $amount) { payment { id, chargeStatus } errors { field message code } } } """ def test_payment_capture_success( staff_api_client, permission_manage_orders, payment_txn_preauth ): payment = payment_txn_preauth assert payment.charge_status == ChargeStatus.NOT_CHARGED payment_id = graphene.Node.to_global_id("Payment", payment.pk) variables = {"paymentId": payment_id, "amount": str(payment_txn_preauth.total)} response = staff_api_client.post_graphql( CAPTURE_QUERY, variables, permissions=[permission_manage_orders] ) content = get_graphql_content(response) data = content["data"]["paymentCapture"] assert not data["errors"] payment_txn_preauth.refresh_from_db() assert payment.charge_status == ChargeStatus.FULLY_CHARGED assert payment.transactions.count() == 2 txn = payment.transactions.last() assert txn.kind == TransactionKind.CAPTURE def test_payment_capture_with_invalid_argument( staff_api_client, permission_manage_orders, payment_txn_preauth ): payment = payment_txn_preauth assert payment.charge_status == ChargeStatus.NOT_CHARGED payment_id = graphene.Node.to_global_id("Payment", payment.pk) variables = {"paymentId": payment_id, "amount": 0} response = staff_api_client.post_graphql( CAPTURE_QUERY, variables, permissions=[permission_manage_orders] ) content = get_graphql_content(response) data = content["data"]["paymentCapture"] assert len(data["errors"]) == 1 assert data["errors"][0]["message"] == "Amount should be a positive number." def test_payment_capture_with_payment_non_authorized_yet( staff_api_client, permission_manage_orders, payment_dummy ): """Ensure capture a payment that is set as authorized is failing with the proper error message. """ payment = payment_dummy assert payment.charge_status == ChargeStatus.NOT_CHARGED payment_id = graphene.Node.to_global_id("Payment", payment.pk) variables = {"paymentId": payment_id, "amount": 1} response = staff_api_client.post_graphql( CAPTURE_QUERY, variables, permissions=[permission_manage_orders] ) content = get_graphql_content(response) data = content["data"]["paymentCapture"] assert data["errors"] == [ { "field": None, "message": "Cannot find successful auth transaction.", "code": "PAYMENT_ERROR", } ] def test_payment_capture_gateway_error( staff_api_client, permission_manage_orders, payment_txn_preauth, monkeypatch ): # given payment = payment_txn_preauth assert payment.charge_status == ChargeStatus.NOT_CHARGED payment_id = graphene.Node.to_global_id("Payment", payment.pk) variables = {"paymentId": payment_id, "amount": str(payment_txn_preauth.total)} monkeypatch.setattr("saleor.payment.gateways.dummy.dummy_success", lambda: False) # when response = staff_api_client.post_graphql( CAPTURE_QUERY, variables, permissions=[permission_manage_orders] ) # then content = get_graphql_content(response) data = content["data"]["paymentCapture"] assert data["errors"] == [ {"field": None, "message": "Unable to process capture", "code": "PAYMENT_ERROR"} ] payment_txn_preauth.refresh_from_db() assert payment.charge_status == ChargeStatus.NOT_CHARGED assert payment.transactions.count() == 2 txn = payment.transactions.last() assert txn.kind == TransactionKind.CAPTURE assert not txn.is_success @patch( "saleor.payment.gateways.dummy_credit_card.plugin." "DummyCreditCardGatewayPlugin.DEFAULT_ACTIVE", True, ) def test_payment_capture_gateway_dummy_credit_card_error( staff_api_client, permission_manage_orders, payment_txn_preauth, monkeypatch ): # given token = TOKEN_EXPIRED error = TOKEN_VALIDATION_MAPPING[token] payment = payment_txn_preauth payment.gateway = "mirumee.payments.dummy_credit_card" payment.save() transaction = payment.transactions.last() transaction.token = token transaction.save() assert payment.charge_status == ChargeStatus.NOT_CHARGED payment_id = graphene.Node.to_global_id("Payment", payment.pk) variables = {"paymentId": payment_id, "amount": str(payment_txn_preauth.total)} monkeypatch.setattr( "saleor.payment.gateways.dummy_credit_card.dummy_success", lambda: False ) # when response = staff_api_client.post_graphql( CAPTURE_QUERY, variables, permissions=[permission_manage_orders] ) # then content = get_graphql_content(response) data = content["data"]["paymentCapture"] assert data["errors"] == [ {"field": None, "message": error, "code": "PAYMENT_ERROR"} ] payment_txn_preauth.refresh_from_db() assert payment.charge_status == ChargeStatus.NOT_CHARGED assert payment.transactions.count() == 2 txn = payment.transactions.last() assert txn.kind == TransactionKind.CAPTURE assert not txn.is_success
33.16568
88
0.706869
623
5,605
6.065811
0.189406
0.037047
0.06298
0.052924
0.750728
0.714739
0.702567
0.691188
0.691188
0.691188
0
0.001547
0.192685
5,605
168
89
33.363095
0.833591
0.022302
0
0.488722
0
0
0.182418
0.050366
0
0
0
0
0.165414
1
0.037594
false
0
0.045113
0
0.082707
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
36312c10c0caad738290f24f167f61c90e7c1847
54,386
py
Python
code/lib/tfbldr/datasets/audio/datasets.py
dendisuhubdy/representation_mixing
146ddc7a2cc34544bb4516149ccfcbe72eedd102
[ "BSD-3-Clause" ]
51
2018-11-16T16:11:40.000Z
2020-04-07T08:12:26.000Z
code/lib/tfbldr/datasets/audio/datasets.py
dendisuhubdy/representation_mixing
146ddc7a2cc34544bb4516149ccfcbe72eedd102
[ "BSD-3-Clause" ]
1
2018-11-30T08:36:58.000Z
2018-12-18T16:37:07.000Z
code/lib/tfbldr/datasets/audio/datasets.py
dendisuhubdy/representation_mixing
146ddc7a2cc34544bb4516149ccfcbe72eedd102
[ "BSD-3-Clause" ]
9
2018-11-21T12:54:42.000Z
2020-07-17T07:39:24.000Z
from .audio_tools import stft from .audio_tools import linear_to_mel_weight_matrix from .audio_tools import stft from .audio_tools import iterate_invert_spectrogram from .audio_tools import soundsc from ..text import pronounce_chars from ..text.cleaning import text_to_sequence from ..text.cleaning import sequence_to_text from ..text.cleaning import cleaners from ..text.cleaning import get_vocabulary_sizes from ...core import get_logger from scipy.io import wavfile import numpy as np import copy import os import json logger = get_logger() # As originally seen in sklearn.utils.extmath # Credit to the sklearn team def _incremental_mean_and_var(X, last_mean=.0, last_variance=None, last_sample_count=0): """Calculate mean update and a Youngs and Cramer variance update. last_mean and last_variance are statistics computed at the last step by the function. Both must be initialized to 0.0. In case no scaling is required last_variance can be None. The mean is always required and returned because necessary for the calculation of the variance. last_n_samples_seen is the number of samples encountered until now. From the paper "Algorithms for computing the sample variance: analysis and recommendations", by Chan, Golub, and LeVeque. Parameters ---------- X : array-like, shape (n_samples, n_features) Data to use for variance update last_mean : array-like, shape: (n_features,) last_variance : array-like, shape: (n_features,) last_sample_count : int Returns ------- updated_mean : array, shape (n_features,) updated_variance : array, shape (n_features,) If None, only mean is computed updated_sample_count : int References ---------- T. Chan, G. Golub, R. LeVeque. Algorithms for computing the sample variance: recommendations, The American Statistician, Vol. 37, No. 3, pp. 242-247 Also, see the sparse implementation of this in `utils.sparsefuncs.incr_mean_variance_axis` and `utils.sparsefuncs_fast.incr_mean_variance_axis0` """ # old = stats until now # new = the current increment # updated = the aggregated stats last_sum = last_mean * last_sample_count new_sum = X.sum(axis=0) new_sample_count = X.shape[0] updated_sample_count = last_sample_count + new_sample_count updated_mean = (last_sum + new_sum) / updated_sample_count if last_variance is None: updated_variance = None else: new_unnormalized_variance = X.var(axis=0) * new_sample_count if last_sample_count == 0: # Avoid division by 0 updated_unnormalized_variance = new_unnormalized_variance else: last_over_new_count = last_sample_count / new_sample_count last_unnormalized_variance = last_variance * last_sample_count updated_unnormalized_variance = ( last_unnormalized_variance + new_unnormalized_variance + last_over_new_count / updated_sample_count * (last_sum / last_over_new_count - new_sum) ** 2) updated_variance = updated_unnormalized_variance / updated_sample_count return updated_mean, updated_variance, updated_sample_count class wavfile_caching_mel_tbptt_iterator(object): def __init__(self, wavfile_list, txtfile_list, batch_size, truncation_length, audio_processing="default", symbol_processing="blended", wav_scale = 2 ** 15, window_size=512, window_step=128, n_mel_filters=80, return_normalized=True, lower_edge_hertz=125.0, upper_edge_hertz=7800.0, start_index=0, stop_index=None, cache_dir_base="/Tmp/kastner/tfbldr_cache", shuffle=False, random_state=None): self.wavfile_list = wavfile_list self.wav_scale = wav_scale self.txtfile_list = txtfile_list self.batch_size = batch_size self.truncation_length = truncation_length self.random_state = random_state self.shuffle = shuffle self.cache_dir_base = cache_dir_base self.return_normalized = return_normalized self.lower_edge_hertz = lower_edge_hertz self.upper_edge_hertz = upper_edge_hertz self.audio_processing = audio_processing self.symbol_processing = symbol_processing symbol_opts = ["blended_pref", "blended", "chars_only", "phones_only", "both"] if symbol_processing not in symbol_opts: raise ValueError("symbol_processing set to invalid argument {}, should be one of {}".format(symbol_processing, symbol_opts)) if audio_processing != "default": raise ValueError("Non-default settings not supported yet") clean_names = ["english_cleaners", "english_phone_cleaners"] self.clean_names = clean_names self.vocabulary_sizes = get_vocabulary_sizes(clean_names) self._special_chars = "!,:?" self.window_size = window_size self.window_step = window_step self.n_mel_filters = n_mel_filters self.start_index = start_index self.stop_index = stop_index if shuffle and self.random_state == None: raise ValueError("Must pass random_state in") if txtfile_list is not None: # try to match every txt file and every wav file by name wv_names_and_bases = sorted([(wv.split(os.sep)[-1], str(os.sep).join(wv.split(os.sep)[:-1])) for wv in self.wavfile_list]) tx_names_and_bases = sorted([(tx.split(os.sep)[-1], str(os.sep).join(tx.split(os.sep)[:-1])) for tx in self.txtfile_list]) wv_i = 0 tx_i = 0 wv_match = [] tx_match = [] wv_lu = {} tx_lu = {} for txnb in tx_names_and_bases: if "." in txnb[0]: tx_part = ".".join(txnb[0].split(".")[:1]) else: # support txt files with no ext tx_part = txnb[0] tx_lu[tx_part] = txnb[1] + os.sep + txnb[0] for wvnb in wv_names_and_bases: wv_part = ".".join(wvnb[0].split(".")[:1]) wv_lu[wv_part] = wvnb[1] + os.sep + wvnb[0] # set of in common keys shared_k = sorted([k for k in wv_lu.keys() if k in tx_lu]) if self.symbol_processing == "blended_pref": # no pruning needed for preferential blending pass elif self.symbol_processing == "blended": # no pruning needed for blending pass elif self.symbol_processing == "chars_only": # all txt files will have chars pass elif self.symbol_processing in ["phones_only", "both"]: # not all files will have valid phones, need to prune the set of files up front to avoid complex issues later print("Pruning files to only phone results...") to_remove = [] for n, sk in enumerate(shared_k): txtpath = tx_lu[sk] if not txtpath.endswith(".json"): raise ValueError("Expected .json file, path given was {}".format(txtpath)) with open(txtpath, "rb") as f: tj = json.load(f) no_phones = [False if "phones" in word else True for word in tj["words"]] if any(no_phones): to_remove.append(sk) if n % 1000 == 0: print("File {} of {} inspected".format(n + 1, len(shared_k))) for tr in to_remove: del wv_lu[tr] if tr in tx_lu: del tx_lu[tr] shared_k = sorted([k for k in wv_lu.keys() if k in tx_lu]) else: raise ValueError("Unknown value for self.symbol_processing {}".format(self.symbol_processing)) for k in shared_k: wv_match.append(wv_lu[k]) tx_match.append(tx_lu[k]) self.wavfile_list = wv_match self.txtfile_list = tx_match self.cache = self.cache_dir_base + os.sep + "-".join(self.wavfile_list[0].split(os.sep)[1:-1]) if not os.path.exists(self.cache): os.makedirs(self.cache) if 0 < self.start_index < 1: self.start_index = int(len(self.wavfile_list) * self.start_index) elif self.start_index >= 1: self.start_index = int(self.start_index) if self.start_index >= len(self.wavfile_list): raise ValueError("start_index {} >= length of wavfile list {}".format(self.start_index, len(self.wavfile_list))) elif self.start_index == 0: self.start_index = int(self.start_index) else: raise ValueError("Invalid value for start_index : {}".format(self.start_index)) if self.stop_index == None: self.stop_index = len(self.wavfile_list) elif 0 < self.stop_index < 1: self.stop_index = int(len(self.wavfile_list) * self.stop_index) elif self.stop_index >= 1: self.stop_index = int(self.stop_index) if self.stop_index >= len(self.wavfile_list): raise ValueError("stop_index {} >= length of wavfile list {}".format(self.stop_index, len(self.wavfile_list))) else: raise ValueError("Invalid value for stop_index : {}".format(self.stop_index)) # could match sizes here... self.wavfile_sizes_mbytes = [os.stat(wf).st_size // 1024 for wf in self.wavfile_list] if return_normalized: self.return_normalized = False # reset random seed here cur_random = self.random_state.get_state() # set up for train / test splits self.all_indices_ = np.arange(len(self.wavfile_list)) self.random_state.shuffle(self.all_indices_) self.all_indices_ = sorted(self.all_indices_[self.start_index:self.stop_index]) self.current_indices_ = [self.random_state.choice(self.all_indices_) for i in range(self.batch_size)] self.current_offset_ = [0] * self.batch_size self.current_read_ = [self.cache_read_wav_and_txt_features(self.wavfile_list[i], self.txtfile_list[i]) for i in self.current_indices_] self.to_reset_ = [0] * self.batch_size mean, std = self.cache_calculate_mean_and_std_normalization() self._mean = mean self._std = std self.random_state = np.random.RandomState() self.random_state.set_state(cur_random) self.return_normalized = True # set up for train / test splits self.all_indices_ = np.arange(len(self.wavfile_list)) self.random_state.shuffle(self.all_indices_) self.all_indices_ = sorted(self.all_indices_[self.start_index:self.stop_index]) self.current_indices_ = [self.random_state.choice(self.all_indices_) for i in range(self.batch_size)] self.current_offset_ = [0] * self.batch_size self.current_read_ = [self.cache_read_wav_and_txt_features(self.wavfile_list[i], self.txtfile_list[i]) for i in self.current_indices_] self.to_reset_ = [0] * self.batch_size def next_batch(self): mel_batch = np.zeros((self.truncation_length, self.batch_size, self.n_mel_filters)) resets = np.ones((self.batch_size, 1)) texts = [] masks = [] for bi in range(self.batch_size): wf, txf, mf = self.current_read_[bi] if self.to_reset_[bi] == 1: self.to_reset_[bi] = 0 resets[bi] = 0. # get a new sample while True: self.current_indices_[bi] = self.random_state.choice(self.all_indices_) self.current_offset_[bi] = 0 try: self.current_read_[bi] = self.cache_read_wav_and_txt_features(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]]) except: logger.info("FILE / TEXT READ ERROR {}:{}".format(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]])) try: self.current_read_[bi] = self.cache_read_wav_and_txt_features(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]], force_refresh=True) logger.info("CORRECTED FILE / TEXT READ ERROR VIA CACHE REFRESH") except: logger.info("STILL FILE / TEXT READ ERROR AFTER REFRESH {}:{}".format(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]])) continue wf, txf, mf = self.current_read_[bi] if len(wf) > self.truncation_length: break trunc = self.current_offset_[bi] + self.truncation_length if trunc >= len(wf): self.to_reset_[bi] = 1 wf_sub = wf[self.current_offset_[bi]:trunc] self.current_offset_[bi] = trunc mel_batch[:len(wf_sub), bi] = wf_sub texts.append(txf) masks.append(mf) if self.symbol_processing == "both": tlen = max([len(t) for t in texts]) tlen2 = max([len(t) for t in texts]) text_batch = np.zeros((tlen, self.batch_size, 1)) text_batch2 = np.zeros((tlen2, self.batch_size, 1)) text_lengths = [] text_lengths2 = [] for bi in range(len(texts)): txt = texts[bi] # masks are overloaded to be phones / other text repr txt2 = masks[bi] text_lengths.append(len(txt)) text_lengths2.append(len(txt2)) text_batch[:len(txt), bi, 0] = txt text_batch2[:len(txt2), bi, 0] = txt2 return mel_batch, text_batch, text_batch2, text_lengths, text_lengths2, resets else: mlen = max([len(t) for t in texts]) text_batch = np.zeros((mlen, self.batch_size, 1)) type_mask_batch = np.zeros((mlen, self.batch_size, 1)) text_lengths = [] for bi in range(len(texts)): txt = texts[bi] mask = masks[bi] text_lengths.append(len(txt)) text_batch[:len(txt), bi, 0] = txt type_mask_batch[:len(mask), bi, 0] = mask return mel_batch, text_batch, type_mask_batch, text_lengths, resets def next_masked_batch(self): if self.symbol_processing == "both": m, t, t2, tl, tl2, r = self.next_batch() m_mask = np.ones_like(m[..., 0]) # not ideal, in theory could also hit on 0 mels but we aren't using this for now # should find contiguous chunk starting from the end m_mask[np.sum(m, axis=-1) == 0] = 0. t_mask = np.zeros_like(t[..., 0]) t2_mask = np.zeros_like(t2[..., 0]) # was [:tli], making mask of all 1s... for mbi, tli in enumerate(tl): t_mask[:tli, mbi] = 1. for mbi, tli in enumerate(tl2): t2_mask[:tli, mbi] = 1. return m, m_mask, t, t_mask, t2, t2_mask, r else: m, t, ma, tl, r = self.next_batch() m_mask = np.ones_like(m[..., 0]) # not ideal, in theory could also hit on 0 mels but we aren't using this for now # should find contiguous chunk starting from the end m_mask[np.sum(m, axis=-1) == 0] = 0. t_mask = np.zeros_like(t[..., 0]) ma_mask = np.zeros_like(ma[..., 0]) # was [:tli], making mask of all 1s... for mbi, tli in enumerate(tl): t_mask[:tli, mbi] = 1. ma_mask[:tli, mbi] = 1. return m, m_mask, t, t_mask, ma, ma_mask, r def cache_calculate_mean_and_std_normalization(self, n_estimate=1000): normpath = self._fpathmaker("norm-mean-std") if not os.path.exists(normpath): logger.info("Calculating normalization per-dim mean and std") for i in range(n_estimate): if (i % 10) == 0: logger.info("Normalization batch {} of {}".format(i, n_estimate)) m, m_mask, t, t_mask, ma, ma_mask, r = self.next_masked_batch() m = m[m_mask > 0] m = m.reshape(-1, m.shape[-1]) if i == 0: normalization_mean = np.mean(m, axis=0) normalization_std = np.std(m, axis=0) normalization_count = len(m) else: nmean, nstd, ncount = _incremental_mean_and_var( m, normalization_mean, normalization_std, normalization_count) normalization_mean = nmean normalization_std = nstd normalization_count = ncount d = {} d["mean"] = normalization_mean d["std"] = normalization_std d["count"] = normalization_count np.savez(normpath, **d) norms = np.load(normpath) mean = norms["mean"] std = norms["std"] norms.close() return mean, std def calculate_log_mel_features(self, sample_rate, waveform, window_size, window_step, lower_edge_hertz, upper_edge_hertz, n_mel_filters): res = np.abs(stft(waveform, windowsize=window_size, step=window_step, real=False, compute_onesided=True)) mels = linear_to_mel_weight_matrix( res.shape[1], sample_rate, lower_edge_hertz=lower_edge_hertz, upper_edge_hertz=min(float(sample_rate) // 2, upper_edge_hertz), n_filts=n_mel_filters, dtype=np.float64) mel_res = np.dot(res, mels) log_mel_res = np.log1p(mel_res) return log_mel_res def _fpathmaker(self, fname): melpart = "-logmel-wsz{}-wst{}-leh{}-ueh{}-nmel{}.npz".format(self.window_size, self.window_step, int(self.lower_edge_hertz), int(self.upper_edge_hertz), self.n_mel_filters) if self.txtfile_list is not None: txtpart = "-txt-clean{}".format(str("".join(self.clean_names))) npzpath = self.cache + os.sep + fname + txtpart + melpart else: npzpath = self.cache + os.sep + fname + melpart return npzpath def cache_read_wav_and_txt_features(self, wavpath, txtpath, force_refresh=False): wavfeats, npzfile, npzpath = self.cache_read_wav_features(wavpath, return_npz=True, force_refresh=force_refresh) txtfeats, txtmask = self.cache_read_txt_features(txtpath, npzfile=npzfile, npzpath=npzpath, force_refresh=force_refresh) npzfile.close() return wavfeats, txtfeats, txtmask def cache_read_wav_features(self, wavpath, return_npz=False, force_refresh=False): fname = ".".join(wavpath.split(os.sep)[-1].split(".")[:-1]) npzpath = self._fpathmaker(fname) if force_refresh or not os.path.exists(npzpath): sr, d = wavfile.read(wavpath) d = d.astype("float64") d = d / float(self.wav_scale) log_mels = self.calculate_log_mel_features(sr, d, self.window_size, self.window_step, self.lower_edge_hertz, self.upper_edge_hertz, self.n_mel_filters) np.savez(npzpath, wavpath=wavpath, sample_rate=sr, log_mels=log_mels) npzfile = np.load(npzpath) log_mels = npzfile["log_mels"] if self.return_normalized is True: log_mels = (log_mels - self._mean) / self._std if return_npz: return log_mels, npzfile, npzpath else: return log_mels def cache_read_txt_features(self, txtpath, npzfile=None, npzpath=None, force_refresh=False): if npzfile is None or "word_list" not in npzfile: if not txtpath.endswith(".json"): raise ValueError("Expected .json file, path given was {}".format(txtpath)) with open(txtpath, "rb") as f: tj = json.load(f) # loaded json, now we need info char_txt = tj["transcript"] char_txt = char_txt.replace(u"\u2018", "'").replace(u"\u2019", "'") char_txt = char_txt.replace("-", " ") char_txt = char_txt.encode("ascii", "replace") try: clean_char_txt = cleaners.english_cleaners(char_txt) except: print("unicode devil in cache read txt features") from IPython import embed; embed(); raise ValueError() clean_char_txt_split = clean_char_txt.split(" ") # need to get all the words and their paired phones, but also re-inject punctuations not found after cleaning... oy # triplets of transcript word, aligned word, and tuple of phones amalgam = [] int_clean_char_chunks = [] int_clean_phone_chunks = [] # offset to handle edge case with "uh/ah" recognition offset = 0 for i in range(len(tj["words"])): if i + offset >= len(clean_char_txt_split): # edge case for 'uh' at the end of sentence break this_word = tj["words"][i] this_base = this_word["word"] if this_word["case"] == "not-found-in-transcript": # we skip this... offset -= 1 continue if "alignedWord" in this_word: this_align = this_word["alignedWord"] elif this_word["case"] == "not-found-in-transcript": this_align = this_base elif this_word["case"] == "not-found-in-audio": # if its not in the audio skip it continue else: print("new case in cache read txt features") from IPython import embed; embed(); raise ValueError() try: this_join_chars = str(clean_char_txt_split[i + offset]) except: print("another except in cache read txt features") from IPython import embed; embed(); raise ValueError() int_clean_char_chunks.append(text_to_sequence(this_join_chars, [self.clean_names[0]])[:-1]) if "phones" in this_word: this_phones = this_word["phones"] hack_phones = [tp.split("_")[0] for tp in [_["phone"] for _ in this_phones]] # add leading @ this_join_phones = "@" + "@".join(hack_phones) specials = "!?.,;:" if this_join_chars[-1] in specials: this_join_phones += this_join_chars[-1] int_clean_phone_chunks.append(text_to_sequence(this_join_phones, [self.clean_names[1]])[:-2]) else: this_join_phones = [None] this_phones = [None] int_clean_phone_chunks.append([None]) amalgam.append((this_base, this_align, this_join_chars, this_join_phones, this_phones)) # check inversion is OK #print(sequence_to_text(int_clean_char_chunks[i], [self.clean_names[0]])) #print(sequence_to_text(int_clean_phone_chunks[i], [self.clean_names[1]])) #aa = [sequence_to_text(int_clean_char_chunks[i], [self.clean_names[0]]) for i in range(len(int_clean_char_chunks))] #cc = [sequence_to_text(int_clean_phone_chunks[i], [self.clean_names[1]]) for i in range(len(int_clean_phone_chunks))] #bb = [a[2] for a in amalgam] #dd = [a[3] for a in amalgam] # check inversion is OK #assert(aa == bb) #assert(cc == dd) word_list_invert = [sequence_to_text(int_clean_char_chunks[i], [self.clean_names[0]]) for i in range(len(int_clean_char_chunks))] phone_list_invert = [sequence_to_text(int_clean_phone_chunks[i], [self.clean_names[1]]) for i in range(len(int_clean_phone_chunks))] word_list = [a[2] for a in amalgam] phone_list = [a[3] for a in amalgam] # TODO: put em all in the npz, then figure out how / what to do on load... if force_refresh or (npzfile is not None and "word_list" not in npzfile): d = {k: v for k, v in npzfile.items()} npzfile.close() d["transcript"] = char_txt d["clean_transcript"] = clean_char_txt d["word_list"] = word_list d["word_list_invert"] = word_list_invert d["phone_list"] = phone_list d["phone_list_invert"] = phone_list_invert d["int_phone_chunks"] = int_clean_phone_chunks d["int_char_chunks"] = int_clean_char_chunks d["cleaners"] = "+".join(self.clean_names) np.savez(npzpath, **d) npzfile = np.load(npzpath) int_char_chunks = [list(c) for c in npzfile["int_char_chunks"]] int_phone_chunks = [list(p) for p in npzfile["int_phone_chunks"]] if len(int_char_chunks) != len(int_phone_chunks): # will need to handle edge case of no valid phones here... print("handle the char / phone different length edge case here cache read txt features") from IPython import embed; embed(); raise ValueError() else: if self.symbol_processing == "both": spc = text_to_sequence(" ", [self.clean_names[0]])[0] spc2 = text_to_sequence(" ", [self.clean_names[1]])[0] first_char = int_char_chunks[0] first_phones = int_phone_chunks[0] for ii in range(len(int_char_chunks) - 1): first_char += [spc] first_char += int_char_chunks[ii + 1] for ii in range(len(int_phone_chunks) - 1): first_phones += [spc2] first_phones += int_phone_chunks[ii + 1] return first_char, first_phones #w = [sequence_to_text(int_char_chunks[i], [self.clean_names[0]]) for i in range(len(int_char_chunks))] #p = [sequence_to_text(int_phone_chunks[i], [self.clean_names[1]]) for i in range(len(int_phone_chunks))] # 50/50 split right now for blended, allow this balance to be set manually? char_phone_mask = [0] * len(int_char_chunks) + [1] * len(int_phone_chunks) self.random_state.shuffle(char_phone_mask) char_phone_mask = char_phone_mask[:len(int_char_chunks)] # setting char_phone_mask to 0 will use chars, 1 will use phones # these if statements override the default for blended... (above) if self.symbol_processing == "blended_pref": char_phone_mask = [0 if len(int_phone_chunks[i]) == 0 else 1 for i in range(len(int_char_chunks))] elif self.symbol_processing == "phones_only": # set the mask to use only phones # all files should have phones because of earlier preproc... char_phone_mask = [1 for i in range(len(char_phone_mask))] elif self.symbol_processing == "chars_only": # only use chars char_phone_mask = [0 for i in range(len(char_phone_mask))] # if the phones entry is None, the word was OOV or not recognized char_phone_int_seq = [int_char_chunks[i] if (len(int_phone_chunks[i]) == 0 or char_phone_mask[i] == 0) else int_phone_chunks[i] for i in range(len(int_char_chunks))] # check the inverse is ok #char_phone_txt = [sequence_to_text(char_phone_int_seq[i], [self.clean_names[char_phone_mask[i]]]) for i in range(len(char_phone_int_seq))] # combine into 1 sequence cphi = char_phone_int_seq[0] cpm = [char_phone_mask[0]] * len(char_phone_int_seq[0]) if self.symbol_processing != "phones_only": spc = text_to_sequence(" ", [self.clean_names[0]])[0] else: spc = text_to_sequence(" ", [self.clean_names[1]])[0] for i in range(len(char_phone_int_seq[1:])): # add space cphi += [spc] # always treat space as char unless in phones only mode if self.symbol_processing != "phones_only": cpm += [0] else: cpm += [1] cphi += char_phone_int_seq[i + 1] cpm += [char_phone_mask[i + 1]] * len(char_phone_int_seq[i + 1]) # check inverse #cpt = "".join([sequence_to_text([cphi[i]], [self.clean_names[cpm[i]]]) for i in range(len(cphi))]) return cphi, cpm def transform_txt(self, char_seq, auto_pronounce=True, phone_seq=None, force_char_spc=True): """ chars format example: "i am learning english." phone_seq format example: "@ay @ae@m @l@er@n@ih@ng @ih@ng@g@l@ih@sh" phone_seq formatting can be gotten from text, using the pronounce_chars function with 'from tfbldr.datasets.text import pronounce_chars' Uses cmudict to do pronunciation """ if phone_seq is None and auto_pronounce is False and self.symbol_processing != "chars_only": raise ValueError("phone_seq argument must be provided for iterator with self.symbol_processing != 'chars_only', currently '{}'".format(self.symbol_processing)) clean_char_seq = cleaners.english_cleaners(char_seq) char_seq_chunk = clean_char_seq.split(" ") dirty_seq_chunk = char_seq.split(" ") if auto_pronounce is True: if phone_seq is not None: raise ValueError("auto_pronounce set to True, but phone_seq was provided! Pass phone_seq=None for auto_pronounce=True") # take out specials then put them back... specials = "!?.,;:" puncts = "!?." tsc = [] for n, csc in enumerate(char_seq_chunk): broke = False for s in specials: if s in csc: new = csc.replace(s, "") tsc.append(new) broke = True break if not broke: tsc.append(csc) if self.symbol_processing == "blended_pref": chunky_phone_seq_chunk = [pronounce_chars(w, raw_line=dirty_seq_chunk[ii], cmu_only=True) for ii, w in enumerate(tsc)] phone_seq_chunk = [cpsc[0] if cpsc != None else None for cpsc in chunky_phone_seq_chunk] else: phone_seq_chunk = [pronounce_chars(w) for w in tsc] for n, psc in enumerate(phone_seq_chunk): for s in specials: if char_seq_chunk[n][-1] == s and phone_seq_chunk[n] != None: phone_seq_chunk[n] += char_seq_chunk[n][-1] #if char_seq_chunk[n][-1] in puncts and n != (len(phone_seq_chunk) - 1): # # add eos # char_seq_chunk[n] += "~" # phone_seq_chunk[n] += "~" break else: raise ValueError("Non auto_pronounce setting not yet configured") if len(char_seq_chunk) != len(phone_seq_chunk): raise ValueError("Char and phone chunking resulted in different lengths {} and {}!\n{}\n{}".format(len(char_seq_chunk), len(phone_seq_chunk), char_seq_chunk, phone_seq_chunk)) if self.symbol_processing != "phones_only": spc = text_to_sequence(" ", [self.clean_names[0]])[0] else: spc = text_to_sequence(" ", [self.clean_names[1]])[0] int_char_chunks = [] int_phone_chunks = [] for n in range(len(char_seq_chunk)): int_char_chunks.append(text_to_sequence(char_seq_chunk[n], [self.clean_names[0]])[:-1]) if phone_seq_chunk[n] == None: int_phone_chunks.append([]) else: int_phone_chunks.append(text_to_sequence(phone_seq_chunk[n], [self.clean_names[1]])[:-2]) # check inverses # w = [sequence_to_text(int_char_chunks[i], [self.clean_names[0]]) for i in range(len(int_char_chunks))] # p = [sequence_to_text(int_phone_chunks[i], [self.clean_names[1]]) for i in range(len(int_phone_chunks))] # TODO: Unify the two functions? char_phone_mask = [0] * len(int_char_chunks) + [1] * len(int_phone_chunks) self.random_state.shuffle(char_phone_mask) char_phone_mask = char_phone_mask[:len(int_char_chunks)] # setting char_phone_mask to 0 will use chars, 1 will use phones # these if statements override the default for blended... (above) if self.symbol_processing == "blended_pref": char_phone_mask = [0 if len(int_phone_chunks[i]) == 0 else 1 for i in range(len(int_char_chunks))] elif self.symbol_processing == "phones_only": # set the mask to use only phones # all files should have phones because of earlier preproc... char_phone_mask = [1 for i in range(len(char_phone_mask))] elif self.symbol_processing == "chars_only": # only use chars char_phone_mask = [0 for i in range(len(char_phone_mask))] # if the phones entry is None, the word was OOV or not recognized char_phone_int_seq = [int_char_chunks[i] if (len(int_phone_chunks[i]) == 0 or char_phone_mask[i] == 0) else int_phone_chunks[i] for i in range(len(int_char_chunks))] # check the inverse is ok # char_phone_txt = [sequence_to_text(char_phone_int_seq[i], [self.clean_names[char_phone_mask[i]]]) for i in range(len(char_phone_int_seq))] # combine into 1 sequence cphi = char_phone_int_seq[0] cpm = [char_phone_mask[0]] * len(char_phone_int_seq[0]) if force_char_spc or self.symbol_processing != "phones_only": spc = text_to_sequence(" ", [self.clean_names[0]])[0] else: spc = text_to_sequence(" ", [self.clean_names[1]])[0] for i in range(len(char_phone_int_seq[1:])): # add space cphi += [spc] # always treat space as char unless in phones only mode if force_char_spc or self.symbol_processing != "phones_only": cpm += [0] else: cpm += [1] cphi += char_phone_int_seq[i + 1] cpm += [char_phone_mask[i + 1]] * len(char_phone_int_seq[i + 1]) # trailing space #cphi = cphi + [spc] # trailing eos cphi = cphi + [1] # add trailing symbol if self.symbol_processing != "phones_only": cpm += [0] else: cpm += [1] # check inverse #cpt = "".join([sequence_to_text([cphi[i]], [self.clean_names[cpm[i]]]) for i in range(len(cphi))]) #if None in phone_seq_chunk: #print("NUN") #print(cpt) #from IPython import embed; embed(); raise ValueError() return cphi, cpm def inverse_transform_txt(self, int_seq, mask): """ mask set to zero will use chars, mask set to 1 will use phones should invert the transform_txt function """ cphi = int_seq cpm = mask cpt = "".join([sequence_to_text([cphi[i]], [self.clean_names[cpm[i]]]) for i in range(len(cphi))]) return cpt # setting char_phone_mask to 0 will use chars, 1 will use phones class old_wavfile_caching_mel_tbptt_iterator(object): def __init__(self, wavfile_list, txtfile_list, batch_size, truncation_length, clean_names, wav_scale = 2 ** 15, window_size=512, window_step=128, n_mel_filters=80, return_normalized=True, lower_edge_hertz=125.0, upper_edge_hertz=7800.0, start_index=0, stop_index=None, cache_dir_base="/Tmp/kastner/tfbldr_cache", shuffle=False, random_state=None): self.wavfile_list = wavfile_list self.wav_scale = wav_scale self.txtfile_list = txtfile_list self.batch_size = batch_size self.truncation_length = truncation_length self.random_state = random_state self.shuffle = shuffle self.cache_dir_base = cache_dir_base self.return_normalized = return_normalized self.lower_edge_hertz = lower_edge_hertz self.upper_edge_hertz = upper_edge_hertz self.clean_names = clean_names self.vocabulary_size = get_vocabulary_size(clean_names) self.window_size = window_size self.window_step = window_step self.n_mel_filters = n_mel_filters self.start_index = start_index self.stop_index = stop_index if shuffle and self.random_state == None: raise ValueError("Must pass random_state in") if txtfile_list is not None: # try to match every txt file and every wav file by name wv_names_and_bases = sorted([(wv.split(os.sep)[-1], str(os.sep).join(wv.split(os.sep)[:-1])) for wv in self.wavfile_list]) tx_names_and_bases = sorted([(tx.split(os.sep)[-1], str(os.sep).join(tx.split(os.sep)[:-1])) for tx in self.txtfile_list]) wv_i = 0 tx_i = 0 wv_match = [] tx_match = [] wv_lu = {} tx_lu = {} for txnb in tx_names_and_bases: if "." in txnb[0]: tx_part = ".".join(txnb[0].split(".")[:1]) else: # support txt files with no ext tx_part = txnb[0] tx_lu[tx_part] = txnb[1] + os.sep + txnb[0] for wvnb in wv_names_and_bases: wv_part = ".".join(wvnb[0].split(".")[:1]) wv_lu[wv_part] = wvnb[1] + os.sep + wvnb[0] # set of in common keys shared_k = sorted([k for k in wv_lu.keys() if k in tx_lu]) for k in shared_k: wv_match.append(wv_lu[k]) tx_match.append(tx_lu[k]) self.wavfile_list = wv_match self.txtfile_list = tx_match self.cache = self.cache_dir_base + os.sep + "-".join(self.wavfile_list[0].split(os.sep)[1:-1]) if not os.path.exists(self.cache): os.makedirs(self.cache) if 0 < self.start_index < 1: self.start_index = int(len(self.wavfile_list) * self.start_index) elif self.start_index >= 1: self.start_index = int(self.start_index) if self.start_index >= len(self.wavfile_list): raise ValueError("start_index {} >= length of wavfile list {}".format(self.start_index, len(self.wavfile_list))) elif self.start_index == 0: self.start_index = int(self.start_index) else: raise ValueError("Invalid value for start_index : {}".format(self.start_index)) if self.stop_index == None: self.stop_index = len(self.wavfile_list) elif 0 < self.stop_index < 1: self.stop_index = int(len(self.wavfile_list) * self.stop_index) elif self.stop_index >= 1: self.stop_index = int(self.stop_index) if self.stop_index >= len(self.wavfile_list): raise ValueError("stop_index {} >= length of wavfile list {}".format(self.stop_index, len(self.wavfile_list))) else: raise ValueError("Invalid value for stop_index : {}".format(self.stop_index)) # could match sizes here... self.wavfile_sizes_mbytes = [os.stat(wf).st_size // 1024 for wf in self.wavfile_list] if return_normalized: self.return_normalized = False # reset random seed here cur_random = self.random_state.get_state() # set up for train / test splits self.all_indices_ = np.arange(len(self.wavfile_list)) self.random_state.shuffle(self.all_indices_) self.all_indices_ = sorted(self.all_indices_[self.start_index:self.stop_index]) self.current_indices_ = [self.random_state.choice(self.all_indices_) for i in range(self.batch_size)] self.current_offset_ = [0] * self.batch_size self.current_read_ = [self.cache_read_wav_and_txt_features(self.wavfile_list[i], self.txtfile_list[i]) for i in self.current_indices_] self.to_reset_ = [0] * self.batch_size mean, std = self.cache_calculate_mean_and_std_normalization() self._mean = mean self._std = std self.random_state = np.random.RandomState() self.random_state.set_state(cur_random) self.return_normalized = True # set up for train / test splits self.all_indices_ = np.arange(len(self.wavfile_list)) self.random_state.shuffle(self.all_indices_) self.all_indices_ = sorted(self.all_indices_[self.start_index:self.stop_index]) self.current_indices_ = [self.random_state.choice(self.all_indices_) for i in range(self.batch_size)] self.current_offset_ = [0] * self.batch_size self.current_read_ = [self.cache_read_wav_and_txt_features(self.wavfile_list[i], self.txtfile_list[i]) for i in self.current_indices_] self.to_reset_ = [0] * self.batch_size def next_batch(self): mel_batch = np.zeros((self.truncation_length, self.batch_size, self.n_mel_filters)) resets = np.ones((self.batch_size, 1)) texts = [] for bi in range(self.batch_size): wf, txf = self.current_read_[bi] if self.to_reset_[bi] == 1: self.to_reset_[bi] = 0 resets[bi] = 0. # get a new sample while True: self.current_indices_[bi] = self.random_state.choice(self.all_indices_) self.current_offset_[bi] = 0 try: self.current_read_[bi] = self.cache_read_wav_and_txt_features(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]]) except: logger.info("FILE / TEXT READ ERROR {}:{}".format(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]])) try: self.current_read_[bi] = self.cache_read_wav_and_txt_features(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]], force_refresh=True) logger.info("CORRECTED FILE / TEXT READ ERROR VIA CACHE REFRESH") except: logger.info("STILL FILE / TEXT READ ERROR AFTER REFRESH {}:{}".format(self.wavfile_list[self.current_indices_[bi]], self.txtfile_list[self.current_indices_[bi]])) continue wf, txf = self.current_read_[bi] if len(wf) > self.truncation_length: break trunc = self.current_offset_[bi] + self.truncation_length if trunc >= len(wf): self.to_reset_[bi] = 1 wf_sub = wf[self.current_offset_[bi]:trunc] self.current_offset_[bi] = trunc mel_batch[:len(wf_sub), bi] = wf_sub texts.append(txf) mlen = max([len(t) for t in texts]) text_batch = np.zeros((mlen, self.batch_size, 1)) for bi, txt in enumerate(texts): text_batch[:len(txt), bi, 0] = txt return mel_batch, text_batch, resets def next_masked_batch(self): m, t, r = self.next_batch() m_mask = np.ones_like(m[..., 0]) # not ideal, in theory could also hit on 0 mels but we aren't using this for now # should find contiguous chunk starting from the end m_mask[np.sum(m, axis=-1) == 0] = 0. t_mask = np.zeros_like(t[..., 0]) t_mask[t[..., 0] > 0] = 1. return m, m_mask, t, t_mask, r def cache_calculate_mean_and_std_normalization(self, n_estimate=1000): normpath = self._fpathmaker("norm-mean-std") if not os.path.exists(normpath): logger.info("Calculating normalization per-dim mean and std") for i in range(n_estimate): if (i % 10) == 0: logger.info("Normalization batch {} of {}".format(i, n_estimate)) m, m_mask, t, t_mask, r = self.next_masked_batch() m = m[m_mask > 0] m = m.reshape(-1, m.shape[-1]) if i == 0: normalization_mean = np.mean(m, axis=0) normalization_std = np.std(m, axis=0) normalization_count = len(m) else: nmean, nstd, ncount = _incremental_mean_and_var( m, normalization_mean, normalization_std, normalization_count) normalization_mean = nmean normalization_std = nstd normalization_count = ncount d = {} d["mean"] = normalization_mean d["std"] = normalization_std d["count"] = normalization_count np.savez(normpath, **d) norms = np.load(normpath) mean = norms["mean"] std = norms["std"] norms.close() return mean, std def calculate_log_mel_features(self, sample_rate, waveform, window_size, window_step, lower_edge_hertz, upper_edge_hertz, n_mel_filters): res = np.abs(stft(waveform, windowsize=window_size, step=window_step, real=False, compute_onesided=True)) mels = linear_to_mel_weight_matrix( res.shape[1], sample_rate, lower_edge_hertz=lower_edge_hertz, upper_edge_hertz=min(float(sample_rate) // 2, upper_edge_hertz), n_filts=n_mel_filters, dtype=np.float64) mel_res = np.dot(res, mels) log_mel_res = np.log1p(mel_res) return log_mel_res def _fpathmaker(self, fname): melpart = "-logmel-wsz{}-wst{}-leh{}-ueh{}-nmel{}.npz".format(self.window_size, self.window_step, int(self.lower_edge_hertz), int(self.upper_edge_hertz), self.n_mel_filters) if self.txtfile_list is not None: txtpart = "-txt-clean{}".format(str("".join(self.clean_names))) npzpath = self.cache + os.sep + fname + txtpart + melpart else: npzpath = self.cache + os.sep + fname + melpart return npzpath def cache_read_wav_features(self, wavpath, return_npz=False, force_refresh=False): fname = ".".join(wavpath.split(os.sep)[-1].split(".")[:-1]) npzpath = self._fpathmaker(fname) if force_refresh or not os.path.exists(npzpath): sr, d = wavfile.read(wavpath) d = d.astype("float64") d = d / float(self.wav_scale) log_mels = self.calculate_log_mel_features(sr, d, self.window_size, self.window_step, self.lower_edge_hertz, self.upper_edge_hertz, self.n_mel_filters) np.savez(npzpath, wavpath=wavpath, sample_rate=sr, log_mels=log_mels) npzfile = np.load(npzpath) log_mels = npzfile["log_mels"] if self.return_normalized is True: log_mels = (log_mels - self._mean) / self._std if return_npz: return log_mels, npzfile, npzpath else: return log_mels def transform_txt(self, line, txt_line=None, timing_sym_list=None): if txt_line == None and timing_sym_list == None: int_txt = text_to_sequence(line, self.clean_names) elif timing_sym_list == None: clean_orig_chunks = txt_line.split(" ") raw_chunks = line.split(" ") if len(raw_chunks) == len(clean_orig_chunks): mutated = raw_chunks for chunk_i in range(len(mutated)): for special in "!,:?": if special in clean_orig_chunks[chunk_i]: if clean_orig_chunks[chunk_i][0] == special: mutated[chunk_i] = special + mutated[chunk_i] elif clean_orig_chunks[chunk_i][-1] == special: mutated[chunk_i] = mutated[chunk_i] + special #if it's in the middle we don't really know what to do... skip it res_txt = " ".join(mutated) else: res_txt = line int_txt = text_to_sequence(res_txt, self.clean_names) else: clean_orig_chunks = txt_line.split(" ") raw_chunks = line.split(" ") if len(raw_chunks) == len(clean_orig_chunks) and len(raw_chunks) == (len(timing_sym_list) - 1): mutated = raw_chunks for chunk_i in range(len(mutated)): for special in ["1","2","3","4"]: if special in clean_orig_chunks[chunk_i]: if clean_orig_chunks[chunk_i][0] == special: mutated[chunk_i] = special + mutated[chunk_i] elif clean_orig_chunks[chunk_i][-1] == special: mutated[chunk_i] = mutated[chunk_i] + special #if it's in the middle we don't really know what to do... skip it res_txt = [] res_txt.append(timing_sym_list[0]) res_txt += [a.strip() + b for a, b in zip(mutated, timing_sym_list[1:])] res_txt = "".join(res_txt) #int_txt = text_to_sequence(res_txt, self.clean_names) #rr = sequence_to_text(int_txt, self.clean_names) else: res_txt = line int_txt = text_to_sequence(res_txt, self.clean_names) return int_txt def inverse_transform_txt(self, int_line): clean_txt = sequence_to_text(int_line, self.clean_names) return clean_txt def cache_read_txt_features(self, txtpath, npzfile=None, npzpath=None, force_refresh=False): if npzfile is None or "raw_txt" not in npzfile: with open(txtpath, "rb") as f: lines = f.readlines() raw_txt = lines[0] # insert commas, semicolons, punctuation, etc from original transcript... if "english_phone_cleaners" in self.clean_names: if len(lines) < 2: raise ValueError("Original text not commented on second line, necessary for phone transcript") # skip '# ' orig_txt = lines[1][2:] clean_orig_txt = cleaners.english_cleaners(orig_txt) int_txt = self.transform_txt(raw_txt, clean_orig_txt) elif "english_phone_pause_cleaners" in self.clean_names: if len(lines) < 3: raise ValueError("Original text not commented on second line, timing double not commented on third line, necessary for phone pause with transcript") timings = np.array([float(si) for si in lines[2][3:].split(" ")]) # centers gotten from preprocessing code timing_centers = np.array([0.00, 0.01, 0.02, 0.08, 0.25]) timing_symbols = np.array([" ", "1", "2", "3", "4"]) # 0.00 # 0.01 # 0.02 # 0.08 # 0.25 center_indices = np.argmin(np.abs(timings - timing_centers[:, None]), axis=0) timings_quantized = timing_centers[center_indices] symbols_quantized = [str(ts) for ts in timing_symbols[center_indices]] orig_txt = lines[1][2:] clean_orig_txt = cleaners.english_cleaners(orig_txt) int_txt = self.transform_txt(raw_txt, clean_orig_txt, symbols_quantized) else: int_txt = text_to_sequence(raw_txt, self.clean_names) clean_txt = sequence_to_text(int_txt, self.clean_names) if force_refresh or (npzfile is not None and "raw_txt" not in npzfile): d = {k: v for k, v in npzfile.items()} npzfile.close() d["raw_txt"] = raw_txt d["clean_txt"] = clean_txt d["int_txt"] = int_txt d["cleaners"] = "+".join(self.clean_names) np.savez(npzpath, **d) npzfile = np.load(npzpath) int_txt = npzfile["int_txt"] return int_txt def cache_read_wav_and_txt_features(self, wavpath, txtpath, force_refresh=False): wavfeats, npzfile, npzpath = self.cache_read_wav_features(wavpath, return_npz=True, force_refresh=force_refresh) txtfeats = self.cache_read_txt_features(txtpath, npzfile=npzfile, npzpath=npzpath, force_refresh=force_refresh) npzfile.close() return wavfeats, txtfeats
49.804029
201
0.580572
7,055
54,386
4.20978
0.085188
0.018519
0.020269
0.011111
0.753098
0.725084
0.694646
0.679798
0.668182
0.654276
0
0.011389
0.318722
54,386
1,091
202
49.849679
0.790187
0.122219
0
0.704626
0
0.001186
0.060203
0.006242
0.001186
0
0
0.001833
0
1
0.027284
false
0.007117
0.023725
0
0.084223
0.007117
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
365c7727441f3ce45c6c2000a56516dcbb47ecda
411
py
Python
test.py
kumarsibi/pystache
0fe2c72ce73fcae8962c975336a90f3ae172bead
[ "MIT" ]
null
null
null
test.py
kumarsibi/pystache
0fe2c72ce73fcae8962c975336a90f3ae172bead
[ "MIT" ]
null
null
null
test.py
kumarsibi/pystache
0fe2c72ce73fcae8962c975336a90f3ae172bead
[ "MIT" ]
1
2020-11-07T18:27:14.000Z
2020-11-07T18:27:14.000Z
#!/usr/bin/env python # coding: utf-8 import sys import os sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) import unittest import xmlrunner def runner(output='python_tests_xml'): return xmlrunner.XMLTestRunner( output=output ) def find_tests(): return unittest.TestLoader().discover('pystache') if __name__ == '__main__': runner().run(find_tests())
21.631579
82
0.705596
55
411
4.981818
0.618182
0.065693
0
0
0
0
0
0
0
0
0
0.005666
0.141119
411
18
83
22.833333
0.770538
0.082725
0
0
0
0
0.090667
0
0
0
0
0
0
1
0.153846
false
0
0.307692
0.153846
0.615385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
3
3661027196354098e56a0da8e7b8ce7a0c21d9f2
545
py
Python
tests/test_managers/test_experiment.py
DXist/polyaxon-cli
0b01512548f9faea77fb60cb7c6bd327e0638b13
[ "MIT" ]
null
null
null
tests/test_managers/test_experiment.py
DXist/polyaxon-cli
0b01512548f9faea77fb60cb7c6bd327e0638b13
[ "MIT" ]
null
null
null
tests/test_managers/test_experiment.py
DXist/polyaxon-cli
0b01512548f9faea77fb60cb7c6bd327e0638b13
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import absolute_import, division, print_function from unittest import TestCase from polyaxon_cli.managers.experiment import ExperimentManager from polyaxon_cli.schemas import ExperimentConfig class TestExperimentManager(TestCase): def test_default_props(self): assert ExperimentManager.IS_GLOBAL is False assert ExperimentManager.IS_POLYAXON_DIR is True assert ExperimentManager.CONFIG_FILE_NAME == '.polyaxonxp' assert ExperimentManager.CONFIG == ExperimentConfig
34.0625
66
0.790826
59
545
7.050847
0.610169
0.221154
0.072115
0
0
0
0
0
0
0
0
0.002165
0.152294
545
15
67
36.333333
0.898268
0.038532
0
0
0
0
0.021073
0
0
0
0
0
0.4
1
0.1
false
0
0.4
0
0.6
0.1
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
36717bfecfbfd8a9a0b115372c8d020c263787e1
321
py
Python
Famcy/_responses_/RedirectPageTab/RedirectPageTab.py
nexuni/Famcy
80f8f18fe1614ab3c203ca3466b9506b494470bf
[ "Apache-2.0" ]
null
null
null
Famcy/_responses_/RedirectPageTab/RedirectPageTab.py
nexuni/Famcy
80f8f18fe1614ab3c203ca3466b9506b494470bf
[ "Apache-2.0" ]
12
2022-02-05T04:56:44.000Z
2022-03-30T09:59:26.000Z
Famcy/_responses_/RedirectPageTab/RedirectPageTab.py
nexuni/Famcy
80f8f18fe1614ab3c203ca3466b9506b494470bf
[ "Apache-2.0" ]
null
null
null
import Famcy class RedirectPageTab(Famcy.FamcyResponse): def __init__(self, redirect_tab="", target=None): super(RedirectPageTab, self).__init__(target=target) self.redirect_tab = redirect_tab def response(self, sijax_response): sijax_response.redirect(url_for("main.generate_tab_page", tab=self.redirect_tab))
32.1
83
0.797508
42
321
5.690476
0.47619
0.1841
0.188285
0
0
0
0
0
0
0
0
0
0.087227
321
10
83
32.1
0.8157
0
0
0
0
0
0.068323
0.068323
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.571429
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
367ef0e282ef3bd732149918a371ed1d4fda5724
341
py
Python
agentq/utils/memory.py
AAorris/agentq
dff689ef7d29730abef319b6455ba089019ed581
[ "MIT" ]
null
null
null
agentq/utils/memory.py
AAorris/agentq
dff689ef7d29730abef319b6455ba089019ed581
[ "MIT" ]
null
null
null
agentq/utils/memory.py
AAorris/agentq
dff689ef7d29730abef319b6455ba089019ed581
[ "MIT" ]
null
null
null
"""Basic memory stream with random sampling.""" class MemoryStream(object): def __init__(self, size): self.memory = deque([]) def add(self, item): self.memory.append(item) if len(self.memory) > size: memory.popleft() def sample(self, shape): return np.random.sample(memory, shape)
22.733333
47
0.607038
41
341
4.95122
0.585366
0.147783
0
0
0
0
0
0
0
0
0
0
0.260997
341
14
48
24.357143
0.805556
0.120235
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.111111
0.555556
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
36a2c17bc2fcaea528a3d9f3bd08b506e08b2f3f
11,253
py
Python
tests/boto3_api_responses.py
parallelstream/kines
d7a879df96babf0deeb54d9607e14c020332f78c
[ "MIT" ]
null
null
null
tests/boto3_api_responses.py
parallelstream/kines
d7a879df96babf0deeb54d9607e14c020332f78c
[ "MIT" ]
null
null
null
tests/boto3_api_responses.py
parallelstream/kines
d7a879df96babf0deeb54d9607e14c020332f78c
[ "MIT" ]
null
null
null
import datetime from dateutil.tz import tzutc, tzlocal LIST_SHARDS_8_RESPONSE = { "Shards": [ { "ShardId": "shardId-000000000007", "ParentShardId": "shardId-000000000003", "HashKeyRange": { "StartingHashKey": "0", "EndingHashKey": "42535295865117307932921825928971026431", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683191123462322666423956522487359138715784273985650" }, }, { "ShardId": "shardId-000000000008", "ParentShardId": "shardId-000000000003", "HashKeyRange": { "StartingHashKey": "42535295865117307932921825928971026432", "EndingHashKey": "85070591730234615865843651857942052863", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683191145763067864954579664023077411364145779966082" }, }, { "ShardId": "shardId-000000000009", "ParentShardId": "shardId-000000000004", "HashKeyRange": { "StartingHashKey": "85070591730234615865843651857942052864", "EndingHashKey": "127605887595351923798765477786913079295", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683201872421508358184310742703566555228091740848274" }, }, { "ShardId": "shardId-000000000010", "ParentShardId": "shardId-000000000004", "HashKeyRange": { "StartingHashKey": "127605887595351923798765477786913079296", "EndingHashKey": "170141183460469231731687303715884105727", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683201894722253556714933884239284827876453246828706" }, }, { "ShardId": "shardId-000000000011", "ParentShardId": "shardId-000000000005", "HashKeyRange": { "StartingHashKey": "170141183460469231731687303715884105728", "EndingHashKey": "212676479325586539664609129644855132159", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683204057894537814185378613203957274767931643789490" }, }, { "ShardId": "shardId-000000000012", "ParentShardId": "shardId-000000000005", "HashKeyRange": { "StartingHashKey": "212676479325586539664609129644855132160", "EndingHashKey": "255211775190703847597530955573826158591", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683204080195283012716001754739675547416293149769922" }, }, { "ShardId": "shardId-000000000013", "ParentShardId": "shardId-000000000006", "HashKeyRange": { "StartingHashKey": "255211775190703847597530955573826158592", "EndingHashKey": "297747071055821155530452781502797185023", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683205886555644093696476219132855631933918731567314" }, }, { "ShardId": "shardId-000000000014", "ParentShardId": "shardId-000000000006", "HashKeyRange": { "StartingHashKey": "297747071055821155530452781502797185024", "EndingHashKey": "340282366920938463463374607431768211455", }, "SequenceNumberRange": { "StartingSequenceNumber": "49599683205908856389292227099360668573904582280237547746" }, }, ] } CLOUDWATCH_RESPONSE_H3_P3 = { "MetricDataResults": [ { "Id": "ir", "Label": "IncomingRecords", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [ 297200.0, 265471.0, 255861.0, 250698.0, 241889.0, 305792.0, 189147.0, ], "StatusCode": "Complete", }, { "Id": "ib", "Label": "IncomingBytes", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [ 971488736.0, 877812818.0, 847037776.0, 830774392.0, 798839755.0, 999749339.0, 623121754.0, ], "StatusCode": "Complete", }, { "Id": "grr", "Label": "GetRecords.Records", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [ 2079782.0, 1858115.0, 1791687.0, 1754570.0, 1693413.0, 2140395.0, 1323267.0, ], "StatusCode": "Complete", }, { "Id": "wpte", "Label": "WriteProvisionedThroughputExceeded", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], "StatusCode": "Complete", }, { "Id": "rpte", "Label": "ReadProvisionedThroughputExceeded", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [0.0, 1.0, 0.0, 3.0, 1.0, 3.0, 5.0], "StatusCode": "Complete", }, { "Id": "giam", "Label": "GetRecords.IteratorAgeMilliseconds", "Timestamps": [ datetime.datetime(2019, 10, 11, 5, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 5, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 6, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 0, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 7, 30, tzinfo=tzutc()), datetime.datetime(2019, 10, 11, 8, 0, tzinfo=tzutc()), ], "Values": [0.0, 0.0, 0.0, 0.0, 0.0, 36000.0, 0.0], "StatusCode": "Complete", }, ], "Messages": [], } KINESIS_RECORDS_ITERATOR_1_RESPONSE = { "Records": [ { "SequenceNumber": "49600282682944895786267660693075522538255370376250918498", "ApproximateArrivalTimestamp": datetime.datetime( 2019, 10, 10, 16, 22, 41, 761000, tzinfo=tzlocal() ), "Data": b"eyJldmVudCI6ICIxIn0K", "PartitionKey": "4439109", }, { "SequenceNumber": "49600282682944895786267660697997059549906526021357667938", "ApproximateArrivalTimestamp": datetime.datetime( 2019, 10, 10, 16, 22, 45, 180000, tzinfo=tzlocal() ), "Data": b"eyJldmVudCI6ICIyIn0K", "PartitionKey": "4439109", }, { "SequenceNumber": "49600282682944895786267660702176316108314299215755871842", "ApproximateArrivalTimestamp": datetime.datetime( 2019, 10, 10, 16, 22, 48, 83000, tzinfo=tzlocal() ), "Data": b"eyJldmVudCI6ICIzIn0K", "PartitionKey": "4439109", }, { "SequenceNumber": "49600282682944895786267660702634498993948243810408466018", "ApproximateArrivalTimestamp": datetime.datetime( 2019, 10, 10, 16, 22, 48, 407000, tzinfo=tzlocal() ), "Data": b"eyJldmVudCI6ICI0In0K", "PartitionKey": "4439109", }, { "SequenceNumber": "49600282682944895786267660705672529578639807063884039778", "ApproximateArrivalTimestamp": datetime.datetime( 2019, 10, 10, 16, 22, 50, 666000, tzinfo=tzlocal() ), "Data": b"eyJldmVudCI6ICI1In0K", "PartitionKey": "4439109", }, ], "NextShardIterator": "AAAA-Shard-Iterator-2", "MillisBehindLatest": 86346000, "ResponseMetadata": { "RequestId": "f66ad0f9-3ade-3f3e-a070-b1f5c9043ac3", "HTTPStatusCode": 200, "HTTPHeaders": { "x-amzn-requestid": "f66ad0f9-3ade-3f3e-a070-b1f5c9043ac3", "x-amz-id-2": "y6rEwcgmg3F05eSo8bEqr2OMvkyFQwc8vCjClMZRRfU0fEq2vbR1lD2FC7v0rp43b1LY1acrEaQWjnjOXgBq/DkW39Fbznto", "date": "Fri, 11 Oct 2019 10:51:56 GMT", "content-type": "application/x-amz-json-1.1", "content-length": "91441", }, "RetryAttempts": 0, }, }
40.771739
125
0.519239
797
11,253
7.318695
0.217064
0.049374
0.161152
0.177267
0.439911
0.352306
0.338248
0.338248
0.309961
0.290417
0
0.327772
0.350129
11,253
275
126
40.92
0.469848
0
0
0.394834
0
0
0.329779
0.172043
0
0
0
0
0
1
0
false
0
0.00738
0
0.00738
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
36a3d5ae6db6459664904cb57891bd2976f9ba9d
181
py
Python
setup.py
jameskabbes/ml_pipeline
48e0cbb4a03a5293e3d0ddc97ec511be636a7760
[ "MIT" ]
null
null
null
setup.py
jameskabbes/ml_pipeline
48e0cbb4a03a5293e3d0ddc97ec511be636a7760
[ "MIT" ]
null
null
null
setup.py
jameskabbes/ml_pipeline
48e0cbb4a03a5293e3d0ddc97ec511be636a7760
[ "MIT" ]
null
null
null
from setuptools import setup if __name__ == '__main__': setup( package_data={'ml_pipeline': [ 'Templates/*.ipynb', 'Templates/*.xlsx' ] } )
22.625
55
0.541436
16
181
5.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.314917
181
8
56
22.625
0.709677
0
0
0
0
0
0.285714
0
0
0
0
0
0
1
0
true
0
0.142857
0
0.142857
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
36b787f8089a07535875a9852a26a17e6b73d187
136
py
Python
26_dirstructure/main.py
maornesimi/python-course-examples
f2e606f142a9d331075db73fd451c4418dba45ed
[ "MIT" ]
2
2016-07-06T08:47:01.000Z
2019-12-15T05:09:24.000Z
26_dirstructure/main.py
maornesimi/python-course-examples
f2e606f142a9d331075db73fd451c4418dba45ed
[ "MIT" ]
143
2016-10-14T07:33:55.000Z
2018-11-06T19:13:52.000Z
26_dirstructure/main.py
maornesimi/python-course-examples
f2e606f142a9d331075db73fd451c4418dba45ed
[ "MIT" ]
43
2016-10-13T15:49:47.000Z
2019-09-10T09:14:52.000Z
""" Main project file """ import lib.demo as demo if __name__ == "__main__": res = demo.add(10,20) print "Result = %d" % res
13.6
29
0.595588
20
136
3.65
0.8
0
0
0
0
0
0
0
0
0
0
0.038835
0.242647
136
9
30
15.111111
0.669903
0
0
0
0
0
0.172727
0
0
0
0
0
0
0
null
null
0
0.25
null
null
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
36cd6eeee254cffeff46b866cc779346e40a5772
143
py
Python
coco/admin/apps.py
ipynbsrv/ipynbsrv
6cd99719c2a9b3814ee37d87a38adcbf30172141
[ "BSD-3-Clause" ]
null
null
null
coco/admin/apps.py
ipynbsrv/ipynbsrv
6cd99719c2a9b3814ee37d87a38adcbf30172141
[ "BSD-3-Clause" ]
null
null
null
coco/admin/apps.py
ipynbsrv/ipynbsrv
6cd99719c2a9b3814ee37d87a38adcbf30172141
[ "BSD-3-Clause" ]
null
null
null
from django.apps import AppConfig class MyAdmin(AppConfig): """ :inherit. """ name = 'coco.admin' label = 'coco-admin'
11.916667
33
0.587413
15
143
5.6
0.8
0.214286
0
0
0
0
0
0
0
0
0
0
0.272727
143
11
34
13
0.807692
0.062937
0
0
0
0
0.169492
0
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
36ddc2b7c182b116276dc1debb737c7fee9997d0
5,305
py
Python
etl/parsers/etw/Microsoft_Windows_WiFiHotspotService.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
104
2020-03-04T14:31:31.000Z
2022-03-28T02:59:36.000Z
etl/parsers/etw/Microsoft_Windows_WiFiHotspotService.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
7
2020-04-20T09:18:39.000Z
2022-03-19T17:06:19.000Z
etl/parsers/etw/Microsoft_Windows_WiFiHotspotService.py
IMULMUL/etl-parser
76b7c046866ce0469cd129ee3f7bb3799b34e271
[ "Apache-2.0" ]
16
2020-03-05T18:55:59.000Z
2022-03-01T10:19:28.000Z
# -*- coding: utf-8 -*- """ Microsoft-Windows-WiFiHotspotService GUID : 814182fe-58f7-11e1-853c-78e7d1ca7337 """ from construct import Int8sl, Int8ul, Int16ul, Int16sl, Int32sl, Int32ul, Int64sl, Int64ul, Bytes, Double, Float32l, Struct from etl.utils import WString, CString, SystemTime, Guid from etl.dtyp import Sid from etl.parsers.etw.core import Etw, declare, guid @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1003, version=0) class Microsoft_Windows_WiFiHotspotService_1003_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1004, version=0) class Microsoft_Windows_WiFiHotspotService_1004_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1005, version=0) class Microsoft_Windows_WiFiHotspotService_1005_0(Etw): pattern = Struct( "Ptr1" / Int64ul, "Ptr2" / Int64ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1006, version=0) class Microsoft_Windows_WiFiHotspotService_1006_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1007, version=0) class Microsoft_Windows_WiFiHotspotService_1007_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1008, version=0) class Microsoft_Windows_WiFiHotspotService_1008_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1009, version=0) class Microsoft_Windows_WiFiHotspotService_1009_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1010, version=0) class Microsoft_Windows_WiFiHotspotService_1010_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1011, version=0) class Microsoft_Windows_WiFiHotspotService_1011_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=1012, version=0) class Microsoft_Windows_WiFiHotspotService_1012_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=2000, version=0) class Microsoft_Windows_WiFiHotspotService_2000_0(Etw): pattern = Struct( "uString1" / WString, "uString2" / WString, "Dword1" / Int32ul, "Dword2" / Int32ul, "Dword3" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=3000, version=0) class Microsoft_Windows_WiFiHotspotService_3000_0(Etw): pattern = Struct( "Dword1" / Int32ul, "Dword2" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=3001, version=0) class Microsoft_Windows_WiFiHotspotService_3001_0(Etw): pattern = Struct( "Ptr" / Int64ul, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=3002, version=0) class Microsoft_Windows_WiFiHotspotService_3002_0(Etw): pattern = Struct( "Ptr" / Int64ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=3003, version=0) class Microsoft_Windows_WiFiHotspotService_3003_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=3004, version=0) class Microsoft_Windows_WiFiHotspotService_3004_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=4000, version=0) class Microsoft_Windows_WiFiHotspotService_4000_0(Etw): pattern = Struct( "aString" / CString ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=4001, version=0) class Microsoft_Windows_WiFiHotspotService_4001_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=4002, version=0) class Microsoft_Windows_WiFiHotspotService_4002_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=4003, version=0) class Microsoft_Windows_WiFiHotspotService_4003_0(Etw): pattern = Struct( "Dword1" / Int32ul, "Dword2" / Int32ul, "aString1" / CString, "Dword3" / Int32ul, "Dword4" / Int32ul, "Dword5" / Int32ul, "uString1" / WString ) @declare(guid=guid("814182fe-58f7-11e1-853c-78e7d1ca7337"), event_id=4004, version=0) class Microsoft_Windows_WiFiHotspotService_4004_0(Etw): pattern = Struct( "uString" / WString, "Dword" / Int32ul )
28.675676
123
0.689538
607
5,305
5.853377
0.130148
0.099071
0.210526
0.123839
0.823248
0.823248
0.522094
0.522094
0.487757
0.487757
0
0.186202
0.183035
5,305
184
124
28.831522
0.633595
0.019416
0
0.404412
0
0
0.200655
0.145581
0
0
0
0
0
1
0
false
0
0.029412
0
0.338235
0
0
0
0
null
0
1
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
36e08c383b2228e78dcdd10016159086d222c565
337
py
Python
0x08-python-more_classes/test/2-main.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
1
2022-02-07T12:13:18.000Z
2022-02-07T12:13:18.000Z
0x08-python-more_classes/test/2-main.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
null
null
null
0x08-python-more_classes/test/2-main.py
malu17/alx-higher_level_programming
75a24d98c51116b737f339697c75855e34254d3a
[ "MIT" ]
1
2021-12-06T18:15:54.000Z
2021-12-06T18:15:54.000Z
#!/usr/bin/python3 Rectangle = __import__('2-rectangle').Rectangle my_rectangle = Rectangle(2, 4) print("Area: {} - Perimeter: {}".format(my_rectangle.area(), my_rectangle.perimeter())) print("--") my_rectangle.width = 10 my_rectangle.height = 3 print("Area: {} - Perimeter: {}".format(my_rectangle.area(), my_rectangle.perimeter()))
28.083333
87
0.709199
42
337
5.428571
0.380952
0.337719
0.157895
0.210526
0.517544
0.517544
0.517544
0.517544
0.517544
0.517544
0
0.022801
0.089021
337
11
88
30.636364
0.71987
0.050445
0
0.285714
0
0
0.191223
0
0
0
0
0
0
1
0
false
0
0.142857
0
0.142857
0.428571
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
36f3044484c99cffa133bdd943fd744ee9444ee9
871
py
Python
PersonalSite/PersonalSite/views.py
joeyuan19/flaming-bear
8cf73a079ed98579f8e1f9e960740c94cd2ad39a
[ "Apache-2.0" ]
null
null
null
PersonalSite/PersonalSite/views.py
joeyuan19/flaming-bear
8cf73a079ed98579f8e1f9e960740c94cd2ad39a
[ "Apache-2.0" ]
null
null
null
PersonalSite/PersonalSite/views.py
joeyuan19/flaming-bear
8cf73a079ed98579f8e1f9e960740c94cd2ad39a
[ "Apache-2.0" ]
null
null
null
from django.shortcuts import render from dajaxice.core import dajaxice_functions def homepage(request): return render(request,'index.html') def test(request): return render(request,'test2.html') def presentation(request): return render(request,'presentation.html') def homepage_redirect(request): return render(request,'error.html') from django.template import RequestContext from django.shortcuts import render_to_response from content.models import ResumeCategory def django_test(request): categories = [cat for cat in ResumeCategory.objects.all()] debug = str(len(categories)) canary = "canary" return render_to_response( 'content/resume_django.html', { 'debug':debug, 'canary':"Made it!", 'categories':categories, }, context_instance=RequestContext(request) )
22.333333
62
0.706085
98
871
6.183673
0.418367
0.09901
0.125413
0.171617
0.10231
0
0
0
0
0
0
0.001425
0.19403
871
38
63
22.921053
0.861823
0
0
0
0
0
0.124281
0.029919
0
0
0
0
0
1
0.192308
false
0
0.192308
0.153846
0.576923
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
3
36f6594566ad4b49e1182a8f96d350e8abd533fd
713
py
Python
pl_bolts/models/rl/__init__.py
lavoiems/lightning-bolts
208e92ba3dcdbc029afd37e09ec9461fbcf3f293
[ "Apache-2.0" ]
822
2020-04-21T03:30:43.000Z
2021-03-07T06:41:31.000Z
pl_bolts/models/rl/__init__.py
lavoiems/lightning-bolts
208e92ba3dcdbc029afd37e09ec9461fbcf3f293
[ "Apache-2.0" ]
538
2020-04-18T01:07:58.000Z
2021-03-09T13:48:50.000Z
pl_bolts/models/rl/__init__.py
lavoiems/lightning-bolts
208e92ba3dcdbc029afd37e09ec9461fbcf3f293
[ "Apache-2.0" ]
162
2020-04-17T15:44:54.000Z
2021-03-09T14:04:02.000Z
from pl_bolts.models.rl.advantage_actor_critic_model import AdvantageActorCritic from pl_bolts.models.rl.double_dqn_model import DoubleDQN from pl_bolts.models.rl.dqn_model import DQN from pl_bolts.models.rl.dueling_dqn_model import DuelingDQN from pl_bolts.models.rl.noisy_dqn_model import NoisyDQN from pl_bolts.models.rl.per_dqn_model import PERDQN from pl_bolts.models.rl.reinforce_model import Reinforce from pl_bolts.models.rl.sac_model import SAC from pl_bolts.models.rl.vanilla_policy_gradient_model import VanillaPolicyGradient __all__ = [ "AdvantageActorCritic", "DoubleDQN", "DQN", "DuelingDQN", "NoisyDQN", "PERDQN", "Reinforce", "SAC", "VanillaPolicyGradient", ]
32.409091
82
0.799439
99
713
5.454545
0.272727
0.1
0.183333
0.283333
0.316667
0
0
0
0
0
0
0
0.120617
713
21
83
33.952381
0.861244
0
0
0
0
0
0.124825
0.029453
0
0
0
0
0
1
0
false
0
0.45
0
0.45
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
7fdfe7c2dd91c71d77f1620c886844f294dbd20f
95
py
Python
api/run.py
mdiener/battleship
a8075e156013621fbe7f316f60a21011a0d959a2
[ "MIT" ]
null
null
null
api/run.py
mdiener/battleship
a8075e156013621fbe7f316f60a21011a0d959a2
[ "MIT" ]
null
null
null
api/run.py
mdiener/battleship
a8075e156013621fbe7f316f60a21011a0d959a2
[ "MIT" ]
null
null
null
from websocket import run def main(): run(8000, False) if __name__ == '__main__': main()
11.875
26
0.663158
13
95
4.230769
0.769231
0
0
0
0
0
0
0
0
0
0
0.052632
0.2
95
7
27
13.571429
0.671053
0
0
0
0
0
0.084211
0
0
0
0
0
0
1
0.2
true
0
0.2
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
3
7ff133f40441bc6decf2c24c77cf40b81c47c648
226
py
Python
Python/FoxAndSnake.py
Zardosh/code-forces-solutions
ea1446b8e4f391f3e9ef63094816c7bdaded1557
[ "MIT" ]
null
null
null
Python/FoxAndSnake.py
Zardosh/code-forces-solutions
ea1446b8e4f391f3e9ef63094816c7bdaded1557
[ "MIT" ]
null
null
null
Python/FoxAndSnake.py
Zardosh/code-forces-solutions
ea1446b8e4f391f3e9ef63094816c7bdaded1557
[ "MIT" ]
null
null
null
n, m = map(int, input().split()) for i in range(n): if i % 2 == 0: print('#' * m) else: if (i + 1) % 4 == 0: print('#' + '.' * (m - 1)) else: print('.' * (m - 1) + '#')
22.6
39
0.30531
30
226
2.3
0.566667
0.26087
0.202899
0
0
0
0
0
0
0
0
0.055118
0.438053
226
10
40
22.6
0.488189
0
0
0.222222
0
0
0.022026
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
3d0b55a34abfbeb991e68a8a56f9ae27b5dfec93
89
py
Python
respa_exchange/__init__.py
HotStew/respa
04f39efb15b4f4206a122e665f8377c7198e1f25
[ "MIT" ]
49
2015-10-21T06:25:31.000Z
2022-03-20T07:24:20.000Z
respa_exchange/__init__.py
HotStew/respa
04f39efb15b4f4206a122e665f8377c7198e1f25
[ "MIT" ]
728
2015-06-24T13:26:54.000Z
2022-03-24T12:18:41.000Z
respa_exchange/__init__.py
digipointtku/respa
a529e0df4d3f072df7801adb5bf97a5f4abd1243
[ "MIT" ]
46
2015-06-26T10:52:57.000Z
2021-12-17T09:38:25.000Z
__version__ = "0.1.0" default_app_config = 'respa_exchange.apps.RespaExchangeAppConfig'
22.25
65
0.808989
11
89
5.909091
0.909091
0
0
0
0
0
0
0
0
0
0
0.036585
0.078652
89
3
66
29.666667
0.756098
0
0
0
0
0
0.52809
0.47191
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
3d0b9cdaec39fdab7c52e01ba07e1ef8ac3d99c2
1,615
py
Python
pyalgorithms/permutations/simple_recursion.py
tpagit/pyalgorithms
ef62fb5bbe431af93cfe45fd0549aa453aee82d1
[ "MIT" ]
null
null
null
pyalgorithms/permutations/simple_recursion.py
tpagit/pyalgorithms
ef62fb5bbe431af93cfe45fd0549aa453aee82d1
[ "MIT" ]
null
null
null
pyalgorithms/permutations/simple_recursion.py
tpagit/pyalgorithms
ef62fb5bbe431af93cfe45fd0549aa453aee82d1
[ "MIT" ]
null
null
null
# # abc # / | \ # / | \ # / | \ # / | \ # / | \ # / | \ # a -> a / a -> b \ a -> c # / | \ # / | \ # / | \ # / | \ # / | \ # / | \ # [a]bc [b]ac [c]ba # / \ / \ / \ # / \ / \ / \ # / \ / \ / \ # / \ / \ / \ # [ab]c [ac]b [ba]c [bc]a [cb]a [ca]b from typing import List def _simple_recursion_permutation(text: List[str], start: int, end: int): if start == end: yield "".join(text) return for i in range(start, end + 1): text[start], text[i] = text[i], text[start] yield from _simple_recursion_permutation(text, start + 1, end) text[start], text[i] = text[i], text[start] def simple_recursion_permutation(text: str) -> str: yield from _simple_recursion_permutation(list(text), 0, len(text) - 1) if __name__ == "__main__": from datetime import datetime string = "abcdefghij" start = datetime.now() for next_value in simple_recursion_permutation(string): print(next_value) end = datetime.now() print((end - start).total_seconds())
31.666667
74
0.337461
128
1,615
4.070313
0.351563
0.143954
0.24952
0.172745
0.368522
0.107486
0.107486
0.107486
0
0
0
0.005362
0.538081
1,615
50
75
32.3
0.693029
0.477399
0
0.105263
0
0
0.022585
0
0
0
0
0
0
1
0.105263
false
0
0.105263
0
0.263158
0.105263
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
3d197c2bab8b805629d86050240e697832a2f604
5,862
py
Python
stable_baselines/common/filters.py
BruceK4t1qbit/stable-baselines
d997d659de54bd14129d0af8df07e7c875cba7e5
[ "MIT" ]
49
2020-07-24T18:17:12.000Z
2022-01-04T15:30:52.000Z
stable_baselines/common/filters.py
BillChan226/POAR-SRL-4-Robot
a6a8052e105369656d34fffc4f7ca4475dcc38df
[ "MIT" ]
14
2020-07-21T20:21:08.000Z
2022-03-12T00:42:18.000Z
stable_baselines/common/filters.py
BillChan226/POAR-SRL-4-Robot
a6a8052e105369656d34fffc4f7ca4475dcc38df
[ "MIT" ]
6
2020-01-07T02:23:52.000Z
2020-10-11T15:42:43.000Z
from collections import deque import numpy as np from .running_stat import RunningStat class Filter(object): """ takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value """ def __call__(self, arr, update=True): raise NotImplementedError def reset(self): """ resets the filter """ pass def output_shape(self, input_space): """ returns the output shape :param input_space: (numpy int) :return: (numpy int) output shape """ raise NotImplementedError class IdentityFilter(Filter): """ A filter that implements an identity function takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value """ def __call__(self, arr, update=True): return arr def output_shape(self, input_space): return input_space.shape class CompositionFilter(Filter): def __init__(self, functions): """ A filter that implements a composition with other functions takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value :param functions: ([function]) composition of these functions and the input """ self.functions = functions def __call__(self, arr, update=True): for func in self.functions: arr = func(arr) return arr def output_shape(self, input_space): out = input_space.shape for func in self.functions: out = func.output_shape(out) return out class ZFilter(Filter): def __init__(self, shape, demean=True, destd=True, clip=10.0): """ A filter that implements a z-filter y = (x-mean)/std using running estimates of mean,std takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value :param shape: ([int]) the shape of the input :param demean: (bool) filter mean :param destd: (bool) filter standard deviation :param clip: (float) clip filter absolute value to this value """ self.demean = demean self.destd = destd self.clip = clip self.running_stat = RunningStat(shape) def __call__(self, arr, update=True): if update: self.running_stat.push(arr) if self.demean: arr = arr - self.running_stat.mean if self.destd: arr = arr / (self.running_stat.std + 1e-8) if self.clip: arr = np.clip(arr, -self.clip, self.clip) return arr def output_shape(self, input_space): return input_space.shape class AddClock(Filter): def __init__(self): """ A filter that appends a counter to the input takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value """ self.count = 0 def reset(self): self.count = 0 def __call__(self, arr, update=True): return np.append(arr, self.count / 100.0) def output_shape(self, input_space): return input_space.shape[0] + 1, class FlattenFilter(Filter): """ A filter that flattens the input takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value """ def __call__(self, arr, update=True): return arr.ravel() def output_shape(self, input_space): return int(np.prod(input_space.shape)), class Ind2OneHotFilter(Filter): def __init__(self, n_cat): """ A filter that turns indices to onehot encoding takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value :param n_cat: (int) the number of categories """ self.n_cat = n_cat def __call__(self, arr, update=True): out = np.zeros(self.n_cat) out[arr] = 1 return out def output_shape(self, input_space): return input_space.n, class DivFilter(Filter): def __init__(self, divisor): """ A filter that divides the input from a value takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value :param divisor: (float) the number you want to divide by """ self.divisor = divisor def __call__(self, arr, update=True): return arr / self.divisor def output_shape(self, input_space): return input_space.shape class StackFilter(Filter): def __init__(self, length): """ A filter that runs a stacking of a 'length' inputs takes a value 'x' (np.ndarray), applies the filter, and returns the new value. Can pass kwarg: 'update' (bool) if the filter can update from the value :param length: (int) the number of inputs to stack """ self.stack = deque(maxlen=length) def reset(self): self.stack.clear() def __call__(self, arr, update=True): self.stack.append(arr) while len(self.stack) < self.stack.maxlen: self.stack.append(arr) return np.concatenate(self.stack, axis=-1) def output_shape(self, input_space): return input_space.shape[:-1] + (input_space.shape[-1] * self.stack.maxlen,)
27.650943
86
0.620096
790
5,862
4.475949
0.156962
0.04836
0.027998
0.030543
0.520362
0.483597
0.448529
0.43043
0.410633
0.39819
0
0.004309
0.287445
5,862
211
87
27.781991
0.842231
0.396281
0
0.425287
0
0
0
0
0
0
0
0
0
1
0.310345
false
0.011494
0.034483
0.126437
0.632184
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
3d19a10a5b191e21762896516127a9429aeb0871
816
py
Python
deeptensor/model/cifar/cifar.py
cicicici/deeptensor
efcd7b9ca2d758cb2461b64fa5ba1268685e4dab
[ "MIT" ]
1
2018-04-04T09:00:45.000Z
2018-04-04T09:00:45.000Z
deeptensor/model/cifar/cifar.py
cicicici/deeptensor
efcd7b9ca2d758cb2461b64fa5ba1268685e4dab
[ "MIT" ]
null
null
null
deeptensor/model/cifar/cifar.py
cicicici/deeptensor
efcd7b9ca2d758cb2461b64fa5ba1268685e4dab
[ "MIT" ]
null
null
null
from .lenet import LeNet from .vgg import VGG from .dpn import DPN, DPN26, DPN92 from .densenet import DenseNet, DenseNet121, DenseNet169, DenseNet201, DenseNet161 from .googlenet import Inception, GoogLeNet from .mobilenet import MobileNet from .mobilenetv2 import MobileNetV2 from .resnet import ResNet, ResNet18, ResNet34, ResNet50, ResNet101, ResNet152 from .resnext import ResNeXt, ResNeXt29_2x64d, ResNeXt29_4x64d, ResNeXt29_8x64d, ResNeXt29_32x4d from .preact_resnet import PreActResNet, PreActResNet18, PreActResNet34, PreActResNet50, PreActResNet101, PreActResNet152 from .senet import SENet, SENet18 from .shufflenet import ShuffleNet, ShuffleNetG2, ShuffleNetG3 from .shufflenetv2 import ShuffleNetV2 from .efficientnet import EfficientNet, EfficientNetB0 from .pnasnet import PNASNet, PNASNetA, PNASNetB
51
121
0.841912
92
816
7.413043
0.5
0.035191
0
0
0
0
0
0
0
0
0
0.09465
0.106618
816
15
122
54.4
0.840878
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
3d398daaf478bf2a1d0435cc6799d92e01893439
358
py
Python
metaci/plan/migrations/0004_remove_plan_devhub.py
sfdc-qbranch/MetaCI
78ac0d2bccd2db381998321ebd71029dd5d9ab39
[ "BSD-3-Clause" ]
48
2018-10-24T14:52:06.000Z
2022-03-25T21:14:50.000Z
metaci/plan/migrations/0004_remove_plan_devhub.py
sfdc-qbranch/MetaCI
78ac0d2bccd2db381998321ebd71029dd5d9ab39
[ "BSD-3-Clause" ]
2,034
2018-10-31T20:59:16.000Z
2022-03-22T21:38:03.000Z
metaci/plan/migrations/0004_remove_plan_devhub.py
sfdc-qbranch/MetaCI
78ac0d2bccd2db381998321ebd71029dd5d9ab39
[ "BSD-3-Clause" ]
27
2018-12-24T18:16:23.000Z
2021-12-15T17:57:27.000Z
# -*- coding: utf-8 -*- # Generated by Django 1.10.3 on 2017-05-18 18:39 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): # skip non-exixtent 0003_planschedule dependencies = [("plan", "0002_plan_devhub")] operations = [migrations.RemoveField(model_name="plan", name="devhub")]
25.571429
75
0.72905
47
358
5.361702
0.765957
0
0
0
0
0
0
0
0
0
0
0.081699
0.145251
358
13
76
27.538462
0.74183
0.290503
0
0
1
0
0.12
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
3d3fbb20a22c4b3af53283c42432e5fe77b2875f
130
py
Python
HelloPySelenium/pytest1.py
lintyleo/seleniumpro
feb643143b6b19a0f1d99151c74504edf736d110
[ "MIT" ]
10
2017-04-05T19:57:51.000Z
2020-12-04T06:36:46.000Z
HelloPySelenium/pytest1.py
lintyleo/seleniumpro
feb643143b6b19a0f1d99151c74504edf736d110
[ "MIT" ]
null
null
null
HelloPySelenium/pytest1.py
lintyleo/seleniumpro
feb643143b6b19a0f1d99151c74504edf736d110
[ "MIT" ]
6
2017-11-01T00:54:50.000Z
2019-12-18T05:26:47.000Z
# webdriver: webdriver里面有个类 叫做Firefox from selenium import webdriver xx = webdriver.Firefox() xx.get("http://www.51testing.com")
21.666667
37
0.776923
16
130
6.3125
0.8125
0
0
0
0
0
0
0
0
0
0
0.017094
0.1
130
5
38
26
0.846154
0.269231
0
0
0
0
0.258065
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
3d4dc2746907fa24b9f8d89a81d0e4c48e0bf480
69,350
py
Python
src/hedera_proto/response_code_pb2.py
HbarStudio/hedera-protobufs-python
f8a503d2c4c5b7c441ddf48607f7ee563b3f931a
[ "Apache-2.0" ]
null
null
null
src/hedera_proto/response_code_pb2.py
HbarStudio/hedera-protobufs-python
f8a503d2c4c5b7c441ddf48607f7ee563b3f931a
[ "Apache-2.0" ]
null
null
null
src/hedera_proto/response_code_pb2.py
HbarStudio/hedera-protobufs-python
f8a503d2c4c5b7c441ddf48607f7ee563b3f931a
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: response_code.proto """Generated protocol buffer code.""" from google.protobuf.internal import enum_type_wrapper from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='response_code.proto', package='proto', syntax='proto3', serialized_options=b'\n\"com.hederahashgraph.api.proto.javaP\001', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x13response_code.proto\x12\x05proto*\xed<\n\x10ResponseCodeEnum\x12\x06\n\x02OK\x10\x00\x12\x17\n\x13INVALID_TRANSACTION\x10\x01\x12\x1b\n\x17PAYER_ACCOUNT_NOT_FOUND\x10\x02\x12\x18\n\x14INVALID_NODE_ACCOUNT\x10\x03\x12\x17\n\x13TRANSACTION_EXPIRED\x10\x04\x12\x1d\n\x19INVALID_TRANSACTION_START\x10\x05\x12 \n\x1cINVALID_TRANSACTION_DURATION\x10\x06\x12\x15\n\x11INVALID_SIGNATURE\x10\x07\x12\x11\n\rMEMO_TOO_LONG\x10\x08\x12\x17\n\x13INSUFFICIENT_TX_FEE\x10\t\x12\x1e\n\x1aINSUFFICIENT_PAYER_BALANCE\x10\n\x12\x19\n\x15\x44UPLICATE_TRANSACTION\x10\x0b\x12\x08\n\x04\x42USY\x10\x0c\x12\x11\n\rNOT_SUPPORTED\x10\r\x12\x13\n\x0fINVALID_FILE_ID\x10\x0e\x12\x16\n\x12INVALID_ACCOUNT_ID\x10\x0f\x12\x17\n\x13INVALID_CONTRACT_ID\x10\x10\x12\x1a\n\x16INVALID_TRANSACTION_ID\x10\x11\x12\x15\n\x11RECEIPT_NOT_FOUND\x10\x12\x12\x14\n\x10RECORD_NOT_FOUND\x10\x13\x12\x17\n\x13INVALID_SOLIDITY_ID\x10\x14\x12\x0b\n\x07UNKNOWN\x10\x15\x12\x0b\n\x07SUCCESS\x10\x16\x12\x10\n\x0c\x46\x41IL_INVALID\x10\x17\x12\x0c\n\x08\x46\x41IL_FEE\x10\x18\x12\x10\n\x0c\x46\x41IL_BALANCE\x10\x19\x12\x10\n\x0cKEY_REQUIRED\x10\x1a\x12\x10\n\x0c\x42\x41\x44_ENCODING\x10\x1b\x12 \n\x1cINSUFFICIENT_ACCOUNT_BALANCE\x10\x1c\x12\x1c\n\x18INVALID_SOLIDITY_ADDRESS\x10\x1d\x12\x14\n\x10INSUFFICIENT_GAS\x10\x1e\x12 \n\x1c\x43ONTRACT_SIZE_LIMIT_EXCEEDED\x10\x1f\x12%\n!LOCAL_CALL_MODIFICATION_EXCEPTION\x10 \x12\x1c\n\x18\x43ONTRACT_REVERT_EXECUTED\x10!\x12 \n\x1c\x43ONTRACT_EXECUTION_EXCEPTION\x10\"\x12\"\n\x1eINVALID_RECEIVING_NODE_ACCOUNT\x10#\x12\x18\n\x14MISSING_QUERY_HEADER\x10$\x12\x19\n\x15\x41\x43\x43OUNT_UPDATE_FAILED\x10%\x12\x18\n\x14INVALID_KEY_ENCODING\x10&\x12\x19\n\x15NULL_SOLIDITY_ADDRESS\x10\'\x12\x1a\n\x16\x43ONTRACT_UPDATE_FAILED\x10(\x12\x18\n\x14INVALID_QUERY_HEADER\x10)\x12\x19\n\x15INVALID_FEE_SUBMITTED\x10*\x12\x1b\n\x17INVALID_PAYER_SIGNATURE\x10+\x12\x14\n\x10KEY_NOT_PROVIDED\x10,\x12\x1b\n\x17INVALID_EXPIRATION_TIME\x10-\x12\x0f\n\x0bNO_WACL_KEY\x10.\x12\x16\n\x12\x46ILE_CONTENT_EMPTY\x10/\x12\x1b\n\x17INVALID_ACCOUNT_AMOUNTS\x10\x30\x12\x1a\n\x16\x45MPTY_TRANSACTION_BODY\x10\x31\x12\x1c\n\x18INVALID_TRANSACTION_BODY\x10\x32\x12*\n&INVALID_SIGNATURE_TYPE_MISMATCHING_KEY\x10\x33\x12+\n\'INVALID_SIGNATURE_COUNT_MISMATCHING_KEY\x10\x34\x12\x18\n\x14\x45MPTY_LIVE_HASH_BODY\x10\x35\x12\x13\n\x0f\x45MPTY_LIVE_HASH\x10\x36\x12\x18\n\x14\x45MPTY_LIVE_HASH_KEYS\x10\x37\x12\x1a\n\x16INVALID_LIVE_HASH_SIZE\x10\x38\x12\x14\n\x10\x45MPTY_QUERY_BODY\x10\x39\x12\x19\n\x15\x45MPTY_LIVE_HASH_QUERY\x10:\x12\x17\n\x13LIVE_HASH_NOT_FOUND\x10;\x12\x1d\n\x19\x41\x43\x43OUNT_ID_DOES_NOT_EXIST\x10<\x12\x1c\n\x18LIVE_HASH_ALREADY_EXISTS\x10=\x12\x15\n\x11INVALID_FILE_WACL\x10>\x12\x18\n\x14SERIALIZATION_FAILED\x10?\x12\x18\n\x14TRANSACTION_OVERSIZE\x10@\x12\x1f\n\x1bTRANSACTION_TOO_MANY_LAYERS\x10\x41\x12\x14\n\x10\x43ONTRACT_DELETED\x10\x42\x12\x17\n\x13PLATFORM_NOT_ACTIVE\x10\x43\x12\x17\n\x13KEY_PREFIX_MISMATCH\x10\x44\x12$\n PLATFORM_TRANSACTION_NOT_CREATED\x10\x45\x12\x1a\n\x16INVALID_RENEWAL_PERIOD\x10\x46\x12\x1c\n\x18INVALID_PAYER_ACCOUNT_ID\x10G\x12\x13\n\x0f\x41\x43\x43OUNT_DELETED\x10H\x12\x10\n\x0c\x46ILE_DELETED\x10I\x12\'\n#ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS\x10J\x12$\n SETTING_NEGATIVE_ACCOUNT_BALANCE\x10K\x12\x15\n\x11OBTAINER_REQUIRED\x10L\x12\x1d\n\x19OBTAINER_SAME_CONTRACT_ID\x10M\x12\x1b\n\x17OBTAINER_DOES_NOT_EXIST\x10N\x12 \n\x1cMODIFYING_IMMUTABLE_CONTRACT\x10O\x12\x19\n\x15\x46ILE_SYSTEM_EXCEPTION\x10P\x12#\n\x1f\x41UTORENEW_DURATION_NOT_IN_RANGE\x10Q\x12\x1d\n\x19\x45RROR_DECODING_BYTESTRING\x10R\x12\x17\n\x13\x43ONTRACT_FILE_EMPTY\x10S\x12\x1b\n\x17\x43ONTRACT_BYTECODE_EMPTY\x10T\x12\x1b\n\x17INVALID_INITIAL_BALANCE\x10U\x12(\n INVALID_RECEIVE_RECORD_THRESHOLD\x10V\x1a\x02\x08\x01\x12%\n\x1dINVALID_SEND_RECORD_THRESHOLD\x10W\x1a\x02\x08\x01\x12\"\n\x1e\x41\x43\x43OUNT_IS_NOT_GENESIS_ACCOUNT\x10X\x12\x1e\n\x1aPAYER_ACCOUNT_UNAUTHORIZED\x10Y\x12#\n\x1fINVALID_FREEZE_TRANSACTION_BODY\x10Z\x12%\n!FREEZE_TRANSACTION_BODY_NOT_FOUND\x10[\x12%\n!TRANSFER_LIST_SIZE_LIMIT_EXCEEDED\x10\\\x12\x1e\n\x1aRESULT_SIZE_LIMIT_EXCEEDED\x10]\x12\x17\n\x13NOT_SPECIAL_ACCOUNT\x10^\x12\x19\n\x15\x43ONTRACT_NEGATIVE_GAS\x10_\x12\x1b\n\x17\x43ONTRACT_NEGATIVE_VALUE\x10`\x12\x14\n\x10INVALID_FEE_FILE\x10\x61\x12\x1e\n\x1aINVALID_EXCHANGE_RATE_FILE\x10\x62\x12\x1f\n\x1bINSUFFICIENT_LOCAL_CALL_GAS\x10\x63\x12 \n\x1c\x45NTITY_NOT_ALLOWED_TO_DELETE\x10\x64\x12\x18\n\x14\x41UTHORIZATION_FAILED\x10\x65\x12\x1f\n\x1b\x46ILE_UPLOADED_PROTO_INVALID\x10\x66\x12)\n%FILE_UPLOADED_PROTO_NOT_SAVED_TO_DISK\x10g\x12#\n\x1f\x46\x45\x45_SCHEDULE_FILE_PART_UPLOADED\x10h\x12\'\n#EXCHANGE_RATE_CHANGE_LIMIT_EXCEEDED\x10i\x12!\n\x1dMAX_CONTRACT_STORAGE_EXCEEDED\x10j\x12+\n\'TRANSFER_ACCOUNT_SAME_AS_DELETE_ACCOUNT\x10k\x12 \n\x1cTOTAL_LEDGER_BALANCE_INVALID\x10l\x12$\n EXPIRATION_REDUCTION_NOT_ALLOWED\x10n\x12\x1a\n\x16MAX_GAS_LIMIT_EXCEEDED\x10o\x12\x1a\n\x16MAX_FILE_SIZE_EXCEEDED\x10p\x12\x19\n\x15RECEIVER_SIG_REQUIRED\x10q\x12\x15\n\x10INVALID_TOPIC_ID\x10\x96\x01\x12\x16\n\x11INVALID_ADMIN_KEY\x10\x9b\x01\x12\x17\n\x12INVALID_SUBMIT_KEY\x10\x9c\x01\x12\x11\n\x0cUNAUTHORIZED\x10\x9d\x01\x12\x1a\n\x15INVALID_TOPIC_MESSAGE\x10\x9e\x01\x12\x1e\n\x19INVALID_AUTORENEW_ACCOUNT\x10\x9f\x01\x12\"\n\x1d\x41UTORENEW_ACCOUNT_NOT_ALLOWED\x10\xa0\x01\x12\x12\n\rTOPIC_EXPIRED\x10\xa2\x01\x12\x19\n\x14INVALID_CHUNK_NUMBER\x10\xa3\x01\x12!\n\x1cINVALID_CHUNK_TRANSACTION_ID\x10\xa4\x01\x12\x1d\n\x18\x41\x43\x43OUNT_FROZEN_FOR_TOKEN\x10\xa5\x01\x12&\n!TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED\x10\xa6\x01\x12\x15\n\x10INVALID_TOKEN_ID\x10\xa7\x01\x12\x1b\n\x16INVALID_TOKEN_DECIMALS\x10\xa8\x01\x12!\n\x1cINVALID_TOKEN_INITIAL_SUPPLY\x10\xa9\x01\x12\'\n\"INVALID_TREASURY_ACCOUNT_FOR_TOKEN\x10\xaa\x01\x12\x19\n\x14INVALID_TOKEN_SYMBOL\x10\xab\x01\x12\x1c\n\x17TOKEN_HAS_NO_FREEZE_KEY\x10\xac\x01\x12%\n TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN\x10\xad\x01\x12\x19\n\x14MISSING_TOKEN_SYMBOL\x10\xae\x01\x12\x1a\n\x15TOKEN_SYMBOL_TOO_LONG\x10\xaf\x01\x12&\n!ACCOUNT_KYC_NOT_GRANTED_FOR_TOKEN\x10\xb0\x01\x12\x19\n\x14TOKEN_HAS_NO_KYC_KEY\x10\xb1\x01\x12\x1f\n\x1aINSUFFICIENT_TOKEN_BALANCE\x10\xb2\x01\x12\x16\n\x11TOKEN_WAS_DELETED\x10\xb3\x01\x12\x1c\n\x17TOKEN_HAS_NO_SUPPLY_KEY\x10\xb4\x01\x12\x1a\n\x15TOKEN_HAS_NO_WIPE_KEY\x10\xb5\x01\x12\x1e\n\x19INVALID_TOKEN_MINT_AMOUNT\x10\xb6\x01\x12\x1e\n\x19INVALID_TOKEN_BURN_AMOUNT\x10\xb7\x01\x12$\n\x1fTOKEN_NOT_ASSOCIATED_TO_ACCOUNT\x10\xb8\x01\x12\'\n\"CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT\x10\xb9\x01\x12\x14\n\x0fINVALID_KYC_KEY\x10\xba\x01\x12\x15\n\x10INVALID_WIPE_KEY\x10\xbb\x01\x12\x17\n\x12INVALID_FREEZE_KEY\x10\xbc\x01\x12\x17\n\x12INVALID_SUPPLY_KEY\x10\xbd\x01\x12\x17\n\x12MISSING_TOKEN_NAME\x10\xbe\x01\x12\x18\n\x13TOKEN_NAME_TOO_LONG\x10\xbf\x01\x12\x1a\n\x15INVALID_WIPING_AMOUNT\x10\xc0\x01\x12\x17\n\x12TOKEN_IS_IMMUTABLE\x10\xc1\x01\x12(\n#TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT\x10\xc2\x01\x12-\n(TRANSACTION_REQUIRES_ZERO_TOKEN_BALANCES\x10\xc3\x01\x12\x18\n\x13\x41\x43\x43OUNT_IS_TREASURY\x10\xc4\x01\x12$\n\x1fTOKEN_ID_REPEATED_IN_TOKEN_LIST\x10\xc5\x01\x12,\n\'TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED\x10\xc6\x01\x12\x1e\n\x19\x45MPTY_TOKEN_TRANSFER_BODY\x10\xc7\x01\x12)\n$EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS\x10\xc8\x01\x12\x18\n\x13INVALID_SCHEDULE_ID\x10\xc9\x01\x12\x1a\n\x15SCHEDULE_IS_IMMUTABLE\x10\xca\x01\x12\x1e\n\x19INVALID_SCHEDULE_PAYER_ID\x10\xcb\x01\x12 \n\x1bINVALID_SCHEDULE_ACCOUNT_ID\x10\xcc\x01\x12\x1c\n\x17NO_NEW_VALID_SIGNATURES\x10\xcd\x01\x12\"\n\x1dUNRESOLVABLE_REQUIRED_SIGNERS\x10\xce\x01\x12+\n&SCHEDULED_TRANSACTION_NOT_IN_WHITELIST\x10\xcf\x01\x12!\n\x1cSOME_SIGNATURES_WERE_INVALID\x10\xd0\x01\x12%\n TRANSACTION_ID_FIELD_NOT_ALLOWED\x10\xd1\x01\x12\'\n\"IDENTICAL_SCHEDULE_ALREADY_CREATED\x10\xd2\x01\x12 \n\x1bINVALID_ZERO_BYTE_IN_STRING\x10\xd3\x01\x12\x1d\n\x18SCHEDULE_ALREADY_DELETED\x10\xd4\x01\x12\x1e\n\x19SCHEDULE_ALREADY_EXECUTED\x10\xd5\x01\x12\x1b\n\x16MESSAGE_SIZE_TOO_LARGE\x10\xd6\x01\x12(\n#OPERATION_REPEATED_IN_BUCKET_GROUPS\x10\xd7\x01\x12\x1d\n\x18\x42UCKET_CAPACITY_OVERFLOW\x10\xd8\x01\x12/\n*NODE_CAPACITY_NOT_SUFFICIENT_FOR_OPERATION\x10\xd9\x01\x12\"\n\x1d\x42UCKET_HAS_NO_THROTTLE_GROUPS\x10\xda\x01\x12(\n#THROTTLE_GROUP_HAS_ZERO_OPS_PER_SEC\x10\xdb\x01\x12+\n&SUCCESS_BUT_MISSING_EXPECTED_OPERATION\x10\xdc\x01\x12%\n UNPARSEABLE_THROTTLE_DEFINITIONS\x10\xdd\x01\x12!\n\x1cINVALID_THROTTLE_DEFINITIONS\x10\xde\x01\x12(\n#ACCOUNT_EXPIRED_AND_PENDING_REMOVAL\x10\xdf\x01\x12\x1d\n\x18INVALID_TOKEN_MAX_SUPPLY\x10\xe0\x01\x12$\n\x1fINVALID_TOKEN_NFT_SERIAL_NUMBER\x10\xe1\x01\x12\x13\n\x0eINVALID_NFT_ID\x10\xe2\x01\x12\x16\n\x11METADATA_TOO_LONG\x10\xe3\x01\x12\x1e\n\x19\x42\x41TCH_SIZE_LIMIT_EXCEEDED\x10\xe4\x01\x12\x18\n\x13INVALID_QUERY_RANGE\x10\xe5\x01\x12\x1d\n\x18\x46RACTION_DIVIDES_BY_ZERO\x10\xe6\x01\x12\x32\n)INSUFFICIENT_PAYER_BALANCE_FOR_CUSTOM_FEE\x10\xe7\x01\x1a\x02\x08\x01\x12\x1e\n\x19\x43USTOM_FEES_LIST_TOO_LONG\x10\xe8\x01\x12!\n\x1cINVALID_CUSTOM_FEE_COLLECTOR\x10\xe9\x01\x12$\n\x1fINVALID_TOKEN_ID_IN_CUSTOM_FEES\x10\xea\x01\x12*\n%TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR\x10\xeb\x01\x12\x1d\n\x18TOKEN_MAX_SUPPLY_REACHED\x10\xec\x01\x12&\n!SENDER_DOES_NOT_OWN_NFT_SERIAL_NO\x10\xed\x01\x12#\n\x1e\x43USTOM_FEE_NOT_FULLY_SPECIFIED\x10\xee\x01\x12 \n\x1b\x43USTOM_FEE_MUST_BE_POSITIVE\x10\xef\x01\x12\"\n\x1dTOKEN_HAS_NO_FEE_SCHEDULE_KEY\x10\xf0\x01\x12%\n CUSTOM_FEE_OUTSIDE_NUMERIC_RANGE\x10\xf1\x01\x12\'\n\"ROYALTY_FRACTION_CANNOT_EXCEED_ONE\x10\xf2\x01\x12\x33\n.FRACTIONAL_FEE_MAX_AMOUNT_LESS_THAN_MIN_AMOUNT\x10\xf3\x01\x12(\n#CUSTOM_SCHEDULE_ALREADY_HAS_NO_FEES\x10\xf4\x01\x12\x34\n/CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON\x10\xf5\x01\x12;\n6CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON\x10\xf6\x01\x12$\n\x1fINVALID_CUSTOM_FEE_SCHEDULE_KEY\x10\xf7\x01\x12 \n\x1bINVALID_TOKEN_MINT_METADATA\x10\xf8\x01\x12 \n\x1bINVALID_TOKEN_BURN_METADATA\x10\xf9\x01\x12%\n CURRENT_TREASURY_STILL_OWNS_NFTS\x10\xfa\x01\x12\x1c\n\x17\x41\x43\x43OUNT_STILL_OWNS_NFTS\x10\xfb\x01\x12!\n\x1cTREASURY_MUST_OWN_BURNED_NFT\x10\xfc\x01\x12#\n\x1e\x41\x43\x43OUNT_DOES_NOT_OWN_WIPED_NFT\x10\xfd\x01\x12>\n9ACCOUNT_AMOUNT_TRANSFERS_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON\x10\xfe\x01\x12.\n)MAX_NFTS_IN_PRICE_REGIME_HAVE_BEEN_MINTED\x10\xff\x01\x12\x1a\n\x15PAYER_ACCOUNT_DELETED\x10\x80\x02\x12\x35\n0CUSTOM_FEE_CHARGING_EXCEEDED_MAX_RECURSION_DEPTH\x10\x81\x02\x12\x35\n0CUSTOM_FEE_CHARGING_EXCEEDED_MAX_ACCOUNT_AMOUNTS\x10\x82\x02\x12\x37\n2INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE\x10\x83\x02\x12 \n\x1bSERIAL_NUMBER_LIMIT_REACHED\x10\x84\x02\x12<\n7CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE\x10\x85\x02\x12(\n#NO_REMAINING_AUTOMATIC_ASSOCIATIONS\x10\x86\x02\x12\x37\n2EXISTING_AUTOMATIC_ASSOCIATIONS_EXCEED_GIVEN_LIMIT\x10\x87\x02\x12\x43\n>REQUESTED_NUM_AUTOMATIC_ASSOCIATIONS_EXCEEDS_ASSOCIATION_LIMIT\x10\x88\x02\x12\x14\n\x0fTOKEN_IS_PAUSED\x10\x89\x02\x12\x1b\n\x16TOKEN_HAS_NO_PAUSE_KEY\x10\x8a\x02\x12\x16\n\x11INVALID_PAUSE_KEY\x10\x8b\x02\x12&\n!FREEZE_UPDATE_FILE_DOES_NOT_EXIST\x10\x8c\x02\x12+\n&FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH\x10\x8d\x02\x12!\n\x1cNO_UPGRADE_HAS_BEEN_PREPARED\x10\x8e\x02\x12\x1b\n\x16NO_FREEZE_IS_SCHEDULED\x10\x8f\x02\x12\x33\n.UPDATE_FILE_HASH_CHANGED_SINCE_PREPARE_UPGRADE\x10\x90\x02\x12%\n FREEZE_START_TIME_MUST_BE_FUTURE\x10\x91\x02\x12&\n!PREPARED_UPDATE_FILE_IS_IMMUTABLE\x10\x92\x02\x12\x1d\n\x18\x46REEZE_ALREADY_SCHEDULED\x10\x93\x02\x12\x1f\n\x1a\x46REEZE_UPGRADE_IN_PROGRESS\x10\x94\x02\x12+\n&UPDATE_FILE_ID_DOES_NOT_MATCH_PREPARED\x10\x95\x02\x12-\n(UPDATE_FILE_HASH_DOES_NOT_MATCH_PREPARED\x10\x96\x02\x12\x1c\n\x17\x43ONSENSUS_GAS_EXHAUSTED\x10\x97\x02\x12\x15\n\x10REVERTED_SUCCESS\x10\x98\x02\x12.\n)MAX_STORAGE_IN_PRICE_REGIME_HAS_BEEN_USED\x10\x99\x02\x12\x16\n\x11INVALID_ALIAS_KEY\x10\x9a\x02\x42&\n\"com.hederahashgraph.api.proto.javaP\x01\x62\x06proto3' ) _RESPONSECODEENUM = _descriptor.EnumDescriptor( name='ResponseCodeEnum', full_name='proto.ResponseCodeEnum', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='OK', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TRANSACTION', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PAYER_ACCOUNT_NOT_FOUND', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_NODE_ACCOUNT', index=3, number=3, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSACTION_EXPIRED', index=4, number=4, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TRANSACTION_START', index=5, number=5, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TRANSACTION_DURATION', index=6, number=6, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SIGNATURE', index=7, number=7, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MEMO_TOO_LONG', index=8, number=8, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_TX_FEE', index=9, number=9, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_PAYER_BALANCE', index=10, number=10, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='DUPLICATE_TRANSACTION', index=11, number=11, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BUSY', index=12, number=12, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NOT_SUPPORTED', index=13, number=13, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FILE_ID', index=14, number=14, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_ACCOUNT_ID', index=15, number=15, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_CONTRACT_ID', index=16, number=16, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TRANSACTION_ID', index=17, number=17, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RECEIPT_NOT_FOUND', index=18, number=18, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RECORD_NOT_FOUND', index=19, number=19, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SOLIDITY_ID', index=20, number=20, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UNKNOWN', index=21, number=21, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SUCCESS', index=22, number=22, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FAIL_INVALID', index=23, number=23, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FAIL_FEE', index=24, number=24, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FAIL_BALANCE', index=25, number=25, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='KEY_REQUIRED', index=26, number=26, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BAD_ENCODING', index=27, number=27, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_ACCOUNT_BALANCE', index=28, number=28, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SOLIDITY_ADDRESS', index=29, number=29, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_GAS', index=30, number=30, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_SIZE_LIMIT_EXCEEDED', index=31, number=31, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='LOCAL_CALL_MODIFICATION_EXCEPTION', index=32, number=32, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_REVERT_EXECUTED', index=33, number=33, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_EXECUTION_EXCEPTION', index=34, number=34, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_RECEIVING_NODE_ACCOUNT', index=35, number=35, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MISSING_QUERY_HEADER', index=36, number=36, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_UPDATE_FAILED', index=37, number=37, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_KEY_ENCODING', index=38, number=38, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NULL_SOLIDITY_ADDRESS', index=39, number=39, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_UPDATE_FAILED', index=40, number=40, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_QUERY_HEADER', index=41, number=41, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FEE_SUBMITTED', index=42, number=42, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_PAYER_SIGNATURE', index=43, number=43, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='KEY_NOT_PROVIDED', index=44, number=44, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_EXPIRATION_TIME', index=45, number=45, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NO_WACL_KEY', index=46, number=46, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FILE_CONTENT_EMPTY', index=47, number=47, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_ACCOUNT_AMOUNTS', index=48, number=48, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_TRANSACTION_BODY', index=49, number=49, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TRANSACTION_BODY', index=50, number=50, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SIGNATURE_TYPE_MISMATCHING_KEY', index=51, number=51, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SIGNATURE_COUNT_MISMATCHING_KEY', index=52, number=52, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_LIVE_HASH_BODY', index=53, number=53, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_LIVE_HASH', index=54, number=54, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_LIVE_HASH_KEYS', index=55, number=55, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_LIVE_HASH_SIZE', index=56, number=56, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_QUERY_BODY', index=57, number=57, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_LIVE_HASH_QUERY', index=58, number=58, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='LIVE_HASH_NOT_FOUND', index=59, number=59, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_ID_DOES_NOT_EXIST', index=60, number=60, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='LIVE_HASH_ALREADY_EXISTS', index=61, number=61, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FILE_WACL', index=62, number=62, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SERIALIZATION_FAILED', index=63, number=63, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSACTION_OVERSIZE', index=64, number=64, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSACTION_TOO_MANY_LAYERS', index=65, number=65, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_DELETED', index=66, number=66, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PLATFORM_NOT_ACTIVE', index=67, number=67, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='KEY_PREFIX_MISMATCH', index=68, number=68, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PLATFORM_TRANSACTION_NOT_CREATED', index=69, number=69, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_RENEWAL_PERIOD', index=70, number=70, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_PAYER_ACCOUNT_ID', index=71, number=71, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_DELETED', index=72, number=72, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FILE_DELETED', index=73, number=73, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS', index=74, number=74, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SETTING_NEGATIVE_ACCOUNT_BALANCE', index=75, number=75, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OBTAINER_REQUIRED', index=76, number=76, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OBTAINER_SAME_CONTRACT_ID', index=77, number=77, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OBTAINER_DOES_NOT_EXIST', index=78, number=78, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MODIFYING_IMMUTABLE_CONTRACT', index=79, number=79, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FILE_SYSTEM_EXCEPTION', index=80, number=80, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AUTORENEW_DURATION_NOT_IN_RANGE', index=81, number=81, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ERROR_DECODING_BYTESTRING', index=82, number=82, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_FILE_EMPTY', index=83, number=83, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_BYTECODE_EMPTY', index=84, number=84, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_INITIAL_BALANCE', index=85, number=85, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_RECEIVE_RECORD_THRESHOLD', index=86, number=86, serialized_options=b'\010\001', type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SEND_RECORD_THRESHOLD', index=87, number=87, serialized_options=b'\010\001', type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_IS_NOT_GENESIS_ACCOUNT', index=88, number=88, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PAYER_ACCOUNT_UNAUTHORIZED', index=89, number=89, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FREEZE_TRANSACTION_BODY', index=90, number=90, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_TRANSACTION_BODY_NOT_FOUND', index=91, number=91, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSFER_LIST_SIZE_LIMIT_EXCEEDED', index=92, number=92, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RESULT_SIZE_LIMIT_EXCEEDED', index=93, number=93, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NOT_SPECIAL_ACCOUNT', index=94, number=94, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_NEGATIVE_GAS', index=95, number=95, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONTRACT_NEGATIVE_VALUE', index=96, number=96, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FEE_FILE', index=97, number=97, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_EXCHANGE_RATE_FILE', index=98, number=98, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_LOCAL_CALL_GAS', index=99, number=99, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ENTITY_NOT_ALLOWED_TO_DELETE', index=100, number=100, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AUTHORIZATION_FAILED', index=101, number=101, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FILE_UPLOADED_PROTO_INVALID', index=102, number=102, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FILE_UPLOADED_PROTO_NOT_SAVED_TO_DISK', index=103, number=103, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FEE_SCHEDULE_FILE_PART_UPLOADED', index=104, number=104, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXCHANGE_RATE_CHANGE_LIMIT_EXCEEDED', index=105, number=105, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MAX_CONTRACT_STORAGE_EXCEEDED', index=106, number=106, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSFER_ACCOUNT_SAME_AS_DELETE_ACCOUNT', index=107, number=107, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOTAL_LEDGER_BALANCE_INVALID', index=108, number=108, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXPIRATION_REDUCTION_NOT_ALLOWED', index=109, number=110, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MAX_GAS_LIMIT_EXCEEDED', index=110, number=111, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MAX_FILE_SIZE_EXCEEDED', index=111, number=112, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='RECEIVER_SIG_REQUIRED', index=112, number=113, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOPIC_ID', index=113, number=150, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_ADMIN_KEY', index=114, number=155, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SUBMIT_KEY', index=115, number=156, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UNAUTHORIZED', index=116, number=157, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOPIC_MESSAGE', index=117, number=158, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_AUTORENEW_ACCOUNT', index=118, number=159, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='AUTORENEW_ACCOUNT_NOT_ALLOWED', index=119, number=160, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOPIC_EXPIRED', index=120, number=162, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_CHUNK_NUMBER', index=121, number=163, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_CHUNK_TRANSACTION_ID', index=122, number=164, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_FROZEN_FOR_TOKEN', index=123, number=165, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED', index=124, number=166, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_ID', index=125, number=167, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_DECIMALS', index=126, number=168, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_INITIAL_SUPPLY', index=127, number=169, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TREASURY_ACCOUNT_FOR_TOKEN', index=128, number=170, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_SYMBOL', index=129, number=171, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_FREEZE_KEY', index=130, number=172, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN', index=131, number=173, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MISSING_TOKEN_SYMBOL', index=132, number=174, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_SYMBOL_TOO_LONG', index=133, number=175, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_KYC_NOT_GRANTED_FOR_TOKEN', index=134, number=176, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_KYC_KEY', index=135, number=177, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_TOKEN_BALANCE', index=136, number=178, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_WAS_DELETED', index=137, number=179, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_SUPPLY_KEY', index=138, number=180, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_WIPE_KEY', index=139, number=181, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_MINT_AMOUNT', index=140, number=182, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_BURN_AMOUNT', index=141, number=183, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_NOT_ASSOCIATED_TO_ACCOUNT', index=142, number=184, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT', index=143, number=185, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_KYC_KEY', index=144, number=186, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_WIPE_KEY', index=145, number=187, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_FREEZE_KEY', index=146, number=188, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SUPPLY_KEY', index=147, number=189, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MISSING_TOKEN_NAME', index=148, number=190, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_NAME_TOO_LONG', index=149, number=191, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_WIPING_AMOUNT', index=150, number=192, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_IS_IMMUTABLE', index=151, number=193, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT', index=152, number=194, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSACTION_REQUIRES_ZERO_TOKEN_BALANCES', index=153, number=195, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_IS_TREASURY', index=154, number=196, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_ID_REPEATED_IN_TOKEN_LIST', index=155, number=197, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED', index=156, number=198, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_TOKEN_TRANSFER_BODY', index=157, number=199, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS', index=158, number=200, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SCHEDULE_ID', index=159, number=201, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SCHEDULE_IS_IMMUTABLE', index=160, number=202, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SCHEDULE_PAYER_ID', index=161, number=203, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_SCHEDULE_ACCOUNT_ID', index=162, number=204, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NO_NEW_VALID_SIGNATURES', index=163, number=205, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UNRESOLVABLE_REQUIRED_SIGNERS', index=164, number=206, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SCHEDULED_TRANSACTION_NOT_IN_WHITELIST', index=165, number=207, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SOME_SIGNATURES_WERE_INVALID', index=166, number=208, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TRANSACTION_ID_FIELD_NOT_ALLOWED', index=167, number=209, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='IDENTICAL_SCHEDULE_ALREADY_CREATED', index=168, number=210, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_ZERO_BYTE_IN_STRING', index=169, number=211, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SCHEDULE_ALREADY_DELETED', index=170, number=212, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SCHEDULE_ALREADY_EXECUTED', index=171, number=213, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MESSAGE_SIZE_TOO_LARGE', index=172, number=214, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='OPERATION_REPEATED_IN_BUCKET_GROUPS', index=173, number=215, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BUCKET_CAPACITY_OVERFLOW', index=174, number=216, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NODE_CAPACITY_NOT_SUFFICIENT_FOR_OPERATION', index=175, number=217, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BUCKET_HAS_NO_THROTTLE_GROUPS', index=176, number=218, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='THROTTLE_GROUP_HAS_ZERO_OPS_PER_SEC', index=177, number=219, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SUCCESS_BUT_MISSING_EXPECTED_OPERATION', index=178, number=220, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UNPARSEABLE_THROTTLE_DEFINITIONS', index=179, number=221, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_THROTTLE_DEFINITIONS', index=180, number=222, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_EXPIRED_AND_PENDING_REMOVAL', index=181, number=223, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_MAX_SUPPLY', index=182, number=224, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_NFT_SERIAL_NUMBER', index=183, number=225, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_NFT_ID', index=184, number=226, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='METADATA_TOO_LONG', index=185, number=227, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='BATCH_SIZE_LIMIT_EXCEEDED', index=186, number=228, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_QUERY_RANGE', index=187, number=229, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FRACTION_DIVIDES_BY_ZERO', index=188, number=230, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_PAYER_BALANCE_FOR_CUSTOM_FEE', index=189, number=231, serialized_options=b'\010\001', type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEES_LIST_TOO_LONG', index=190, number=232, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_CUSTOM_FEE_COLLECTOR', index=191, number=233, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_ID_IN_CUSTOM_FEES', index=192, number=234, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR', index=193, number=235, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_MAX_SUPPLY_REACHED', index=194, number=236, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SENDER_DOES_NOT_OWN_NFT_SERIAL_NO', index=195, number=237, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_NOT_FULLY_SPECIFIED', index=196, number=238, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_MUST_BE_POSITIVE', index=197, number=239, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_FEE_SCHEDULE_KEY', index=198, number=240, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_OUTSIDE_NUMERIC_RANGE', index=199, number=241, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ROYALTY_FRACTION_CANNOT_EXCEED_ONE', index=200, number=242, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FRACTIONAL_FEE_MAX_AMOUNT_LESS_THAN_MIN_AMOUNT', index=201, number=243, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_SCHEDULE_ALREADY_HAS_NO_FEES', index=202, number=244, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON', index=203, number=245, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON', index=204, number=246, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_CUSTOM_FEE_SCHEDULE_KEY', index=205, number=247, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_MINT_METADATA', index=206, number=248, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_TOKEN_BURN_METADATA', index=207, number=249, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CURRENT_TREASURY_STILL_OWNS_NFTS', index=208, number=250, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_STILL_OWNS_NFTS', index=209, number=251, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TREASURY_MUST_OWN_BURNED_NFT', index=210, number=252, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_DOES_NOT_OWN_WIPED_NFT', index=211, number=253, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='ACCOUNT_AMOUNT_TRANSFERS_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON', index=212, number=254, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MAX_NFTS_IN_PRICE_REGIME_HAVE_BEEN_MINTED', index=213, number=255, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PAYER_ACCOUNT_DELETED', index=214, number=256, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_CHARGING_EXCEEDED_MAX_RECURSION_DEPTH', index=215, number=257, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_FEE_CHARGING_EXCEEDED_MAX_ACCOUNT_AMOUNTS', index=216, number=258, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE', index=217, number=259, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SERIAL_NUMBER_LIMIT_REACHED', index=218, number=260, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE', index=219, number=261, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NO_REMAINING_AUTOMATIC_ASSOCIATIONS', index=220, number=262, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='EXISTING_AUTOMATIC_ASSOCIATIONS_EXCEED_GIVEN_LIMIT', index=221, number=263, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='REQUESTED_NUM_AUTOMATIC_ASSOCIATIONS_EXCEEDS_ASSOCIATION_LIMIT', index=222, number=264, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_IS_PAUSED', index=223, number=265, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='TOKEN_HAS_NO_PAUSE_KEY', index=224, number=266, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_PAUSE_KEY', index=225, number=267, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_UPDATE_FILE_DOES_NOT_EXIST', index=226, number=268, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH', index=227, number=269, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NO_UPGRADE_HAS_BEEN_PREPARED', index=228, number=270, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='NO_FREEZE_IS_SCHEDULED', index=229, number=271, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UPDATE_FILE_HASH_CHANGED_SINCE_PREPARE_UPGRADE', index=230, number=272, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_START_TIME_MUST_BE_FUTURE', index=231, number=273, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='PREPARED_UPDATE_FILE_IS_IMMUTABLE', index=232, number=274, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_ALREADY_SCHEDULED', index=233, number=275, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='FREEZE_UPGRADE_IN_PROGRESS', index=234, number=276, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UPDATE_FILE_ID_DOES_NOT_MATCH_PREPARED', index=235, number=277, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='UPDATE_FILE_HASH_DOES_NOT_MATCH_PREPARED', index=236, number=278, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='CONSENSUS_GAS_EXHAUSTED', index=237, number=279, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='REVERTED_SUCCESS', index=238, number=280, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='MAX_STORAGE_IN_PRICE_REGIME_HAS_BEEN_USED', index=239, number=281, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INVALID_ALIAS_KEY', index=240, number=282, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=31, serialized_end=7820, ) _sym_db.RegisterEnumDescriptor(_RESPONSECODEENUM) ResponseCodeEnum = enum_type_wrapper.EnumTypeWrapper(_RESPONSECODEENUM) OK = 0 INVALID_TRANSACTION = 1 PAYER_ACCOUNT_NOT_FOUND = 2 INVALID_NODE_ACCOUNT = 3 TRANSACTION_EXPIRED = 4 INVALID_TRANSACTION_START = 5 INVALID_TRANSACTION_DURATION = 6 INVALID_SIGNATURE = 7 MEMO_TOO_LONG = 8 INSUFFICIENT_TX_FEE = 9 INSUFFICIENT_PAYER_BALANCE = 10 DUPLICATE_TRANSACTION = 11 BUSY = 12 NOT_SUPPORTED = 13 INVALID_FILE_ID = 14 INVALID_ACCOUNT_ID = 15 INVALID_CONTRACT_ID = 16 INVALID_TRANSACTION_ID = 17 RECEIPT_NOT_FOUND = 18 RECORD_NOT_FOUND = 19 INVALID_SOLIDITY_ID = 20 UNKNOWN = 21 SUCCESS = 22 FAIL_INVALID = 23 FAIL_FEE = 24 FAIL_BALANCE = 25 KEY_REQUIRED = 26 BAD_ENCODING = 27 INSUFFICIENT_ACCOUNT_BALANCE = 28 INVALID_SOLIDITY_ADDRESS = 29 INSUFFICIENT_GAS = 30 CONTRACT_SIZE_LIMIT_EXCEEDED = 31 LOCAL_CALL_MODIFICATION_EXCEPTION = 32 CONTRACT_REVERT_EXECUTED = 33 CONTRACT_EXECUTION_EXCEPTION = 34 INVALID_RECEIVING_NODE_ACCOUNT = 35 MISSING_QUERY_HEADER = 36 ACCOUNT_UPDATE_FAILED = 37 INVALID_KEY_ENCODING = 38 NULL_SOLIDITY_ADDRESS = 39 CONTRACT_UPDATE_FAILED = 40 INVALID_QUERY_HEADER = 41 INVALID_FEE_SUBMITTED = 42 INVALID_PAYER_SIGNATURE = 43 KEY_NOT_PROVIDED = 44 INVALID_EXPIRATION_TIME = 45 NO_WACL_KEY = 46 FILE_CONTENT_EMPTY = 47 INVALID_ACCOUNT_AMOUNTS = 48 EMPTY_TRANSACTION_BODY = 49 INVALID_TRANSACTION_BODY = 50 INVALID_SIGNATURE_TYPE_MISMATCHING_KEY = 51 INVALID_SIGNATURE_COUNT_MISMATCHING_KEY = 52 EMPTY_LIVE_HASH_BODY = 53 EMPTY_LIVE_HASH = 54 EMPTY_LIVE_HASH_KEYS = 55 INVALID_LIVE_HASH_SIZE = 56 EMPTY_QUERY_BODY = 57 EMPTY_LIVE_HASH_QUERY = 58 LIVE_HASH_NOT_FOUND = 59 ACCOUNT_ID_DOES_NOT_EXIST = 60 LIVE_HASH_ALREADY_EXISTS = 61 INVALID_FILE_WACL = 62 SERIALIZATION_FAILED = 63 TRANSACTION_OVERSIZE = 64 TRANSACTION_TOO_MANY_LAYERS = 65 CONTRACT_DELETED = 66 PLATFORM_NOT_ACTIVE = 67 KEY_PREFIX_MISMATCH = 68 PLATFORM_TRANSACTION_NOT_CREATED = 69 INVALID_RENEWAL_PERIOD = 70 INVALID_PAYER_ACCOUNT_ID = 71 ACCOUNT_DELETED = 72 FILE_DELETED = 73 ACCOUNT_REPEATED_IN_ACCOUNT_AMOUNTS = 74 SETTING_NEGATIVE_ACCOUNT_BALANCE = 75 OBTAINER_REQUIRED = 76 OBTAINER_SAME_CONTRACT_ID = 77 OBTAINER_DOES_NOT_EXIST = 78 MODIFYING_IMMUTABLE_CONTRACT = 79 FILE_SYSTEM_EXCEPTION = 80 AUTORENEW_DURATION_NOT_IN_RANGE = 81 ERROR_DECODING_BYTESTRING = 82 CONTRACT_FILE_EMPTY = 83 CONTRACT_BYTECODE_EMPTY = 84 INVALID_INITIAL_BALANCE = 85 INVALID_RECEIVE_RECORD_THRESHOLD = 86 INVALID_SEND_RECORD_THRESHOLD = 87 ACCOUNT_IS_NOT_GENESIS_ACCOUNT = 88 PAYER_ACCOUNT_UNAUTHORIZED = 89 INVALID_FREEZE_TRANSACTION_BODY = 90 FREEZE_TRANSACTION_BODY_NOT_FOUND = 91 TRANSFER_LIST_SIZE_LIMIT_EXCEEDED = 92 RESULT_SIZE_LIMIT_EXCEEDED = 93 NOT_SPECIAL_ACCOUNT = 94 CONTRACT_NEGATIVE_GAS = 95 CONTRACT_NEGATIVE_VALUE = 96 INVALID_FEE_FILE = 97 INVALID_EXCHANGE_RATE_FILE = 98 INSUFFICIENT_LOCAL_CALL_GAS = 99 ENTITY_NOT_ALLOWED_TO_DELETE = 100 AUTHORIZATION_FAILED = 101 FILE_UPLOADED_PROTO_INVALID = 102 FILE_UPLOADED_PROTO_NOT_SAVED_TO_DISK = 103 FEE_SCHEDULE_FILE_PART_UPLOADED = 104 EXCHANGE_RATE_CHANGE_LIMIT_EXCEEDED = 105 MAX_CONTRACT_STORAGE_EXCEEDED = 106 TRANSFER_ACCOUNT_SAME_AS_DELETE_ACCOUNT = 107 TOTAL_LEDGER_BALANCE_INVALID = 108 EXPIRATION_REDUCTION_NOT_ALLOWED = 110 MAX_GAS_LIMIT_EXCEEDED = 111 MAX_FILE_SIZE_EXCEEDED = 112 RECEIVER_SIG_REQUIRED = 113 INVALID_TOPIC_ID = 150 INVALID_ADMIN_KEY = 155 INVALID_SUBMIT_KEY = 156 UNAUTHORIZED = 157 INVALID_TOPIC_MESSAGE = 158 INVALID_AUTORENEW_ACCOUNT = 159 AUTORENEW_ACCOUNT_NOT_ALLOWED = 160 TOPIC_EXPIRED = 162 INVALID_CHUNK_NUMBER = 163 INVALID_CHUNK_TRANSACTION_ID = 164 ACCOUNT_FROZEN_FOR_TOKEN = 165 TOKENS_PER_ACCOUNT_LIMIT_EXCEEDED = 166 INVALID_TOKEN_ID = 167 INVALID_TOKEN_DECIMALS = 168 INVALID_TOKEN_INITIAL_SUPPLY = 169 INVALID_TREASURY_ACCOUNT_FOR_TOKEN = 170 INVALID_TOKEN_SYMBOL = 171 TOKEN_HAS_NO_FREEZE_KEY = 172 TRANSFERS_NOT_ZERO_SUM_FOR_TOKEN = 173 MISSING_TOKEN_SYMBOL = 174 TOKEN_SYMBOL_TOO_LONG = 175 ACCOUNT_KYC_NOT_GRANTED_FOR_TOKEN = 176 TOKEN_HAS_NO_KYC_KEY = 177 INSUFFICIENT_TOKEN_BALANCE = 178 TOKEN_WAS_DELETED = 179 TOKEN_HAS_NO_SUPPLY_KEY = 180 TOKEN_HAS_NO_WIPE_KEY = 181 INVALID_TOKEN_MINT_AMOUNT = 182 INVALID_TOKEN_BURN_AMOUNT = 183 TOKEN_NOT_ASSOCIATED_TO_ACCOUNT = 184 CANNOT_WIPE_TOKEN_TREASURY_ACCOUNT = 185 INVALID_KYC_KEY = 186 INVALID_WIPE_KEY = 187 INVALID_FREEZE_KEY = 188 INVALID_SUPPLY_KEY = 189 MISSING_TOKEN_NAME = 190 TOKEN_NAME_TOO_LONG = 191 INVALID_WIPING_AMOUNT = 192 TOKEN_IS_IMMUTABLE = 193 TOKEN_ALREADY_ASSOCIATED_TO_ACCOUNT = 194 TRANSACTION_REQUIRES_ZERO_TOKEN_BALANCES = 195 ACCOUNT_IS_TREASURY = 196 TOKEN_ID_REPEATED_IN_TOKEN_LIST = 197 TOKEN_TRANSFER_LIST_SIZE_LIMIT_EXCEEDED = 198 EMPTY_TOKEN_TRANSFER_BODY = 199 EMPTY_TOKEN_TRANSFER_ACCOUNT_AMOUNTS = 200 INVALID_SCHEDULE_ID = 201 SCHEDULE_IS_IMMUTABLE = 202 INVALID_SCHEDULE_PAYER_ID = 203 INVALID_SCHEDULE_ACCOUNT_ID = 204 NO_NEW_VALID_SIGNATURES = 205 UNRESOLVABLE_REQUIRED_SIGNERS = 206 SCHEDULED_TRANSACTION_NOT_IN_WHITELIST = 207 SOME_SIGNATURES_WERE_INVALID = 208 TRANSACTION_ID_FIELD_NOT_ALLOWED = 209 IDENTICAL_SCHEDULE_ALREADY_CREATED = 210 INVALID_ZERO_BYTE_IN_STRING = 211 SCHEDULE_ALREADY_DELETED = 212 SCHEDULE_ALREADY_EXECUTED = 213 MESSAGE_SIZE_TOO_LARGE = 214 OPERATION_REPEATED_IN_BUCKET_GROUPS = 215 BUCKET_CAPACITY_OVERFLOW = 216 NODE_CAPACITY_NOT_SUFFICIENT_FOR_OPERATION = 217 BUCKET_HAS_NO_THROTTLE_GROUPS = 218 THROTTLE_GROUP_HAS_ZERO_OPS_PER_SEC = 219 SUCCESS_BUT_MISSING_EXPECTED_OPERATION = 220 UNPARSEABLE_THROTTLE_DEFINITIONS = 221 INVALID_THROTTLE_DEFINITIONS = 222 ACCOUNT_EXPIRED_AND_PENDING_REMOVAL = 223 INVALID_TOKEN_MAX_SUPPLY = 224 INVALID_TOKEN_NFT_SERIAL_NUMBER = 225 INVALID_NFT_ID = 226 METADATA_TOO_LONG = 227 BATCH_SIZE_LIMIT_EXCEEDED = 228 INVALID_QUERY_RANGE = 229 FRACTION_DIVIDES_BY_ZERO = 230 INSUFFICIENT_PAYER_BALANCE_FOR_CUSTOM_FEE = 231 CUSTOM_FEES_LIST_TOO_LONG = 232 INVALID_CUSTOM_FEE_COLLECTOR = 233 INVALID_TOKEN_ID_IN_CUSTOM_FEES = 234 TOKEN_NOT_ASSOCIATED_TO_FEE_COLLECTOR = 235 TOKEN_MAX_SUPPLY_REACHED = 236 SENDER_DOES_NOT_OWN_NFT_SERIAL_NO = 237 CUSTOM_FEE_NOT_FULLY_SPECIFIED = 238 CUSTOM_FEE_MUST_BE_POSITIVE = 239 TOKEN_HAS_NO_FEE_SCHEDULE_KEY = 240 CUSTOM_FEE_OUTSIDE_NUMERIC_RANGE = 241 ROYALTY_FRACTION_CANNOT_EXCEED_ONE = 242 FRACTIONAL_FEE_MAX_AMOUNT_LESS_THAN_MIN_AMOUNT = 243 CUSTOM_SCHEDULE_ALREADY_HAS_NO_FEES = 244 CUSTOM_FEE_DENOMINATION_MUST_BE_FUNGIBLE_COMMON = 245 CUSTOM_FRACTIONAL_FEE_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON = 246 INVALID_CUSTOM_FEE_SCHEDULE_KEY = 247 INVALID_TOKEN_MINT_METADATA = 248 INVALID_TOKEN_BURN_METADATA = 249 CURRENT_TREASURY_STILL_OWNS_NFTS = 250 ACCOUNT_STILL_OWNS_NFTS = 251 TREASURY_MUST_OWN_BURNED_NFT = 252 ACCOUNT_DOES_NOT_OWN_WIPED_NFT = 253 ACCOUNT_AMOUNT_TRANSFERS_ONLY_ALLOWED_FOR_FUNGIBLE_COMMON = 254 MAX_NFTS_IN_PRICE_REGIME_HAVE_BEEN_MINTED = 255 PAYER_ACCOUNT_DELETED = 256 CUSTOM_FEE_CHARGING_EXCEEDED_MAX_RECURSION_DEPTH = 257 CUSTOM_FEE_CHARGING_EXCEEDED_MAX_ACCOUNT_AMOUNTS = 258 INSUFFICIENT_SENDER_ACCOUNT_BALANCE_FOR_CUSTOM_FEE = 259 SERIAL_NUMBER_LIMIT_REACHED = 260 CUSTOM_ROYALTY_FEE_ONLY_ALLOWED_FOR_NON_FUNGIBLE_UNIQUE = 261 NO_REMAINING_AUTOMATIC_ASSOCIATIONS = 262 EXISTING_AUTOMATIC_ASSOCIATIONS_EXCEED_GIVEN_LIMIT = 263 REQUESTED_NUM_AUTOMATIC_ASSOCIATIONS_EXCEEDS_ASSOCIATION_LIMIT = 264 TOKEN_IS_PAUSED = 265 TOKEN_HAS_NO_PAUSE_KEY = 266 INVALID_PAUSE_KEY = 267 FREEZE_UPDATE_FILE_DOES_NOT_EXIST = 268 FREEZE_UPDATE_FILE_HASH_DOES_NOT_MATCH = 269 NO_UPGRADE_HAS_BEEN_PREPARED = 270 NO_FREEZE_IS_SCHEDULED = 271 UPDATE_FILE_HASH_CHANGED_SINCE_PREPARE_UPGRADE = 272 FREEZE_START_TIME_MUST_BE_FUTURE = 273 PREPARED_UPDATE_FILE_IS_IMMUTABLE = 274 FREEZE_ALREADY_SCHEDULED = 275 FREEZE_UPGRADE_IN_PROGRESS = 276 UPDATE_FILE_ID_DOES_NOT_MATCH_PREPARED = 277 UPDATE_FILE_HASH_DOES_NOT_MATCH_PREPARED = 278 CONSENSUS_GAS_EXHAUSTED = 279 REVERTED_SUCCESS = 280 MAX_STORAGE_IN_PRICE_REGIME_HAS_BEEN_USED = 281 INVALID_ALIAS_KEY = 282 DESCRIPTOR.enum_types_by_name['ResponseCodeEnum'] = _RESPONSECODEENUM _sym_db.RegisterFileDescriptor(DESCRIPTOR) DESCRIPTOR._options = None _RESPONSECODEENUM.values_by_name["INVALID_RECEIVE_RECORD_THRESHOLD"]._options = None _RESPONSECODEENUM.values_by_name["INVALID_SEND_RECORD_THRESHOLD"]._options = None _RESPONSECODEENUM.values_by_name["INSUFFICIENT_PAYER_BALANCE_FOR_CUSTOM_FEE"]._options = None # @@protoc_insertion_point(module_scope)
46.264176
11,814
0.773453
8,922
69,350
5.574535
0.103676
0.087944
0.184514
0.131917
0.657431
0.588909
0.558026
0.528571
0.514034
0.512265
0
0.067094
0.134535
69,350
1,498
11,815
46.29506
0.761563
0.003057
0
0.652937
1
0.004727
0.236445
0.217495
0
0
0
0
0
1
0
false
0
0.003376
0
0.003376
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
3d508ea692ff4c0fe012224a02611729b8b6ea1c
850
py
Python
heliotrope/view/api/hitomi/info.py
ombe1229/Heliotrope
d8160a23fd12e8987f80edfacfa4d3fb7de7be21
[ "MIT" ]
null
null
null
heliotrope/view/api/hitomi/info.py
ombe1229/Heliotrope
d8160a23fd12e8987f80edfacfa4d3fb7de7be21
[ "MIT" ]
null
null
null
heliotrope/view/api/hitomi/info.py
ombe1229/Heliotrope
d8160a23fd12e8987f80edfacfa4d3fb7de7be21
[ "MIT" ]
null
null
null
from sanic.blueprints import Blueprint from sanic.response import HTTPResponse, json from sanic.views import HTTPMethodView from sanic_openapi import openapi # type: ignore from heliotrope.sanic import HeliotropeRequest hitomi_info = Blueprint("hitomi_info", url_prefix="/info") class HitomiInfoView(HTTPMethodView): @openapi.summary("Get hitomi info") # type: ignore @openapi.tag("hitomi") # type: ignore async def get(self, request: HeliotropeRequest, index_id: int) -> HTTPResponse: if info := await request.app.ctx.nosql_query.find_info(index_id): return json({"status": 200, **info}) return request.app.ctx.response.not_found # TODO: add_route is partially unknown and as_view is partially unknown Need PR Sanic hitomi_info.add_route(HitomiInfoView.as_view(), "/<index_id:int>") # type: ignore
36.956522
85
0.747059
112
850
5.535714
0.482143
0.058065
0.032258
0
0
0
0
0
0
0
0
0.004167
0.152941
850
22
86
38.636364
0.856944
0.158824
0
0
0
0
0.081805
0
0
0
0
0.045455
0
1
0
false
0
0.357143
0
0.571429
0.142857
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
1
0
0
3
3d61d07fc4bf1fa9b87854a6777b4c7445d518bd
4,777
py
Python
jmetal/problem/multiobjective/zdt.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
335
2017-03-16T19:44:50.000Z
2022-03-30T08:50:46.000Z
jmetal/problem/multiobjective/zdt.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
85
2017-05-16T06:40:51.000Z
2022-02-05T23:43:49.000Z
jmetal/problem/multiobjective/zdt.py
12yuens2/jMetalPy
6f54940cb205df831f5498e2eac2520b331ee4fd
[ "MIT" ]
130
2017-02-08T01:19:15.000Z
2022-03-25T08:32:08.000Z
from math import sqrt, pow, sin, pi, cos from jmetal.core.problem import FloatProblem from jmetal.core.solution import FloatSolution """ .. module:: ZDT :platform: Unix, Windows :synopsis: ZDT problem family of multi-objective problems. .. moduleauthor:: Antonio J. Nebro <antonio@lcc.uma.es> """ class ZDT1(FloatProblem): """ Problem ZDT1. .. note:: Bi-objective unconstrained problem. The default number of variables is 30. .. note:: Continuous problem having a convex Pareto front """ def __init__(self, number_of_variables: int=30): """ :param number_of_variables: Number of decision variables of the problem. """ super(ZDT1, self).__init__() self.number_of_variables = number_of_variables self.number_of_objectives = 2 self.number_of_constraints = 0 self.obj_directions = [self.MINIMIZE, self.MINIMIZE] self.obj_labels = ['x', 'y'] self.lower_bound = self.number_of_variables * [0.0] self.upper_bound = self.number_of_variables * [1.0] def evaluate(self, solution: FloatSolution) -> FloatSolution: g = self.eval_g(solution) h = self.eval_h(solution.variables[0], g) solution.objectives[0] = solution.variables[0] solution.objectives[1] = h * g return solution def eval_g(self, solution: FloatSolution): g = sum(solution.variables) - solution.variables[0] constant = 9.0 / (solution.number_of_variables - 1) return constant * g + 1.0 def eval_h(self, f: float, g: float) -> float: return 1.0 - sqrt(f / g) def get_name(self): return 'ZDT1' class ZDT1Modified(ZDT1): """ Problem ZDT1Modified. .. note:: Version including a loop for increasing the computing time of the evaluation functions. """ def __init__(self, number_of_variables = 30): super(ZDT1Modified, self).__init__(number_of_variables) def evaluate(self, solution:FloatSolution) -> FloatSolution: s: float = 0.0 for i in range(1000): for j in range(10000): s += i * 0.235 / 1.234 + 1.23525 * j return super().evaluate(solution) class ZDT2(ZDT1): """ Problem ZDT2. .. note:: Bi-objective unconstrained problem. The default number of variables is 30. .. note:: Continuous problem having a non-convex Pareto front """ def eval_h(self, f: float, g: float) -> float: return 1.0 - pow(f / g, 2.0) def get_name(self): return 'ZDT2' class ZDT3(ZDT1): """ Problem ZDT3. .. note:: Bi-objective unconstrained problem. The default number of variables is 30. .. note:: Continuous problem having a partitioned Pareto front """ def eval_h(self, f: float, g: float) -> float: return 1.0 - sqrt(f / g) - (f / g) * sin(10.0 * f * pi) def get_name(self): return 'ZDT3' class ZDT4(ZDT1): """ Problem ZDT4. .. note:: Bi-objective unconstrained problem. The default number of variables is 10. .. note:: Continuous multi-modal problem having a convex Pareto front """ def __init__(self, number_of_variables: int=10): """ :param number_of_variables: Number of decision variables of the problem. """ super(ZDT4, self).__init__(number_of_variables=number_of_variables) self.lower_bound = self.number_of_variables * [-5.0] self.upper_bound = self.number_of_variables * [5.0] self.lower_bound[0] = 0.0 self.upper_bound[0] = 1.0 def eval_g(self, solution: FloatSolution): g = 0.0 for i in range(1, solution.number_of_variables): g += pow(solution.variables[i], 2.0) - 10.0 * cos(4.0 * pi * solution.variables[i]) g += 1.0 + 10.0 * (solution.number_of_variables - 1) return g def eval_h(self, f: float, g: float) -> float: return 1.0 - sqrt(f / g) def get_name(self): return 'ZDT4' class ZDT6(ZDT1): """ Problem ZDT6. .. note:: Bi-objective unconstrained problem. The default number of variables is 10. .. note:: Continuous problem having a non-convex Pareto front """ def __init__(self, number_of_variables: int=10): """ :param number_of_variables: Number of decision variables of the problem. """ super(ZDT6, self).__init__(number_of_variables=number_of_variables) def eval_g(self, solution: FloatSolution): g = sum(solution.variables) - solution.variables[0] g = g / (solution.number_of_variables - 1) g = pow(g, 0.25) g = 9.0 * g g = 1.0 + g return g def eval_h(self, f: float, g: float) -> float: return 1.0 - pow(f / g, 2.0) def get_name(self): return 'ZDT6'
29.670807
101
0.628009
649
4,777
4.46379
0.172573
0.088367
0.15844
0.06524
0.659993
0.626855
0.574387
0.513635
0.448395
0.448395
0
0.038483
0.254762
4,777
160
102
29.85625
0.775281
0.245342
0
0.324675
0
0
0.006673
0
0
0
0
0
0
1
0.246753
false
0
0.038961
0.12987
0.558442
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
e9eacbec0983f71ad935211a09d14567cc6a0fcf
3,438
py
Python
unittest/test.py
DongHoonPark/ltspice_pytool
5055d50d4106d5aba473713f3a3138622c6ad4a7
[ "MIT" ]
65
2018-12-31T15:20:24.000Z
2022-03-25T17:01:01.000Z
unittest/test.py
DongHoonPark/ltspice_pytool
5055d50d4106d5aba473713f3a3138622c6ad4a7
[ "MIT" ]
21
2019-09-03T15:23:11.000Z
2022-02-08T14:35:21.000Z
unittest/test.py
DongHoonPark/ltspice_pytool
5055d50d4106d5aba473713f3a3138622c6ad4a7
[ "MIT" ]
22
2019-06-16T09:26:33.000Z
2021-08-15T09:25:21.000Z
import unittest from ltspice import Ltspice import os from os import path import numpy as np class TestLtspiceMethods(unittest.TestCase): def __init__(self, methodName: str) -> None: super().__init__(methodName=methodName) self.__file_path = os.path.dirname(__file__) def test_loading(self): passed = True try: Ltspice(path.join(self.__file_path, 'rl_circuit_tran.raw')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_tran64b.raw')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_ac.raw')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_fft.fft')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_tranascii.raw')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_acascii.raw')).parse() Ltspice(path.join(self.__file_path, 'rl_circuit_fftascii.fft')).parse() except Exception as e: print(e) passed = False self.assertTrue(passed) def test_transient_data(self): lt1 = Ltspice(path.join(self.__file_path, 'rl_circuit_tran.raw')).parse() lt2 = Ltspice(path.join(self.__file_path, 'rl_circuit_tran64b.raw')).parse() lt3 = Ltspice(path.join(self.__file_path, 'rl_circuit_tranascii.raw')).parse() self.assertTrue(abs(lt1.get_data('V(R1)', time=0.1) - lt2.get_data('V(R1)', time=0.1)) < 2e-3) self.assertTrue(abs(lt2.get_data('V(R1)', time=0.1) - lt3.get_data('V(R1)', time=0.1)) < 2e-3) self.assertTrue(abs(lt3.get_data('V(R1)', time=0.1) - lt1.get_data('V(R1)', time=0.1)) < 2e-3) t1 = lt1.get_x() tp_last = 0 for tp in t1: self.assertLessEqual(tp_last, tp) tp_last = tp t2 = lt2.get_x() tp_last = 0 for tp in t2: self.assertLessEqual(tp_last, tp) tp_last = tp t3 = lt3.get_x() tp_last = 0 for tp in t3: self.assertLessEqual(tp_last, tp) tp_last = tp def test_fft_data(self): lt1 = Ltspice(path.join(self.__file_path, 'rl_circuit_fft.fft')).parse() lt2 = Ltspice(path.join(self.__file_path, 'rl_circuit_fftascii.fft')).parse() freq = np.linspace(1000, 130000, 130) x1 = lt1.get_data('V(R1)', frequency=freq) x2 = lt2.get_data('V(R1)', frequency=freq) self.assertTrue(np.isclose(0, np.max(abs(x1) -abs(x2)), atol=1e-5)) self.assertTrue(np.isclose(0, np.max(np.angle(x1) - np.angle(x2)), atol=1e-2)) def test_ac_data(self): lt1 = Ltspice(path.join(self.__file_path, 'rl_circuit_acascii.raw')).parse() lt2 = Ltspice(path.join(self.__file_path, 'rl_circuit_ac.raw')).parse() freq = np.linspace(0, 1300, 1301) x1 = lt1.get_data('V(R1)', frequency=freq) x2 = lt2.get_data('V(R1)', frequency=freq) self.assertTrue(np.isclose(0, np.max(abs(x1) - abs(x2)))) self.assertTrue(np.isclose(0, np.max(np.angle(x1) - np.angle(x2)))) def test_reverse_x_dc_analysis(self): passed = True try: lt1 = Ltspice(path.join(self.__file_path, 'reverse_x_analysis.raw')).parse() except: passed = False self.assertTrue(passed) self.assertTrue(lt1.case_count is 28) if __name__ == '__main__': unittest.main()
36.967742
102
0.605876
489
3,438
3.989775
0.192229
0.065607
0.098411
0.146079
0.732957
0.701179
0.701179
0.685802
0.570477
0.560738
0
0.041297
0.246364
3,438
92
103
37.369565
0.711694
0
0
0.295775
0
0
0.107652
0.059354
0
0
0
0
0.183099
1
0.084507
false
0.084507
0.070423
0
0.169014
0.014085
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
3
e9ffd8ea4934d502e01ad1d28577b5d32a45d8b7
9,545
py
Python
venv/lib/python3.8/site-packages/dateparser/data/numeral_translation_data/zh.py
yuta-komura/vishnu
67173b674d5f4f3be189474103612447ef69ab44
[ "MIT" ]
1
2021-11-17T04:55:14.000Z
2021-11-17T04:55:14.000Z
dateparser/data/numeral_translation_data/zh.py
cool-RR/dateparser
c38336df521cc57d947dc2c9111539a72f801652
[ "BSD-3-Clause" ]
null
null
null
dateparser/data/numeral_translation_data/zh.py
cool-RR/dateparser
c38336df521cc57d947dc2c9111539a72f801652
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- info = { "%%cardinal-alternate2-13": { "(10, 19)": "零一=%spellout-cardinal-alternate2=;", "(20, 999999999999)": "零=%spellout-cardinal-alternate2=;", "(1000000000000, 'inf')": "=%spellout-cardinal-alternate2=;" }, "%%cardinal-alternate2-2": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "一=%spellout-numbering=;", "(20, 'inf')": "=%spellout-numbering=;" }, "%%cardinal-alternate2-3": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal-alternate2=;", "(20, 99)": "零=%spellout-cardinal-alternate2=;", "(100, 'inf')": "=%spellout-cardinal-alternate2=;" }, "%%cardinal-alternate2-4": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal-alternate2=;", "(20, 999)": "零=%spellout-cardinal-alternate2=;", "(1000, 'inf')": "=%spellout-cardinal-alternate2=;" }, "%%cardinal-alternate2-5": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal-alternate2=;", "(20, 9999)": "零=%spellout-cardinal-alternate2=;", "(10000, 'inf')": "=%spellout-cardinal-alternate2=;" }, "%%cardinal-alternate2-8": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal-alternate2=;", "(20, 9999999)": "零=%spellout-cardinal-alternate2=;", "(10000000, 'inf')": "=%spellout-cardinal-alternate2=;" }, "%%cardinal13": { "(10, 19)": "零一=%spellout-cardinal=;", "(20, 999999999999)": "零=%spellout-cardinal=;", "(1000000000000, 'inf')": "=%spellout-cardinal=;" }, "%%cardinal2": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "一=%spellout-numbering=;", "(20, 'inf')": "=%spellout-numbering=;" }, "%%cardinal3": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal=;", "(20, 99)": "零=%spellout-cardinal=;", "(100, 'inf')": "=%spellout-cardinal=;" }, "%%cardinal4": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal=;", "(20, 999)": "零=%spellout-cardinal=;", "(1000, 'inf')": "=%spellout-cardinal=;" }, "%%cardinal5": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal=;", "(20, 9999)": "零=%spellout-cardinal=;", "(10000, 'inf')": "=%spellout-cardinal=;" }, "%%cardinal8": { "(1, 9)": "零=%spellout-numbering=;", "(10, 19)": "零一=%spellout-cardinal=;", "(20, 9999999)": "零=%spellout-cardinal=;", "(10000000, 'inf')": "=%spellout-cardinal=;" }, "%%financialnumber13": { "(10, 19)": "零壹=%spellout-cardinal-financial=;", "(20, 999999999999)": "零=%spellout-cardinal-financial=;", "(1000000000000, 'inf')": "=%spellout-cardinal-financial=;" }, "%%financialnumber2": { "(1, 9)": "零=%spellout-cardinal-financial=;", "(10, 19)": "壹=%spellout-cardinal-financial=;", "(20, 'inf')": "=%spellout-cardinal-financial=;" }, "%%financialnumber3": { "(1, 9)": "零=%spellout-cardinal-financial=;", "(10, 19)": "零壹=%spellout-cardinal-financial=;", "(20, 99)": "零=%spellout-cardinal-financial=;", "(100, 'inf')": "=%spellout-cardinal-financial=;" }, "%%financialnumber4": { "(1, 9)": "零=%spellout-cardinal-financial=;", "(10, 19)": "零壹=%spellout-cardinal-financial=;", "(20, 999)": "零=%spellout-cardinal-financial=;", "(1000, 'inf')": "=%spellout-cardinal-financial=;" }, "%%financialnumber5": { "(1, 9)": "零=%spellout-cardinal-financial=;", "(10, 19)": "零壹=%spellout-cardinal-financial=;", "(20, 9999)": "零=%spellout-cardinal-financial=;", "(10000, 'inf')": "=%spellout-cardinal-financial=;" }, "%%financialnumber8": { "(1, 9)": "零=%spellout-cardinal-financial=;", "(10, 19)": "零壹=%spellout-cardinal-financial=;", "(20, 9999999)": "零=%spellout-cardinal-financial=;", "(10000000, 'inf')": "=%spellout-cardinal-financial=;" }, "%%number13": { "(10, 19)": "〇一=%spellout-numbering=;", "(20, 999999999999)": "〇=%spellout-numbering=;", "(1000000000000, 'inf')": "=%spellout-numbering=;" }, "%%number2": { "(1, 9)": "〇=%spellout-numbering=;", "(10, 19)": "一=%spellout-numbering=;", "(20, 'inf')": "=%spellout-numbering=;" }, "%%number3": { "(1, 9)": "〇=%spellout-numbering=;", "(10, 19)": "〇一=%spellout-numbering=;", "(20, 99)": "〇=%spellout-numbering=;", "(100, 'inf')": "=%spellout-numbering=;" }, "%%number4": { "(1, 9)": "〇=%spellout-numbering=;", "(10, 19)": "〇一=%spellout-numbering=;", "(20, 999)": "〇=%spellout-numbering=;", "(1000, 'inf')": "=%spellout-numbering=;" }, "%%number5": { "(1, 9)": "〇=%spellout-numbering=;", "(10, 19)": "〇一=%spellout-numbering=;", "(20, 9999)": "〇=%spellout-numbering=;", "(10000, 'inf')": "=%spellout-numbering=;" }, "%%number8": { "(1, 9)": "〇=%spellout-numbering=;", "(10, 19)": "〇一=%spellout-numbering=;", "(20, 9999999)": "〇=%spellout-numbering=;", "(10000000, 'inf')": "=%spellout-numbering=;" }, "%%numbering-days": { "(0, 20)": "=%spellout-numbering=;", "(21, 29)": "廿>>;", "30": "<<十;", "(31, 39)": "丗>>;", "40": "<<十;", "(41, 49)": "卌>>;", "(50, 'inf')": "=%spellout-numbering=;" }, "%%spellout-numbering-year-digits": { "(0, 9)": "=%spellout-numbering=;", "(10, 99)": "<<>>>;", "(100, 999)": "<<>>>;", "(1000, 'inf')": "<<>>>;" }, "%spellout-cardinal": { "0": "零;", "1": "一;", "2": "二;", "3": "三;", "4": "四;", "5": "五;", "6": "六;", "7": "七;", "8": "八;", "9": "九;", "(10, 99)": "=%spellout-numbering=;", "(100, 999)": "<<百[>%%cardinal2>];", "(1000, 9999)": "<<千[>%%cardinal3>];", "(10000, 99999999)": "<<万[>%%cardinal4>];", "(100000000, 999999999999)": "<<亿[>%%cardinal5>];", "(1000000000000, 9999999999999999)": "<<兆[>%%cardinal8>];", "(10000000000000000, 999999999999999999)": "<<京[>%%cardinal13>];", "(1000000000000000000, 'inf')": "=#,##0=;" }, "%spellout-cardinal-alternate2": { "3": "三;", "4": "四;", "5": "五;", "6": "六;", "7": "七;", "8": "八;", "9": "九;", "(10, 99)": "=%spellout-numbering=;", "(100, 999)": "<<百[>%%cardinal-alternate2-2>];", "(1000, 9999)": "<<千[>%%cardinal-alternate2-3>];", "(10000, 99999999)": "<<万[>%%cardinal-alternate2-4>];", "(100000000, 999999999999)": "<<亿[>%%cardinal-alternate2-5>];", "(1000000000000, 9999999999999999)": "<<兆[>%%cardinal-alternate2-8>];", "(10000000000000000, 999999999999999999)": "<<京[>%%cardinal-alternate2-13>];", "(1000000000000000000, 'inf')": "=#,##0=;" }, "%spellout-cardinal-financial": { "0": "零;", "1": "壹;", "2": "贰;", "3": "叁;", "4": "肆;", "5": "伍;", "6": "陆;", "7": "柒;", "8": "捌;", "9": "玖;", "(10, 19)": "拾[>>];", "(20, 99)": "<<拾[>>];", "(100, 999)": "<<佰[>%%financialnumber2>];", "(1000, 9999)": "<<仟[>%%financialnumber3>];", "(10000, 99999999)": "<<万[>%%financialnumber4>];", "(100000000, 999999999999)": "<<亿[>%%financialnumber5>];", "(1000000000000, 9999999999999999)": "<<兆[>%%financialnumber8>];", "(10000000000000000, 999999999999999999)": "<<京[>%%financialnumber13>];", "(1000000000000000000, 'inf')": "=#,##0=;" }, "%spellout-numbering": { "0": "〇;", "1": "一;", "2": "二;", "3": "三;", "4": "四;", "5": "五;", "6": "六;", "7": "七;", "8": "八;", "9": "九;", "(10, 19)": "十[>>];", "(20, 99)": "<<十[>>];", "(100, 999)": "<<百[>%%number2>];", "(1000, 9999)": "<<千[>%%number3>];", "(10000, 99999999)": "<<万[>%%number4>];", "(100000000, 999999999999)": "<<亿[>%%number5>];", "(1000000000000, 9999999999999999)": "<<兆[>%%number8>];", "(10000000000000000, 999999999999999999)": "<<京[>%%number13>];", "(1000000000000000000, 'inf')": "=#,##0=;" }, "%spellout-numbering-days": { "0": "〇;", "(1, 10)": "初=%spellout-numbering=;", "(11, 20)": "=%spellout-numbering=;", "(21, 'inf')": "=%%numbering-days=;" }, "%spellout-numbering-year": { "(0, 999)": "=%spellout-numbering=;", "(1000, 9999)": "=%%spellout-numbering-year-digits=;", "(10000, 'inf')": "=%spellout-numbering=;" }, "%spellout-ordinal": { "(0, 'inf')": "第=%spellout-numbering=;" }, "cardinal-alternate2-13": { "(1, 'inf')": "零=%spellout-numbering=;" }, "cardinal13": { "(1, 'inf')": "零=%spellout-numbering=;" }, "financialnumber13": { "(1, 'inf')": "零=%spellout-cardinal-financial=;" }, "number13": { "(1, 'inf')": "〇=%spellout-numbering=;" }, "spellout-cardinal-alternate2": { "(2, 'inf')": "两;" } }
36.292776
86
0.454374
829
9,545
5.231604
0.13269
0.210284
0.138344
0.038045
0.459534
0.334794
0.280148
0.264007
0.256629
0.256629
0
0.16543
0.251441
9,545
262
87
36.431298
0.439048
0.0022
0
0.325671
0
0
0.627494
0.363894
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
1813a5c1d3d1d900ef7f6d0d5ffdbad09e80ed1a
5,842
py
Python
idaes/generic_models/properties/core/reactions/tests/test_rate_forms.py
carldlaird/idaes-pse
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
[ "RSA-MD" ]
112
2019-02-11T23:16:36.000Z
2022-03-23T20:59:57.000Z
idaes/generic_models/properties/core/reactions/tests/test_rate_forms.py
carldlaird/idaes-pse
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
[ "RSA-MD" ]
621
2019-03-01T14:44:12.000Z
2022-03-31T19:49:25.000Z
idaes/generic_models/properties/core/reactions/tests/test_rate_forms.py
carldlaird/idaes-pse
cc7a32ca9fa788f483fa8ef85f3d1186ef4a596f
[ "RSA-MD" ]
154
2019-02-01T23:46:33.000Z
2022-03-23T15:07:10.000Z
################################################################################# # The Institute for the Design of Advanced Energy Systems Integrated Platform # Framework (IDAES IP) was produced under the DOE Institute for the # Design of Advanced Energy Systems (IDAES), and is copyright (c) 2018-2021 # by the software owners: The Regents of the University of California, through # Lawrence Berkeley National Laboratory, National Technology & Engineering # Solutions of Sandia, LLC, Carnegie Mellon University, West Virginia University # Research Corporation, et al. All rights reserved. # # Please see the files COPYRIGHT.md and LICENSE.md for full copyright and # license information. ################################################################################# """ Tests for rate forms """ import pytest from pyomo.environ import Block, ConcreteModel, Var, units as pyunits from idaes.generic_models.properties.core.generic.generic_reaction import \ GenericReactionParameterBlock, ConcentrationForm from idaes.generic_models.properties.core.reactions.rate_forms import * from idaes.core.util.testing import PhysicalParameterTestBlock from idaes.core.util.misc import add_object_reference @pytest.mark.unit def test_power_law_rate_no_order(): m = ConcreteModel() # # Add a test thermo package for validation m.pparams = PhysicalParameterTestBlock() m.thermo = m.pparams.build_state_block([1]) m.rparams = GenericReactionParameterBlock(default={ "property_package": m.pparams, "base_units": {"time": pyunits.s, "mass": pyunits.kg, "amount": pyunits.mol, "length": pyunits.m, "temperature": pyunits.K}, "rate_reactions": { "r1": {"stoichiometry": {("p1", "c1"): -1, ("p1", "c2"): 2}, "rate_form": power_law_rate, "concentration_form": ConcentrationForm.moleFraction}}}) # Create a dummy state block m.rxn = Block([1]) add_object_reference( m.rxn[1], "phase_component_set", m.pparams._phase_component_set) add_object_reference(m.rxn[1], "params", m.rparams) add_object_reference(m.rxn[1], "state_ref", m.thermo[1]) m.rxn[1].k_rxn = Var(["r1"], initialize=1) power_law_rate.build_parameters( m.rparams.reaction_r1, m.rparams.config.rate_reactions["r1"]) # Check parameter construction assert isinstance(m.rparams.reaction_r1.reaction_order, Var) assert len(m.rparams.reaction_r1.reaction_order) == 4 for i, v in m.rparams.reaction_r1.reaction_order.items(): try: stoic = m.rparams.config.rate_reactions.r1.stoichiometry[i] except KeyError: stoic = 0 if stoic < 1: assert v.value == -stoic else: assert v.value == 0 # Check reaction form rform = power_law_rate.return_expression( m.rxn[1], m.rparams.reaction_r1, "r1", 300) assert str(rform) == str( m.rxn[1].k_rxn["r1"] * m.thermo[1].mole_frac_phase_comp["p1", "c1"] ** m.rparams.reaction_r1.reaction_order["p1", "c1"]) @pytest.mark.unit def test_power_law_rate_with_order(): m = ConcreteModel() # # Add a test thermo package for validation m.pparams = PhysicalParameterTestBlock() m.thermo = m.pparams.build_state_block([1]) m.rparams = GenericReactionParameterBlock(default={ "property_package": m.pparams, "base_units": {"time": pyunits.s, "mass": pyunits.kg, "amount": pyunits.mol, "length": pyunits.m, "temperature": pyunits.K}, "rate_reactions": { "r1": {"stoichiometry": {("p1", "c1"): -1, ("p1", "c2"): 2}, "rate_form": power_law_rate, "concentration_form": ConcentrationForm.moleFraction, "parameter_data": { "reaction_order": {("p1", "c1"): 1, ("p1", "c2"): 2, ("p2", "c1"): 3, ("p2", "c2"): 4}}}}}) # Create a dummy state block m.rxn = Block([1]) add_object_reference( m.rxn[1], "phase_component_set", m.pparams._phase_component_set) add_object_reference(m.rxn[1], "params", m.rparams) add_object_reference(m.rxn[1], "state_ref", m.thermo[1]) m.rxn[1].k_rxn = Var(["r1"], initialize=1) power_law_rate.build_parameters( m.rparams.reaction_r1, m.rparams.config.rate_reactions["r1"]) # Check parameter construction assert isinstance(m.rparams.reaction_r1.reaction_order, Var) assert len(m.rparams.reaction_r1.reaction_order) == 4 assert m.rparams.reaction_r1.reaction_order["p1", "c1"].value == 1 assert m.rparams.reaction_r1.reaction_order["p1", "c2"].value == 2 assert m.rparams.reaction_r1.reaction_order["p2", "c1"].value == 3 assert m.rparams.reaction_r1.reaction_order["p2", "c2"].value == 4 # Check reaction form rform = power_law_rate.return_expression( m.rxn[1], m.rparams.reaction_r1, "r1", 300) assert str(rform) == str( m.rxn[1].k_rxn["r1"] * ( m.thermo[1].mole_frac_phase_comp["p1", "c1"] ** m.rparams.reaction_r1.reaction_order["p1", "c1"] * m.thermo[1].mole_frac_phase_comp["p1", "c2"] ** m.rparams.reaction_r1.reaction_order["p1", "c2"] * m.thermo[1].mole_frac_phase_comp["p2", "c1"] ** m.rparams.reaction_r1.reaction_order["p2", "c1"] * m.thermo[1].mole_frac_phase_comp["p2", "c2"] ** m.rparams.reaction_r1.reaction_order["p2", "c2"]))
40.013699
81
0.5962
701
5,842
4.788873
0.238231
0.059577
0.085791
0.096515
0.748883
0.748883
0.706583
0.704796
0.554662
0.554662
0
0.029412
0.24923
5,842
145
82
40.289655
0.735978
0.144985
0
0.597938
0
0
0.088639
0
0
0
0
0
0.123711
1
0.020619
false
0
0.061856
0
0.082474
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
182c8d361b05a3870102076dd21ec42104903358
144
py
Python
lambda/helloworld.py
binxio/blog-lambda-python-37-runtime
bd4c228f0aee64a0ae5d5a6287c530be834fe930
[ "Apache-2.0" ]
null
null
null
lambda/helloworld.py
binxio/blog-lambda-python-37-runtime
bd4c228f0aee64a0ae5d5a6287c530be834fe930
[ "Apache-2.0" ]
null
null
null
lambda/helloworld.py
binxio/blog-lambda-python-37-runtime
bd4c228f0aee64a0ae5d5a6287c530be834fe930
[ "Apache-2.0" ]
null
null
null
import json def handler(event: dict, context) -> dict: return { 'statusCode': 200, 'body': json.dumps('Hello World') }
18
42
0.569444
16
144
5.125
0.875
0
0
0
0
0
0
0
0
0
0
0.029126
0.284722
144
8
43
18
0.76699
0
0
0
0
0
0.172414
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
3
1871e9130f89b538c72d2dcb47979a5ca024971f
113
py
Python
deployment/uwsgi/wsgi.py
victorskl/gen3-indexd
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
[ "Apache-2.0" ]
11
2018-05-31T06:29:44.000Z
2020-10-21T14:09:36.000Z
deployment/uwsgi/wsgi.py
victorskl/gen3-indexd
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
[ "Apache-2.0" ]
171
2017-11-13T16:56:35.000Z
2022-03-29T19:37:35.000Z
deployment/uwsgi/wsgi.py
victorskl/gen3-indexd
c22ffb1f1472e0732d3b44cd39b1241ff8156cc6
[ "Apache-2.0" ]
25
2018-03-06T19:03:24.000Z
2021-11-27T19:39:49.000Z
from indexd import get_app import os os.environ["INDEXD_SETTINGS"] = "/var/www/indexd/" application = get_app()
18.833333
50
0.752212
17
113
4.823529
0.647059
0.146341
0
0
0
0
0
0
0
0
0
0
0.115044
113
5
51
22.6
0.82
0
0
0
0
0
0.274336
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
18740bcecea258c86fbe06939d7387c41561ca8e
2,308
py
Python
qrainbowstyle/colorsystem.py
desty2k/QDarkStyleSheet
4a8cd42acf5e9e7fce5fabbe37b1f97d89d203b2
[ "CC-BY-4.0" ]
10
2020-12-10T08:11:16.000Z
2022-03-30T09:29:34.000Z
qrainbowstyle/colorsystem.py
desty2k/QDarkStyleSheet
4a8cd42acf5e9e7fce5fabbe37b1f97d89d203b2
[ "CC-BY-4.0" ]
3
2021-05-21T15:04:10.000Z
2022-02-13T20:26:59.000Z
qrainbowstyle/colorsystem.py
desty2k/QDarkStyleSheet
4a8cd42acf5e9e7fce5fabbe37b1f97d89d203b2
[ "CC-BY-4.0" ]
2
2021-02-27T16:08:47.000Z
2022-02-22T15:05:10.000Z
# colorsystem.py is the full list of colors that can be used to easily create themes. class Gray: B0 = '#000000' B10 = '#19232D' B20 = '#293544' B30 = '#37414F' B40 = '#455364' B50 = '#54687A' B60 = '#60798B' B70 = '#788D9C' B80 = '#9DA9B5' B90 = '#ACB1B6' B100 = '#B9BDC1' B110 = '#C9CDD0' B120 = '#CED1D4' B130 = '#E0E1E3' B140 = '#FAFAFA' B150 = '#FFFFFF' class Blue: B0 = '#000000' B10 = '#062647' B20 = '#26486B' B30 = '#375A7F' B40 = '#346792' B50 = '#1A72BB' B60 = '#057DCE' B70 = '#259AE9' B80 = '#37AEFE' B90 = '#73C7FF' B100 = '#9FCBFF' B110 = '#C2DFFA' B120 = '#CEE8FF' B130 = '#DAEDFF' B140 = '#F5FAFF' B150 = '##FFFFFF' class Green: B0 = '#000000' B10 = '#064738' B20 = '#055C49' B30 = '#007A5E' B40 = '#008760' B50 = '#019D70' B60 = '#02BA85' B70 = '#20C997' B80 = '#44DEB0' B90 = '#3BEBB7' B100 = '#88F2D3' B110 = '#B0F5E1' B120 = '#D1FBEE' B130 = '#E4FFF7' B140 = '#F5FFFD' B150 = '#FFFFFF' class Red: B0 = '#000000' B10 = '#470606' B20 = '#760B0B' B30 = '#AF0F0F' B40 = '#D4140B' B50 = '#DE321F' B60 = '#E24232' B70 = '#E74C3C' B80 = '#F66657' B90 = '#F88478' B100 = '#FFACA4' B110 = '#FFC3BD' B120 = '#FEDDDA' B130 = '#FFEEEE' B140 = '#FFF5F5' B150 = '#FFFFFF' class Orange: B0 = '#000000' B10 = '#471D06' B20 = '#692907' B30 = '#AB3E00' B40 = '#CE4B01' B50 = '#E05E15' B60 = '#E57004' B70 = '#F37E12' B80 = '#FF993B' B90 = '#FFB950' B100 = '#FFCF84' B110 = '#FFDDA7' B120 = '#FFEACA' B130 = '#FFF3E2' B140 = '#FFFBF5' B150 = '#FFFFFF' class GroupDark: B10 = '#E11C1C' B20 = '#FF8A00' B30 = '#88BA00' B40 = '#2DB500' B50 = '#3FC6F0' B60 = '#107EEC' B70 = '#5C47E0' B80 = '#7F27C5' B90 = '#C88AFA' B100 = '#AF2294' B110 = '#DB4D8E' B120 = '#38D4A4' class GroupLight: B10 = '#FF6700' B20 = '#FFB000' B30 = '#FFE600' B40 = '#7FDD05' B50 = '#00A585' B60 = '#22BCF2' B70 = '#1256CC' B80 = '#803AD0' B90 = '#B568F2' B100 = '#CC2782' B110 = '#FF71BF' B120 = '#7EE8C7'
18.31746
85
0.480936
238
2,308
4.663866
0.571429
0.036036
0.04955
0
0
0
0
0
0
0
0
0.374105
0.334055
2,308
125
86
18.464
0.348081
0.035962
0
0.081081
0
0
0.327935
0
0
0
0
0
0
1
0
false
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
3
1878dfab3e7da20bc7f22218e3c0e340b2516a2b
252
py
Python
pull_backup/__init__.py
MochaTechnologies/pull_backup
7310eac2e73fd360a264b2fb9e83a58cbb4f7ffb
[ "MIT" ]
2
2021-07-29T05:36:09.000Z
2021-11-15T11:45:22.000Z
pull_backup/__init__.py
MochaTechnologies/pull_backup
7310eac2e73fd360a264b2fb9e83a58cbb4f7ffb
[ "MIT" ]
null
null
null
pull_backup/__init__.py
MochaTechnologies/pull_backup
7310eac2e73fd360a264b2fb9e83a58cbb4f7ffb
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- from __future__ import unicode_literals import frappe __version__ = '0.0.1' @frappe.whitelist() def download_backup(filename): from frappe.utils.response import download_backup return download_backup("/backups/"+filename)
19.384615
50
0.765873
32
252
5.65625
0.65625
0.232044
0
0
0
0
0
0
0
0
0
0.017937
0.115079
252
12
51
21
0.793722
0.083333
0
0
0
0
0.061404
0
0
0
0
0
0
1
0.142857
false
0
0.428571
0
0.714286
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
18800ca8ce96ae7d03620bf76730cf19f9e7ceff
67
py
Python
savethepages/connection.py
MichaelCurrin/save-the-pages
6e4bb572b3d1168f45dc198da69ea2c5f4806153
[ "MIT" ]
null
null
null
savethepages/connection.py
MichaelCurrin/save-the-pages
6e4bb572b3d1168f45dc198da69ea2c5f4806153
[ "MIT" ]
1
2019-11-22T11:34:18.000Z
2019-11-22T11:34:18.000Z
savethepages/connection.py
MichaelCurrin/save-the-pages
6e4bb572b3d1168f45dc198da69ea2c5f4806153
[ "MIT" ]
null
null
null
""" Connection module. """ from lib import connect DB = connect()
9.571429
23
0.671642
8
67
5.625
0.875
0
0
0
0
0
0
0
0
0
0
0
0.179104
67
6
24
11.166667
0.818182
0.268657
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
1881dd0e6574bc142c837c27bfc9d9ae67a9bca6
808
py
Python
greentest/test__ares_host_result.py
Eugeny/gevent
adb7b838ed66c13abe5059605730bb4b4531bbcd
[ "MIT" ]
2
2015-12-19T01:34:43.000Z
2018-02-02T12:32:01.000Z
greentest/test__ares_host_result.py
Eugeny/gevent
adb7b838ed66c13abe5059605730bb4b4531bbcd
[ "MIT" ]
null
null
null
greentest/test__ares_host_result.py
Eugeny/gevent
adb7b838ed66c13abe5059605730bb4b4531bbcd
[ "MIT" ]
2
2019-11-24T12:11:50.000Z
2020-12-26T19:00:20.000Z
import pickle import greentest from gevent.ares import ares_host_result class TestPickle(greentest.TestCase): # Issue 104: ares.ares_host_result unpickleable def _test(self, protocol): r = ares_host_result('family', ('arg1', 'arg2', )) dumped = pickle.dumps(r, protocol) loaded = pickle.loads(dumped) assert r == loaded, (r, loaded) assert r.family == loaded.family, (r, loaded) def test0(self): return self._test(0) def test1(self): return self._test(1) def test2(self): return self._test(2) if pickle.HIGHEST_PROTOCOL == 3: def test3(self): return self._test(3) else: assert pickle.HIGHEST_PROTOCOL == 2, pickle.HIGHEST_PROTOCOL if __name__ == '__main__': greentest.main()
23.764706
68
0.631188
101
808
4.831683
0.405941
0.081967
0.114754
0.147541
0
0
0
0
0
0
0
0.024958
0.256188
808
33
69
24.484848
0.787022
0.055693
0
0
0
0
0.028909
0
0
0
0
0
0.130435
1
0.217391
false
0
0.130435
0.173913
0.565217
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
43e6d5db8390c0b23e43f94230f0d63124922d1e
27,020
py
Python
_build/jupyter_execute/ipynb/01a-introducao.py
gcpeixoto/FMECD
9bca72574c6630d1594396fffef31cfb8d58dec2
[ "CC0-1.0" ]
null
null
null
_build/jupyter_execute/ipynb/01a-introducao.py
gcpeixoto/FMECD
9bca72574c6630d1594396fffef31cfb8d58dec2
[ "CC0-1.0" ]
null
null
null
_build/jupyter_execute/ipynb/01a-introducao.py
gcpeixoto/FMECD
9bca72574c6630d1594396fffef31cfb8d58dec2
[ "CC0-1.0" ]
null
null
null
# Introdução à Ciência de Dados ## Ciência de dados no século XXI A dinâmica do mundo globalizado elevou a importância dos dados e da informação a uma escala jamais vista na história humana em virtude da evolução exponencial dos recursos tecnológicos, dos meios de comunicação e, principalmente, da computação de alto desempenho. Está em vigor a *Era da Informação*, em que os dados são considerados a matéria-prima imprescindível. Assim como a terra era o recurso fundamental para a agricultura, e o ferro o era para a indústria, os dados tornaram-se um bem de valor inestimável para pessoas, empresas, governos e para a própria ciência. Com a expansão do fenômeno *Big Data*, diversos nichos do conhecimento começaram a eclodir trazendo consigo uma série de nomes elegantes, tais como *business intelligence*, *data analytics*, *data warehouse* e *data engineering*. Apesar disso, *ciência de dados* desponta-se como o conceito mais razoável para denotar o aspecto científico dos dados. Em um contexto acadêmico, ela encontra-se na interseção de outras áreas do conhecimento e no cerne de uma cadeia maior envolvendo gestão de processos e o pensamento científico. É difícil estabelecer um modelo holístico unificado que traduza de maneira exata a capilaridade da ciência de dados nas atividades modernas. Diante disso, a Figura abaixo tenta ilustrar, para nossos propósitos, como a ciência de dados relaciona-se com outros domínios do conhecimento de maneira multidisciplinar. ```{figure} ../figs/01/cycle-ds.png --- width: 300px name: cycle-ds --- Modelo holístico da ciência de dados. ``` O diagrama mostrado na {numref}`cycle-ds` possui três camadas. A camada mais interna mostra como áreas do conhecimento tradicionais se intersectam para dar forma ao que chamamos de *ciência de dados*. Aqui, enfatizamos três grandes conjuntos: 1. **Matemática/Estatística**, que fornece os modelos matemáticos e estatísticos fundamentais para estudo, análise e inferência de dados, aos quais se agregam as técnicas de aprendizagem de máquina; 2. **Ciência da Computação/Engenharia de Software**, que fornece elementos básicos de hardware e software para projetar soluções de intercâmbio, armazenamento e segurança de dados, por exemplo. 3. **Conhecimento do Domínio/Expertise**, que é o próprio ramo de aplicação do conhecimento que está sendo buscado através dos dados em questão, ao qual se aderem o *data reporting*, a inteligência de negócios, o *marketing* e a comunicação de dados em geral para suporte à tomada de decisões. A camada intermediária relaciona-se à gestão de processos da cadeia de dados, que envolvem governança, curadoria, armazenamento e reuso de dados, por exemplo, isto é, todos os aspectos relacionados à preservação, manutenção, destruição e compartilhamento de dados. No invólucro mais externo, temos a camada relativa ao método científico de busca de soluções para um dado problema. Com alguma adaptação, os processos envolvidos nesta camada representam de maneira satisfatória tanto a ideia de *soluções dirigidas por dados* (*data-driven solutions*) amplamente utilizada em contextos empresariais e industriais, em que ferramentas inovadoras são construídas para entregar produtos e soluções especialmente voltadas a um segmento ou público particular com base em um cuidadoso mapeamento de clientes, quanto o compartilhamento e reprodutibilidade da pesquisa científica. Em linhas gerais, este ciclo contém os seguintes processos: 1. **Definição do problema**, etapa em que uma "grande pergunta" é feita, a qual, a princípio, pode ser respondida ao se vasculhar um conjunto de dados específico. 2. **Aquisição de dados**, etapa em que se coleta toda a informação relacionada ao problema lançado na etapa anterior. 3. **Processamento de dados**, etapa em que os dados adquiridos são processados para análise. Nesta etapa realiza-se um verdadeiro tratamento dos dados (limpeza, formatação e organização). 4. **Análise de dados**, etapa em que os dados são analisados e perscrutados por meio de técnicas de mineração, agrupamento e clusterização. Neste momento é que testes de hipótese e mecanismos de inferência são utilizados. 5. **Descoberta de dados**, etapa em que descobertas são realizadas, tais como correlações entre variáveis, comportamentos distintivos e tendências claramente identificáveis, permitindo que conhecimento seja gerado a partir da informação. 6. **Solução**, etapa final do ciclo na qual as descobertas podem ser convertidas em produtos e ativos de valor agregado para o domínio do problema proposto. ### O caso da COVID-19 A pandemia causada pela COVID-19 que assolou o mundo recentemente pode ser tomada como um estudo de caso singular de aplicação do processo de análise de dados citado na seção anterior. Sob o ponto de vista científico, poderíamos levantar várias questões acerca do vírus no que diz respeito à velocidade de contágio, ao impacto em atividades econômicas, às alterações no comportamento social, entre outras. Modelos epidemiológicos apontam que a interação entre pessoas é um dos principais mecanismos de transmissão viral. A partir dessa premissa e levando em consideração o nosso país, uma pergunta que poderíamos fazer a fim de nortear uma pesquisa em ciência de dados seria: _a taxa de contágio do vírus em pessoas vivendo próximas de um centro comercial localizado em uma zona rural é menor do do que em pessoas vivendo próximas de um centro comercial localizado em uma zona urbana?_. É evidente que, para responder uma pergunta como esta com precisão científica, necessitaríamos de definições e muitos dados. Como delimitaríamos a zona urbana? O centro comercial deveria ser caracterizado como um conjunto de lojas de pequeno porte? Feiras? Um local de comércio onde, diariamente, circulam 100 pessoas por hora? Além disso, neste caso, como faríamos para coletar as informações de que precisamos? No banco de dados do IBGE? No DATASUS? A aquisição de dados pode ser uma tarefa mais difícil do que se imagina. No caso em questão, certamente buscaríamos informações em bancos de dados do setor público, de secretarias municipais, de órgãos estaduais, até instituições especializadas em âmbito federal. Entretanto, no caso do Brasil, nem todas as regiões – quiçá o país inteiro – usufruem de bancos de dados amplos e precisos onde variáveis primárias necessárias para a análise de dados sejam facilmente obtidas. Supondo que tenhamos em mãos as informações de saúde acerca dos habitantes das zonas rural e urbana necessárias para nossa pesquisa sobre a COVID-19, o outro passo a tomar é o processamento dos dados. De que maneira o banco de dados se apresenta? Como uma infinidade de planilhas de Excel sem nenhuma formatação específica? Arquivos .csv estruturados e categorizados por faixa etária, município, densidade populacional? Toda a informação é hierárquica em arquivos HDF5? Para cada situação, devemos dispor de ferramentas específicas e adequadas para manipular, organizar, limpar e estruturar os dados. Todo este tratamento dos dados ocorre, em geral, por duas vias: soluções pré-existentes (programas, recursos, interfaces, frameworks, projetos _open source_ etc. já disponíveis no mercado ou na academia) ou soluções customizadas, criadas pelo cientista de dados para o atendimento de demandas específicas não cobertas pelas soluções pré-existentes. Uma vez processados, os dados atingem uma condição minimamente razoável para serem escrutinados, isto é, analisados minuciosamente. Nesta fase, o intelecto de quem analisa os dados está a todo vapor, visto que um misto de conhecimento técnico, experiência, e criatividade são os ingredientes para realizar descobertas. Os dados são levados de um lado para outro, calculam-se expressões matemática aqui e acolá, testes estatísticos são feitos uma, duas, três, n vezes, até que conclusões surpreendentes podem aparecer. A propagação de um vírus é um fenômeno não linear suscetível a dinâmicas quase imprevisíveis. Portanto, ao procurarmos a resposta para uma pergunta difícil como a que pusemos acima, pode ser que descubramos padrões e tendências que sequer cogitávamos capazes de responder até mesmo perguntas para outros problemas. Poderíamos chegar à conclusão, por exemplo, que a taxa de contágio na zona urbana é afetada pelas características arquitetônicas do centro comercial: arejamento deficiente, corredores de movimentação estreitos, pontos de venda altamente concentrados, etc. Ao final do ciclo, espera-se que respostas sejam obtidas para que soluções sejam propostas e decisões tomadas com responsabilidade. Quando o assunto é a saúde de pessoas, questões éticas e morais tornam-se extremamente sensíveis. O papel de cientistas e analistas de dados em situações particulares como a da COVID-19 é munir gestores e líderes com recomendações resultantes das evidências mostradas pelos dados. Todavia, é importante dizer que modelos matemáticos são estimativas da realidade e também possuem graus de falibilidade. Portanto, equilibrar as descobertas com o peso das decisões é essencial para o alcance de soluções adequadas. Diversos projetos focados em ciência e análise de dados focados no estudo da COVID-19 estão atualmente em curso no mundo. Um dos pioneiros foi o _Coronavirus Resource Center_ da _John Hopkins University_ [[CRC-JHU]](https://coronavirus.jhu.edu/map.html). Iniciativas no Brasil são as seguintes: _Observatório Covid-19 BR_ [[COVID19BR]](https://covid19br.github.io/index.html), _Observatório Covid-19 Fiocruz_ [[FIOCRUZ]](https://portal.fiocruz.br/observatorio-covid-19), CoronaVIS-UFRGS [[CoronaVIS-UFRGS]](https://covid19.ufrgs.dev/dashboard/#/dashboard), CovidBR-UFCG [[CovidBR-UFCG]](http://covid.lsi.ufcg.edu.br), entre outras. Na UFPB, destacamos a página do LEAPIG [[LEAPIG-UFPB]](http://www.de.ufpb.br/~leapig/projetos/covid_19.html#PB). Certamente, a COVID-19 deverá se consagrar como um dos maiores estudos de caso da história mundial para a ciência e análise de dados, haja vista o poder computacional de nossos dias. ### Cientista de dados x analista de dados x engenheiro de dados As carreiras profissionais neste novo mundo dos dados converteram-se em muitas especialidades. Há três perfis, em particular, sobre os quais gostaríamos de comentar: _o cientista de dados_, o _analista de dados_ e o _engenheiro de dados_. Porém, antes de entrar nesta "sopa de letrinhas", vale a pena entender um pouco sobre como a ciência de dados, como um todo, é compreendida pelas pessoas mundo afora. Nos Estados Unidos, um esforço conjunto entre representantes da universidade, do poder público, da indústria e de outros segmentos culminou na publicação especial No. 1500-1 (2015) do _National Institute of Standards and Technology_ (NIST), que definiu diversos conceitos relacionados à ciência de dados [[NIST 1500-1 (2015)]](https://bigdatawg.nist.gov/_uploadfiles/NIST.SP.1500-1.pdf). Segundo este documento, > _"**Cientista de dados** é um profissional que tem conhecimentos suficientes sobre necessidades de negócio, domínio do conhecimento, além de possuir habilidades analíticas, de software e de engenharia de sistemas para gerir, de ponta a ponta, os processos envolvidos no ciclo de vida dos dados."_ Como se vê, a identidade do cientista de dados é definida por uma interseção de competências. Todas essas competências estão distribuídas, de certa forma, nas três grandes áreas do conhecimento que citamos acima. Por outro lado, o que exatamente é a _ciência de dados_? De acordo com o mesmo documento, > _"**Ciência de dados** é a extração do conhecimento útil diretamente a partir de dados através de um processo de descoberta ou de formulação e teste de hipóteses."_ A partir disso, percebemos que _informação_ não é sinônimo de _conhecimento_. Para termos uma clareza melhor dessa distinção, basta refletirmos sobre nosso uso diário do celular. O número de mensagens de texto, de fotos, áudios e vídeos que trocamos com outras pessoas por meio de aplicativos de mensagem instantânea, redes sociais ou e-mail é gigantesco. Quantos de nós não passamos pela necessidade de apagar conteúdo salvo em nosso celular para liberar espaço! Às vezes, não temos ideia de quanta informação trocamos por minuto com três ou quatro colegas. A questão central é: de toda essa informação, que fração seria considerada útil? Isto é, o que poderíamos julgar como conhecimento aproveitável? A resposta talvez seja um incrível "nada"... Portanto, ter bastante informação à disposição não significa, necessariamente, possuir conhecimento. Da mesma forma que estudar para aprender é um exercício difícil para o cérebro, garimpar conhecimento em meio a um mar de informação é uma tarefa que exige paciência, análise, raciocínio dedutivo e criatividade. Por falar em análise de dados, vamos entender um pouco sobre o termo _analytics_, frequentemente utilizado no mercado de trabalho. _Analytics_ pode ser traduzido literalmente como "análise" e, segundo o documento NIST 1500-1, é definido como o "processo de sintetizar conhecimento a partir da informação". Diante disso, podemos dizer que > _"**Analista de dados** é o profissional capaz de sintetizar conhecimento a partir da informação e convertê-lo em ativo exploráveis."_ Uma terceira vertente que surgiu com a evolução do _Big Data_ foi a _engenharia de dados_, que tem por objetivo projetar ferramentas, dispositivos e sistemas com robustez suficiente para lidar com a grande massa de dados em circulação. Podemos dizer que > _"**Engenheiro(a) de dados** é o(a) profissional que explora recursos independentes para construir sistemas escaláveis capazes de armazenar, manipular e analisar dados com eficiência e e desenvolver novas arquiteturas sempre que a natureza do banco de dados exigi-las."_ Embora essas três especializações possuam características distintivas, elas são tratadas como partes de um corpo maior, que é a Ciência de Dados. O projeto [EDISON](https://edison-project.eu), coordenado pela Universidade de Amsterdã, Holanda, por exemplo, foi responsável por mapear definições e taxonomias para construir grupos profissionais em ciência de dados para ocuparem posições em centros de pesquisa e indústrias na Europa. De acordo com o _EDISON Data Science Framework_ [[EDSF]](https://edison-project.eu/sites/edison-project.eu/files/attached_files/node-5/edison2017poster02-dsp-profiles-v03.pdf), os grupos profissionais se dividem entre gerentes (CEOs, líderes de pesquisa), profissionais gerais (analista de negócios, engenheiros de dados etc.), profissionais de banco de dados (designer de computação em nuvem, designer de banco de dados etc.), profissionais de curadoria (bibliotecários, arquivistas etc.), profissionais técnicos (operadores de equipamentos, mantenedores de _warehouses_ etc.) e profissionais de apoio (suporte a usuários, alimentadores de sistemas, atendentes etc.). #### Quem faz o quê? Resumimos a seguir as principais tarefas atribuídas a cientistas, analistas e engenheiros(as) de dados com base em artigos de canais especializados [[DataQuest]](https://www.dataquest.io/blog/data-analyst-data-scientist-data-engineer/), [[NCube]](https://ncube.com/blog/data-engineer-data-scientist-data-analyst-what-is-the-difference), [[Medium]](https://medium.com/@gdesantis7/decoding-the-data-scientist-51b353a01443), [[Data Science Academy]](http://datascienceacademy.com.br/blog/qual-a-diferenca-entre-cientista-de-dados-e-engenheiro-de-machine-learning/), [[Data Flair]](https://data-flair.training/blogs/data-scientist-vs-data-engineer-vs-data-analyst/). Uma característica importante entre os perfis diz respeito à organização dos dados. Enquanto cientistas e analistas de dados lidam com dados _estruturados_ – dados organizados e bem definidos que permitem fácil pesquisa –, engenheiros(as) de dados trabalham com dados _não estruturados_. ##### Cientista de dados - Realiza o pré-processamento, a transformação e a limpeza dos dados; - Usa ferramentas de aprendizagem de máquina para descobrir padrões nos dados; - Aperfeiçoa e otimiza algoritmos de aprendizagem de máquina; - Formula questões de pesquisa com base em requisitos do domínio do conhecimento; ##### Analista de dados - Analisa dados por meio de estatística descritiva; - Usa linguagens de consulta a banco de dados para recuperar e manipular a informação; - Confecciona relatórios usando visualização de dados; - Participa do processo de entendimento de negócios; ##### Engenheiro(a) de dados - Desenvolve, constroi e mantém arquiteturas de dados; - Realiza testes de larga escala em plataformas de dados; - Manipula dados brutos e não estruturados; - Desenvolve _pipelines_ para modelagem, mineração e produção de dados - Cuida do suporte a cientistas e analistas de dados; #### Que ferramentas são usadas? As ferramentas usadas por cada um desses profissionais são variadas e evoluem constantemente. Na lista a seguir, citamos algumas. ##### Cientista de dados - R, Python, Hadoop, Ferramentas SQL (Oracle, PostgreSQL, MySQL etc.) - Álgebra, Estatística, Aprendizagem de Máquina - Ferramentas de visualização de dados ##### Analista de dados - R, Python, - Excel, Pandas - Ferramentas de visualização de dados (Tableau, Infogram, PowerBi etc.) - Ferramentas para relatoria e comunicação ##### Engenheiro(a) de dados - Ferramentas SQL e noSQL (Oracle NoSQL, MongoDB, Cassandra etc.) - Soluções ETL - Extract/Transform/Load (AWS Glue, xPlenty, Stitch etc.) - Python, Scala, Java etc. - Spark, Hadoop etc. ### Matemática por trás dos dados No mundo real, lidamos com uma grande diversidade de dados, mas nem sempre percebemos como a Matemática atua por trás de cada pedacinho da informação. Ao longo da sua graduação em ciência de dados, você aprenderá conceitos abstratos novos e trabalhará com mais profundidade outros que já conhece, tais como vetor e matriz. Você provavelmente já deve ter ouvido falar que o computador digital funciona com uma linguagem _binária_ cujas mensagens são todas codificadas como sequencias dos dígitos 0 e 1. Daí que vem o nome _bit_, um acrônimo para _binary digit_, ou dígito binário. Em termos de _bits_, a frase "Ciência de dados é legal!", por exemplo, é escrita como `1000011110100111101010110111011000111101001110000110000011001001100101 1000001100100110000111001001101111111001110000011101001100000110110011001 01110011111000011101100100001`. Interessante, não? Vejamos outros exemplos. Nas aulas de Física, você aprendeu que um vetor possui uma origem e uma extremidade. Tanto a origem como a extremidade são "pontos" do espaço. Agora, imagine um plano cartesiano. Se a sua origem é o ponto $O = (0,0)$ e a sua extremidade é o ponto $B = (2,0)$, você pode traçar um vetor de $O$ a $B$ andando duas unidades para a direita. Claramente, este vetor estará sobre o eixo das abscissas. Imagine que você pudesse então usar cada unidade como se fosse uma "caixa" onde pudesse "guardar" uma informação sobre você. Ou seja, em $O = (0,0)$ você coloca seu nome, em $A = (0,1)$ a sua idade e em $B = (0,2)$ seu CPF. Você teria um "vetor" com 3 valores. Além disso, suponha que você pudesse fazer o mesmo para mais 9 pessoas de sua família repetindo este processo em outros 9 vetores paralelos ao primeiro. Você teria agora $3 + 9 \times 3 = 3 + 27 = 30$ caixas para guardar informações. OK, e daí? O que acabamos de ilustrar é a ideia fundamental para estruturar tabelas, planilhas do Excel, ou _dataframes_ (que você aprenderá neste curso). Tudo isso são matrizes! Ou seja, informação organizada em linhas e colunas! Cada linha é como um vetor que contém 3 posições (são as colunas). Cada coluna são os registros que você coloca. Então, digamos que você tenha pensado na sua mãe como o próximo membro da família. O nome dela seria colocado na "caixa" que estaria no ponto $(1,0)$, a idade dela na "caixa" que estaria no ponto $(1,1)$ e o CPF dela na "caixa" que estaria no ponto $(1,2)$. Fazendo o com todos os demais membros, você vai concluiria que o CPF do 10o. membro da família deveria estar na "caixa" associada ao ponto $(9,9)$. Para um computador, vetores são chamados de _arrays_. Uma lista de coisas também pode ser comparada a um _array_. Note acima que a segunda coordenada do primeiro vetor (aquele que tem as caixas de informação a seu respeito) é sempre zero. Ela se mantém fixa. Isto significa que o dado assemelha-se a algo **unidimensional**. Isto é, basta que eu apenas faça uma contagem de elementos em uma direção. Isto é muito similar ao conjunto dos números inteiros positivos $\mathbb{Z}_{+}$. Quando, porém, inserimos os vetores adicionais (as informações dos membros da sua família), a segunda coordenada também se altera. Isto significa que o dado assemelha-se a algo **bidimensional**. Ou seja, a contagem dos elementos ocorre em duas direções. Levando em conta um plano cartesiano com o eixo das ordenadas orientado para baixo e não para cima, como de costume, os números cresceriam não apenas para a direita, mas também para baixo. Logo, teríamos uma segunda contagem baseada em mais um conjunto de números inteiros positivos $\mathbb{Z}_{+}$ independente do primeiro. De que estamos falando aqui? Estamos falando do conceito de _par ordenado_. Isto é, qualquer ponto $(x,y)$ com $x \in \mathbb{Z}_{+}$ e $y \in \mathbb{Z}_{+}$ é um "local" onde existiria uma caixinha onde podemos guardar informações de maneira independente. Uma matriz é exatamente isto. As imagens vistas na televisão, as _selfies_ e fotografias que você faz com seu celular e as figuras neste livro podem todas ser descritas como matrizes. Cada elemento da matriz é identificado com uma posição $(x,y)$ ao qual damos o nome de _pixel_. Uma imagem é, por sua vez, uma pixelização. Porém, as imagens não são apenas "endereços" de pixels. Elas possuem cor, tons de cinza, ou são monocromáticas (preto e branco). As cores são representadas por "canais". E, acredite, cada canal é também uma matriz de dados! No final das contas, uma imagem colorida é uma "matriz formada por outras matrizes"! Uma matriz formada a partir de outra matriz é um exemplo de dado **tridimensional**. Um exemplo disso são dados sequenciais, tais como um filme ou uma animação. O número de _frames per second_ (FPS), ou "quadros por segundo", é tão alta hoje em dia que nossa visão não é capaz de captar que, quando vamos ao cinema ou assistimos um filme pela TV ou no Youtube, o que vemos é exatamente a mudança rápida e sucessiva de vários "quadros" de imagens por segundo. Como você verá ao longo deste curso, muitos conceitos de Matemática que você aprendeu ao longo do Ensino Médio começarão a fazer mais sentido com as aplicações. ## Ferramentas computacionais do curso Neste curso, usaremos Python 3.x (onde x é um número de versão) como linguagem de programação. Por se tratar de uma linguagem interpretada, interagir com ela é mais fácil do que uma linguagem compilada. Um conjunto mínimo de recursos para Python funcionar é composto do _core_ da linguagem, um terminal de comandos e um editor de texto. Enquanto programadores experientes usam menos recursos visuais, para efeito didático, usaremos interfaces mais amigáveis e interativas comprovadas como bons ambientes de aprendizagem. ### _iPython_ e _Jupyter Notebook_ O [[iPython]](http://ipython.org) foi um projeto iniciado em 2001 para o desenvolvimento de um interpretador Python para melhorar a interatividade com a linguagem. Ele foi integrado como um _kernel_ (núcleo) no projeto [[Jupyter]](http://jupyter.org), desenvolvido em 2014, permitindo textos, códigos e elementos gráficos sejam integrados em cadernos interativos. _Jupyter notebooks_ são interfaces onde podemos executar códigos em diferentes linguagens desde que alteremos os _kernels_. A palavra _Jupyter_ é uma aglutinação das iniciais de _Julia_, _Python_ e _R_, que são as linguagens de programação mais usuais para ciência de dados. ### *Anaconda* Em 2012, o projeto [[Anaconda]](https://www.anaconda.com) foi iniciado como objetivo de fornecer uma ferramenta completa para o trabalho com Python. Em 2020, já como uma empresa de ponta, ela tornou-se uma das pioneiras no fornecimento de plataformas individuais e empresariais para ciência de dados. Segundo a empresa, a [[Individual Edition]](https://www.anaconda.com/products/individual), que é a versão aberta para uso é a mais popular no mundo com mais de 20 milhões de usuários. Recomendamos que você siga as orientações de instalação desta versão. Uma vez instalada, basta lançar as ferramentas a partir do dashboard _Anaconda Navigator_. ### *Jupyter Lab* Uma ferramenta que melhorou a interatividade do Jupyter é o _Jupyter Lab_, que realiza um alto nível de integração. Este [[artigo]](https://blog.jupyter.org/jupyterlab-is-ready-for-users-5a6f039b8906) discute as características do Jupyter Lab, entre as quais vale citar o recurso de arrastar/soltar para reordenar células de cadernos e copiá-las entre cadernos. ### *Binder* O projeto [[Binder]](https://mybinder.org) funciona como um servidor online baseada na tecnologia _Jupyter Hub_ para servir cadernos interativos online. Através do Binder, é possível executar códigos "na nuvem" sem a necessidade de instalações, porém as sessões são perdidas após o uso. ### *Google Colab* O [[Google Colab]](http://colab.research.google.com), uma redução de _Colaboratory_, é uma ferramenta que possui características mistas entre o _Jupyter notebook_ e o _Binder_, porém permite que o usuário use a infra-estrutura de computação de alto desempenho (GPUs e TPUS) da Google. A vantagem é que usuários de contas Google podem sincronizar arquivos diretamente com o Google Drive. ### Módulos principais Neste curso, o ecossistema de ferramentas torna-se pleno com a adição de alguns módulos que são considerados essenciais para a prática da ciência e análise de dados contemporânea: - *numpy* (*NUMeric PYthon*): o *numpy* serve para o trabalho de computação numérica, operando fundamentalmente com vetores, matrizes e ágebra linear. - *pandas* (*Python for Data Analysis*): é a biblioteca para análise de dados de Python, que opera *dataframes* com eficiência. - *sympy* (*SYMbolic PYthon*): é um módulo para trabalhar com matemática simbólica e cumpre o papel de um verdadeiro sistema algébrico computacional. - *matplotlib*: voltado para plotagem e visualização de dados, foi um dos primeiros módulos Python para este fim. - *scipy* (*SCIentific PYthon*): o *scipy* pode ser visto, na verdade, como um módulo mais amplo que integra os módulos anteriores. Em particular, ele é utilizado para cálculos de integração numérica, interpolação, otimização e estatística. - *seaborn*: é um módulo para visualização de dados baseado no *matplotlib*, porém com capacidades visuais melhores. A visualização de dados é um tema de suma importância para resultados da análise exploratória de dados em estatística. Um site recomendado para pesquisar as melhores ferramentas para análise de dados é o [[PyViz]](https://pyviz.org).
128.056872
1,102
0.797335
4,114
27,020
5.210258
0.347837
0.027758
0.011756
0.002799
0.040121
0.019361
0.019361
0.011943
0.009237
0.005785
0
0.014212
0.151073
27,020
211
1,103
128.056872
0.920089
0
0
0.02
0
0.04
0.04235
0
0
0
0
0
0
0
null
null
0.02
0.04
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
3
43ff9c7834317ca88fdd10a2fa19c148b2543d32
276
py
Python
visualizations.py
msdevana/Self_Organizing_Maps
73dec256ca9c07a24af24c9a1b61a690c3cb7ce7
[ "MIT" ]
null
null
null
visualizations.py
msdevana/Self_Organizing_Maps
73dec256ca9c07a24af24c9a1b61a690c3cb7ce7
[ "MIT" ]
null
null
null
visualizations.py
msdevana/Self_Organizing_Maps
73dec256ca9c07a24af24c9a1b61a690c3cb7ce7
[ "MIT" ]
null
null
null
""" Created By Manish S. Devana Visualization tools for the SOMS """ import matplotlib.pyplot as plt import numpy as np import seaborn as sns def umatrix(som): """ """ def hitmap(som): """ """ def plot_weights(som): """ """
8.117647
32
0.554348
33
276
4.606061
0.757576
0.078947
0
0
0
0
0
0
0
0
0
0
0.318841
276
34
33
8.117647
0.808511
0.221014
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0.5
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
3
a125c27d0459677dc021bf1fa462f0e654de07c3
269
py
Python
pandas/types/common.py
vimalromeo/pandas
7c14e4f14aff216be558bf5d4d2d00b4838c2360
[ "PSF-2.0", "Apache-2.0", "BSD-3-Clause-No-Nuclear-License-2014", "MIT", "ECL-2.0", "BSD-3-Clause" ]
69
2020-03-31T06:40:17.000Z
2022-02-25T11:48:18.000Z
venv/lib/python3.7/site-packages/pandas/types/common.py
John1001Song/Big-Data-Robo-Adviser
9444dce96954c546333d5aecc92a06c3bfd19aa5
[ "MIT" ]
8
2019-12-04T23:44:11.000Z
2022-02-10T08:31:40.000Z
venv/lib/python3.7/site-packages/pandas/types/common.py
John1001Song/Big-Data-Robo-Adviser
9444dce96954c546333d5aecc92a06c3bfd19aa5
[ "MIT" ]
28
2020-04-15T15:24:17.000Z
2021-12-26T04:05:02.000Z
import warnings warnings.warn("pandas.types.common is deprecated and will be " "removed in a future version, import " "from pandas.api.types", DeprecationWarning, stacklevel=3) from pandas.core.dtypes.common import * # noqa
29.888889
62
0.654275
32
269
5.5
0.75
0.113636
0
0
0
0
0
0
0
0
0
0.005051
0.263941
269
8
63
33.625
0.883838
0.01487
0
0
0
0
0.391635
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
a142ba7e30a25e454afb0eeaef27a2699539f282
809
py
Python
torch_glow/tests/nodes/dropout_test.py
dreiss/glow
54c7625053bfb14955c4402ab8d9bca85195d53c
[ "Apache-2.0" ]
2
2022-03-04T17:32:39.000Z
2022-03-04T18:15:37.000Z
torch_glow/tests/nodes/dropout_test.py
dreiss/glow
54c7625053bfb14955c4402ab8d9bca85195d53c
[ "Apache-2.0" ]
10
2020-08-09T11:46:55.000Z
2020-12-03T04:38:52.000Z
torch_glow/tests/nodes/dropout_test.py
dreiss/glow
54c7625053bfb14955c4402ab8d9bca85195d53c
[ "Apache-2.0" ]
null
null
null
from __future__ import absolute_import, division, print_function, unicode_literals import torch import torch.nn.functional as F from tests.utils import jitVsGlow import unittest class TestDropout(unittest.TestCase): def test_dropout(self): """Basic test of the PyTorch aten::dropout Node on Glow.""" def test_f(a): return F.dropout(a + a, p=0.5, training=False) x = torch.randn(6, 4, 10) jitVsGlow(test_f, x, expected_fused_ops={"aten::dropout"}) def test_dropout_inplace(self): """Basic test of the PyTorch aten::dropout_ Node on Glow.""" def test_f(a): return F.dropout(a + a, p=0.5, training=False, inplace=True) x = torch.randn(6, 4, 10) jitVsGlow(test_f, x, expected_fused_ops={"aten::dropout_"})
26.966667
82
0.657602
118
809
4.338983
0.423729
0.054688
0.054688
0.058594
0.5625
0.5625
0.5625
0.5625
0.5625
0.5625
0
0.019169
0.226205
809
29
83
27.896552
0.798722
0.133498
0
0.25
0
0
0.03913
0
0
0
0
0
0
1
0.25
false
0
0.3125
0.125
0.75
0.0625
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
0
0
0
3
a1474b6aeca5189ad6bb26ab492d50b2d1e5a565
1,728
py
Python
tests/test_get.py
cryptomaniac512/falcon-toolkit
da4950cf93b4923a0733e8903df0cb54879932ec
[ "MIT" ]
3
2018-02-08T09:00:37.000Z
2018-02-13T09:17:17.000Z
tests/test_get.py
cryptomaniac512/pytest-falcon-client
da4950cf93b4923a0733e8903df0cb54879932ec
[ "MIT" ]
1
2019-02-08T13:26:08.000Z
2019-02-08T13:26:08.000Z
tests/test_get.py
sivakov512/pytest-falcon-client
da4950cf93b4923a0733e8903df0cb54879932ec
[ "MIT" ]
null
null
null
import pytest @pytest.mark.parametrize("client_fixture", ("client", "cors_client")) def test_getting_json(request, client_fixture): client = request.getfixturevalue(client_fixture) got = client.get("/example/20/") assert got == "20 ok" @pytest.mark.parametrize("client_fixture", ("client", "cors_client")) def test_getting_response(request, client_fixture): client = request.getfixturevalue(client_fixture) response = client.get("/example/20/", as_response=True) assert response.status_code == 200 assert response.json == "20 ok" @pytest.mark.parametrize("client_fixture", ("client", "cors_client")) def test_raises_for_failed_request(request, client_fixture): client = request.getfixturevalue(client_fixture) with pytest.raises(AssertionError): client.get("/example/42/") @pytest.mark.parametrize("client_fixture", ("client", "cors_client")) def test_getting_response_for_failed_request(request, client_fixture): client = request.getfixturevalue(client_fixture) response = client.get("/example/42/", as_response=True) assert response.status_code == 400 assert response.json == "Invalid id" @pytest.mark.parametrize("client_fixture", ("client", "cors_client")) def test_getting_json_for_failed_request(request, client_fixture): client = request.getfixturevalue(client_fixture) got = client.get("/example/42/", expected_statuses=[400]) assert got == "Invalid id" def test_raises_with_default_client_for_cors_api(make_client, cors_api): """Will raise `KeyError` because `Origin` header is not provided by default client.""" client = make_client(cors_api) with pytest.raises(KeyError): client.get("/example/20/")
30.315789
72
0.735532
213
1,728
5.713615
0.230047
0.16023
0.156122
0.110929
0.706656
0.706656
0.706656
0.644207
0.644207
0.644207
0
0.016711
0.134259
1,728
56
73
30.857143
0.796791
0.046296
0
0.3125
0
0
0.156899
0
0
0
0
0
0.21875
1
0.1875
false
0
0.03125
0
0.21875
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a14e615dfb6b88fbe3ee9b2ee0c8898d500bdf49
583
py
Python
PythonCode/Laboratorio de programacao/7.py
CrystianPrintes20/ProjetoUri
92a88ae2671a556f4d418c3605e9a2c6933dc9d8
[ "MIT" ]
null
null
null
PythonCode/Laboratorio de programacao/7.py
CrystianPrintes20/ProjetoUri
92a88ae2671a556f4d418c3605e9a2c6933dc9d8
[ "MIT" ]
null
null
null
PythonCode/Laboratorio de programacao/7.py
CrystianPrintes20/ProjetoUri
92a88ae2671a556f4d418c3605e9a2c6933dc9d8
[ "MIT" ]
null
null
null
import random def Ler_vetor(): funcionarios = 50 return random.sample(range(100), funcionarios) def Somar(v): soma = sum(v) return soma def Maior(v): return max(v) def Menor(v): return min(v) def Media(v): return (sum(v)/len(v)) vetor = Ler_vetor() print("Salarios: {}".format(vetor)) print('Valor da folha salarial da empresa: {} R$'.format(Somar(vetor))) print('Valor do maior salário: {} R$'.format(Maior(vetor))) print('Valor do menor salário: {} R$'.format(Menor(vetor))) print('Valor da Média dos salários: {:.0f}'.format(Media(vetor)))
18.21875
71
0.653516
87
583
4.356322
0.413793
0.131926
0.158311
0.08971
0
0
0
0
0
0
0
0.012422
0.171527
583
31
72
18.806452
0.772257
0
0
0
0
0
0.250429
0
0
0
0
0
0
1
0.263158
false
0
0.052632
0.157895
0.578947
0.263158
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
a15d3d5148df0189099c5f0b0ea7b98144e3636d
67
py
Python
exercicios/ex037.py
evertoncorreadias/aulas
3a99a7c4223caa7cbda7a891b619875135847e9d
[ "MIT" ]
null
null
null
exercicios/ex037.py
evertoncorreadias/aulas
3a99a7c4223caa7cbda7a891b619875135847e9d
[ "MIT" ]
null
null
null
exercicios/ex037.py
evertoncorreadias/aulas
3a99a7c4223caa7cbda7a891b619875135847e9d
[ "MIT" ]
null
null
null
nome=input('qual seu nome').strip() print('correa' in nome.lower())
33.5
35
0.701493
11
67
4.272727
0.818182
0
0
0
0
0
0
0
0
0
0
0
0.074627
67
2
36
33.5
0.758065
0
0
0
0
0
0.279412
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
a17c10da92206173ff4407c8744df462918759a6
660
py
Python
7KYU/cool_string.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
4
2021-07-17T22:48:03.000Z
2022-03-25T14:10:58.000Z
7KYU/cool_string.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
null
null
null
7KYU/cool_string.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
3
2021-06-14T14:18:16.000Z
2022-03-16T06:02:02.000Z
def cool_string(s: str) -> bool: """ Let's call a string cool if it is formed only by Latin letters and no two lowercase and no two uppercase letters are in adjacent positions. Given a string, check if it is cool. """ string_without_whitespace = ''.join(s.split()) if string_without_whitespace.isalpha(): for i, k in enumerate(range(len(string_without_whitespace)-1)): if (string_without_whitespace[i].islower() and string_without_whitespace[i+1].islower()) or (string_without_whitespace[i].isupper() and string_without_whitespace[i+1].isupper()): return False return True return False
50.769231
190
0.690909
95
660
4.642105
0.484211
0.206349
0.365079
0.217687
0.126984
0.126984
0
0
0
0
0
0.005758
0.210606
660
12
191
55
0.840691
0.268182
0
0.25
0
0
0
0
0
0
0
0
0
1
0.125
false
0
0
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a19337afbe026927a8882a88c3e4ae782bae32da
64
py
Python
public/app/constants.py
iconation/server-status
e10d7764163ec8bdba89740676a5bd25a900bc7b
[ "Apache-2.0" ]
null
null
null
public/app/constants.py
iconation/server-status
e10d7764163ec8bdba89740676a5bd25a900bc7b
[ "Apache-2.0" ]
null
null
null
public/app/constants.py
iconation/server-status
e10d7764163ec8bdba89740676a5bd25a900bc7b
[ "Apache-2.0" ]
null
null
null
GITHUB_REPOSITORY = "https://github.com/ICONation/server-status"
64
64
0.8125
8
64
6.375
0.875
0
0
0
0
0
0
0
0
0
0
0
0.03125
64
1
64
64
0.822581
0
0
0
0
0
0.646154
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a1a70aa5d483e05a450efe95d9c97acafd7ee844
2,292
py
Python
api/fatsecretapi/views.py
Abiwax/CalorieCheckerIonic4
4f027aa19916d7a24c3a6d50ded812a1abde081b
[ "Apache-2.0" ]
null
null
null
api/fatsecretapi/views.py
Abiwax/CalorieCheckerIonic4
4f027aa19916d7a24c3a6d50ded812a1abde081b
[ "Apache-2.0" ]
7
2020-06-05T20:27:04.000Z
2022-03-02T06:52:54.000Z
api/fatsecretapi/views.py
Abiwax/CalorieCheckerIonic4
4f027aa19916d7a24c3a6d50ded812a1abde081b
[ "Apache-2.0" ]
null
null
null
# Create your views here. from fatsecret import Fatsecret from django.http import JsonResponse from django.conf import settings fs = Fatsecret(settings.FATSECRET_ACCESS_KEY, settings.FATSECRET_SECRET_KEY) def foods(request): if request.method == 'GET': search = request.GET["search"] foods_detail = [] try: foods_detail = fs.foods_search(search, max_results=50, page_number=1) except: foods_detail = {"error": "An error occured while processing."} response = JsonResponse({"results":foods_detail}) return response elif request.method == 'POST': response = JsonResponse({"error": "must be a get method"}) return response def food(request): if request.method == 'GET': search = request.GET["search"] foods = [] try: foods = fs.food_get(search) except Exception as ex: print(ex) foods = {"error": "An error occured while processing."} response = JsonResponse({"results":foods}) return response elif request.method == 'POST': response = JsonResponse({"error": "must be a get method"}) return response def recipes(request): if request.method == 'GET': search = request.GET["search"] recipes_result = [] try: recipes_result = fs.recipes_search(search, max_results=50, page_number=1) except Exception as ex: print(ex) recipes_result = {"error": "An error occured while processing."} response = JsonResponse({"results":recipes_result}) return response elif request.method == 'POST': response = JsonResponse({"error": "must be a get method"}) return response def recipe(request): if request.method == 'GET': search = request.GET["search"] recipe_result = [] try: recipe_result = fs.recipe_get(search) except Exception as ex: print(ex) recipe_result = {"error": "An error occured while processing."} response = JsonResponse({"results":recipe_result}) return response elif request.method == 'POST': response = JsonResponse({"error": "must be a get method"}) return response
32.742857
85
0.609948
251
2,292
5.474104
0.207171
0.065502
0.046579
0.064047
0.740175
0.740175
0.725619
0.725619
0.674672
0.546579
0
0.003636
0.280105
2,292
69
86
33.217391
0.829091
0.010035
0
0.576271
0
0
0.14841
0
0
0
0
0
0
1
0.067797
false
0
0.050847
0
0.254237
0.050847
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a1ac20e362a8de936d8dcf7e4305859b84a5faa1
1,187
py
Python
tests/config/test_manager.py
NishanBanga/JobFunnel
d1b326e803fb79ffcdb2d31f9cb17913959c1922
[ "MIT" ]
1,652
2019-07-13T14:41:37.000Z
2022-03-29T04:25:06.000Z
tests/config/test_manager.py
GlobalOpenSourceSociety/JobFunnel
45b3a8784313f1af1f21536df77c63868ffcdb7d
[ "MIT" ]
124
2019-07-04T16:36:04.000Z
2022-03-22T19:52:58.000Z
tests/config/test_manager.py
GlobalOpenSourceSociety/JobFunnel
45b3a8784313f1af1f21536df77c63868ffcdb7d
[ "MIT" ]
215
2019-11-12T01:07:24.000Z
2022-03-15T20:23:10.000Z
# FIXME: need to break down config manager testing a bit more # @pytest.mark.parametrize('pass_del_cfg', (True, False)) # def test_config_manager_init(mocker, pass_del_cfg): # """NOTE: unlike other configs this one validates itself on creation # """ # # Mocks # patch_del_cfg = mocker.patch('jobfunnel.config.manager.DelayConfig') # patch_os = mocker.patch('jobfunnel.config.manager.os') # patch_os.path.exists.return_value = False # check it makes all paths # mock_master_csv = mocker.Mock() # mock_block_list = mocker.Mock() # mock_dupe_list = mocker.Mock() # mock_cache_folder = mocker.Mock() # mock_search_cfg = mocker.Mock() # mock_proxy_cfg = mocker.Mock() # mock_del_cfg = mocker.Mock() # # FUT # cfg = JobFunnelConfigManager( # master_csv_file=mock_master_csv, # user_block_list_file=mock_block_list, # duplicates_list_file=mock_dupe_list, # cache_folder=mock_cache_folder, # search_config=mock_search_cfg, # delay_config=mock_del_cfg if pass_del_cfg else None, # proxy_config=mock_proxy_cfg, # log_file='', # TODO optional? # ) # # Assertions
38.290323
75
0.679865
156
1,187
4.826923
0.448718
0.092961
0.111554
0.069057
0.087649
0
0
0
0
0
0
0
0.2123
1,187
30
76
39.566667
0.805348
0.946083
0
null
0
null
0
0
null
0
0
0.033333
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
0
0
0
0
0
3
a1bf02611c6a82153d263112d9c736b3b648875b
172
py
Python
Python/prova/fibro 2.py
marcelosilva7/Primeiro-repositorio
2550c86d07101bed84e0d110599a9492b1069263
[ "MIT" ]
null
null
null
Python/prova/fibro 2.py
marcelosilva7/Primeiro-repositorio
2550c86d07101bed84e0d110599a9492b1069263
[ "MIT" ]
null
null
null
Python/prova/fibro 2.py
marcelosilva7/Primeiro-repositorio
2550c86d07101bed84e0d110599a9492b1069263
[ "MIT" ]
null
null
null
t1 = 0 t2 = 1 print(f'{t1} -> {t2} -> ', end='') for c in range(1, 21): t3 = t1 + t2 print(f'{t3} -> ', end='') t1 = t2 t2 = t3 print('FIM', end='')
19.111111
35
0.401163
29
172
2.37931
0.482759
0.173913
0
0
0
0
0
0
0
0
0
0.147826
0.331395
172
9
36
19.111111
0.452174
0
0
0
0
0
0.163636
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a1d0457b1dc1d0b5cbd54a40bd10967ee66ed22a
42,034
py
Python
plogical/vhost.py
uzairAK/serverom-panel
3dcde05ad618e6bef280db7d3180f926fe2ab1db
[ "MIT" ]
null
null
null
plogical/vhost.py
uzairAK/serverom-panel
3dcde05ad618e6bef280db7d3180f926fe2ab1db
[ "MIT" ]
null
null
null
plogical/vhost.py
uzairAK/serverom-panel
3dcde05ad618e6bef280db7d3180f926fe2ab1db
[ "MIT" ]
null
null
null
#!/usr/local/CyberCP/bin/python import os import os.path import sys import django sys.path.append('/usr/local/CyberCP') os.environ.setdefault("DJANGO_SETTINGS_MODULE", "CyberCP.settings") try: django.setup() except: pass import shutil from plogical import installUtilities import subprocess import shlex from plogical import CyberCPLogFileWriter as logging from plogical.mysqlUtilities import mysqlUtilities from plogical.dnsUtilities import DNS from random import randint from plogical.processUtilities import ProcessUtilities from managePHP.phpManager import PHPManager from plogical.vhostConfs import vhostConfs from ApachController.ApacheVhosts import ApacheVhost try: from websiteFunctions.models import Websites, ChildDomains, aliasDomains from databases.models import Databases except: pass import pwd import grp ## If you want justice, you have come to the wrong place. class vhost: Server_root = "/usr/local/lsws" cyberPanel = "/usr/local/CyberCP" redisConf = '/usr/local/lsws/conf/dvhost_redis.conf' @staticmethod def addUser(virtualHostUser, path): try: FNULL = open(os.devnull, 'w') if os.path.exists("/etc/lsb-release"): command = 'adduser --no-create-home --home ' + path + ' --disabled-login --gecos "" ' + virtualHostUser else: command = "adduser " + virtualHostUser + " -M -d " + path ProcessUtilities.executioner(command) command = "groupadd " + virtualHostUser ProcessUtilities.executioner(command) command = "usermod -a -G " + virtualHostUser + " " + virtualHostUser ProcessUtilities.executioner(command) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [addingUsers]") @staticmethod def createDirectories(path, virtualHostUser, pathHTML, pathLogs, confPath, completePathToConfigFile): try: FNULL = open(os.devnull, 'w') try: command = 'chmod 711 /home' cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except: pass try: os.makedirs(path) command = "chown " + virtualHostUser + ":" + virtualHostUser + " " + path cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) command = "chmod 711 " + path cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [27 Not able create to directories for virtual host [createDirectories]]") return [0, "[27 Not able to directories for virtual host [createDirectories]]"] try: os.makedirs(pathHTML) if ProcessUtilities.decideDistro() == ProcessUtilities.centos: groupName = 'nobody' else: groupName = 'nogroup' command = "chown " + virtualHostUser + ":%s " % (groupName) + pathHTML cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) command = "chmod 750 %s" % (pathHTML) cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [33 Not able to directories for virtual host [createDirectories]]") return [0, "[33 Not able to directories for virtual host [createDirectories]]"] try: os.makedirs(pathLogs) if ProcessUtilities.decideDistro() == ProcessUtilities.centos: groupName = 'nobody' else: groupName = 'nogroup' command = "chown %s:%s %s" % ('root', groupName, pathLogs) cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) if ProcessUtilities.decideServer() == ProcessUtilities.OLS: command = "chmod -R 750 " + pathLogs else: command = "chmod -R 750 " + pathLogs cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [39 Not able to directories for virtual host [createDirectories]]") return [0, "[39 Not able to directories for virtual host [createDirectories]]"] try: ## For configuration files permissions will be changed later globally. if not os.path.exists(confPath): os.makedirs(confPath) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [45 Not able to directories for virtual host [createDirectories]]") return [0, "[45 Not able to directories for virtual host [createDirectories]]"] try: ## For configuration files permissions will be changed later globally. file = open(completePathToConfigFile, "w+") command = "chown " + "lsadm" + ":" + "lsadm" + " " + completePathToConfigFile cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) command = 'chmod 600 %s' % (completePathToConfigFile) cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except IOError as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createDirectories]]") return [0, "[45 Not able to directories for virtual host [createDirectories]]"] return [1, 'None'] except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createDirectories]") return [0, str(msg)] @staticmethod def finalizeVhostCreation(virtualHostName, virtualHostUser): try: FNULL = open(os.devnull, 'w') shutil.copy("/usr/local/CyberCP/index.html", "/home/" + virtualHostName + "/public_html/index.html") command = "chown " + virtualHostUser + ":" + virtualHostUser + " " + "/home/" + virtualHostName + "/public_html/index.html" cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) vhostPath = vhost.Server_root + "/conf/vhosts" command = "chown -R " + "lsadm" + ":" + "lsadm" + " " + vhostPath cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [finalizeVhostCreation]") @staticmethod def createDirectoryForVirtualHost(virtualHostName,administratorEmail,virtualHostUser, phpVersion, openBasedir): path = "/home/" + virtualHostName pathHTML = "/home/" + virtualHostName + "/public_html" pathLogs = "/home/" + virtualHostName + "/logs" confPath = vhost.Server_root + "/conf/vhosts/"+virtualHostName completePathToConfigFile = confPath +"/vhost.conf" ## adding user vhost.addUser(virtualHostUser, path) ## Creating Directories result = vhost.createDirectories(path, virtualHostUser, pathHTML, pathLogs, confPath, completePathToConfigFile) if result[0] == 0: return [0, result[1]] ## Creating Per vhost Configuration File if vhost.perHostVirtualConf(completePathToConfigFile,administratorEmail,virtualHostUser,phpVersion, virtualHostName, openBasedir) == 1: return [1,"None"] else: return [0,"[61 Not able to create per host virtual configurations [perHostVirtualConf]"] @staticmethod def perHostVirtualConf(vhFile, administratorEmail,virtualHostUser, phpVersion, virtualHostName, openBasedir): # General Configurations tab if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: confFile = open(vhFile, "w+") php = PHPManager.getPHPString(phpVersion) currentConf = vhostConfs.olsMasterConf currentConf = currentConf.replace('{adminEmails}', administratorEmail) currentConf = currentConf.replace('{virtualHostUser}', virtualHostUser) currentConf = currentConf.replace('{php}', php) currentConf = currentConf.replace('{adminEmails}', administratorEmail) currentConf = currentConf.replace('{php}', php) if openBasedir == 1: currentConf = currentConf.replace('{open_basedir}', 'php_admin_value open_basedir "/tmp:$VH_ROOT"') else: currentConf = currentConf.replace('{open_basedir}', '') confFile.write(currentConf) confFile.close() except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [perHostVirtualConf]]") return 0 return 1 else: try: if not os.path.exists(vhost.redisConf): confFile = open(vhFile, "w+") php = PHPManager.getPHPString(phpVersion) currentConf = vhostConfs.lswsMasterConf currentConf = currentConf.replace('{virtualHostName}', virtualHostName) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{php}', php) confFile.write(currentConf) confFile.close() else: ## Non-www currentConf = vhostConfs.lswsRediConfMaster currentConf = currentConf.replace('{virtualHostName}', virtualHostName) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{php}', phpVersion.lstrip('PHP ')) currentConf = currentConf.replace('{uid}', str(pwd.getpwnam(virtualHostUser).pw_uid)) currentConf = currentConf.replace('{gid}', str(grp.getgrnam(virtualHostUser).gr_gid)) command = 'redis-cli set %s' % (currentConf) ProcessUtilities.executioner(command) ## WWW currentConf = vhostConfs.lswsRediConfMasterWWW currentConf = currentConf.replace('{virtualHostName}', 'www.%s' % (virtualHostName)) currentConf = currentConf.replace('{master}', virtualHostName) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{php}', phpVersion.lstrip('PHP ')) currentConf = currentConf.replace('{uid}', str(pwd.getpwnam(virtualHostUser).pw_uid)) currentConf = currentConf.replace('{gid}', str(grp.getgrnam(virtualHostUser).gr_gid)) command = 'redis-cli set %s' % (currentConf) ProcessUtilities.executioner(command) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [perHostVirtualConf]]") return 0 return 1 @staticmethod def createNONSSLMapEntry(virtualHostName): try: data = open("/usr/local/lsws/conf/httpd_config.conf").readlines() writeDataToFile = open("/usr/local/lsws/conf/httpd_config.conf", 'w') map = " map " + virtualHostName + " " + virtualHostName + "\n" mapchecker = 1 for items in data: if (mapchecker == 1 and (items.find("listener") > -1 and items.find("Default") > -1)): writeDataToFile.writelines(items) writeDataToFile.writelines(map) mapchecker = 0 else: writeDataToFile.writelines(items) return 1 except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg)) return 0 @staticmethod def createConfigInMainVirtualHostFile(virtualHostName): if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: if vhost.createNONSSLMapEntry(virtualHostName) == 0: return [0, "Failed to create NON SSL Map Entry [createConfigInMainVirtualHostFile]"] writeDataToFile = open("/usr/local/lsws/conf/httpd_config.conf", 'a') currentConf = vhostConfs.olsMasterMainConf currentConf = currentConf.replace('{virtualHostName}', virtualHostName) writeDataToFile.write(currentConf) writeDataToFile.close() return [1,"None"] except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + "223 [IO Error with main config file [createConfigInMainVirtualHostFile]]") return [0,"223 [IO Error with main config file [createConfigInMainVirtualHostFile]]"] else: try: writeDataToFile = open("/usr/local/lsws/conf/httpd.conf", 'a') configFile = 'Include /usr/local/lsws/conf/vhosts/' + virtualHostName + '/vhost.conf\n' writeDataToFile.writelines(configFile) writeDataToFile.close() writeDataToFile.close() return [1, "None"] except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + "223 [IO Error with main config file [createConfigInMainVirtualHostFile]]") return [0, "223 [IO Error with main config file [createConfigInMainVirtualHostFile]]"] @staticmethod def deleteVirtualHostConfigurations(virtualHostName): if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: ## Deleting master conf numberOfSites = str(Websites.objects.count() + ChildDomains.objects.count()) vhost.deleteCoreConf(virtualHostName, numberOfSites) delWebsite = Websites.objects.get(domain=virtualHostName) externalApp = delWebsite.externalApp ## databases = Databases.objects.filter(website=delWebsite) childDomains = delWebsite.childdomains_set.all() ## Deleting child domains for items in childDomains: numberOfSites = Websites.objects.count() + ChildDomains.objects.count() vhost.deleteCoreConf(items.domain, numberOfSites) for items in databases: mysqlUtilities.deleteDatabase(items.dbName, items.dbUser) delWebsite.delete() ## Deleting DNS Zone if there is any. DNS.deleteDNSZone(virtualHostName) if not os.path.exists(vhost.redisConf): installUtilities.installUtilities.reStartLiteSpeed() ## Delete mail accounts command = "rm -rf /home/vmail/" + virtualHostName subprocess.call(shlex.split(command)) ## if ProcessUtilities.decideDistro() == ProcessUtilities.centos: command = 'userdel -r -f %s' % (externalApp) else: command = 'deluser %s' % (externalApp) ProcessUtilities.executioner(command) # command = 'groupdel %s' % (externalApp) ProcessUtilities.executioner(command) ## Remove git conf folder if present gitPath = '/home/cyberpanel/git/%s' % (virtualHostName) if os.path.exists(gitPath): shutil.rmtree(gitPath) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [Not able to remove virtual host configuration from main configuration file.]") return 0 return 1 else: try: ## Deleting master conf numberOfSites = str(Websites.objects.count() + ChildDomains.objects.count()) vhost.deleteCoreConf(virtualHostName, numberOfSites) delWebsite = Websites.objects.get(domain=virtualHostName) externalApp = delWebsite.externalApp ## Cagefs command = '/usr/sbin/cagefsctl --disable %s' % (delWebsite.externalApp) ProcessUtilities.normalExecutioner(command) databases = Databases.objects.filter(website=delWebsite) childDomains = delWebsite.childdomains_set.all() ## Deleting child domains for items in childDomains: numberOfSites = Websites.objects.count() + ChildDomains.objects.count() vhost.deleteCoreConf(items.domain, numberOfSites) for items in databases: mysqlUtilities.deleteDatabase(items.dbName, items.dbUser) delWebsite.delete() ## Deleting DNS Zone if there is any. DNS.deleteDNSZone(virtualHostName) installUtilities.installUtilities.reStartLiteSpeed() ## Delete mail accounts command = "rm -rf /home/vmail/" + virtualHostName subprocess.call(shlex.split(command)) ## if ProcessUtilities.decideDistro() == ProcessUtilities.centos: command = 'userdel -r -f %s' % (externalApp) else: command = 'deluser %s' % (externalApp) ProcessUtilities.executioner(command) # command = 'groupdel %s' % (externalApp) ProcessUtilities.executioner(command) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [Not able to remove virtual host configuration from main configuration file.]") return 0 return 1 @staticmethod def deleteCoreConf(virtualHostName, numberOfSites): if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: virtualHostPath = "/home/" + virtualHostName if os.path.exists(virtualHostPath): shutil.rmtree(virtualHostPath) confPath = vhost.Server_root + "/conf/vhosts/" + virtualHostName if os.path.exists(confPath): shutil.rmtree(confPath) data = open("/usr/local/lsws/conf/httpd_config.conf").readlines() writeDataToFile = open("/usr/local/lsws/conf/httpd_config.conf", 'w') check = 1 sslCheck = 1 for items in data: if numberOfSites == 1: if (items.find(' ' + virtualHostName) > -1 and items.find(" map " + virtualHostName) > -1): continue if (items.find(' ' + virtualHostName) > -1 and (items.find("virtualHost") > -1 or items.find("virtualhost") > -1)): check = 0 if items.find("listener") > -1 and items.find("SSL") > -1: sslCheck = 0 if (check == 1 and sslCheck == 1): writeDataToFile.writelines(items) if (items.find("}") > -1 and (check == 0 or sslCheck == 0)): check = 1 sslCheck = 1 else: if (items.find(' ' + virtualHostName) > -1 and items.find(" map " + virtualHostName) > -1): continue if (items.find(' ' + virtualHostName) > -1 and (items.find("virtualHost") > -1 or items.find("virtualhost") > -1)): check = 0 if (check == 1): writeDataToFile.writelines(items) if (items.find("}") > -1 and check == 0): check = 1 ## Delete Apache Conf ApacheVhost.DeleteApacheVhost(virtualHostName) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [Not able to remove virtual host configuration from main configuration file.]") return 0 return 1 else: virtualHostPath = "/home/" + virtualHostName try: shutil.rmtree(virtualHostPath) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [Not able to remove virtual host directory from /home continuing..]") if not os.path.exists(vhost.redisConf): try: confPath = vhost.Server_root + "/conf/vhosts/" + virtualHostName shutil.rmtree(confPath) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [Not able to remove virtual host configuration directory from /conf ]") try: data = open("/usr/local/lsws/conf/httpd.conf").readlines() writeDataToFile = open("/usr/local/lsws/conf/httpd.conf", 'w') for items in data: if items.find('/' + virtualHostName + '/') > -1: pass else: writeDataToFile.writelines(items) writeDataToFile.close() except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [Not able to remove virtual host configuration from main configuration file.]") return 0 return 1 else: command = 'redis-cli delete "vhost:%s"' % (virtualHostName) ProcessUtilities.executioner(command) command = 'redis-cli delete "vhost:www.%s"' % (virtualHostName) ProcessUtilities.executioner(command) @staticmethod def checkIfVirtualHostExists(virtualHostName): if os.path.exists("/home/" + virtualHostName): return 1 return 0 @staticmethod def changePHP(vhFile, phpVersion): phpDetachUpdatePath = '/home/%s/.lsphp_restart.txt' % (vhFile.split('/')[-2]) if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: if ApacheVhost.changePHP(phpVersion, vhFile) == 0: data = open(vhFile, "r").readlines() php = PHPManager.getPHPString(phpVersion) if not os.path.exists("/usr/local/lsws/lsphp" + str(php) + "/bin/lsphp"): print(0, 'This PHP version is not available on your CyberPanel.') return [0, "[This PHP version is not available on your CyberPanel. [changePHP]"] writeDataToFile = open(vhFile, "w") path = " path /usr/local/lsws/lsphp" + str(php) + "/bin/lsphp\n" for items in data: if items.find("/usr/local/lsws/lsphp") > -1 and items.find("path") > -1: writeDataToFile.writelines(path) else: writeDataToFile.writelines(items) writeDataToFile.close() writeToFile = open(phpDetachUpdatePath, 'w') writeToFile.close() installUtilities.installUtilities.reStartLiteSpeed() try: os.remove(phpDetachUpdatePath) except: pass else: php = PHPManager.getPHPString(phpVersion) command = "systemctl restart php%s-php-fpm" % (php) ProcessUtilities.normalExecutioner(command) print("1,None") return 1,'None' except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [changePHP]") print(0,str(msg)) return [0, str(msg) + " [IO Error with per host config file [changePHP]"] else: try: if not os.path.exists(vhost.redisConf): data = open(vhFile, "r").readlines() php = PHPManager.getPHPString(phpVersion) if not os.path.exists("/usr/local/lsws/lsphp" + str(php) + "/bin/lsphp"): print(0, 'This PHP version is not available on your CyberPanel.') return [0, "[This PHP version is not available on your CyberPanel. [changePHP]"] writeDataToFile = open(vhFile, "w") finalString = ' AddHandler application/x-httpd-php' + str(php) + ' .php\n' for items in data: if items.find("AddHandler application/x-httpd") > -1: writeDataToFile.writelines(finalString) else: writeDataToFile.writelines(items) writeDataToFile.close() writeToFile = open(phpDetachUpdatePath, 'w') writeToFile.close() installUtilities.installUtilities.reStartLiteSpeed() try: os.remove(phpDetachUpdatePath) except: pass else: command = 'redis-cli get "vhost:%s"' % (vhFile.split('/')[-2]) configData = ProcessUtilities.outputExecutioner(command) import re configData = re.sub(r'"phpVersion": .*,', '"phpVersion": %s,' % (phpVersion.lstrip('PHP ')), configData) command = "redis-cli set vhost:%s '%s'" % (vhFile.split('/')[-2], configData) ProcessUtilities.executioner(command) print("1,None") return 1, 'None' except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [changePHP]]") print(0, str(msg)) return [0, str(msg) + " [IO Error with per host config file [changePHP]]"] @staticmethod def addRewriteRules(virtualHostName, fileName=None): try: pass except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [IO Error with per host config file [changePHP]]") return 0 return 1 @staticmethod def checkIfRewriteEnabled(data): try: for items in data: if items.find(".htaccess") > -1: return 1 return 0 except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [checkIfRewriteEnabled]]") return 0 @staticmethod def findDomainBW(domainName, totalAllowed): try: path = "/home/" + domainName + "/logs/" + domainName + ".access_log" if not os.path.exists("/home/" + domainName + "/logs"): print("0,0") bwmeta = "/home/" + domainName + "/logs/bwmeta" if not os.path.exists(path): print("0,0") if os.path.exists(bwmeta): try: data = open(bwmeta).readlines() currentUsed = int(data[0].strip("\n")) inMB = int(float(currentUsed) / (1024.0 * 1024.0)) if totalAllowed == 0: totalAllowed = 999999 percentage = float(100) / float(totalAllowed) percentage = float(percentage) * float(inMB) except: print("0,0") if percentage > 100.0: percentage = 100 print(str(inMB) + "," + str(percentage)) else: print("0,0") except OSError as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [findDomainBW]") print("0,0") except ValueError as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [findDomainBW]") print("0,0") @staticmethod def permissionControl(path): try: command = 'sudo chown -R cyberpanel:cyberpanel ' + path cmd = shlex.split(command) res = subprocess.call(cmd) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg)) @staticmethod def leaveControl(path): try: command = 'sudo chown -R root:root ' + path cmd = shlex.split(command) res = subprocess.call(cmd) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg)) @staticmethod def checkIfAliasExists(aliasDomain): try: alias = aliasDomains.objects.get(aliasDomain=aliasDomain) return 1 except BaseException as msg: return 0 @staticmethod def checkIfSSLAliasExists(data, aliasDomain): try: for items in data: if items.strip(',').strip('\n') == aliasDomain: return 1 return 0 except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [checkIfSSLAliasExists]") return 1 @staticmethod def createAliasSSLMap(confPath, masterDomain, aliasDomain): try: data = open(confPath, 'r').readlines() writeToFile = open(confPath, 'w') sslCheck = 0 for items in data: if (items.find("listener SSL") > -1): sslCheck = 1 if items.find(masterDomain) > -1 and items.find('map') > -1 and sslCheck == 1: data = [_f for _f in items.split(" ") if _f] if data[1] == masterDomain: if vhost.checkIfSSLAliasExists(data, aliasDomain) == 0: writeToFile.writelines(items.rstrip('\n') + ", " + aliasDomain + "\n") sslCheck = 0 else: writeToFile.writelines(items) else: writeToFile.writelines(items) writeToFile.close() installUtilities.installUtilities.reStartLiteSpeed() except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createAliasSSLMap]") ## Child Domain Functions @staticmethod def finalizeDomainCreation(virtualHostUser, path): try: FNULL = open(os.devnull, 'w') shutil.copy("/usr/local/CyberCP/index.html", path + "/index.html") command = "chown " + virtualHostUser + ":" + virtualHostUser + " " + path + "/index.html" cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) vhostPath = vhost.Server_root + "/conf/vhosts" command = "chown -R " + "lsadm" + ":" + "lsadm" + " " + vhostPath cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [finalizeDomainCreation]") @staticmethod def createDirectoryForDomain(masterDomain, domain, phpVersion, path, administratorEmail, virtualHostUser, openBasedir): FNULL = open(os.devnull, 'w') confPath = vhost.Server_root + "/conf/vhosts/" + domain completePathToConfigFile = confPath + "/vhost.conf" try: os.makedirs(path) if ProcessUtilities.decideDistro() == ProcessUtilities.centos: groupName = 'nobody' else: groupName = 'nogroup' command = "chown " + virtualHostUser + ":%s " % (groupName) + path cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) command = "chmod 755 %s" % (path) cmd = shlex.split(command) subprocess.call(cmd, stdout=FNULL, stderr=subprocess.STDOUT) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + "329 [Not able to create directories for virtual host [createDirectoryForDomain]]") try: ## For configuration files permissions will be changed later globally. os.makedirs(confPath) except OSError as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + "335 [Not able to create directories for virtual host [createDirectoryForDomain]]") return [0, "[344 Not able to directories for virtual host [createDirectoryForDomain]]"] try: ## For configuration files permissions will be changed later globally. file = open(completePathToConfigFile, "w+") except IOError as msg: logging.CyberCPLogFileWriter.writeToFile(str(msg) + " [createDirectoryForDomain]]") return [0, "[351 Not able to directories for virtual host [createDirectoryForDomain]]"] if vhost.perHostDomainConf(path, masterDomain, domain, completePathToConfigFile, administratorEmail, phpVersion, virtualHostUser, openBasedir) == 1: return [1, "None"] else: return [0, "[359 Not able to create per host virtual configurations [createDirectoryForDomain]"] @staticmethod def perHostDomainConf(path, masterDomain, domain, vhFile, administratorEmail, phpVersion, virtualHostUser, openBasedir): if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: php = PHPManager.getPHPString(phpVersion) externalApp = virtualHostUser + str(randint(1000, 9999)) currentConf = vhostConfs.olsChildConf currentConf = currentConf.replace('{path}', path) currentConf = currentConf.replace('{masterDomain}', masterDomain) currentConf = currentConf.replace('{adminEmails}', administratorEmail) currentConf = currentConf.replace('{externalApp}', externalApp) currentConf = currentConf.replace('{externalAppMaster}', virtualHostUser) currentConf = currentConf.replace('{php}', php) currentConf = currentConf.replace('{adminEmails}', administratorEmail) currentConf = currentConf.replace('{php}', php) if openBasedir == 1: currentConf = currentConf.replace('{open_basedir}', 'php_admin_value open_basedir "/tmp:$VH_ROOT"') else: currentConf = currentConf.replace('{open_basedir}', '') confFile = open(vhFile, "w+") confFile.write(currentConf) confFile.close() except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [perHostDomainConf]]") return 0 return 1 else: try: if not os.path.exists(vhost.redisConf): confFile = open(vhFile, "w+") php = PHPManager.getPHPString(phpVersion) currentConf = vhostConfs.lswsChildConf currentConf = currentConf.replace('{virtualHostName}', domain) currentConf = currentConf.replace('{masterDomain}', masterDomain) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{path}', path) currentConf = currentConf.replace('{php}', php) confFile.write(currentConf) confFile.close() else: ## Non www currentConf = vhostConfs.lswsRediConfChild currentConf = currentConf.replace('{virtualHostName}', domain) currentConf = currentConf.replace('{masterDomain}', masterDomain) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{path}', path) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{php}', phpVersion.lstrip('PHP ')) currentConf = currentConf.replace('{uid}', str(pwd.getpwnam(virtualHostUser).pw_uid)) currentConf = currentConf.replace('{gid}', str(grp.getgrnam(virtualHostUser).gr_gid)) command = 'redis-cli set %s' % (currentConf) ProcessUtilities.executioner(command) ## www currentConf = vhostConfs.lswsRediConfChildWWW currentConf = currentConf.replace('{virtualHostName}', 'www.%s' % (domain)) currentConf = currentConf.replace('{masterDomain}', masterDomain) currentConf = currentConf.replace('{administratorEmail}', administratorEmail) currentConf = currentConf.replace('{path}', path) currentConf = currentConf.replace('{externalApp}', virtualHostUser) currentConf = currentConf.replace('{php}', phpVersion.lstrip('PHP ')) currentConf = currentConf.replace('{uid}', str(pwd.getpwnam(virtualHostUser).pw_uid)) currentConf = currentConf.replace('{gid}', str(grp.getgrnam(virtualHostUser).gr_gid)) command = 'redis-cli set %s' % (currentConf) ProcessUtilities.executioner(command) except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + " [IO Error with per host config file [perHostDomainConf]]") return 0 return 1 @staticmethod def createConfigInMainDomainHostFile(domain, masterDomain): if ProcessUtilities.decideServer() == ProcessUtilities.OLS: try: if vhost.createNONSSLMapEntry(domain) == 0: return [0, "Failed to create NON SSL Map Entry [createConfigInMainVirtualHostFile]"] writeDataToFile = open("/usr/local/lsws/conf/httpd_config.conf", 'a') currentConf = vhostConfs.olsChildMainConf currentConf = currentConf.replace('{virtualHostName}', domain) currentConf = currentConf.replace('{masterDomain}', masterDomain) writeDataToFile.write(currentConf) writeDataToFile.close() return [1, "None"] except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + "223 [IO Error with main config file [createConfigInMainDomainHostFile]]") return [0, "223 [IO Error with main config file [createConfigInMainDomainHostFile]]"] else: try: writeDataToFile = open("/usr/local/lsws/conf/httpd.conf", 'a') configFile = 'Include /usr/local/lsws/conf/vhosts/' + domain + '/vhost.conf\n' writeDataToFile.writelines(configFile) writeDataToFile.close() return [1, "None"] except BaseException as msg: logging.CyberCPLogFileWriter.writeToFile( str(msg) + "223 [IO Error with main config file [createConfigInMainDomainHostFile]]") return [0, "223 [IO Error with main config file [createConfigInMainDomainHostFile]]"]
41.372047
149
0.560142
3,475
42,034
6.762302
0.103597
0.055236
0.072812
0.050385
0.756798
0.722967
0.689434
0.660964
0.625388
0.601813
0
0.009246
0.343888
42,034
1,015
150
41.412808
0.842815
0.018128
0
0.720109
0
0
0.152812
0.033993
0.005435
0
0
0
0
1
0.03125
false
0.009511
0.028533
0
0.146739
0.017663
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
a1ee69696f35228df34de656c28f874e810cd996
279
py
Python
web/verify.py
bzd111/oh-my-rss
e81beb66e5e51f8a7d100b2c506111138d17451a
[ "MIT" ]
null
null
null
web/verify.py
bzd111/oh-my-rss
e81beb66e5e51f8a7d100b2c506111138d17451a
[ "MIT" ]
null
null
null
web/verify.py
bzd111/oh-my-rss
e81beb66e5e51f8a7d100b2c506111138d17451a
[ "MIT" ]
null
null
null
from django.http import HttpResponseForbidden def verify_request(func): """ verify user request """ def wrapper(request): if not request.POST.get('uid', ''): return HttpResponseForbidden() return func(request) return wrapper
16.411765
45
0.62724
28
279
6.214286
0.607143
0
0
0
0
0
0
0
0
0
0
0
0.275986
279
16
46
17.4375
0.861386
0.0681
0
0
0
0
0.012397
0
0
0
0
0
0
1
0.285714
false
0
0.142857
0
0.857143
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
b803012fe5a7d1f802d9306cd1bc2613b7fa5585
3,726
py
Python
pycatia/knowledge_interfaces/knowledge_activate_object.py
evereux/catia_python
08948585899b12587b0415ce3c9191a408b34897
[ "MIT" ]
90
2019-02-21T10:05:28.000Z
2022-03-19T01:53:41.000Z
pycatia/knowledge_interfaces/knowledge_activate_object.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
99
2019-05-21T08:29:12.000Z
2022-03-25T09:55:15.000Z
pycatia/knowledge_interfaces/knowledge_activate_object.py
Luanee/pycatia
ea5eef8178f73de12404561c00baf7a7ca30da59
[ "MIT" ]
26
2019-04-04T06:31:36.000Z
2022-03-30T07:24:47.000Z
#! usr/bin/python3.6 """ Module initially auto generated using V5Automation files from CATIA V5 R28 on 2020-06-11 12:40:47.360445 .. warning:: The notes denoted "CAA V5 Visual Basic Help" are to be used as reference only. They are there as a guide as to how the visual basic / catscript functions work and thus help debugging in pycatia. """ from pycatia.knowledge_interfaces.knowledge_object import KnowledgeObject class KnowledgeActivateObject(KnowledgeObject): """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445) | System.IUnknown | System.IDispatch | System.CATBaseUnknown | System.CATBaseDispatch | System.AnyObject | KnowledgeInterfaces.KnowledgeObject | KnowledgeActivateObject | | Interface to access a CATIAKnowledgeActivableObject. """ def __init__(self, com_object): super().__init__(com_object) self.knowledge_activate_object = com_object @property def activated(self) -> bool: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445) | o Property Activated() As boolean (Read Only) | | Returns whether the relation is activated. | True if the relation is activated. An activated relation is processed | whenever the value of one of its input parameter is | modified. | | Example: | This example retrieves whether the maximummass relation is activated, | and if true, displays the result in a message box: | | If ( maximummass.Activated ) Then | MsgBox "maximummass is activated" | End If :return: bool :rtype: bool """ return self.knowledge_activate_object.Activated def activate(self) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)) | o Sub Activate() | | Activates the relation. The relation will be processed whenever the value | of one of its input parameter is modified. | | Example: | This example activates the maximummass relation: | | maximummass.Activate() :return: None :rtype: None """ return self.knowledge_activate_object.Activate() def deactivate(self) -> None: """ .. note:: :class: toggle CAA V5 Visual Basic Help (2020-06-11 12:40:47.360445)) | o Sub Deactivate() | | Deactivates the relation. The relation will no longer be processed when the | value of one of its input parameter is modified. | | Example: | This example deactivates the maximummass relation: | | maximummass.Deactivate() :return: None :rtype: None """ return self.knowledge_activate_object.Deactivate() def __repr__(self): return f'KnowledgeActivateObject(name="{ self.name }")'
33.567568
108
0.506978
345
3,726
5.402899
0.356522
0.035408
0.021459
0.026824
0.369099
0.312768
0.312768
0.302039
0.302039
0.246245
0
0.051845
0.425389
3,726
110
109
33.872727
0.818776
0.670156
0
0
1
0
0.067771
0.046687
0
0
0
0
0
1
0.357143
false
0
0.071429
0.071429
0.785714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
b80870a0c7b59e072a80d92516b1cea8e83af866
145
py
Python
6KYU/is_prime.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
4
2021-07-17T22:48:03.000Z
2022-03-25T14:10:58.000Z
6KYU/is_prime.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
null
null
null
6KYU/is_prime.py
yaznasivasai/python_codewars
25493591dde4649dc9c1ec3bece8191a3bed6818
[ "MIT" ]
3
2021-06-14T14:18:16.000Z
2022-03-16T06:02:02.000Z
def is_prime(num): if num <= 1: return False d = 2 while d * d <= num and num % d != 0: d += 1 return d * d > num
20.714286
40
0.441379
25
145
2.52
0.52
0.222222
0.15873
0
0
0
0
0
0
0
0
0.04878
0.434483
145
7
41
20.714286
0.719512
0
0
0
0
0
0
0
0
0
0
0
0
1
0.142857
false
0
0
0
0.428571
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
6297f326a0bb92b7ea38d33b46f5792ba2473d47
499
py
Python
accepted/Find_All_Numbers_Disappeared_in_Array.py
sheagk/leetcode_solutions
7571bd13f4274f6b4b622b43a414d56fc26d3be0
[ "MIT" ]
null
null
null
accepted/Find_All_Numbers_Disappeared_in_Array.py
sheagk/leetcode_solutions
7571bd13f4274f6b4b622b43a414d56fc26d3be0
[ "MIT" ]
null
null
null
accepted/Find_All_Numbers_Disappeared_in_Array.py
sheagk/leetcode_solutions
7571bd13f4274f6b4b622b43a414d56fc26d3be0
[ "MIT" ]
1
2020-09-03T14:26:00.000Z
2020-09-03T14:26:00.000Z
## https://leetcode.com/submissions/detail/230651834/ ## problem is to find the numbers between 1 and length of the ## array that aren't in the array. simple way to do that is to ## do the set difference between range(1, len(ar)+1) and the ## input numbers ## hits 98th percentile in terms of runtime, though only ## 14th percentile in memory usage class Solution: def findDisappearedNumbers(self, nums: List[int]) -> List[int]: return list(set(range(1, len(nums)+1)) - set(nums))
38.384615
67
0.709419
80
499
4.425
0.625
0.022599
0.050847
0
0
0
0
0
0
0
0
0.043902
0.178357
499
13
68
38.384615
0.819512
0.663327
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3
62a6274e4ab46635bafe5141a202e2107793372e
206
py
Python
xmemeAPI/admin.py
sukanta-nandi/XMEME-Backend
a532cfca17ccde0aae9faf2a4b937644f61b9894
[ "MIT" ]
null
null
null
xmemeAPI/admin.py
sukanta-nandi/XMEME-Backend
a532cfca17ccde0aae9faf2a4b937644f61b9894
[ "MIT" ]
null
null
null
xmemeAPI/admin.py
sukanta-nandi/XMEME-Backend
a532cfca17ccde0aae9faf2a4b937644f61b9894
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import memeData # Register your models here. @admin.register(memeData) class memeDataAdmin(admin.ModelAdmin): list_display = ['id', 'name'] #pass
20.6
38
0.728155
25
206
5.96
0.72
0
0
0
0
0
0
0
0
0
0
0
0.169903
206
10
39
20.6
0.871345
0.145631
0
0
0
0
0.034483
0
0
0
0
0
0
1
0
false
0
0.4
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
62c200b5cdc57483caa359cc9d19aa50b3e6655f
497
py
Python
Ejercicios para practicar/Cadenas/Ejercicio 3.py
AlexPC23/Python
77689d74c5444faa1aa253a122602307e52ac581
[ "Apache-2.0" ]
null
null
null
Ejercicios para practicar/Cadenas/Ejercicio 3.py
AlexPC23/Python
77689d74c5444faa1aa253a122602307e52ac581
[ "Apache-2.0" ]
null
null
null
Ejercicios para practicar/Cadenas/Ejercicio 3.py
AlexPC23/Python
77689d74c5444faa1aa253a122602307e52ac581
[ "Apache-2.0" ]
null
null
null
#Escribir un programa que pregunte el nombre completo del usuario en la consola y después muestre por pantalla el nombre completo del usuario tres veces, una con todas las letras minúsculas, otra con todas las letras mayúsculas y otra solo con la primera letra del nombre y de los apellidos en mayúscula. El usuario puede introducir su nombre combinando mayúsculas y minúsculas como quiera. nombre = str(input('¿Como te llamas?: ')) print(nombre.upper() + ' tiene ' + str(len(nombre)) + ' letras')
165.666667
390
0.778672
79
497
4.911392
0.632911
0.041237
0.082474
0.097938
0.134021
0
0
0
0
0
0
0
0.158954
497
3
391
165.666667
0.925837
0.782696
0
0
0
0
0.296296
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
3
62c4e2a641c937e152a6639d961812119ca22d47
277
py
Python
endpoints/cotect-endpoints/cotect_endpoints/local_test.py
JNKielmann/cotect
1b213459b41ef18119948633385ebad2cc16e9e2
[ "MIT" ]
19
2020-03-18T15:49:58.000Z
2021-02-11T12:07:22.000Z
endpoints/cotect-endpoints/cotect_endpoints/local_test.py
JNKielmann/cotect
1b213459b41ef18119948633385ebad2cc16e9e2
[ "MIT" ]
6
2020-03-21T18:50:29.000Z
2022-02-27T01:38:20.000Z
endpoints/cotect-endpoints/cotect_endpoints/local_test.py
JNKielmann/cotect
1b213459b41ef18119948633385ebad2cc16e9e2
[ "MIT" ]
7
2020-03-24T14:42:35.000Z
2020-04-06T13:22:29.000Z
import os os.environ["GOOGLE_APPLICATION_CREDENTIALS"] = "/workspace/firebase-secret.json" os.environ["ID_GENERATION_SECRET"] = "secret" os.environ["NEO4J_URI"] = "bolt://docker.for.mac.localhost:7687" os.environ["NEO4J_AUTH"] = "" from cotect_endpoints.endpoints import app
30.777778
80
0.768953
37
277
5.567568
0.675676
0.174757
0.135922
0
0
0
0
0
0
0
0
0.023346
0.072202
277
8
81
34.625
0.77821
0
0
0
0
0
0.512635
0.350181
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
3
62c8a9488ea348f6324d1c7461f6119553870629
25,023
py
Python
lte/protos/sctpd_pb2.py
aweimeow/enodebd
e1cd20693153e6b85e5d1bf9d21af2501c358601
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
lte/protos/sctpd_pb2.py
aweimeow/enodebd
e1cd20693153e6b85e5d1bf9d21af2501c358601
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
lte/protos/sctpd_pb2.py
aweimeow/enodebd
e1cd20693153e6b85e5d1bf9d21af2501c358601
[ "Apache-2.0", "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- # Generated by the protocol buffer compiler. DO NOT EDIT! # source: lte/protos/sctpd.proto """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database # @@protoc_insertion_point(imports) _sym_db = _symbol_database.Default() DESCRIPTOR = _descriptor.FileDescriptor( name='lte/protos/sctpd.proto', package='magma.sctpd', syntax='proto3', serialized_options=b'Z\031magma/lte/cloud/go/protos', create_key=_descriptor._internal_create_key, serialized_pb=b'\n\x16lte/protos/sctpd.proto\x12\x0bmagma.sctpd\"\x88\x01\n\x07InitReq\x12\x10\n\x08use_ipv4\x18\x01 \x01(\x08\x12\x10\n\x08use_ipv6\x18\x02 \x01(\x08\x12\x12\n\nipv4_addrs\x18\x03 \x03(\t\x12\x12\n\nipv6_addrs\x18\x04 \x03(\t\x12\x0c\n\x04port\x18\x05 \x01(\r\x12\x0c\n\x04ppid\x18\x06 \x01(\r\x12\x15\n\rforce_restart\x18\x07 \x01(\x08\"v\n\x07InitRes\x12/\n\x06result\x18\x01 \x01(\x0e\x32\x1f.magma.sctpd.InitRes.InitResult\":\n\nInitResult\x12\x10\n\x0cINIT_UNKNOWN\x10\x00\x12\x0b\n\x07INIT_OK\x10\x01\x12\r\n\tINIT_FAIL\x10\x02\"L\n\tSendDlReq\x12\x10\n\x08\x61ssoc_id\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\r\x12\x0f\n\x07payload\x18\x03 \x01(\x0c\x12\x0c\n\x04ppid\x18\x04 \x01(\r\"\x87\x01\n\tSendDlRes\x12\x33\n\x06result\x18\x01 \x01(\x0e\x32#.magma.sctpd.SendDlRes.SendDlResult\"E\n\x0cSendDlResult\x12\x13\n\x0fSEND_DL_UNKNOWN\x10\x00\x12\x0e\n\nSEND_DL_OK\x10\x01\x12\x10\n\x0cSEND_DL_FAIL\x10\x02\"L\n\tSendUlReq\x12\x10\n\x08\x61ssoc_id\x18\x01 \x01(\r\x12\x0e\n\x06stream\x18\x02 \x01(\r\x12\x0f\n\x07payload\x18\x03 \x01(\x0c\x12\x0c\n\x04ppid\x18\x04 \x01(\r\"\x0b\n\tSendUlRes\"k\n\x0bNewAssocReq\x12\x10\n\x08\x61ssoc_id\x18\x01 \x01(\r\x12\x11\n\tinstreams\x18\x02 \x01(\r\x12\x12\n\noutstreams\x18\x03 \x01(\r\x12\x15\n\rran_cp_ipaddr\x18\x04 \x01(\x0c\x12\x0c\n\x04ppid\x18\x05 \x01(\r\"\r\n\x0bNewAssocRes\"A\n\rCloseAssocReq\x12\x10\n\x08\x61ssoc_id\x18\x01 \x01(\r\x12\x10\n\x08is_reset\x18\x02 \x01(\x08\x12\x0c\n\x04ppid\x18\x03 \x01(\r\"\x0f\n\rCloseAssocRes2\x81\x01\n\rSctpdDownlink\x12\x34\n\x04Init\x12\x14.magma.sctpd.InitReq\x1a\x14.magma.sctpd.InitRes\"\x00\x12:\n\x06SendDl\x12\x16.magma.sctpd.SendDlReq\x1a\x16.magma.sctpd.SendDlRes\"\x00\x32\xd3\x01\n\x0bSctpdUplink\x12:\n\x06SendUl\x12\x16.magma.sctpd.SendUlReq\x1a\x16.magma.sctpd.SendUlRes\"\x00\x12@\n\x08NewAssoc\x12\x18.magma.sctpd.NewAssocReq\x1a\x18.magma.sctpd.NewAssocRes\"\x00\x12\x46\n\nCloseAssoc\x12\x1a.magma.sctpd.CloseAssocReq\x1a\x1a.magma.sctpd.CloseAssocRes\"\x00\x42\x1bZ\x19magma/lte/cloud/go/protosb\x06proto3' ) _INITRES_INITRESULT = _descriptor.EnumDescriptor( name='InitResult', full_name='magma.sctpd.InitRes.InitResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='INIT_UNKNOWN', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INIT_OK', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='INIT_FAIL', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=238, serialized_end=296, ) _sym_db.RegisterEnumDescriptor(_INITRES_INITRESULT) _SENDDLRES_SENDDLRESULT = _descriptor.EnumDescriptor( name='SendDlResult', full_name='magma.sctpd.SendDlRes.SendDlResult', filename=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key, values=[ _descriptor.EnumValueDescriptor( name='SEND_DL_UNKNOWN', index=0, number=0, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SEND_DL_OK', index=1, number=1, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), _descriptor.EnumValueDescriptor( name='SEND_DL_FAIL', index=2, number=2, serialized_options=None, type=None, create_key=_descriptor._internal_create_key), ], containing_type=None, serialized_options=None, serialized_start=443, serialized_end=512, ) _sym_db.RegisterEnumDescriptor(_SENDDLRES_SENDDLRESULT) _INITREQ = _descriptor.Descriptor( name='InitReq', full_name='magma.sctpd.InitReq', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='use_ipv4', full_name='magma.sctpd.InitReq.use_ipv4', index=0, number=1, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='use_ipv6', full_name='magma.sctpd.InitReq.use_ipv6', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ipv4_addrs', full_name='magma.sctpd.InitReq.ipv4_addrs', index=2, number=3, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ipv6_addrs', full_name='magma.sctpd.InitReq.ipv6_addrs', index=3, number=4, type=9, cpp_type=9, label=3, has_default_value=False, default_value=[], message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='port', full_name='magma.sctpd.InitReq.port', index=4, number=5, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ppid', full_name='magma.sctpd.InitReq.ppid', index=5, number=6, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='force_restart', full_name='magma.sctpd.InitReq.force_restart', index=6, number=7, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=40, serialized_end=176, ) _INITRES = _descriptor.Descriptor( name='InitRes', full_name='magma.sctpd.InitRes', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='magma.sctpd.InitRes.result', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _INITRES_INITRESULT, ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=178, serialized_end=296, ) _SENDDLREQ = _descriptor.Descriptor( name='SendDlReq', full_name='magma.sctpd.SendDlReq', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='assoc_id', full_name='magma.sctpd.SendDlReq.assoc_id', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='stream', full_name='magma.sctpd.SendDlReq.stream', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='payload', full_name='magma.sctpd.SendDlReq.payload', index=2, number=3, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ppid', full_name='magma.sctpd.SendDlReq.ppid', index=3, number=4, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=298, serialized_end=374, ) _SENDDLRES = _descriptor.Descriptor( name='SendDlRes', full_name='magma.sctpd.SendDlRes', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='result', full_name='magma.sctpd.SendDlRes.result', index=0, number=1, type=14, cpp_type=8, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ _SENDDLRES_SENDDLRESULT, ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=377, serialized_end=512, ) _SENDULREQ = _descriptor.Descriptor( name='SendUlReq', full_name='magma.sctpd.SendUlReq', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='assoc_id', full_name='magma.sctpd.SendUlReq.assoc_id', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='stream', full_name='magma.sctpd.SendUlReq.stream', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='payload', full_name='magma.sctpd.SendUlReq.payload', index=2, number=3, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ppid', full_name='magma.sctpd.SendUlReq.ppid', index=3, number=4, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=514, serialized_end=590, ) _SENDULRES = _descriptor.Descriptor( name='SendUlRes', full_name='magma.sctpd.SendUlRes', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=592, serialized_end=603, ) _NEWASSOCREQ = _descriptor.Descriptor( name='NewAssocReq', full_name='magma.sctpd.NewAssocReq', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='assoc_id', full_name='magma.sctpd.NewAssocReq.assoc_id', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='instreams', full_name='magma.sctpd.NewAssocReq.instreams', index=1, number=2, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='outstreams', full_name='magma.sctpd.NewAssocReq.outstreams', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ran_cp_ipaddr', full_name='magma.sctpd.NewAssocReq.ran_cp_ipaddr', index=3, number=4, type=12, cpp_type=9, label=1, has_default_value=False, default_value=b"", message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ppid', full_name='magma.sctpd.NewAssocReq.ppid', index=4, number=5, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=605, serialized_end=712, ) _NEWASSOCRES = _descriptor.Descriptor( name='NewAssocRes', full_name='magma.sctpd.NewAssocRes', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=714, serialized_end=727, ) _CLOSEASSOCREQ = _descriptor.Descriptor( name='CloseAssocReq', full_name='magma.sctpd.CloseAssocReq', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='assoc_id', full_name='magma.sctpd.CloseAssocReq.assoc_id', index=0, number=1, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='is_reset', full_name='magma.sctpd.CloseAssocReq.is_reset', index=1, number=2, type=8, cpp_type=7, label=1, has_default_value=False, default_value=False, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), _descriptor.FieldDescriptor( name='ppid', full_name='magma.sctpd.CloseAssocReq.ppid', index=2, number=3, type=13, cpp_type=3, label=1, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=729, serialized_end=794, ) _CLOSEASSOCRES = _descriptor.Descriptor( name='CloseAssocRes', full_name='magma.sctpd.CloseAssocRes', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto3', extension_ranges=[], oneofs=[ ], serialized_start=796, serialized_end=811, ) _INITRES.fields_by_name['result'].enum_type = _INITRES_INITRESULT _INITRES_INITRESULT.containing_type = _INITRES _SENDDLRES.fields_by_name['result'].enum_type = _SENDDLRES_SENDDLRESULT _SENDDLRES_SENDDLRESULT.containing_type = _SENDDLRES DESCRIPTOR.message_types_by_name['InitReq'] = _INITREQ DESCRIPTOR.message_types_by_name['InitRes'] = _INITRES DESCRIPTOR.message_types_by_name['SendDlReq'] = _SENDDLREQ DESCRIPTOR.message_types_by_name['SendDlRes'] = _SENDDLRES DESCRIPTOR.message_types_by_name['SendUlReq'] = _SENDULREQ DESCRIPTOR.message_types_by_name['SendUlRes'] = _SENDULRES DESCRIPTOR.message_types_by_name['NewAssocReq'] = _NEWASSOCREQ DESCRIPTOR.message_types_by_name['NewAssocRes'] = _NEWASSOCRES DESCRIPTOR.message_types_by_name['CloseAssocReq'] = _CLOSEASSOCREQ DESCRIPTOR.message_types_by_name['CloseAssocRes'] = _CLOSEASSOCRES _sym_db.RegisterFileDescriptor(DESCRIPTOR) InitReq = _reflection.GeneratedProtocolMessageType('InitReq', (_message.Message,), { 'DESCRIPTOR' : _INITREQ, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.InitReq) }) _sym_db.RegisterMessage(InitReq) InitRes = _reflection.GeneratedProtocolMessageType('InitRes', (_message.Message,), { 'DESCRIPTOR' : _INITRES, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.InitRes) }) _sym_db.RegisterMessage(InitRes) SendDlReq = _reflection.GeneratedProtocolMessageType('SendDlReq', (_message.Message,), { 'DESCRIPTOR' : _SENDDLREQ, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.SendDlReq) }) _sym_db.RegisterMessage(SendDlReq) SendDlRes = _reflection.GeneratedProtocolMessageType('SendDlRes', (_message.Message,), { 'DESCRIPTOR' : _SENDDLRES, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.SendDlRes) }) _sym_db.RegisterMessage(SendDlRes) SendUlReq = _reflection.GeneratedProtocolMessageType('SendUlReq', (_message.Message,), { 'DESCRIPTOR' : _SENDULREQ, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.SendUlReq) }) _sym_db.RegisterMessage(SendUlReq) SendUlRes = _reflection.GeneratedProtocolMessageType('SendUlRes', (_message.Message,), { 'DESCRIPTOR' : _SENDULRES, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.SendUlRes) }) _sym_db.RegisterMessage(SendUlRes) NewAssocReq = _reflection.GeneratedProtocolMessageType('NewAssocReq', (_message.Message,), { 'DESCRIPTOR' : _NEWASSOCREQ, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.NewAssocReq) }) _sym_db.RegisterMessage(NewAssocReq) NewAssocRes = _reflection.GeneratedProtocolMessageType('NewAssocRes', (_message.Message,), { 'DESCRIPTOR' : _NEWASSOCRES, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.NewAssocRes) }) _sym_db.RegisterMessage(NewAssocRes) CloseAssocReq = _reflection.GeneratedProtocolMessageType('CloseAssocReq', (_message.Message,), { 'DESCRIPTOR' : _CLOSEASSOCREQ, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.CloseAssocReq) }) _sym_db.RegisterMessage(CloseAssocReq) CloseAssocRes = _reflection.GeneratedProtocolMessageType('CloseAssocRes', (_message.Message,), { 'DESCRIPTOR' : _CLOSEASSOCRES, '__module__' : 'lte.protos.sctpd_pb2' # @@protoc_insertion_point(class_scope:magma.sctpd.CloseAssocRes) }) _sym_db.RegisterMessage(CloseAssocRes) DESCRIPTOR._options = None _SCTPDDOWNLINK = _descriptor.ServiceDescriptor( name='SctpdDownlink', full_name='magma.sctpd.SctpdDownlink', file=DESCRIPTOR, index=0, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=814, serialized_end=943, methods=[ _descriptor.MethodDescriptor( name='Init', full_name='magma.sctpd.SctpdDownlink.Init', index=0, containing_service=None, input_type=_INITREQ, output_type=_INITRES, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='SendDl', full_name='magma.sctpd.SctpdDownlink.SendDl', index=1, containing_service=None, input_type=_SENDDLREQ, output_type=_SENDDLRES, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_SCTPDDOWNLINK) DESCRIPTOR.services_by_name['SctpdDownlink'] = _SCTPDDOWNLINK _SCTPDUPLINK = _descriptor.ServiceDescriptor( name='SctpdUplink', full_name='magma.sctpd.SctpdUplink', file=DESCRIPTOR, index=1, serialized_options=None, create_key=_descriptor._internal_create_key, serialized_start=946, serialized_end=1157, methods=[ _descriptor.MethodDescriptor( name='SendUl', full_name='magma.sctpd.SctpdUplink.SendUl', index=0, containing_service=None, input_type=_SENDULREQ, output_type=_SENDULRES, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='NewAssoc', full_name='magma.sctpd.SctpdUplink.NewAssoc', index=1, containing_service=None, input_type=_NEWASSOCREQ, output_type=_NEWASSOCRES, serialized_options=None, create_key=_descriptor._internal_create_key, ), _descriptor.MethodDescriptor( name='CloseAssoc', full_name='magma.sctpd.SctpdUplink.CloseAssoc', index=2, containing_service=None, input_type=_CLOSEASSOCREQ, output_type=_CLOSEASSOCRES, serialized_options=None, create_key=_descriptor._internal_create_key, ), ]) _sym_db.RegisterServiceDescriptor(_SCTPDUPLINK) DESCRIPTOR.services_by_name['SctpdUplink'] = _SCTPDUPLINK # @@protoc_insertion_point(module_scope)
36.529927
2,054
0.74999
3,125
25,023
5.67456
0.08416
0.051768
0.08143
0.077652
0.713359
0.645971
0.632324
0.617493
0.617493
0.617493
0
0.034634
0.126524
25,023
684
2,055
36.583333
0.776685
0.03289
0
0.675806
1
0.001613
0.183995
0.13445
0
0
0
0
0
1
0
false
0
0.006452
0
0.006452
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
62dfad9618218c197076118daf8205205ec79a99
2,997
py
Python
project/editorial/models/__init__.py
ProjectFacet/facet
dc6bc79d450f7e2bdf59cfbcd306d05a736e4db9
[ "MIT" ]
25
2015-07-13T22:16:36.000Z
2021-11-11T02:45:32.000Z
project/editorial/models/__init__.py
ProjectFacet/facet
dc6bc79d450f7e2bdf59cfbcd306d05a736e4db9
[ "MIT" ]
74
2015-12-01T18:57:47.000Z
2022-03-11T23:25:47.000Z
project/editorial/models/__init__.py
ProjectFacet/facet
dc6bc79d450f7e2bdf59cfbcd306d05a736e4db9
[ "MIT" ]
6
2016-01-08T21:12:43.000Z
2019-05-20T16:07:56.000Z
""" Model for editorial application. Models ====== People: User, Organization, Network Contributors: ContributorInfo, OrganizationContributorInfo Platforms: Platform, PlatformAccount Projects: Project Story: Story Facet: FacetTemplate, Facet, FacetContributor, ContentLicense - (Temp: Versions of above for web, print, audio, video facets) Copy: StoryCopyDetail, FacetCopyDetail, ImageAssetCopyDetail, DocumentAssetCopyDetail, AudioAssetCopyDetail, VideoAssetCopyDetail - (Temp: Versions of above for web, print, audio, video facets) Tasks: Task Events: Event Assets: BaseAsset, BaseAssetMetadata, BaseImage, ImageAsset, SimpleImage, BaseDocument, DocumentAsset, DocumentImage, BaseAudio, AudioAsset, SimpleAudio, BaseVideo, VideoAsset, SimpleVideo, Notes: (Base)Note, NetworkNote, OrganizationNote, UserNote, ProjectNote, StoryNote Discussion: Discussion, PrivateDiscussion, PrivateMessage, Comment, CommentReadStatus """ from .user import User from .organization import Organization, OrganizationPublicProfile from .network import Network from .assets import ImageAsset, DocumentAsset, AudioAsset, VideoAsset from .assets import SimpleImage, SimpleDocument, SimpleAudio, SimpleVideo from .discussion import Discussion, Comment, PrivateMessage from .projects import Project from .story import Story from .facets import Facet, FacetTemplate, ContentLicense from .notes import Note from .platforms import Platform, PlatformAccount from .tasks import Task from .events import Event from .copy import * # XXX from .contractors import ContractorProfile, TalentEditorProfile, OrganizationContractorAffiliation, Pitch, Call, Assignment from .platforms import Platform, PlatformAccount from .subscription import OrganizationSubscription, ContractorSubscription #-----------------------------------------------------------------------# # SOCIAL POST #-----------------------------------------------------------------------# # XXX Leaving this commented out for now to think about how to best # relate or make use of Platform and PlatformAccount in the options. # class SocialPost(models.Model): # """A social post. # # A social post to promote a project, story or event. # """ # # FACEBOOK = 'Facebook' # TWITTER = 'Twitter' # INSTAGRAM = 'Instagram' # SOCIAL_ACCOUNT_CHOICES = ( # (FACEBOOK, 'Facebook'), # (TWITTER, 'Twitter'), # (INSTAGRAM, 'Instagram'), # ) # # social_platform = models.CharField( # max_length=50, # choices=SOCIAL_ACCOUNT_CHOICES, # help_text='Platform the post is created for.' # ) # # text = models.TextField( # help_text='Content of the post.' # ) # # # a social post can be associated with a project, story or an event. # # Add connection to P, Se, St, or E # # # Add Image assets for social posts to Assets section. #
36.54878
123
0.682349
287
2,997
7.097561
0.480836
0.019637
0.0162
0.018655
0.143348
0.143348
0.098184
0.045164
0.045164
0.045164
0
0.00083
0.195529
2,997
81
124
37
0.844048
0.665999
0
0.117647
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
62e238270ed059d2479949cf0bad5480892fad83
597
py
Python
optgbm/__init__.py
kmedved/OptGBM
7f37f4becea3e6d23dbc663b0c2c212d54ae413c
[ "MIT" ]
26
2019-09-17T06:16:40.000Z
2022-01-08T01:17:40.000Z
optgbm/__init__.py
kmedved/OptGBM
7f37f4becea3e6d23dbc663b0c2c212d54ae413c
[ "MIT" ]
62
2019-09-16T17:33:37.000Z
2021-06-18T00:31:38.000Z
optgbm/__init__.py
kmedved/OptGBM
7f37f4becea3e6d23dbc663b0c2c212d54ae413c
[ "MIT" ]
3
2020-01-25T05:44:14.000Z
2021-10-01T04:12:29.000Z
"""OptGBM package.""" import logging from pkg_resources import DistributionNotFound from pkg_resources import get_distribution try: distribution = get_distribution(__name__) __version__ = distribution.version except DistributionNotFound: pass from lightgbm import * # noqa from . import basic # noqa from . import compat # noqa from . import sklearn # noqa from . import typing # noqa from . import utils # noqa from .sklearn import * # noqa logger = logging.getLogger(__name__) handler = logging.StreamHandler() logger.addHandler(handler) logger.setLevel(logging.INFO)
20.586207
46
0.760469
68
597
6.441176
0.426471
0.109589
0.159817
0.100457
0
0
0
0
0
0
0
0
0.165829
597
28
47
21.321429
0.879518
0.085427
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0.052632
0.526316
0
0.526316
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
3
62e60984c03d7d6173d5e1b94902e5e5564f46d7
503
py
Python
countries/migrations/0006_auto_20180218_2036.py
kneirinck/django-countries-flavor
446f1f8eccb40b2f1e413905d319a5dc19bbea3b
[ "MIT" ]
null
null
null
countries/migrations/0006_auto_20180218_2036.py
kneirinck/django-countries-flavor
446f1f8eccb40b2f1e413905d319a5dc19bbea3b
[ "MIT" ]
null
null
null
countries/migrations/0006_auto_20180218_2036.py
kneirinck/django-countries-flavor
446f1f8eccb40b2f1e413905d319a5dc19bbea3b
[ "MIT" ]
null
null
null
# Generated by Django 2.0.1 on 2018-02-18 20:36 from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('countries', '0005_remove_locale_data'), ] operations = [ migrations.RenameField( model_name='country', old_name='mpoly', new_name='outlines', ), migrations.RenameField( model_name='division', old_name='poly', new_name='bbox', ), ]
20.958333
49
0.554672
50
503
5.4
0.72
0.155556
0.192593
0.222222
0
0
0
0
0
0
0
0.056548
0.332008
503
23
50
21.869565
0.747024
0.089463
0
0.235294
1
0
0.149123
0.050439
0
0
0
0
0
1
0
false
0
0.058824
0
0.235294
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
62f5004bdab4005c86fc2d9311630af54d7663dc
303
py
Python
tests/test_version.py
fy0/querylayer
424411a7d69ef2732b3c6e2f90030f93eb45f059
[ "MIT" ]
4
2020-11-09T03:11:45.000Z
2021-04-21T12:37:12.000Z
tests/test_version.py
fy0/pycurd
424411a7d69ef2732b3c6e2f90030f93eb45f059
[ "MIT" ]
null
null
null
tests/test_version.py
fy0/pycurd
424411a7d69ef2732b3c6e2f90030f93eb45f059
[ "MIT" ]
1
2022-03-27T06:43:21.000Z
2022-03-27T06:43:21.000Z
import os import re from pycrud import __version__ def test_version(): project_file = open(os.path.abspath(os.path.join(os.path.dirname(__file__), '..', 'pyproject.toml')), 'r', encoding='utf-8').read() m = re.search(r"version = \"(.+?)\"", project_file) assert m.group(1) == __version__
27.545455
135
0.663366
43
303
4.325581
0.627907
0.096774
0.193548
0
0
0
0
0
0
0
0
0.007663
0.138614
303
10
136
30.3
0.704981
0
0
0
0
0
0.108911
0
0
0
0
0
0.142857
1
0.142857
false
0
0.428571
0
0.571429
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
3
62f79e9b7586592240ecaefc5a5f9f424eecd6a0
266
py
Python
v2/crawlers/factory.py
DucPhamTV/MaiTet
44a1465a3239808f6640592ba666d9c5449c0ef4
[ "MIT" ]
null
null
null
v2/crawlers/factory.py
DucPhamTV/MaiTet
44a1465a3239808f6640592ba666d9c5449c0ef4
[ "MIT" ]
15
2021-02-20T12:03:33.000Z
2021-07-26T10:15:03.000Z
v2/crawlers/factory.py
DucPhamTV/MaiTet
44a1465a3239808f6640592ba666d9c5449c0ef4
[ "MIT" ]
null
null
null
from .simple_crawler import SimpleCrawler class Factory: def __init__(self, url): self.url = url @staticmethod def create_crawler(url): # TODO: Analyse url # Hard code to return simple crawler return SimpleCrawler(url)
20.461538
44
0.654135
31
266
5.419355
0.612903
0.154762
0
0
0
0
0
0
0
0
0
0
0.281955
266
12
45
22.166667
0.879581
0.195489
0
0
0
0
0
0
0
0
0
0.083333
0
1
0.285714
false
0
0.142857
0.142857
0.714286
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
1
0
0
0
1
0
0
0
3
1a0188a2b25ce0ef3cf47b11f2718dba5c77a126
1,088
py
Python
paddleseg/datasets/__init__.py
QinchengZhang/PaddleSeg
73321822b3c6836540281f801e1e163abee91cdc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
paddleseg/datasets/__init__.py
QinchengZhang/PaddleSeg
73321822b3c6836540281f801e1e163abee91cdc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
paddleseg/datasets/__init__.py
QinchengZhang/PaddleSeg
73321822b3c6836540281f801e1e163abee91cdc
[ "ECL-2.0", "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- ''' Author: TJUZQC Date: 2021-01-12 15:07:21 LastEditors: TJUZQC LastEditTime: 2021-01-12 15:23:04 Description: None ''' # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from .dataset import Dataset from .cityscapes import Cityscapes from .voc import PascalVOC from .ade import ADE20K from .optic_disc_seg import OpticDiscSeg from .bjsclc import BJSCLC from .breast_cancer import BreastCancer from .remotesensing import RemoteSensing from .segpc2021 import SegPC2021 from .tn_scui2020 import TN_SCUI2020
34
74
0.779412
162
1,088
5.203704
0.648148
0.071174
0.01898
0.023725
0
0
0
0
0
0
0
0.059331
0.147978
1,088
32
75
34
0.850054
0.659926
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
3
1a075cc25df39a7dc4988fc582cb5c537d9e4c24
1,648
py
Python
chronometry/estimate/Measurement.py
idin/chronometry
e022a1d06d8e8be990483130bc314a189de2f149
[ "MIT" ]
null
null
null
chronometry/estimate/Measurement.py
idin/chronometry
e022a1d06d8e8be990483130bc314a189de2f149
[ "MIT" ]
null
null
null
chronometry/estimate/Measurement.py
idin/chronometry
e022a1d06d8e8be990483130bc314a189de2f149
[ "MIT" ]
null
null
null
class Measurement: def __init__(self, x, result, elapsed, timeout_error, other_error): """ :type x: dict :type result: object :type elapsed: float :type timeout_error: bool """ if not isinstance(x, dict): raise TypeError('x should be a dictionary!') self._x = x self._result = result self._elapsed_time = elapsed self._timeout_error = timeout_error self._other_error = other_error self._weight = None @property def weight(self): return self._weight @property def timeout_error(self): return self._timeout_error @property def other_error(self): return self._other_error def __lt__(self, other): return self.elapsed_time < other.elapsed_time def __le__(self, other): return self.elapsed_time <= other.elapsed_time def __eq__(self, other): return self.elapsed_time == other.elapsed_time def __gt__(self, other): return self.elapsed_time > other.elapsed_time def __ge__(self, other): return self.elapsed_time >= other.elapsed_time def __ne__(self, other): return self.elapsed_time != other.elapsed_time @property def x(self): """ :rtype: dict """ return self._x @property def elapsed_time(self): return self._elapsed_time @property def dictionary(self): if isinstance(self._result, dict) and 'elapsed' not in self._result and 'x' not in self._result: return dict( x=self._x, time=self.elapsed_time, timeout_error=self._timeout_error, other_error=self._other_error, **self._result ) else: return { **self._x, 'time': self.elapsed_time, 'result': self._result, 'timeout_error': self._timeout_error, 'other_error': self._other_error }
23.211268
104
0.720874
232
1,648
4.75
0.172414
0.169691
0.136116
0.133394
0.401996
0.401996
0.358439
0.358439
0.358439
0.316697
0
0
0.169296
1,648
70
105
23.542857
0.804967
0.057039
0
0.122449
0
0
0.043934
0
0
0
0
0
0
1
0.265306
false
0
0
0.204082
0.55102
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
3