hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
7a5efdd3fa22d91097ebe01edb43cd8a8309df98
2,817
py
Python
main.py
devlocalhost/covidpy
4c2e8ce8831a389b9ca5bad8ed64aa3c4fb5c60c
[ "MIT" ]
null
null
null
main.py
devlocalhost/covidpy
4c2e8ce8831a389b9ca5bad8ed64aa3c4fb5c60c
[ "MIT" ]
null
null
null
main.py
devlocalhost/covidpy
4c2e8ce8831a389b9ca5bad8ed64aa3c4fb5c60c
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 from traceback import format_exc from requests import get from colorhex import colorex, BOLD from datetime import datetime from sys import exit from os import system BLURPLE = '7289da' GREEN = '43b581' YELLOW = 'fdcc4b' RED = 'f04947' def main(): system('clear') country = input(colorex('Enter a countries name, press enter without typing anything to auto detect your country or e to exit\n -> ', GREEN, BOLD)) if country == '': try: auto_country = get('http://www.geoplugin.net/json.gp').json() except Exception as exc: system('clear') print(colorex(f'An error occured while trying to auto detect country. Please try again or enter the countries name and make sure you have internet access\nTraceback: {exc}', RED, BOLD)) input(colorex('Press enter to go back\n-> ', GREEN, BOLD)) system('clear') main() country = auto_country['geoplugin_countryName'] getcovidstats(country) elif country == 'e': system('clear') exit() elif country != '': getcovidstats(country) def getcovidstats(country): try: resp = get(f'https://disease.sh/v3/covid-19/countries/{country}').json() except Exception as exc: system('clear') print(colorex(f'An error occured while trying to get covid 19 stats. Please try again later and make sure you have internet access\nTraceback: {exc}', RED, BOLD)) input(colorex('Press enter to go back\n-> ', GREEN, BOLD)) system('clear') main() try: country_name = resp['country'] except KeyError as exc: system('clear') print(colorex(f'Invalid country name, or the country doesnt have stats. Please try again\nTraceback: {format_exc()}', RED, BOLD)) input(colorex('Press enter to go back\n-> ', GREEN, BOLD)) system('clear') main() short_country_name = resp['countryInfo']['iso2'] country_population = resp['population'] total_cases = resp['cases'] cases_today = resp['todayCases'] total_deaths = resp['deaths'] deaths_today = resp['todayDeaths'] total_recovered = resp['recovered'] today_recovered = resp['todayRecovered'] continent = resp['continent'] updated_at = datetime.fromtimestamp(resp['updated'] / 1000.0).strftime('%d %B %Y at %I:%M:%S %p') system('clear') print(colorex(f'Country: {country_name} ({short_country_name})', BLURPLE, BOLD)) print(colorex(f'Continent: {continent}', BLURPLE, BOLD)) print(colorex(f'Population: {country_population}', GREEN, BOLD)) print(colorex(f'Total cases: {total_cases}, Today: {cases_today}', RED, BOLD)) print(colorex(f'Total deaths: {total_deaths}, Today: {deaths_today}', RED, BOLD)) print(colorex(f'Total recovered: {total_recovered}, Today: {today_recovered}', GREEN, BOLD)) print(colorex(f'Updated at: {updated_at}', YELLOW, BOLD)) input(colorex('Press enter to go back\n-> ', GREEN, BOLD)) system('clear') main() main()
29.652632
188
0.709265
394
2,817
5.002538
0.30203
0.055809
0.065956
0.05175
0.354135
0.292745
0.292745
0.24759
0.24759
0.24759
0
0.011213
0.14519
2,817
95
189
29.652632
0.807309
0.007455
0
0.382353
0
0.044118
0.424535
0.015379
0
0
0
0
0
1
0.029412
false
0
0.088235
0
0.117647
0.147059
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a60e3f7182adc1bb57a1b4a891e3d337828847a
676
py
Python
Proj1/test.py
sebemery/EE559-DeepLearning-MiniProjects
34ea114884e2e4f1416d63fa53466619e6a5c5fd
[ "MIT" ]
null
null
null
Proj1/test.py
sebemery/EE559-DeepLearning-MiniProjects
34ea114884e2e4f1416d63fa53466619e6a5c5fd
[ "MIT" ]
null
null
null
Proj1/test.py
sebemery/EE559-DeepLearning-MiniProjects
34ea114884e2e4f1416d63fa53466619e6a5c5fd
[ "MIT" ]
1
2020-03-18T18:58:02.000Z
2020-03-18T18:58:02.000Z
from models.Nets import * from models.Basic import * from models.Inception_Net import * from models.Le_Net import * from utils.dlc_practical_prologue import * from utils.Evaluate import * from utils.grid_search import * from utils.loader import * from utils.metrics import * from utils.plot import * from utils.training import * import argparse if __name__ == "__main__": Nets_default = Nets() seeds = [1,2,3,4,5,6,7,8,9,10] train_results, test_losses,test_accuracies = evaluate_model(Nets_default.LeNet_sharing_aux, seeds , plot =False, rotate = True,translate=True,swap_channel = True)
30.727273
117
0.683432
91
676
4.835165
0.549451
0.227273
0.238636
0
0
0
0
0
0
0
0
0.021195
0.232249
676
21
118
32.190476
0.82659
0
0
0
0
0
0.011834
0
0
0
0
0
0
1
0
false
0
0.705882
0
0.705882
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
7a60ea9079d8b4f42edfd2138f6895976593e4dd
905
py
Python
examples/example4/sub_processt.py
bb515/probabilistic-peridynamics-project
263c81b8736fd8308d3faf096714b12a2f787322
[ "MIT" ]
null
null
null
examples/example4/sub_processt.py
bb515/probabilistic-peridynamics-project
263c81b8736fd8308d3faf096714b12a2f787322
[ "MIT" ]
null
null
null
examples/example4/sub_processt.py
bb515/probabilistic-peridynamics-project
263c81b8736fd8308d3faf096714b12a2f787322
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Sat May 2 10:21:16 2020 @author: Ben Boys """ import subprocess beams = ['3300beam952t.msh', '3300beam2970t.msh', '3300beam4392t.msh', '3300beam6048t.msh', '3300beam11836t.msh', '3300beam17600t.msh', '3300beam31680t.msh', '3300beam64350t.msh', '3300beam149600t.msh'] with open("data_force_3300t.txt", "w+") as output: for beam in beams: subprocess.call(["python", "./example4t.py", beam, "--profile"], stdout=output); # ============================================================================= # with open("data_displacement_optimised_3300.txt", "w+") as output: # for beam in beams: # subprocess.call(["python", "./example4d.py", beam, "--optimised", "--profile"], stdout=output); # =============================================================================
33.518519
105
0.499448
82
905
5.45122
0.609756
0.035794
0.053691
0.053691
0.205817
0.205817
0.205817
0.205817
0.205817
0.205817
0
0.133965
0.183425
905
26
106
34.807692
0.470907
0.471823
0
0
0
0
0.449462
0
0
0
0
0
0
1
0
false
0
0.076923
0
0.076923
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a60ed8d5b51faf0acae3eb73b2aefe676b8f9fb
407
py
Python
data-detective-airflow/dags/dags/dummy/code/code.py
dmitriy-e/metadata-governance
018a879951dee3f3c2c05ac8e05b8360dd7f4ab3
[ "Apache-2.0" ]
5
2021-12-01T09:55:23.000Z
2021-12-21T16:23:33.000Z
data-detective-airflow/dags/dags/dummy/code/code.py
dmitriy-e/metadata-governance
018a879951dee3f3c2c05ac8e05b8360dd7f4ab3
[ "Apache-2.0" ]
1
2022-03-14T16:50:41.000Z
2022-03-14T16:50:41.000Z
data-detective-airflow/dags/dags/dummy/code/code.py
dmitriy-e/metadata-governance
018a879951dee3f3c2c05ac8e05b8360dd7f4ab3
[ "Apache-2.0" ]
2
2021-11-03T09:43:09.000Z
2021-11-17T10:16:29.000Z
import yaml from pandas import DataFrame def val_translate(context, in_df: DataFrame, file_name: str) -> DataFrame: task = context.get('task') out_df = in_df.copy() with open(f'{task.dag.etc_dir}/{file_name}', 'r', encoding='utf-8') as cfg: config = yaml.safe_load(cfg) out_df['test'] = out_df.apply( lambda row: config[row['test']], axis=1 ) return out_df
27.133333
79
0.633907
62
407
3.983871
0.645161
0.080972
0
0
0
0
0
0
0
0
0
0.006289
0.218673
407
14
80
29.071429
0.77044
0
0
0
0
0
0.117936
0.07371
0
0
0
0
0
1
0.083333
false
0
0.166667
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a61638ec41b218d13f9ed19e5fe88c4dfeca072
331
py
Python
Regex/3_Group_Groups_ Groupdict.py
FaranakAlikhah/ADM-HW1
f4255112c58a4a200d04c943c74f096cc31e9dad
[ "MIT" ]
null
null
null
Regex/3_Group_Groups_ Groupdict.py
FaranakAlikhah/ADM-HW1
f4255112c58a4a200d04c943c74f096cc31e9dad
[ "MIT" ]
null
null
null
Regex/3_Group_Groups_ Groupdict.py
FaranakAlikhah/ADM-HW1
f4255112c58a4a200d04c943c74f096cc31e9dad
[ "MIT" ]
null
null
null
#!/usr/bin/env python # coding: utf-8 # # section 13.Regex and Parsing challenges : # # ### writer : Faranak Alikhah 1954128 # ### 3. Group(), Groups() & Groupdict() : # In[ ]: import re s= input() pattern=r'([A-Z a-z 0-9])\1+'#alphabet numeric m=re.search(pattern,s) if m: print(m.group(1)) else: print(-1) #
13.791667
47
0.589124
51
331
3.823529
0.784314
0.020513
0
0
0
0
0
0
0
0
0
0.060837
0.205438
331
23
48
14.391304
0.680608
0.528701
0
0
0
0
0.130435
0
0
0
0
0
0
1
0
false
0
0.125
0
0.125
0.25
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a631c8b774ce30119cf2fb20a75fd6b612229ac
2,238
py
Python
checker/__main__.py
vyahello/pep8-checker
fe5d0746201d3a26ec2ae0c9c4a70203700af1f0
[ "MIT" ]
3
2020-08-09T15:17:44.000Z
2022-03-19T22:16:25.000Z
checker/__main__.py
vyahello/pep8-checker
fe5d0746201d3a26ec2ae0c9c4a70203700af1f0
[ "MIT" ]
4
2020-08-12T21:25:16.000Z
2021-04-17T10:57:57.000Z
checker/__main__.py
vyahello/pep8-checker
fe5d0746201d3a26ec2ae0c9c4a70203700af1f0
[ "MIT" ]
1
2020-12-17T10:06:37.000Z
2020-12-17T10:06:37.000Z
"""Represents executable entrypoint for `pep8-checker` application.""" import http import os from typing import Any, Dict, Optional from pathlib import Path import attr from bottle import TEMPLATE_PATH, abort, request, route, run, view import requests TEMPLATE_PATH.append(str(Path('./') / 'checker' / 'views')) def api_url() -> str: """Returns AWS_ENDPOINT URL.""" url: str = os.environ.get('AWS_ENDPOINT', '') if not url: raise RuntimeError('Please set API_URL environment variable') return url @attr.dataclass(frozen=True, slots=True) class Server: """The class represents a server endpoint.""" host: str = '0.0.0.0' port: str = os.environ.get('PORT', '5050') is_debug: bool = True reloader: bool = True def as_json(self) -> Dict[str, Any]: """Returns server configuration as a dict.""" return { 'host': self.host, 'port': self.port, 'is_debug': self.is_debug, 'reloader': self.reloader, } @route('/', method=('GET', 'POST')) @view(tpl_name='index') def index() -> Dict[str, str]: """Specify index page view. Returns: <dict[str, str]> response from AWS lambda server. """ title = 'PEP8 Checker' code: str = request.forms.get('code', '') # pylint: disable=no-member if code: response: Dict[Any, Any] = requests.post( url=api_url(), json={'code': code} ).json() error: Optional[str] = response.get('errorMessage') exception: Optional[str] = response.get('errorType') if error and exception: abort( code=int(http.HTTPStatus.BAD_REQUEST), text=f'Lambda function returned status {exception} exception', ) return {'title': title, 'code': code, 'pep_errors': response['body']} return {'title': title, 'code': code, 'pep_errors': ''} def easyrun(server: Server = Server()) -> None: """Launches a web application. Args: server: <Server> a given server configuration. """ run( host=server.host, port=server.port, debug=server.is_debug, reloader=server.reloader, ) if __name__ == '__main__': easyrun()
27.975
78
0.602324
268
2,238
4.940299
0.391791
0.021148
0.018127
0.022659
0.049849
0.049849
0.049849
0
0
0
0
0.005974
0.252011
2,238
79
79
28.329114
0.784946
0.164433
0
0
0
0
0.143643
0
0
0
0
0
0
1
0.075472
false
0
0.132075
0
0.377358
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a645f8adcca004eb3891c7ce1a1f1c15e442c29
4,758
py
Python
python/experiment.py
khoadoan/adversarial-hashing
eeeeb464b4fe7084efd38a6257499d674a9a7194
[ "Apache-2.0" ]
6
2020-06-26T09:51:07.000Z
2021-09-09T09:48:55.000Z
python/experiment.py
khoadoan/adversarial-hashing
eeeeb464b4fe7084efd38a6257499d674a9a7194
[ "Apache-2.0" ]
null
null
null
python/experiment.py
khoadoan/adversarial-hashing
eeeeb464b4fe7084efd38a6257499d674a9a7194
[ "Apache-2.0" ]
1
2020-07-21T19:34:38.000Z
2020-07-21T19:34:38.000Z
import torch from torch import nn import torchvision.transforms as transforms from tqdm import tqdm import numpy as np from train import model_helper from models import mlp, dcgan import data_manual def get_numpy_data(dataloader): x, y = [], [] for batch_x, batch_y in tqdm(iter(dataloader)): x.append(batch_x.numpy()) y.append(batch_y.numpy()) x = np.vstack(x) y = np.concatenate(y) return x, y def get_mnist_dataloaders(image_size, batch_size, dataroot, workers=0, data_transforms=None): if data_transforms is None: data_transforms = transforms.Compose([ transforms.Resize(image_size), transforms.ToTensor() ]) train_dataloader = data_manual.get_dataloader('mnist', image_size, batch_size, dataroot=dataroot, workers=0, data_transforms=None, type='train', shuffle=True, clear_cache=True) db_dataloader = data_manual.get_dataloader('mnist', image_size, batch_size, dataroot=dataroot, workers=0, data_transforms=None, type='db', shuffle=False, clear_cache=True) query_dataloader = data_manual.get_dataloader('mnist', image_size, batch_size, dataroot=dataroot, workers=0, data_transforms=None, type='query', shuffle=False, clear_cache=True) return train_dataloader, db_dataloader, query_dataloader def create_mlp_encoder_nobn(args, device): net = mlp.Encoder(args.image_size, args.nc, args.enc_layers, args.nz, activation=nn.LeakyReLU(0.2), use_bn=False, dropout=0) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def create_mlp_decoder_nobn(args, device): net = mlp.Decoder(args.nz, args.dec_layers, args.nc, args.image_size, activation=nn.LeakyReLU(0.2), output_activation=nn.Tanh(), use_bn=False, dropout=0) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def create_mlp_encoder(args, device): net = mlp.Encoder(args.image_size, args.nc, args.enc_layers, args.nz, activation=nn.LeakyReLU(0.2), use_bn=True, dropout=0) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def create_mlp_decoder(args, device): net = mlp.Decoder(args.nz, args.dec_layers, args.nc, args.image_size, activation=nn.LeakyReLU(0.2), output_activation=nn.Tanh(), use_bn=True, dropout=0) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def create_dcgan_encoder(args, device): net = dcgan.Encoder(args.image_size, args.nc, args.ndf, args.nz, args.n_extra_layers) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def create_dcgan_decoder(args, device): net = dcgan.Decoder(args.nz, args.ngf, args.nc, args.image_size, args.n_extra_layers) net.apply(model_helper.weights_init) print(net) optimizer = model_helper.get_optimizer(args, net.parameters()) if torch.cuda.is_available(): net = net.type(torch.cuda.FloatTensor) return net, optimizer def summarize_results(loss, metrics, ncols=5, figsize=(5 * 4, 3)): if type(loss) != dict: loss = loss.__dict__ metrics = metrics.__dict__ nrows = np.ceil(len(loss) / ncols).astype(int) fig, axes = plt.subplots(nrows, ncols, figsize=figsize) for i, (k, v) in enumerate(loss.items()): if len(axes.shape) > 1: ax = axes[int(i / nrows), i % ncols] else: ax = axes[i % ncols] if len(v) > 0: x, y = list(zip(*v.items())) if 'grad' in k: y = [e[0] for e in y] #only get the max norm ax = sns.lineplot(x[10:], y[10:], ax=ax) ax.set_title('{}: {:.4f}'.format(k, np.min(y))) fig.suptitle('Losses') plt.tight_layout() plt.show()
37.171875
121
0.648802
644
4,758
4.614907
0.208075
0.048116
0.026245
0.038358
0.663526
0.630888
0.619448
0.609354
0.609354
0.609354
0
0.007671
0.232871
4,758
128
122
37.171875
0.806575
0.004414
0
0.407767
0
0
0.009922
0
0
0
0
0
0
1
0.087379
false
0
0.07767
0
0.242718
0.058252
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a651dc10038b543bed65a91a976010e9fef4523
492
py
Python
modules/vtk_basic/vtkDataObjectReader.py
chrisidefix/devide
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
[ "BSD-3-Clause" ]
25
2015-08-24T16:05:14.000Z
2020-12-09T20:07:14.000Z
modules/vtk_basic/vtkDataObjectReader.py
chrisidefix/devide
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
[ "BSD-3-Clause" ]
1
2016-02-16T21:18:10.000Z
2016-02-16T21:18:10.000Z
modules/vtk_basic/vtkDataObjectReader.py
chrisidefix/devide
99bfe156e710fa47ba7ae88b0ce1eef592a3a439
[ "BSD-3-Clause" ]
5
2016-02-16T20:05:37.000Z
2020-01-31T11:27:39.000Z
# class generated by DeVIDE::createDeVIDEModuleFromVTKObject from module_kits.vtk_kit.mixins import SimpleVTKClassModuleBase import vtk class vtkDataObjectReader(SimpleVTKClassModuleBase): def __init__(self, module_manager): SimpleVTKClassModuleBase.__init__( self, module_manager, vtk.vtkDataObjectReader(), 'Reading vtkDataObject.', (), ('vtkDataObject',), replaceDoc=True, inputFunctions=None, outputFunctions=None)
37.846154
64
0.71748
39
492
8.74359
0.641026
0.046921
0.082111
0.123167
0
0
0
0
0
0
0
0
0.203252
492
12
65
41
0.869898
0.117886
0
0
1
0
0.081019
0
0
0
0
0
0
1
0.1
false
0
0.2
0
0.4
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a6651f10741e2f55d41c5d49f4a11f863eca070
8,311
py
Python
venv/lib/python3.7/site-packages/twilio/rest/preview/trusted_comms/brands_information.py
uosorio/heroku_face
7d6465e71dba17a15d8edaef520adb2fcd09d91e
[ "Apache-2.0" ]
1,362
2015-01-04T10:25:18.000Z
2022-03-24T10:07:08.000Z
venv/lib/python3.7/site-packages/twilio/rest/preview/trusted_comms/brands_information.py
uosorio/heroku_face
7d6465e71dba17a15d8edaef520adb2fcd09d91e
[ "Apache-2.0" ]
299
2015-01-30T09:52:39.000Z
2022-03-31T23:03:02.000Z
venv/lib/python3.7/site-packages/twilio/rest/preview/trusted_comms/brands_information.py
uosorio/heroku_face
7d6465e71dba17a15d8edaef520adb2fcd09d91e
[ "Apache-2.0" ]
622
2015-01-03T04:43:09.000Z
2022-03-29T14:11:00.000Z
# coding=utf-8 r""" This code was generated by \ / _ _ _| _ _ | (_)\/(_)(_|\/| |(/_ v1.0.0 / / """ from twilio.base import deserialize from twilio.base import values from twilio.base.instance_context import InstanceContext from twilio.base.instance_resource import InstanceResource from twilio.base.list_resource import ListResource from twilio.base.page import Page class BrandsInformationList(ListResource): """ PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact help@twilio.com. """ def __init__(self, version): """ Initialize the BrandsInformationList :param Version version: Version that contains the resource :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationList :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationList """ super(BrandsInformationList, self).__init__(version) # Path Solution self._solution = {} def get(self): """ Constructs a BrandsInformationContext :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext """ return BrandsInformationContext(self._version, ) def __call__(self): """ Constructs a BrandsInformationContext :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext """ return BrandsInformationContext(self._version, ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Preview.TrustedComms.BrandsInformationList>' class BrandsInformationPage(Page): """ PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact help@twilio.com. """ def __init__(self, version, response, solution): """ Initialize the BrandsInformationPage :param Version version: Version that contains the resource :param Response response: Response from the API :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationPage :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationPage """ super(BrandsInformationPage, self).__init__(version, response) # Path Solution self._solution = solution def get_instance(self, payload): """ Build an instance of BrandsInformationInstance :param dict payload: Payload response from the API :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance """ return BrandsInformationInstance(self._version, payload, ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ return '<Twilio.Preview.TrustedComms.BrandsInformationPage>' class BrandsInformationContext(InstanceContext): """ PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact help@twilio.com. """ def __init__(self, version): """ Initialize the BrandsInformationContext :param Version version: Version that contains the resource :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext """ super(BrandsInformationContext, self).__init__(version) # Path Solution self._solution = {} self._uri = '/BrandsInformation'.format(**self._solution) def fetch(self, if_none_match=values.unset): """ Fetch the BrandsInformationInstance :param unicode if_none_match: Standard `If-None-Match` HTTP header :returns: The fetched BrandsInformationInstance :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance """ headers = values.of({'If-None-Match': if_none_match, }) payload = self._version.fetch(method='GET', uri=self._uri, headers=headers, ) return BrandsInformationInstance(self._version, payload, ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) return '<Twilio.Preview.TrustedComms.BrandsInformationContext {}>'.format(context) class BrandsInformationInstance(InstanceResource): """ PLEASE NOTE that this class contains preview products that are subject to change. Use them with caution. If you currently do not have developer preview access, please contact help@twilio.com. """ def __init__(self, version, payload): """ Initialize the BrandsInformationInstance :returns: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance """ super(BrandsInformationInstance, self).__init__(version) # Marshaled Properties self._properties = { 'update_time': deserialize.iso8601_datetime(payload.get('update_time')), 'file_link': payload.get('file_link'), 'file_link_ttl_in_seconds': payload.get('file_link_ttl_in_seconds'), 'url': payload.get('url'), } # Context self._context = None self._solution = {} @property def _proxy(self): """ Generate an instance context for the instance, the context is capable of performing various actions. All instance actions are proxied to the context :returns: BrandsInformationContext for this BrandsInformationInstance :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationContext """ if self._context is None: self._context = BrandsInformationContext(self._version, ) return self._context @property def update_time(self): """ :returns: Creation time of the information retrieved :rtype: datetime """ return self._properties['update_time'] @property def file_link(self): """ :returns: The URL to the brands information :rtype: unicode """ return self._properties['file_link'] @property def file_link_ttl_in_seconds(self): """ :returns: How long will be the `file_link` valid :rtype: unicode """ return self._properties['file_link_ttl_in_seconds'] @property def url(self): """ :returns: The URL of this resource :rtype: unicode """ return self._properties['url'] def fetch(self, if_none_match=values.unset): """ Fetch the BrandsInformationInstance :param unicode if_none_match: Standard `If-None-Match` HTTP header :returns: The fetched BrandsInformationInstance :rtype: twilio.rest.preview.trusted_comms.brands_information.BrandsInformationInstance """ return self._proxy.fetch(if_none_match=if_none_match, ) def __repr__(self): """ Provide a friendly representation :returns: Machine friendly representation :rtype: str """ context = ' '.join('{}={}'.format(k, v) for k, v in self._solution.items()) return '<Twilio.Preview.TrustedComms.BrandsInformationInstance {}>'.format(context)
34.342975
96
0.679461
837
8,311
6.549582
0.166069
0.055819
0.052718
0.074425
0.668004
0.649033
0.641007
0.612185
0.569318
0.551076
0
0.001255
0.233185
8,311
241
97
34.485477
0.858936
0.495247
0
0.314286
1
0
0.119104
0.082842
0
0
0
0
0
1
0.257143
false
0
0.085714
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
3
7a66f09c5519425a30cfc0792ff8af6ceb340d6d
15,676
py
Python
Projects/Space Invaders/main c clara.py
busterbeam/pygame
5dbbfd8dff5785e5d1010909aec51e9ef8d1cf5c
[ "MIT" ]
6
2018-05-26T13:06:34.000Z
2021-11-08T11:44:28.000Z
Projects/Space Invaders/main c clara.py
busterbeam/pygame
5dbbfd8dff5785e5d1010909aec51e9ef8d1cf5c
[ "MIT" ]
1
2018-09-11T18:35:56.000Z
2019-05-06T13:41:32.000Z
Projects/Space Invaders/main c clara.py
busterbeam/pygame
5dbbfd8dff5785e5d1010909aec51e9ef8d1cf5c
[ "MIT" ]
11
2018-06-01T01:53:11.000Z
2021-08-11T01:00:17.000Z
import pygame from pygame.locals import * from os.path import realpath, dirname from time import time from random import randint def main(): running, settings = load() while running: settings = update(settings) draw(**settings) running = check_exit(**settings) pygame.quit() quit() def load(): screen_size = (450, 333) screen = pygame.display.set_mode(screen_size) pygame.display.set_caption("Shoot'n up") game_object = { 'player' : Player(), 'enemy' : [], 'shoot' : [], 'jump_scare': [], 'hit_effect': [], 'shoot_effect':[], 'bg' : Background(), 'HUD' : [Sprite(dirname(realpath(__file__))+'/assets/img/effects/HUD_Vidas.png', 20, 300)], } game_object = load_level(game_object, 1) path = dirname(realpath(__file__)) last_shoot = time() level = 1 return True, { 'screen_size' : screen_size, 'screen' : screen, 'game_object' : game_object, 'path' : path, 'exit_request' : False, 'last_shoot' : last_shoot, 'level' : level, 'enemy_last_shoot': time() } def load_level(game_object, what_level): path = dirname(realpath(__file__)) if what_level==1: for j in range(10): if j%2: game_object['enemy'].append(Enemy(20+40*j, 40, 0, 0.5)) else: game_object['enemy'].append(Enemy(20+40*j, 40, 0, 1)) elif what_level==2: for i in range(3): for j in range(10): game_object['enemy'].append(Enemy(20+40*j, -70-80*i, i, .8)) elif what_level==3: for i in range(5): if i%2: for j in range(5): x = 20+(440/5*j) game_object['enemy'].append(Enemy(x, -70-80*i, i%3, .8)) else: for j in range(10): x = 20+(440/10*j) game_object['enemy'].append(Enemy(x, -70-80*i, i%3, .8)) else: for i in range(randint(2, 8)): foo = randint(5,10) for j in range(foo): x = 20+(440/foo*j) if (i ==1 or i ==5) and j%2: game_object['enemy'].append(Enemy(x, -70-80*i, i%3, 2)) else: game_object['enemy'].append(Enemy(x, -70-80*i, i%3, 1)) return game_object def update(settings): settings = check_keys(settings) if len(settings['game_object']['enemy'])==0: settings['level'] +=1 load_level(settings['game_object'], settings['level']) settings['game_object']['player'].load_img() settings['game_object']['shoot'] = update_shoot(settings['game_object']['shoot']) settings['game_object']['bg'].tile, settings['game_object']['bg'].time = update_bg(settings['game_object']['bg'].tile, settings['game_object']['bg'].time) settings['game_object']['bg'] = parallax(settings['game_object']['player'], settings['game_object']['bg']) settings['game_object']['enemy'] = update_enemy(settings['game_object']['enemy'], settings['screen_size'], settings) settings['game_object'] = collider(settings['game_object']) for fire in settings['game_object']['player'].fires: fire.animation.update() for gO in settings['game_object']['enemy']: gO.fire.animation.update() for explosion in settings['game_object']['hit_effect']: explosion.animation.update() if explosion.animation.pos == 7: settings['game_object']['hit_effect'].remove(explosion) for gO in settings['game_object']['shoot_effect']: gO.animation.update() gO.x += settings['game_object']['player'].x_speed if gO.animation.pos == 6: settings['game_object']['shoot_effect'].remove(gO) return settings def collider(game_object): for shoot in game_object['shoot']: if shoot.origin=='player': for enemy in game_object['enemy']: if (shoot.x>enemy.x and shoot.x<enemy.x+enemy.width) or \ (shoot.x+shoot.width>enemy.x and shoot.x+shoot.width<enemy.x+enemy.width): if (shoot.y<enemy.y+enemy.height and shoot.y>enemy.y): x, y = shoot.x-53/2, shoot.y-25 game_object['hit_effect'].append(Hit_effect(x, y)) enemy.hit_demage() game_object['shoot'].remove(shoot) if enemy.hp<=0: game_object['enemy'].remove(enemy) break if shoot.y<0: try: game_object['shoot'].remove(shoot) except:None if shoot.origin=='enemy': player = game_object['player'] if (shoot.x>player.x and shoot.x<player.x+player.width) or \ (shoot.x+shoot.width>player.x and shoot.x+shoot.width<player.x+player.width): if (shoot.y<player.y+player.height and shoot.y>player.y): x, y = shoot.x-53/2, shoot.y-25 game_object['hit_effect'].append(Hit_effect(x, y)) game_object['shoot'].remove(shoot) game_object['player'].hp -=1 break if shoot.y<0: try: game_object['shoot'].remove(shoot) except:None return game_object def update_enemy(enemy, screen_size, settings): enemy_who_gonna_shoot = randint(0, len(enemy)) index =0 for gO in enemy: if index == enemy_who_gonna_shoot and time()-settings['enemy_last_shoot']>0.5: x = gO.x+gO.width/2-8 y = gO.y+gO.height settings['game_object']['shoot'].append(Shoot(x,y, 'enemy')) settings['game_object']['shoot_effect'].append(Shoot_effect(x,y, 'enemy')) settings['enemy_last_shoot'] = time() gO.y += gO.y_speed if time()-gO.init>0.1 and gO.hit_mark: gO.image_return() gO.hit_mark = False if gO.y>screen_size[1]: enemy.remove(gO) index +=1 return enemy def parallax(player, bg): middle = player.x foo = -middle/225.00*25 bg.x = foo return bg def update_shoot(shoot): for gO in shoot: gO.y+=gO.y_speed return shoot def check_keys(settings): k = pygame.key.get_pressed() settings['game_object']['player'].player_move_key(k, settings['screen_size']) for e in pygame.event.get(): if e.type == QUIT or (e.type == KEYDOWN and e.key == K_ESCAPE): settings['exit_request'] = True if k[K_SPACE] and time()-settings['last_shoot']>0.24: x, y = settings['game_object']['player'].x, settings['game_object']['player'].y settings['game_object']['shoot'].append(Shoot(x,y+3, 'player')) settings['game_object']['shoot'].append(Shoot(x+24,y+3, 'player')) settings['game_object']['shoot_effect'].append(Shoot_effect(x, y-14, 'player')) settings['game_object']['shoot_effect'].append(Shoot_effect(x+22, y-14, 'player')) settings['last_shoot'] = time() return settings def update_bg(tile, last_time): if time()-last_time>0.02: tile = (tile+1)%200 last_time = time() return tile, last_time def draw(game_object, screen, screen_size, path, **kwargs): draw_bg(screen, game_object['bg']) draw_enemy(screen, game_object['enemy']) draw_shoot_effect(screen, game_object['shoot_effect']) draw_player(screen, game_object['player']) draw_HUD(screen, game_object['HUD'], game_object['player'].hp) draw_shoot(screen, game_object['shoot']) draw_hit_effect(screen, game_object['hit_effect']) pygame.display.flip() fps(60) pass def draw_shoot_effect(screen, effect): for gO in effect: screen.blit(gO.img, (int(gO.x), int(gO.y))) def draw_hit_effect(screen, explosion): for gO in explosion: screen.blit(gO.img, (int(gO.x), int(gO.y))) def draw_shoot(screen, shoot): for gO in shoot: screen.blit(gO.img, (int(gO.x), int(gO.y))) def draw_enemy(screen, enemy): for gO in enemy: screen.blit(gO.fire.img, (int(gO.x+16), int(gO.y-7))) screen.blit(gO.img, (int(gO.x),int(gO.y))) def draw_HUD(screen, HUD, lifes): for gO in HUD: if gO.__class__==Sprite: x= gO.x screen.blit(gO.img, (int(gO.x), int(gO.y))) img = pygame.image.load(dirname(realpath(__file__)) + '/assets/img/effects/life.png') for i in range(lifes-1): screen.blit(img, (int(x+5+22*i), int(305))) def draw_player(screen, player): screen.blit(player.img, (int(player.x), int(player.y))) y = player.fires[0].y+player.height-5 if player.pos == 'M': x = player.x+2 #compensar o offset do primeiro fogo x_offset = 25 #compensar o offset do segundo fogo else: x = player.x+4 #compensar o offset do primeiro fogo x_offset = 18 #compensar o offset do segundo fogo screen.blit(player.fires[0].img, (int(x), int(y))) screen.blit(player.fires[0].img, (int(x+x_offset), int(y))) def fps(frames): pygame.time.Clock().tick(frames) def draw_bg(screen, bg): screen.blit(bg.img[bg.tile], (int(bg.x),int(bg.y))) pass def check_exit(exit_request, **kwargs): return not exit_request class Hit_effect: def __init__(self,x,y): self.x = x self.y = y path = dirname(realpath(__file__))+'/assets/img/effects' self.img = pygame.image.load(path+'/explosion0.png') self.animation = Animation({'explosion' : [8, 0.01]}, path, 'explosion', self) class Shoot_effect: def __init__(self, x,y, origin): self.x = x self.y = y self.origin = origin path = dirname(realpath(__file__)) if origin == 'player': self.img = pygame.image.load(path+'/assets/img/effects/fire_effectPlayer0.png') self.animation = Animation({'fire_effectPlayer' : [7, 0]}, path+'/assets/img/effects', 'fire_effectPlayer', self) else: self.img = pygame.image.load(path+'/assets/img/effects/fire_effectEnemy0.png') self.animation = Animation({'fire_effectEnemy' : [7, 0]}, path+'/assets/img/effects', 'fire_effectEnemy', self) class Shoot: def __init__(self, x, y, origin): self.x = x self.y = y self.origin = origin if origin == 'player': self.img = pygame.image.load(dirname(realpath(__file__))+'/assets/img/effects/shootPlayer.png') self.y_speed = -4 else: self.img = pygame.image.load(dirname(realpath(__file__))+'/assets/img/effects/shootEnemy.png') self.y_speed = 4 self.width = self.img.get_width() self.height= self.img.get_height() class Sprite: def __init__(self, path, x, y): self.x = x self.y = y self.img = pygame.image.load(path) class Explosion: def __init__(self, x, y): self.x = x self.y = y path = dirname(realpath(__file__)) self.img = pygame.image.load(path+'/assets/img/effects/explosion0.png') self.animation = Animation({'explosion' : [7, 0.2]}, path, 'explosion', self) class Enemy: def __init__(self, x, y, type, y_speed): self.x = x self.x_speed = 0 self.y = y self.hp = type+2 self.y_speed = y_speed self.type = type self.img = pygame.image.load(dirname(realpath(__file__))+'/assets/img/enemy/enemy'+str(type)+'.png').convert_alpha() self.width = self.img.get_width() self.height = self.img.get_height() self.fire = Fire(self) self.init = time() self.hit_mark = False def hit_demage(self): if randint(1,2)%2: self.img = white(self.img) else: self.img = red(self.img) self.hit_mark = True self.hp -= 1 self.init = time() def image_return(self): self.img = pygame.image.load(dirname(realpath(__file__))+'/assets/img/enemy/enemy'+str(self.type)+'.png').convert_alpha() def white(surface): for row in range(surface.get_height()): for column in range(surface.get_width()): if surface.get_at((column, row))[3] == 255: surface.set_at((column, row), (255, 255, 255)) return surface def red(surface): for row in range(surface.get_height()): for column in range(surface.get_width()): if surface.get_at((column, row))[3] == 255: surface.set_at((column, row), (255, 130, 130)) return surface class Player: def __init__(self): self.pos = 'M' self.hp = 4 self.x = 200 self.x_speed = 0 self.y = 280 self.tiles = {} self.spaw_effect = False self.spaw_effect_start = time() self.fires= [ Fire(self), Fire(self) ] path = dirname(realpath(__file__)) for sides in ['L', 'M', 'R']: for i in range(4): k = i+1 self.tiles[str(k)+sides]= (pygame.image.load(path+'/assets/img/ships/ship' + str(k) + sides + '.png')) self.load_img() self.width = self.img.get_width() self.height = self.img.get_height() def load_img(self): self.img = self.tiles[str(self.hp)+self.pos] def player_move_key(self, k, screen_size): if k[K_d]: self.x_speed += 1.4 self.pos = 'R' elif k[K_a]: self.x_speed -= 1.4 self.pos = 'L' else: self.x_speed /= 1.1 self.pos = 'M' if abs(self.x_speed)>5: if self.x_speed>0: self.x_speed = 5 else: self.x_speed = -5 self.x+=self.x_speed if self.x+self.width>screen_size[0]: self.x = screen_size[0]-self.width self.pos = 'M' if self.x < 0: self.x = 0 self.pos = 'M' class Fire: def __init__(self, obj): self.x = obj.x self.y = obj.y self.img = '' self.animation = Animation({'fire' : [4, 0.02]}, dirname(realpath(__file__))+'/assets/img/effects', 'fire', self) class Animation(): def __init__(self, sprites, path, first, obj): self.sprites = sprites self.path = path self.tile = first self.pos = 0 self.last_update = time() self.obj = obj self.obj.img = pygame.image.load(path + '/' + first + str(self.pos) + '.png') def change(self, tile, pos=0): self.tile = tile self.pos = 0 self.obj.img = pygame.image.load(self.path + '/' + tile + str(pos) + '.png') def update(self): if time()-self.last_update>self.sprites[self.tile][1]: if self.pos == self.sprites[self.tile][0]-1: self.pos = 0 else: self.pos += 1 self.obj.img = pygame.image.load(self.path + '/' + self.tile + str(self.pos) + '.png') self.last_update = time() class Background: def __init__(self): self.x = -25 self.y = 0 self.tile = 0 self.time = time() self.img = [] path = dirname(realpath(__file__)) for i in range(200): self.img.append(pygame.image.load(path+'/assets/img/bg/b0553b276f5049bec4808d6a012e32bc-' + str(i)+'.png')) main()
36.37123
158
0.559773
2,124
15,676
3.974576
0.093691
0.08055
0.06823
0.027719
0.41874
0.346245
0.277422
0.260602
0.219972
0.214996
0
0.02628
0.288785
15,676
431
159
36.37123
0.730918
0.008803
0
0.265985
0
0
0.101577
0.023367
0
0
0
0
0
1
0.097187
false
0.005115
0.012788
0.002558
0.16624
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a685ed0387ff3acf55274c958313caca3731d6f
209
py
Python
ADVECTOR/enums/forcings.py
john-science/ADVECTOR
5c5ca7595c2c051f1a088b1f0e694936c3da3610
[ "MIT" ]
7
2021-09-07T02:32:00.000Z
2022-01-15T11:35:02.000Z
ADVECTOR/enums/forcings.py
TheOceanCleanupAlgorithms/ADVECT
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
[ "MIT" ]
1
2021-12-24T15:16:26.000Z
2021-12-24T15:16:26.000Z
ADVECTOR/enums/forcings.py
TheOceanCleanupAlgorithms/ADVECT
e27ce15da6a2fcbccbe363f8c2415b0122696d1f
[ "MIT" ]
1
2021-12-12T15:13:52.000Z
2021-12-12T15:13:52.000Z
from enum import Enum class Forcing(Enum): """use .name for variable name, .value for human readable name""" current = "current" wind = "10-meter wind" seawater_density = "seawater density"
20.9
69
0.674641
27
209
5.185185
0.666667
0.214286
0
0
0
0
0
0
0
0
0
0.01227
0.220096
209
9
70
23.222222
0.846626
0.282297
0
0
0
0
0.25
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
7a68824134279f8b6dbfeea1e35c104956a0af71
16,219
py
Python
tests/test_routes.py
sternie-devops-squad/wishlists
5383dbc9afa0d3a1bb18993553986828ce8c591b
[ "Apache-2.0" ]
3
2022-02-16T23:05:07.000Z
2022-02-25T19:16:57.000Z
tests/test_routes.py
sternie-devops-squad/wishlists
5383dbc9afa0d3a1bb18993553986828ce8c591b
[ "Apache-2.0" ]
41
2022-02-23T13:08:31.000Z
2022-03-31T21:59:44.000Z
tests/test_routes.py
sternie-devops-squad/wishlists
5383dbc9afa0d3a1bb18993553986828ce8c591b
[ "Apache-2.0" ]
null
null
null
""" <your resource name> API Service Test Suite Test cases can be run with the following: nosetests -v --with-spec --spec-color coverage report -m """ import os import logging from unittest import TestCase from unittest.mock import MagicMock, patch from tests.factories import WishlistFactory, ItemFactory from service import status # HTTP Status Codes from service.models import db from service.routes import app, init_db DATABASE_URI = os.getenv( "DATABASE_URI", "postgresql://postgres:postgres@localhost:5432/postgres" ) BASE_URL = "/wishlists" CONTENT_TYPE_JSON = "application/json" ###################################################################### # T E S T C A S E S ###################################################################### class TestWishlistService(TestCase): """ Wishlist Service Tests """ @classmethod def setUpClass(cls): """ Run once before all tests """ app.config['TESTING'] = True app.config['DEBUG'] = False app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URI app.logger.setLevel(logging.CRITICAL) init_db() @classmethod def tearDownClass(cls): """ Runs once before test suite """ pass def setUp(self): """ Runs before each test """ db.drop_all() # clean up the last tests db.create_all() # create new tables self.app = app.test_client() def tearDown(self): """ Runs once after each test case """ db.session.remove() db.drop_all() ###################################################################### # H E L P E R M E T H O D S ###################################################################### def _create_wishlists(self, count): """ Factory method to create wishlists in bulk """ wishlists = [] for _ in range(count): wishlist = WishlistFactory() resp = self.app.post( BASE_URL, json=wishlist.serialize(), content_type="application/json" ) self.assertEqual( resp.status_code, status.HTTP_201_CREATED, "Could not create test Wishlist" ) new_wishlist = resp.get_json() wishlist.id = new_wishlist["id"] wishlists.append(wishlist) return wishlists ###################################################################### # W I S H L I S T T E S T C A S E S ###################################################################### def test_index(self): """ Test index call """ resp = self.app.get("/") self.assertEqual(resp.status_code, status.HTTP_200_OK) def test_get_wishlist_list(self): """ Get a list of Wishlists """ self._create_wishlists(5) resp = self.app.get(BASE_URL) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), 5) def test_get_wishlist_by_name(self): """ Get a Wishlist by Name """ wishlists = self._create_wishlists(3) resp = self.app.get( BASE_URL, query_string=f"name={wishlists[1].name}" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(data[0]["name"], wishlists[1].name) def test_get_wishlist(self): """ Get a single Wishlist """ # get the id of an wishlist wishlist = self._create_wishlists(1)[0] resp = self.app.get( f"{BASE_URL}/{wishlist.id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(data["name"], wishlist.name) def test_get_wishlist_not_found(self): """Get a Wishlist that is not found""" resp = self.app.get(f"{BASE_URL}/0") self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_create_wishlist(self): """ Create a new Wishlist """ wishlist = WishlistFactory() resp = self.app.post( BASE_URL, json=wishlist.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # Make sure location header is set location = resp.headers.get("Location", None) self.assertIsNotNone(location) # Check the data is correct new_wishlist = resp.get_json() self.assertEqual(new_wishlist["name"], wishlist.name, "Names does not match") self.assertEqual(new_wishlist["type"], wishlist.type, "Type does not match") self.assertEqual(new_wishlist["items"], wishlist.items, "Item does not match") self.assertEqual(new_wishlist["user_id"], wishlist.user_id, "user_id does not match") self.assertEqual(new_wishlist["created_date"], str(wishlist.created_date), "Created Date does not match") # Check that the location header was correct by getting it resp = self.app.get(location, content_type="application/json") self.assertEqual(resp.status_code, status.HTTP_200_OK) new_wishlist = resp.get_json() self.assertEqual(new_wishlist["name"], wishlist.name, "Names does not match") self.assertEqual(new_wishlist["type"], wishlist.type, "Type does not match") self.assertEqual(new_wishlist["items"], wishlist.items, "Item does not match") self.assertEqual(new_wishlist["user_id"], wishlist.user_id, "user_id does not match") self.assertEqual(new_wishlist["created_date"], str(wishlist.created_date), "Created Date does not match") def test_update_wishlist(self): """ Update (Edit) an existing Wishlist """ # create a wishlist to update test_wishlist = WishlistFactory() resp = self.app.post( BASE_URL, json=test_wishlist.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # update the wishlist new_wishlist = resp.get_json() new_wishlist["name"] = "Pets" new_wishlist_id = new_wishlist["id"] resp = self.app.put( f"{BASE_URL}/{new_wishlist_id}", json=new_wishlist, content_type="application/json", ) self.assertEqual(resp.status_code, status.HTTP_200_OK) updated_wishlist = resp.get_json() self.assertEqual(updated_wishlist["name"], "Pets") def test_update_wishlist_not_found(self): """Update a Wishlist that does not exist""" new_wishlist = WishlistFactory() resp = self.app.put( f"{BASE_URL}/0", json=new_wishlist.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) def test_delete_wishlist(self): """ Delete an Wishlist """ # get the id of an wishlist wishlist = self._create_wishlists(1)[0] resp = self.app.delete( f"{BASE_URL}/{wishlist.id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT) # Error handler testing code below based on the Service_Accounts code example def test_bad_request(self): """ Send wrong media type """ wishlist = WishlistFactory() resp = self.app.post( BASE_URL, json={"name": "not enough data"}, content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_400_BAD_REQUEST) def test_unsupported_media_type(self): """ Send wrong media type """ wishlist = WishlistFactory() resp = self.app.post( BASE_URL, json=wishlist.serialize(), content_type="test/html" ) self.assertEqual(resp.status_code, status.HTTP_415_UNSUPPORTED_MEDIA_TYPE) def test_method_not_allowed(self): """ Make an illegal method call """ resp = self.app.put( BASE_URL, json={"not": "today"}, content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_405_METHOD_NOT_ALLOWED) ###################################################################### # I T E M T E S T C A S E S ###################################################################### def test_get_item_list(self): """ Get a list of Items """ # add two items to wishlist wishlist = self._create_wishlists(1)[0] item_list = ItemFactory.create_batch(2) # Create item 1 resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item_list[0].serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # Create item 2 resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item_list[1].serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # get the list back and make sure there are 2 resp = self.app.get( f"{BASE_URL}/{wishlist.id}/items", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() self.assertEqual(len(data), 2) def test_add_item(self): """ Add an item to a wishlist """ wishlist = self._create_wishlists(1)[0] item = ItemFactory() resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) data = resp.get_json() logging.debug(data) self.assertEqual(data["wishlist_id"], wishlist.id) self.assertEqual(data["name"], item.name) self.assertEqual(data["category"], item.category) self.assertEqual(data["price"], item.price) # self.assertEqual(data["in_stock"], item.in_stock) # self.assertEqual(data["purchased"], item.purchased) def test_get_item(self): """ Get an item from an wishlist """ # create a known item wishlist = self._create_wishlists(1)[0] item = ItemFactory() resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) data = resp.get_json() logging.debug(data) item_id = data["id"] # retrieve it back resp = self.app.get( f"{BASE_URL}/{wishlist.id}/items/{item_id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() logging.debug(data) self.assertEqual(data["wishlist_id"], wishlist.id) self.assertEqual(data["name"], item.name) self.assertEqual(data["category"], item.category) self.assertEqual(data["price"], item.price) # self.assertEqual(data["in_stock"], item.in_stock) # self.assertEqual(data["purchased"], item.purchased) def test_update_item(self): """ Update an item on an wishlist """ # create a known item wishlist = self._create_wishlists(1)[0] item = ItemFactory() resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) data = resp.get_json() logging.debug(data) item_id = data["id"] data["name"] = "XXXX" # send the update back resp = self.app.put( f"{BASE_URL}/{wishlist.id}/items/{item_id}", json=data, content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) # retrieve it back resp = self.app.get( f"{BASE_URL}/{wishlist.id}/items/{item_id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_200_OK) data = resp.get_json() logging.debug(data) self.assertEqual(data["id"], item_id) self.assertEqual(data["wishlist_id"], wishlist.id) self.assertEqual(data["name"], "XXXX") def test_delete_item(self): """ Delete an Item """ wishlist = self._create_wishlists(1)[0] item = ItemFactory() resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) data = resp.get_json() logging.debug(data) item_id = data["id"] # send delete request resp = self.app.delete( f"{BASE_URL}/{wishlist.id}/items/{item_id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_204_NO_CONTENT) # retrieve it back and make sure item is not there resp = self.app.get( f"{BASE_URL}/{wishlist.id}/items/{item_id}", content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND) ###################################################################### # T E S T A C T I O N S ###################################################################### def test_purchase_a_item(self): """Purchase an Item""" wishlist = self._create_wishlists(1)[0] item = ItemFactory() item.in_stock = True resp = self.app.post( f"{BASE_URL}/{wishlist.id}/items", json=item.serialize(), content_type="application/json" ) self.assertEqual(resp.status_code, status.HTTP_201_CREATED) item_data = resp.get_json() item_id = item_data["id"] logging.info(f"Created Item with id {item_id} = {item_data}") # Request to purchase a Item resp = self.app.put(f"{BASE_URL}/{wishlist.id}/items/{item_id}/purchase") self.assertEqual(resp.status_code, status.HTTP_200_OK) # Retrieve the Item and make sue it is purchased resp = self.app.get(f"{BASE_URL}/{wishlist.id}/items/{item_id}") self.assertEqual(resp.status_code, status.HTTP_200_OK) item_data = resp.get_json() self.assertEqual(item_data["id"], item_id) self.assertEqual(item_data["purchased"], True) # Note: FIX ME PLEASE! # def test_purchase_not_available(self): # """Purchase a Item that is not in stock""" # wishlist = self._create_wishlists(1)[0] # item = ItemFactory() # item.in_stock = False # resp = self.app.post( # f"{BASE_URL}/{wishlist.id}/items", # json=item.serialize(), # content_type="application/json" # ) # self.assertEqual(resp.status_code, status.HTTP_201_CREATED) # item_data = resp.get_json() # item_id = item_data["id"] # item_is = item_data["in_stock"] # logging.info(f"Created Item with id {item_id} = {item_data}") # logging.info(f"Item in stock {item_is}") # # Request to purchase a Item should fail # resp = self.app.put(f"{BASE_URL}/{wishlist.id}/items/{item_id}/purchase") # self.assertEqual(resp.status_code, status.HTTP_409_CONFLICT) def test_purchase_a_item_not_found(self): """Purchase a Item not found""" wishlist = self._create_wishlists(1)[0] resp = self.app.put(f"{BASE_URL}/{wishlist.id}/items/{0}/purchase") self.assertEqual(resp.status_code, status.HTTP_404_NOT_FOUND)
36.94533
113
0.580924
1,932
16,219
4.689441
0.119048
0.107616
0.040066
0.09106
0.707837
0.68245
0.656291
0.644702
0.638411
0.616225
0
0.011631
0.263148
16,219
439
114
36.94533
0.746465
0.163574
0
0.56701
0
0
0.135861
0.059163
0
0
0
0
0.206186
1
0.082474
false
0.003436
0.027491
0
0.116838
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a699d75b7048967367caac0285a0e4744e00927
2,168
py
Python
src/redrawing/components/debug.py
ReDrawing/redrawing
20743f0c8d64d9d2e15cefa840423c9698c74653
[ "MIT" ]
1
2021-04-20T00:00:15.000Z
2021-04-20T00:00:15.000Z
src/redrawing/components/debug.py
ReDrawing/redrawing
20743f0c8d64d9d2e15cefa840423c9698c74653
[ "MIT" ]
null
null
null
src/redrawing/components/debug.py
ReDrawing/redrawing
20743f0c8d64d9d2e15cefa840423c9698c74653
[ "MIT" ]
1
2021-07-18T03:57:01.000Z
2021-07-18T03:57:01.000Z
import time from .stage import Stage class Debug_Stage(Stage): '''! Stage for debugging, print messages in setup and process ''' configs_default={"name":"debug_stage", "blank_line":False, "wait_key": False, "wait_seconds": 0, "context_debug":"context"} def __init__(self, configs={}): '''! Constructor @param configs: name: Stage name, will be printed in the screen (default: "debug_stage") blank_line: Print a blank line in the screen (default: False) wait_key: Wait for a key to be pressed after print (default: False) wait_seconds: Wait for a number of seconds after print (default: 0, no wait) context_debug: Word that will be placed in context, can be used for debbunging substages (default: "context") ''' super().__init__(configs) def setup(self): '''! Intiialize the stage Print the name of the stage, and according to the settings, wait for a key to be pressed or print a blank line ''' print(self._configs["name"], "setup") if self._configs["blank_line"]: print() if self._configs["wait_key"]: input("Type anything to continue: ") if self._configs["wait_seconds"] != 0: time.sleep(self._configs["wait_seconds"]) self.set_context("context_debug", self._configs["context_debug"]) def process(self, context={}): '''! Prints the name of the stage, and according to the settings, wait for a key to be pressed or print a blank line If "context_debug" key is in the context, print the value. ''' print(self._configs["name"], "process") if "context_debug" in context: print(context["context_debug"]) if self._configs["blank_line"]: print() if self._configs["wait_key"]: input("Type anything to continue: ") if self._configs["wait_seconds"] != 0: time.sleep(self._configs["wait_seconds"])
32.358209
127
0.579336
264
2,168
4.587121
0.242424
0.109001
0.06441
0.056152
0.366639
0.366639
0.366639
0.348472
0.348472
0.348472
0
0.002695
0.315498
2,168
66
128
32.848485
0.813342
0.374077
0
0.48
0
0
0.24144
0
0
0
0
0
0
1
0.12
false
0
0.08
0
0.28
0.2
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a69a21aa05fe2df5e6fa114f41410a45c4bd7d5
6,659
py
Python
bin/build.py
mgijax/mgv-data
99a2f5a8f56d7ebb6a014431d8a20df721df58e2
[ "MIT" ]
1
2022-03-04T06:20:33.000Z
2022-03-04T06:20:33.000Z
bin/build.py
JoelRichardson/mgv-data
99a2f5a8f56d7ebb6a014431d8a20df721df58e2
[ "MIT" ]
1
2021-04-06T13:17:47.000Z
2021-04-06T13:17:47.000Z
bin/build.py
JoelRichardson/mgv-data
99a2f5a8f56d7ebb6a014431d8a20df721df58e2
[ "MIT" ]
1
2022-03-04T11:49:53.000Z
2022-03-04T11:49:53.000Z
# # build.py # # Builds a back end for MGV based on a config file. # import os import sys import time import json from argparse import ArgumentParser import re from urllib.request import urlopen import gzip from lib.Config import ConfigFileReader from lib.Downloader import downloaderNameMap from lib.Importer import importerNameMap from lib.Deployer import Deployer ### ------------------------------------------------------------------ class MgvDataBuilder : VALID_TYPES = ["assembly", "models", "orthologs"] VALID_PHASES = ["download", "import", "deploy"] def __init__ (self) : self.logfile = sys.stderr self.genome_re = None def log(self, s, newline='\n', timestamp=True) : if timestamp: ts = time.asctime(time.localtime(time.time())) self.logfile.write(ts + " ") self.logfile.write(str(s)) self.logfile.write(newline) self.logfile.flush() def getArgs (self) : parser = ArgumentParser("Builds the backend for MGV based on a config file.") parser.add_argument( "-b", "--build-config", required=True, help = "Build config file. Required.") parser.add_argument( "-g", "--genome", default = ".*", help = "Which genomes to build. By default, builds all genomes. Specify a regex pattern used to match the genome names.") parser.add_argument( "-p", "--phase", choices = self.VALID_PHASES, action = "append", default = [], help = "Which phase to run. One of: %(choices)s. If not specified, runs all phases.") parser.add_argument( "-t", "--type", choices = self.VALID_TYPES, default = None, help = "Which datatype to process. One of: %(choices)s. If not specified, processes all types.") parser.add_argument( "-l", "--log-file", default = None, help = "Where to write log messages. By default, logs to stderr.") parser.add_argument( "-d", "--downloads-dir", default = "./downloads", help = "Where downloaded files go. Default = %(default)s") parser.add_argument( "-o", "--output-dir", default = "./output", help = "Where the output files go. Default = %(default)s") parser.add_argument( "-w", "--web-dir", help = "Web accessible directory containing data generated files. Default = same as --output-dir.") parser.add_argument( "--cgi-dir", help = "Place to put the CGI scripts used by MGV Default = same as --web-dir.") parser.add_argument( "--snapshot-file", help = "Alliance release snapshot file to use in lieu of querying API. (default = get snapshot from Alliance API)") parser.add_argument( "-D", "--debug", action = "store_true", default = False, help = "Run in debug mode.") args = parser.parse_args() args.downloads_dir = os.path.abspath(args.downloads_dir) args.output_dir = os.path.abspath(args.output_dir) args.web_dir = os.path.abspath(args.web_dir) if args.web_dir else args.output_dir args.cgi_dir = os.path.abspath(args.cgi_dir) if args.cgi_dir else args.web_dir if len(args.phase) == 0: args.phase = self.VALID_PHASES return args def deepCopy (self, obj) : return json.loads(json.dumps(obj)) def ensureDirectory (self, d, empty = False): if self.args.debug: return if not os.path.exists(d): os.makedirs(d) if empty: cmd = "rm -fr %s/*" % d self.log(cmd) os.system(cmd) def process(self, g) : self.log("Processing cfg: " + str(g)) gn = g["name"] for t in self.VALID_TYPES: if self.args.type in [t, None] : if not t in g: continue # if type(g[t]) is str and g[t].startswith("="): if "deploy" in self.args.phase: gg = self.getCfg(g[t][1:]) tgtPath = os.path.join(self.args.web_dir, gg["name"], t) lnkPath = os.path.join(self.args.web_dir, g["name"], t) cmd = 'ln -s %s %s' % (tgtPath, lnkPath) self.log("Creating symlink: " + cmd) continue sname = g[t].get("source","UrlDownloader") cls = downloaderNameMap[sname] downloader = cls(self, g, t, self.args.debug) # Download data if "download" in self.args.phase: downloader.go() # Import data if "import" in self.args.phase: icls = importerNameMap[t] importer = icls(self, t, g, self.args.output_dir, self.args.debug) importer.go() # Deploy if "deploy" in self.args.phase: deployer = Deployer(self, t, g, self.args.output_dir, self.args.web_dir, self.args.cgi_dir, debug=self.args.debug) deployer.go() def getCfg (self, name = None) : if name is None: return self.cfg else: return self.name2cfg.get(name, None) def main (self) : # self.args = self.getArgs() if self.args.log_file: self.logfile = open(self.args.log_file, 'w') self.log("\n\nThis is the MGV back end data builder.") self.log("Arguments: " + str(self.args)) self.genome_re = re.compile('^' + self.args.genome + '$') # self.cfg = ConfigFileReader(self.args.build_config).read() if self.args.debug: self.log("Running in DEBUG mode. No commands will be executed.") # self.name2cfg = {} for g in self.cfg: self.name2cfg[g["name"]] = g # for g in self.cfg: if g.get("disabled", False) : continue if self.genome_re.match(g["name"]): self.log("Processing " + g["name"]) self.process(g) else: # self.log("Skipping " + g["name"]) pass self.log("Builder exiting.") self.logfile.close() ### ------------------------------------------------------------------ if __name__ == "__main__": MgvDataBuilder().main()
37.410112
134
0.522301
772
6,659
4.433938
0.257772
0.051417
0.05463
0.018697
0.129127
0.09816
0.084721
0.0409
0.018113
0
0
0.001136
0.33894
6,659
177
135
37.621469
0.776465
0.038895
0
0.162162
0
0.027027
0.199561
0
0
0
0
0
0
1
0.054054
false
0.006757
0.114865
0.006757
0.222973
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a69b967fd4fe272f88237f204dac1419991d4fb
117
py
Python
server/learning/login/urls.py
kantanand/insmartapps
4ab54bb41101e43b5edaac9795509584f01c5c92
[ "MIT" ]
3
2016-05-01T18:39:08.000Z
2019-02-19T11:55:40.000Z
server/learning/login/urls.py
kantanand/insmartapps
4ab54bb41101e43b5edaac9795509584f01c5c92
[ "MIT" ]
1
2016-04-28T16:41:24.000Z
2016-06-11T19:11:14.000Z
server/learning/login/urls.py
kantanand/insmartapps
4ab54bb41101e43b5edaac9795509584f01c5c92
[ "MIT" ]
null
null
null
from django.conf.urls import url from login import views urlpatterns = [ url(r'^$', views.index, name='home'), ]
19.5
41
0.683761
17
117
4.705882
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.162393
117
6
42
19.5
0.816327
0
0
0
0
0
0.050847
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
3
7a6c328d04079d34f9c72f504438d1ac60d7f5c9
1,225
py
Python
streamparse/__init__.py
mdemaster/mdemaster_w205_exercise2
0030d78d674840f848250bff970824a1d49587fb
[ "Apache-2.0" ]
null
null
null
streamparse/__init__.py
mdemaster/mdemaster_w205_exercise2
0030d78d674840f848250bff970824a1d49587fb
[ "Apache-2.0" ]
1
2020-06-25T07:11:18.000Z
2020-06-25T07:11:18.000Z
streamparse/__init__.py
mdemaster/mdemaster_w205_exercise2
0030d78d674840f848250bff970824a1d49587fb
[ "Apache-2.0" ]
null
null
null
''' This package makes it easier to work with Storm and Python. :organization: Parsely ''' from __future__ import absolute_import, print_function, unicode_literals import streamparse.bolt import streamparse.cmdln import streamparse.component import streamparse.contextmanagers import streamparse.debug import streamparse.decorators import streamparse.dsl import streamparse.spout import streamparse.storm from streamparse.version import __version__, VERSION __all__ = [ 'bolt', 'cmdln', 'component', 'contextmanagers', 'debug', 'decorators', 'dsl', 'spout', 'storm', '__version__', 'VERSION', ] __license__ = """ Copyright 2014-2015 Parsely, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """
25
72
0.760816
159
1,225
5.716981
0.566038
0.168317
0.028603
0.035204
0
0
0
0
0
0
0
0.011719
0.164082
1,225
48
73
25.520833
0.875977
0.067755
0
0
0
0
0.563492
0
0
0
0
0
0
1
0
false
0
0.305556
0
0.305556
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
1
7a6c43158e65f1ef63580d8b4dd6ab79de3bce35
10,245
py
Python
plane_waves/polarization_animation.py
raptor/ECEn360_Winter2016
65076f19c561ee51c8757720694d1ef00f829bdb
[ "MIT" ]
null
null
null
plane_waves/polarization_animation.py
raptor/ECEn360_Winter2016
65076f19c561ee51c8757720694d1ef00f829bdb
[ "MIT" ]
1
2019-03-03T00:54:52.000Z
2019-03-04T18:06:38.000Z
plane_waves/polarization_animation.py
raptor/ECEn360_Winter2016
65076f19c561ee51c8757720694d1ef00f829bdb
[ "MIT" ]
1
2019-03-02T02:50:24.000Z
2019-03-02T02:50:24.000Z
#---------------------------------------------------------------------- # # 9/25/18 - Update to use Python 3.6, PyQt5 and pyqtgraph 0.10.0 # G. Nordin #---------------------------------------------------------------------- from PyQt5 import QtGui, QtCore import pyqtgraph as pg import pyqtgraph.opengl as gl import numpy as np import sys ## Always start by initializing Qt (only once per application) app = QtGui.QApplication([]) ## Define a top-level widget to hold everything w = QtGui.QWidget() w.resize(1000,600) w.setWindowTitle('Polarization Visualization') ## Create widgets to be placed inside heading_text = QtGui.QLabel('Polarization Angles ' + u"\u03C8" + ' and ' + u"\u03B4") # Box with sliders sliderbox = QtGui.QGroupBox() hBoxLayout = QtGui.QHBoxLayout() psi_slider_layout = QtGui.QVBoxLayout() delta_slider_layout = QtGui.QVBoxLayout() # psi slider psi_label = QtGui.QLabel(u"\u03C8") psi_slider = QtGui.QSlider() psi_slider.setOrientation(QtCore.Qt.Vertical) psi_slider.setMinimum(0) psi_slider.setMaximum(90) psi_slider.setValue(0) psi_value = QtGui.QLabel(str(psi_slider.value()) + u"\u00b0") psi_slider_layout.addWidget(psi_label) psi_slider_layout.addWidget(psi_slider) psi_slider_layout.addWidget(psi_value) def set_psi_value(value): psi_value.setText(str(value) + u"\u00b0") global psi_deg psi_deg = value psi_slider.valueChanged.connect(set_psi_value) # delta slider delta_label = QtGui.QLabel(u"\u03B4") delta_slider = QtGui.QSlider() delta_slider.setOrientation(QtCore.Qt.Vertical) delta_slider.setMinimum(-180) delta_slider.setMaximum(180) delta_slider.setValue(0) delta_value = QtGui.QLabel(str(delta_slider.value()) + u"\u00b0") delta_slider_layout.addWidget(delta_label) delta_slider_layout.addWidget(delta_slider) delta_slider_layout.addWidget(delta_value) def set_delta_value(value): delta_value.setText(str(value) + u"\u00b0") global delta_deg delta_deg = value delta_slider.valueChanged.connect(set_delta_value) # Set layout of box containing sliders hBoxLayout.addItem(psi_slider_layout) hBoxLayout.addItem(delta_slider_layout) sliderbox.setLayout(hBoxLayout) # Box with options optionbox = QtGui.QGroupBox() vBoxLayout = QtGui.QVBoxLayout() # Options hfield_checkbox = QtGui.QCheckBox("Show H-field") # Add to layout vBoxLayout.addWidget(hfield_checkbox) # Add to box optionbox.setLayout(vBoxLayout) # Create openGL view widget & add a grid wGL = gl.GLViewWidget() wGL.setSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding) wGL.opts['distance'] = 5 g = gl.GLGridItem() wGL.addItem(g) ## Create a grid layout to manage the widgets size and position layout = QtGui.QGridLayout() w.setLayout(layout) layout.setColumnStretch (1, 2) ## Add widgets to the layout in their proper positions layout.addWidget(heading_text, 0, 0) # heading text goes in upper-left layout.addWidget(sliderbox, 1, 0) # slider box goes underneath heading text layout.addWidget(optionbox, 2, 0) # option box goes underneath slider box layout.addWidget(wGL, 0, 1, 3, 1) # wGL goes on right side, spanning 3 rows ## Display the widget as a new window w.show() ##------------ Set up polarization animation ------------## degtorad = np.pi/180.0 # Function to create new array from old where new array is formatted to prepare to # draw lines perpendicular from z-axis to curve defined by input array def preptomakelines(pts): pts2 = np.zeros(shape=(2*pts.shape[0], pts.shape[1])) for i in range(pts.shape[0]): pts2[2*i,2] = pts[i,2] pts2[2*i + 1,:] = pts[i,:] return pts2 psi_deg = float(psi_slider.value()) delta_deg = float(delta_slider.value()) # Calculate sinusoidal electric field for arbitrary polarization def efield_arbpol(t,z,amplitude,psi_rad,delta_rad): x = amplitude * np.cos(psi_rad) * np.cos(2*np.pi*(t-z)) y = amplitude * np.sin(psi_rad) * np.cos(2*np.pi*(t-z) + delta_rad) z = z return x, y, z # Prep coordinate rotations for electric & magnetic fields to go from calculation # coordinates to pyqtgraph plotting coordinates temp2Darray = [[-1, 0, 0], [0, 0, 1], [0, 1, 0]] rot_efield_coord = np.array(temp2Darray) # Calculate electric & magnetic field arrays. Also make arrays to define lines. amplitude = 1.0 z = np.linspace(-10, 10, 500) x, y, z = efield_arbpol(0.0,z,amplitude,psi_deg*degtorad,delta_deg*degtorad) # E-field pts_e = np.vstack([x,y,z]).transpose() pts_e_lines = preptomakelines(pts_e) pts_e = np.dot(pts_e, rot_efield_coord) pts_e_lines = np.dot(pts_e_lines, rot_efield_coord) z0 = np.zeros(len(z)) pts_e_z0 = np.vstack([x,y,z0]).transpose() pts_e_z0 = np.dot(pts_e_z0, rot_efield_coord) pts_e_arrow = np.array( [[0.0, 0.0, 0.0], pts_e_z0[int(len(pts_e_z0)/2.0)]] ) # H-field pts_h = np.vstack([-y,x,z]).transpose() # Orthogonal to E pts_h_lines = preptomakelines(pts_h) pts_h = np.dot(pts_h, rot_efield_coord) pts_h_lines = np.dot(pts_h_lines, rot_efield_coord) pts_h_z0 = np.vstack([-y,x,z0]).transpose() pts_h_z0 = np.dot(pts_h_z0, rot_efield_coord) pts_h_arrow = np.array( [[0.0, 0.0, 0.0], pts_h_z0[int(len(pts_h_z0)/2.0)]] ) # Get ready to make plots efield_color = (1, 0, 0, 1) efield_color_z0 = (1, 1, 1, 1) efield_color_arrow = (1, 0.67, 0.67, 1) hfield_color = (0, 0, 1, 1) hfield_color_z0 = (1, 1, 1, 1) hfield_color_arrow = (0.67, 0.67, 1, 1) linewidth = 4.0 linewidth2Dpol = 2.0 linewidth2Defieldvector = 10.0 # Make plots plt_e = gl.GLLinePlotItem(pos=pts_e, mode='line_strip', color=efield_color, width=linewidth, antialias=True) wGL.addItem(plt_e) #plt_e_lines = gl.GLLinePlotItem(pos=pts_e_lines, mode='lines', color=efield_color, width=linewidth, antialias=True) #wGL.addItem(plt_e_lines) plt_e_z0 = gl.GLLinePlotItem(pos=pts_e_z0, mode='line_strip', color=efield_color_z0, width=linewidth2Dpol, antialias=True) wGL.addItem(plt_e_z0) plt_e_arrow = gl.GLLinePlotItem(pos=pts_e_arrow, mode='line_strip', color=efield_color_arrow, width=linewidth2Defieldvector, antialias=True) wGL.addItem(plt_e_arrow) plt_h = gl.GLLinePlotItem(pos=pts_h, mode='line_strip', color=hfield_color, width=linewidth, antialias=True) wGL.addItem(plt_h) #plt_h_lines = gl.GLLinePlotItem(pos=pts_h_lines, mode='lines', color=hfield_color, width=linewidth, antialias=True) #wGL.addItem(plt_h_lines) plt_h_z0 = gl.GLLinePlotItem(pos=pts_h_z0, mode='line_strip', color=hfield_color_z0, width=linewidth2Dpol, antialias=True) wGL.addItem(plt_h_z0) plt_h_arrow = gl.GLLinePlotItem(pos=pts_h_arrow, mode='line_strip', color=hfield_color_arrow, width=linewidth2Defieldvector, antialias=True) wGL.addItem(plt_h_arrow) # Start with H-field items as invisible plt_h.setVisible(False) #plt_h_lines.setVisible(False) plt_h_z0.setVisible(False) plt_h_arrow.setVisible(False) # Add lines to visually define axes x_length = 1.1 y_length = 1.1 z_length = 10 linewidthaxis = 1.0 axis_color = (32, 32, 32, 40) ## make z-axis zaxis = np.linspace(-z_length,z_length,10) x_zaxis = np.zeros(10) y_zaxis = np.zeros(10) pts_zaxis = np.vstack([x_zaxis,zaxis,y_zaxis]).transpose() plt_zaxis = gl.GLLinePlotItem(pos=pts_zaxis, color=axis_color, width=linewidthaxis, antialias=True) #wGL.addItem(plt_zaxis) ## make y-axis yaxis = np.linspace(-y_length,y_length,10) x_yaxis = np.zeros(10) z_yaxis = np.zeros(10) pts_yaxis = np.vstack([yaxis,z_yaxis,x_yaxis]).transpose() plt_yaxis = gl.GLLinePlotItem(pos=pts_yaxis, color=axis_color, width=linewidthaxis, antialias=True) wGL.addItem(plt_yaxis) ## make x-axis xaxis = np.linspace(-x_length,x_length,10) y_xaxis = np.zeros(10) z_xaxis = np.zeros(10) pts_xaxis = np.vstack([y_xaxis,z_xaxis,xaxis]).transpose() plt_xaxis = gl.GLLinePlotItem(pos=pts_xaxis, color=axis_color, width=linewidthaxis, antialias=True) wGL.addItem(plt_xaxis) # make image for x-y plane image_shape = (2,2) uniform_values = np.ones(image_shape, dtype=np.int) * 255 print(uniform_values) uniform_image_transparent = pg.makeARGB(uniform_values)[0] uniform_image_transparent[:,:,:] = 255 uniform_image_transparent[:,:,3] = 80 print(uniform_image_transparent) v1 = gl.GLImageItem(uniform_image_transparent) v1.translate(-image_shape[0]/2., -image_shape[1]/2., 0) v1.rotate(90, 1,0,0) wGL.addItem(v1) # Set up some animation parameters frametime = 50 # frame refresh time in ms velocity = 1./frametime counter = 0 # Function to update scene for each frame def update(): global z, z0, velocity, counter, amplitude global plt_e, rot_efield_coord, plt_e_z0, plt_e_arrow #, plt_e_lines global plt_h, plt_h_z0, plt_h_arrow #, plt_h_lines global psi_deg, delta_deg, degtorad counter +=1 time = float(counter)/frametime % 1 x, y, z = efield_arbpol(time,z,amplitude,psi_deg*degtorad,delta_deg*degtorad) pts_e = np.vstack([x,y,z]).transpose() pts_e_lines = preptomakelines(pts_e) pts_e = np.dot(pts_e, rot_efield_coord) #pts_e_lines = np.dot(pts_e_lines, rot_efield_coord) plt_e.setData(pos=pts_e) #plt_e_lines.setData(pos=pts_e_lines) pts_e_z0 = np.vstack([x,y,z0]).transpose() pts_e_z0 = np.dot(pts_e_z0, rot_efield_coord) plt_e_z0.setData(pos=pts_e_z0) pts_e_arrow = np.array( [[0.0, 0.0, 0.0], pts_e_z0[int(len(pts_e_z0)/2.0)]] ) plt_e_arrow.setData(pos=pts_e_arrow) pts_h = np.vstack([-y,x,z]).transpose() pts_h_lines = preptomakelines(pts_h) pts_h = np.dot(pts_h, rot_efield_coord) #pts_h_lines = np.dot(pts_h_lines, rot_efield_coord) plt_h.setData(pos=pts_h) #plt_h_lines.setData(pos=pts_h_lines) pts_h_z0 = np.vstack([-y,x,z0]).transpose() pts_h_z0 = np.dot(pts_h_z0, rot_efield_coord) plt_h_z0.setData(pos=pts_h_z0) pts_h_arrow = np.array( [[0.0, 0.0, 0.0], pts_h_z0[int(len(pts_h_z0)/2.0)]] ) plt_h_arrow.setData(pos=pts_h_arrow) # Poor man's state updating if hfield_checkbox.isChecked(): plt_h.setVisible(True) #plt_h_lines.setVisible(True) plt_h_z0.setVisible(True) plt_h_arrow.setVisible(True) else: plt_h.setVisible(False) #plt_h_lines.setVisible(False) plt_h_z0.setVisible(False) plt_h_arrow.setVisible(False) # Set up timer for animation timer = QtCore.QTimer() timer.timeout.connect(update) timer.start(50) ## Start the Qt event loop app.exec_()
35.085616
140
0.730112
1,682
10,245
4.218193
0.171225
0.019168
0.007611
0.007329
0.364764
0.292319
0.261311
0.25229
0.234249
0.196476
0
0.033576
0.127867
10,245
291
141
35.206186
0.760492
0.219815
0
0.150754
0
0
0.022607
0
0
0
0
0
0
1
0.025126
false
0
0.025126
0
0.060302
0.01005
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a6d0f5efa3463ae20d88607a8d5407e7e5f2f04
4,102
py
Python
FEMpy/tests/unit/test_assemblers.py
floydie7/FEMpy
50e11b88dc249ff7c599472b455b07b04df1afd7
[ "MIT" ]
null
null
null
FEMpy/tests/unit/test_assemblers.py
floydie7/FEMpy
50e11b88dc249ff7c599472b455b07b04df1afd7
[ "MIT" ]
null
null
null
FEMpy/tests/unit/test_assemblers.py
floydie7/FEMpy
50e11b88dc249ff7c599472b455b07b04df1afd7
[ "MIT" ]
1
2022-01-22T06:39:38.000Z
2022-01-22T06:39:38.000Z
import numpy as np from FEMpy import Mesh, FEBasis, Assemblers mesh_1D_linear = Mesh.Interval1D(0, 1, 1/2, 'linear') basis_1D_linear = FEBasis.IntervalBasis1D('linear') mesh_1D_quadratic = Mesh.Interval1D(0, 1, 1/2, 'quadratic') basis_1D_quadratic = FEBasis.IntervalBasis1D('quadratic') mesh_2D_triangular_linear = Mesh.TriangularMesh2D(0, 1, 0, 1, 1/2, 1/2, 'linear') basis_2D__triangular_linear = FEBasis.TriangularBasis2D('linear') def coefficient_or_source_function(x): return 1 def test_matrix_assembly_1d_linear(): matrix = Assemblers.assemble_matrix(coefficient_or_source_function, mesh_1D_linear, basis_1D_linear, basis_1D_linear, derivative_order_trial=1, derivative_order_test=1) assert np.allclose(matrix.toarray(), np.array([[2., -2., 0.], [-2., 4., -2.], [0., -2., 2.]])) def test_matrix_assembly_1d_quadratic(): matrix = Assemblers.assemble_matrix(coefficient_or_source_function, mesh_1D_quadratic, basis_1D_quadratic, basis_1D_quadratic, derivative_order_trial=1, derivative_order_test=1) assert np.allclose(matrix.toarray(), np.array([[4.6667, -5.3333, 0.6667, 0., 0.], [-5.3333, 10.6667, -5.3333, 0., 0.], [0.6667, -5.3333, 9.3333, -5.3333, 0.6667], [0., 0., -5.3333, 10.6667, -5.3333], [0., 0., 0.6667, -5.3333, 4.6667]]), rtol=1e-4, atol=1e-7) def test_matrix_assembly_2d_linear(): matrix = Assemblers.assemble_matrix(coefficient_or_source_function, mesh_2D_triangular_linear, basis_2D__triangular_linear, basis_2D__triangular_linear, derivative_order_trial=(1, 0), derivative_order_test=(1, 0)) assert np.allclose(matrix.toarray(), np.array([[0.5, 0., 0., -0.5, 0., 0., 0., 0., 0.], [0., 1., 0., 0., -1., 0., 0., 0., 0.], [0., 0., 0.5, 0., 0., -0.5, 0., 0., 0.], [-0.5, 0., 0., 1., 0., 0., -0.5, 0., 0.], [0., -1., 0., 0., 2., 0., 0., -1., 0.], [0., 0., -0.5, 0., 0., 1., 0., 0., -0.5], [0., 0., 0., -0.5, 0., 0., 0.5, 0., 0.], [0., 0., 0., 0., -1., 0., 0., 1., 0.], [0., 0., 0., 0., 0., -0.5, 0., 0., 0.5]])) # test_matrix_assembly_2d_quadratic omitted because the matrix is too large to type by hand. def test_vector_assembly_1d_linear(): vector = Assemblers.assemble_vector(coefficient_or_source_function, mesh_1D_linear, basis_1D_linear, derivative_order_test=0) assert np.allclose(vector, np.array([0.25, 0.5, 0.25])) def test_vector_assembly_1d_quadratic(): vector = Assemblers.assemble_vector(coefficient_or_source_function, mesh_1D_quadratic, basis_1D_quadratic, derivative_order_test=0) assert np.allclose(vector, np.array([0.0833, 0.3333, 0.1667, 0.3333, 0.0833]), rtol=1e-3, atol=1e-6) def test_vector_assembly_2d_linear(): vector = Assemblers.assemble_vector(coefficient_or_source_function, mesh_2D_triangular_linear, basis_2D__triangular_linear, derivative_order_test=(0,0)) assert np.allclose(vector, np.array([0.0417, 0.1250, 0.0833, 0.1250, 0.25, 0.1250, 0.0833, 0.1250, 0.0417]), rtol=1e-3, atol=1e-6) # test_vector_assembly_2d_quadratic omitted because the vector is too large to type by hand.
53.973684
112
0.515358
512
4,102
3.875
0.132813
0.058468
0.049899
0.034274
0.749496
0.682964
0.612903
0.540323
0.524698
0.497984
0
0.131401
0.352511
4,102
75
113
54.693333
0.615587
0.044125
0
0.038462
0
0
0.01072
0
0
0
0
0
0.115385
1
0.134615
false
0
0.038462
0.019231
0.192308
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a6de493fc92b2d2c1df09e619cb6257aedb5b88
101
py
Python
predict_emotions.py
fahsan/EmotiFind
aa3071d296cb61a91eea5e0139a9cbeb82b7d14b
[ "MIT" ]
null
null
null
predict_emotions.py
fahsan/EmotiFind
aa3071d296cb61a91eea5e0139a9cbeb82b7d14b
[ "MIT" ]
null
null
null
predict_emotions.py
fahsan/EmotiFind
aa3071d296cb61a91eea5e0139a9cbeb82b7d14b
[ "MIT" ]
1
2018-05-12T22:54:49.000Z
2018-05-12T22:54:49.000Z
#Takes as input a numpy array of 3 parameters per article and a output array of emotions per text.
33.666667
99
0.772277
19
101
4.105263
0.789474
0.179487
0
0
0
0
0
0
0
0
0
0.0125
0.207921
101
2
100
50.5
0.9625
0.960396
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
7a6f7d87a2d1fb4721a02d6017c7510a78a70218
7,420
py
Python
fonts/terminus-font-4.49.1/bin/bdf.py
xfnw/yaft
c57e8f3014aa5cf743ca0855e543dbafc2e0db22
[ "MIT" ]
null
null
null
fonts/terminus-font-4.49.1/bin/bdf.py
xfnw/yaft
c57e8f3014aa5cf743ca0855e543dbafc2e0db22
[ "MIT" ]
null
null
null
fonts/terminus-font-4.49.1/bin/bdf.py
xfnw/yaft
c57e8f3014aa5cf743ca0855e543dbafc2e0db22
[ "MIT" ]
null
null
null
# # Copyright (C) 2017-2020 Dimitar Toshkov Zhekov <dimitar.zhekov@gmail.com> # # This program is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the Free # Software Foundation; either version 2 of the License, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY # or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # import re import codecs from collections import OrderedDict from enum import IntEnum, unique import fnutil # -- Width -- DPARSE_LIMIT = 512 SPARSE_LIMIT = 32000 class Width: def __init__(self, x, y): self.x = x self.y = y @staticmethod def parse(name, value, limit): words = fnutil.split_words(name, value, 2) return Width(fnutil.parse_dec(name + '.x', words[0], -limit, limit), fnutil.parse_dec(name + '.y', words[1], -limit, limit)) @staticmethod def parse_s(name, value): return Width.parse(name, value, SPARSE_LIMIT) @staticmethod def parse_d(name, value): return Width.parse(name, value, DPARSE_LIMIT) def __str__(self): return '%d %d' % (self.x, self.y) # -- BXX -- class BBX: def __init__(self, width, height, xoff, yoff): self.width = width self.height = height self.xoff = xoff self.yoff = yoff @staticmethod def parse(name, value): words = fnutil.split_words(name, value, 4) return BBX(fnutil.parse_dec('width', words[0], 1, DPARSE_LIMIT), fnutil.parse_dec('height', words[1], 1, DPARSE_LIMIT), fnutil.parse_dec('bbxoff', words[2], -DPARSE_LIMIT, DPARSE_LIMIT), fnutil.parse_dec('bbyoff', words[3], -DPARSE_LIMIT, DPARSE_LIMIT)) def row_size(self): return (self.width + 7) >> 3 def __str__(self): return '%d %d %d %d' % (self.width, self.height, self.xoff, self.yoff) # -- Props -- def skip_comments(line): return None if line[:7] == b'COMMENT' else line class Props(OrderedDict): def __iter__(self): return self.items().__iter__() def read(self, input, name, callback=None): return self.parse(input.read_lines(skip_comments), name, callback) def parse(self, line, name, callback=None): if not line or not line.startswith(bytes(name, 'ascii')): raise Exception(name + ' expected') value = line[len(name):].lstrip() self[name] = value return value if callback is None else callback(name, value) def set(self, name, value): self[name] = value if isinstance(value, (bytes, bytearray)) else bytes(str(value), 'ascii') # -- Base -- class Base: def __init__(self): self.props = Props() self.bbx = None # -- Char HEX_BYTES = (48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 65, 66, 67, 68, 69, 70) class Char(Base): def __init__(self): Base.__init__(self) self.code = -1 self.swidth = None self.dwidth = None self.data = None def bitmap(self): bitmap = '' row_size = self.bbx.row_size() for index in range(0, len(self.data), row_size): bitmap += self.data[index : index + row_size].hex() + '\n' return bytes(bitmap, 'ascii').upper() def _read(self, input): # HEADER self.props.read(input, 'STARTCHAR') self.code = self.props.read(input, 'ENCODING', fnutil.parse_dec) self.swidth = self.props.read(input, 'SWIDTH', Width.parse_s) self.dwidth = self.props.read(input, 'DWIDTH', Width.parse_d) self.bbx = self.props.read(input, 'BBX', BBX.parse) line = input.read_lines(skip_comments) if line and line.startswith(b'ATTRIBUTES'): self.props.parse(line, 'ATTRIBUTES') line = input.read_lines(skip_comments) # BITMAP if self.props.parse(line, 'BITMAP'): raise Exception('BITMAP expected') row_len = self.bbx.row_size() * 2 self.data = bytearray() for _ in range(0, self.bbx.height): line = input.read_lines(skip_comments) if not line: raise Exception('bitmap data expected') if len(line) == row_len: self.data += codecs.decode(line, 'hex') else: raise Exception('invalid bitmap length') # FINAL if input.read_lines(skip_comments) != b'ENDCHAR': raise Exception('ENDCHAR expected') return self @staticmethod def read(input): return Char()._read(input) # pylint: disable=protected-access def write(self, output): for [name, value] in self.props: output.write_prop(name, value) output.write_line(self.bitmap() + b'ENDCHAR') # -- Font -- @unique class XLFD(IntEnum): FOUNDRY = 1 FAMILY_NAME = 2 WEIGHT_NAME = 3 SLANT = 4 SETWIDTH_NAME = 5 ADD_STYLE_NAME = 6 PIXEL_SIZE = 7 POINT_SIZE = 8 RESOLUTION_X = 9 RESOLUTION_Y = 10 SPACING = 11 AVERAGE_WIDTH = 12 CHARSET_REGISTRY = 13 CHARSET_ENCODING = 14 CHARS_MAX = 65535 class Font(Base): def __init__(self): Base.__init__(self) self.xlfd = [] self.chars = [] self.default_code = -1 @property def bold(self): return b'bold' in self.xlfd[XLFD.WEIGHT_NAME].lower() @property def italic(self): return self.xlfd[XLFD.SLANT] in [b'I', b'O'] @property def proportional(self): return self.xlfd[XLFD.SPACING] == b'P' def _read(self, input): # HEADER line = input.read_line() if self.props.parse(line, 'STARTFONT') != b'2.1': raise Exception('STARTFONT 2.1 expected') self.xlfd = self.props.read(input, 'FONT', lambda name, value: value.split(b'-', 15)) if len(self.xlfd) != 15 or self.xlfd[0] != b'': raise Exception('non-XLFD font names are not supported') self.props.read(input, 'SIZE') self.bbx = self.props.read(input, 'FONTBOUNDINGBOX', BBX.parse) line = input.read_lines(skip_comments) if line and line.startswith(b'STARTPROPERTIES'): num_props = self.props.parse(line, 'STARTPROPERTIES', fnutil.parse_dec) for _ in range(0, num_props): line = input.read_lines(skip_comments) if line is None: raise Exception('property expected') match = re.fullmatch(br'(\w+)\s+([-\d"].*)', line) if not match: raise Exception('invalid property format') name = str(match.group(1), 'ascii') value = match.group(2) if self.props.get(name) is not None: raise Exception('duplicate property') if name == 'DEFAULT_CHAR': self.default_code = fnutil.parse_dec(name, value) self.props[name] = value if self.props.read(input, 'ENDPROPERTIES') != b'': raise Exception('ENDPROPERTIES expected') line = input.read_lines(skip_comments) # GLYPHS num_chars = fnutil.parse_dec('CHARS', self.props.parse(line, 'CHARS'), 1, CHARS_MAX) for _ in range(0, num_chars): self.chars.append(Char.read(input)) if next((char.code for char in self.chars if char.code == self.default_code), -1) != self.default_code: raise Exception('invalid DEFAULT_CHAR') # FINAL if input.read_lines(skip_comments) != b'ENDFONT': raise Exception('ENDFONT expected') if input.read_line() is not None: raise Exception('garbage after ENDFONT') return self @staticmethod def read(input): return Font()._read(input) # pylint: disable=protected-access def write(self, output): for [name, value] in self.props: output.write_prop(name, value) for char in self.chars: char.write(output) output.write_line(b'ENDFONT')
23.935484
105
0.68814
1,101
7,420
4.514078
0.235241
0.034406
0.028169
0.032596
0.273843
0.198793
0.135614
0.099396
0.065996
0.065996
0
0.019133
0.175876
7,420
309
106
24.012945
0.793622
0.124124
0
0.182796
0
0
0.087212
0
0
0
0
0
0
1
0.145161
false
0
0.026882
0.069892
0.387097
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a6f999b096c719861108c73f2eea05851482105
9,488
py
Python
gym_brt/envs/qube_base_env.py
zuzuba/quanser-openai-driver
c4bec08a8c7ac1c05dec26c863f899f44f15fd06
[ "MIT" ]
null
null
null
gym_brt/envs/qube_base_env.py
zuzuba/quanser-openai-driver
c4bec08a8c7ac1c05dec26c863f899f44f15fd06
[ "MIT" ]
null
null
null
gym_brt/envs/qube_base_env.py
zuzuba/quanser-openai-driver
c4bec08a8c7ac1c05dec26c863f899f44f15fd06
[ "MIT" ]
null
null
null
from __future__ import absolute_import from __future__ import print_function from __future__ import division import gym import time import math import numpy as np from gym import spaces from gym.utils import seeding from gym_brt.quanser import QubeServo2, QubeServo2Simulator from gym_brt.control import QubeFlipUpControl # theta, alpha: positions, velocities, accelerations OBSERVATION_HIGH = np.asarray([ 1, 1, 1, 1, # angles np.pi / 4, np.pi / 4, # velocities np.pi / 4, np.pi / 4, # accelerations 4100, # tach0 0.2, # sense ], dtype=np.float64) OBSERVATION_LOW = -OBSERVATION_HIGH MAX_MOTOR_VOLTAGE = 8.0 ACTION_HIGH = np.asarray([MAX_MOTOR_VOLTAGE], dtype=np.float64) ACTION_LOW = -ACTION_HIGH STATE_KEYS = [ 'COS_THETA', 'SIN_THETA', 'COS_ALPHA', 'SIN_ALPHA', 'THETA_VELOCITY', 'ALPHA_VELOCITY', 'THETA_ACCELERATION', 'ALPHA_ACCELERATION', 'TACH0', 'SENSE' ] def normalize_angle(theta): return ((theta + np.pi) % (2 * np.pi)) - np.pi class QubeBaseReward(object): def __init__(self): self.target_space = spaces.Box( low=ACTION_LOW, high=ACTION_HIGH, dtype=np.float32) def __call__(self, state, action): raise NotImplementedError class QubeBaseEnv(gym.Env): metadata = { 'render.modes': ['human', 'rgb_array'], 'video.frames_per_second' : 50 } def __init__(self, frequency=1000, use_simulator=False): self.observation_space = spaces.Box( OBSERVATION_LOW, OBSERVATION_HIGH, dtype=np.float32) self.action_space = spaces.Box( ACTION_LOW, ACTION_HIGH, dtype=np.float32) self.reward_fn = QubeBaseReward() self._theta_velocity_cstate = 0 self._alpha_velocity_cstate = 0 self._theta_velocity = 0 self._alpha_velocity = 0 self._frequency = frequency # Open the Qube if use_simulator: self.qube = QubeServo2Simulator( euler_steps=1, frequency=frequency) else: self.qube = QubeServo2(frequency=frequency) self.qube.__enter__() self.seed() self.viewer = None self.use_simulator = use_simulator def __enter__(self): return self def __exit__(self, type, value, traceback): self.close() def seed(self, seed=None): self.np_random, seed = seeding.np_random(seed) return [seed] def _step(self, action): motor_voltages = np.clip(np.array( [action[0]], dtype=np.float64), ACTION_LOW, ACTION_HIGH) currents, encoders, others = self.qube.action(motor_voltages) self._sense = currents[0] self._tach0 = others[0] # Calculate alpha, theta, alpha_velocity, and theta_velocity self._theta = encoders[0] * (-2.0 * np.pi / 2048) alpha_un = encoders[1] * (2.0 * np.pi / 2048) # Alpha without normalizing self._alpha = (alpha_un % (2.0 * np.pi)) - np.pi # Normalized and shifted alpha theta_velocity = -2500 * self._theta_velocity_cstate + 50 * self._theta alpha_velocity = -2500 * self._alpha_velocity_cstate + 50 * alpha_un self._theta_velocity_cstate += (-50 * self._theta_velocity_cstate + self._theta) / self._frequency self._alpha_velocity_cstate += (-50 * self._alpha_velocity_cstate + alpha_un) / self._frequency # TODO: update using the transfer function self._theta_acceleration = (theta_velocity - self._theta_velocity) * self._frequency self._alpha_acceleration = (alpha_velocity - self._alpha_velocity) * self._frequency self._theta_velocity = theta_velocity self._alpha_velocity = alpha_velocity return self._get_state() def _get_state(self): state = np.asarray([ np.cos(self._theta), np.sin(self._theta), np.cos(self._alpha), np.sin(self._alpha), self._theta_velocity, self._alpha_velocity, self._theta_acceleration, self._alpha_acceleration, self._tach0, self._sense, ], dtype=np.float32) return state def _flip_up(self, early_quit=False): """Run classic control for flip-up until the pendulum is inverted for a set amount of time. Assumes that initial state is stationary downwards. Args: early_quit: Quit if flip up doesn't succeed after set amount of time """ control = QubeFlipUpControl(env=self, sample_freq=self._frequency) time_hold = 1.0 * self._frequency # Number of samples to hold upright sample = 0 # Samples since control system started samples_upright = 0 # Consecutive samples pendulum is upright action = self.action_space.sample() state, _, _, _ = self.step([1.0]) while True: action = control.action(state) state, _, _, _ = self.step(action) # Break if pendulum is inverted if self._alpha < (10 * np.pi / 180): if samples_upright > time_hold: break samples_upright += 1 else: samples_upright = 0 sample += 1 return state def _dampen_down(self, min_hold_time=0.5): action = np.zeros( shape=self.action_space.shape, dtype=self.action_space.dtype) time_hold = min_hold_time * self._frequency samples_downwards = 0 # Consecutive samples pendulum is stationary while True: state, _, _, _ = self.step(action) # Break if pendulum is stationary ref_state = [0., 0., 0., 0.] if np.allclose(state[4:8], ref_state, rtol=1e-02, atol=1e-03): if samples_downwards > time_hold: break samples_downwards += 1 else: samples_downwards = 0 return self._get_state() def flip_up(self, early_quit=False, time_out=5, min_hold_time=1): return self._flip_up(early_quit=early_quit) def dampen_down(self): return self._dampen_down() def reset(self): # Start the pendulum stationary at the bottom (stable point) self.dampen_down() action = np.zeros( shape=self.action_space.shape, dtype=self.action_space.dtype) return self.step(action)[0] def step(self, action): state = self._step(action) reward = self.reward_fn(state, action) done = False info = {} return state, reward, done, info def render(self, mode='human'): # Simple and *NOT* physically accurate rendering screen = screen_width = screen_height = 600 scale = 0.5 * screen / 100.0 # Everything is scaled out of 100 qubewidth = 10.0 * scale qubeheight = 10.0 * scale origin = (screen_width/2, screen_height/2) arm_len = 40 * scale arm_width = 1.0 * scale pen_len = 40 * scale pen_width = 2.0 * scale def pen_origin(theta, origin=origin, len=arm_len): x = origin[0] - len * math.sin(theta) y = origin[1] + len * math.cos(theta) return x, y if self.viewer is None: from gym.envs.classic_control import rendering self.viewer = rendering.Viewer(screen_width, screen_height) # draw qube base l,r,t,b = qubewidth/2, -qubewidth/2, -qubeheight/2, qubeheight/2 qube = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)]) qube.set_color(0.0, 0.0, 0.0) qubetrans = rendering.Transform(translation=origin) qube.add_attr(qubetrans) self.viewer.add_geom(qube) # draw qube arm l,r,t,b = arm_width/2, -arm_width/2, 0, arm_len arm = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)]) arm.set_color(0.5, 0.5, 0.5) self.armtrans = rendering.Transform(translation=origin) arm.add_attr(self.armtrans) self.viewer.add_geom(arm) arm_trace = rendering.make_circle(radius=arm_len, filled=False) armtracetrans = rendering.Transform(translation=origin) arm_trace.set_color(0.5, 0.5, 0.5) arm_trace.add_attr(armtracetrans) self.viewer.add_geom(arm_trace) # draw qube pendulum pen_orgin = (origin[0], origin[1] + arm_len) l,r,t,b = pen_width/2, -pen_width/2, 0, pen_len pen = rendering.FilledPolygon([(l,b), (l,t), (r,t), (r,b)]) pen.set_color(1.0, 0.0, 0.0) self.pentrans = rendering.Transform( translation=pen_orgin, rotation=math.pi/10) pen.add_attr(self.pentrans) self.viewer.add_geom(pen) self.armtrans.set_rotation(np.pi+self._theta) self.pentrans.set_translation(*pen_origin(np.pi+self._theta)) self.pentrans.set_rotation(self._alpha) return self.viewer.render(return_rgb_array = mode=='rgb_array') def close(self, type=None, value=None, traceback=None): # Safely close the Qube self.qube.__exit__(type=type, value=value, traceback=traceback) if self.viewer: self.viewer.close()
33.059233
106
0.602867
1,178
9,488
4.617997
0.202037
0.028125
0.004963
0.004412
0.194118
0.113971
0.104779
0.069853
0.039706
0.039706
0
0.027998
0.296058
9,488
286
107
33.174825
0.786495
0.096648
0
0.127358
0
0
0.02037
0.002708
0
0
0
0.003497
0
1
0.084906
false
0
0.056604
0.018868
0.216981
0.004717
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a6fdd46e944281a1207e5491edb3ac4ac32ea1b
397
py
Python
mopidy_bamp/mopidy_bamp/base_service.py
isabella232/BossAlienMediaPlayer
c5ab5993a68c43d6b213bef2224020c9d4cc96cf
[ "MIT" ]
14
2019-09-25T18:28:32.000Z
2021-08-10T00:46:43.000Z
mopidy_bamp/mopidy_bamp/base_service.py
J87/BossAlienMediaPlayer
5af42e2fa94bd7e0fc4486b3deed2fdd6e3dcd05
[ "MIT" ]
9
2020-01-08T11:10:25.000Z
2022-03-02T05:42:50.000Z
mopidy_bamp/mopidy_bamp/base_service.py
J87/BossAlienMediaPlayer
5af42e2fa94bd7e0fc4486b3deed2fdd6e3dcd05
[ "MIT" ]
6
2019-09-25T15:04:40.000Z
2021-01-04T11:21:01.000Z
import tornado.web import threading class BaseService: initialised = False core = None config = None lock = None def init(self, core, config): self.core = core self.config = config self.lock = threading.Lock() self.initialised = True def check_init(self): if not self.initialised: raise tornado.web.HTTPError(500)
17.26087
44
0.609572
46
397
5.23913
0.478261
0.082988
0
0
0
0
0
0
0
0
0
0.010989
0.312343
397
22
45
18.045455
0.871795
0
0
0
0
0
0
0
0
0
0
0
0
1
0.133333
false
0
0.133333
0
0.6
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
7a70d71d5291dbc0c13322848d901d168bba6bc7
3,178
py
Python
main/helpers.py
anngle/t923
078d2c566c77afa2ca1be7663d3c23c9f0ecddac
[ "BSD-3-Clause" ]
1
2021-11-28T05:46:45.000Z
2021-11-28T05:46:45.000Z
main/helpers.py
anngle/t923
078d2c566c77afa2ca1be7663d3c23c9f0ecddac
[ "BSD-3-Clause" ]
null
null
null
main/helpers.py
anngle/t923
078d2c566c77afa2ca1be7663d3c23c9f0ecddac
[ "BSD-3-Clause" ]
null
null
null
#coding=utf-8 from werkzeug import import_string, cached_property from functools import wraps from flask import request,render_template,session,current_app,url_for from datetime import timedelta,datetime # from main.extensions import redis_store from flask_sse import sse # from urllib.parse import urljoin # from urllib import parse # from urlparse import urlparse, urljoin import time class LazyView(object): def __init__(self, import_name): self.__module__, self.__name__ = import_name.rsplit('.', 1) self.import_name = import_name @cached_property def view(self): return import_string(self.import_name) def __call__(self, *args, **kwargs): return self.view(*args, **kwargs) def url(bp,url_rule, import_name, **options): view = LazyView('main.views.' + bp.name+'.'+ import_name) bp.add_url_rule(url_rule, view_func=view, **options) def templated(template=None): def decorator(f): @wraps(f) def decorated_function(*args, **kwargs): template_name = template if template_name is None: template_name = request.endpoint \ .replace('.', '/') + '.html' ctx = f(*args, **kwargs) if ctx is None: ctx = {} elif not isinstance(ctx, dict): return ctx return render_template(template_name, **ctx) return decorated_function return decorator """http://flask.pocoo.org/snippets/71/ Counting Online Users with Redis """ def mark_online(user_id): now = int(time.time()) expires = now + (current_app.config['ONLINE_LAST_MINUTES'] * 60) + 10 all_users_key = 'online-users/%d' % (now // 60) user_key = 'user-activity/%s' % user_id p = redis_store.pipeline() p.sadd(all_users_key, user_id) p.set(user_key, now) p.expireat(all_users_key, expires) p.expireat(user_key, expires) p.execute() def get_user_last_activity(user_id): last_active = redis_store.get('user-activity/%s' % user_id) if last_active is None: return None return datetime.utcfromtimestamp(int(last_active)) def get_online_users(): current = int(time.time()) // 60 minutes = range(current_app.config['ONLINE_LAST_MINUTES']) online_count = redis_store.sunion(['online-users/%d' % (current - x) for x in minutes]) return online_count """http://flask.pocoo.org/snippets/62/ Securely Redirect Back """ # def is_safe_url(target): # ref_url = parse(request.host_url) # test_url = parse(urljoin(request.host_url, target)) # return test_url.scheme in ('http', 'https') and \ # ref_url.netloc == test_url.netloc # def get_redirect_target(): # for target in request.values.get('next'), request.referrer: # if not target: # continue # if is_safe_url(target): # return target # def redirect_back(endpoint, **values): # target = request.form['next'] # if not target or not is_safe_url(target): # target = url_for(endpoint, **values) # return redirect(target) """ return redirect_back('index') """
28.890909
73
0.648521
414
3,178
4.746377
0.309179
0.035623
0.021374
0.022901
0.078372
0.033588
0
0
0
0
0
0.00574
0.232536
3,178
110
74
28.890909
0.799918
0.237256
0
0
0
0
0.053981
0
0
0
0
0
0
1
0.178571
false
0
0.214286
0.035714
0.571429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a710889018c82681c9e827f743aab1961a6e26a
1,482
py
Python
LeetCode/medium/search_a_2d_matrix_ii.py
hnc01/online-judge
d306dc32c9d8600a987affbe4e4b80809f0b0982
[ "MIT" ]
null
null
null
LeetCode/medium/search_a_2d_matrix_ii.py
hnc01/online-judge
d306dc32c9d8600a987affbe4e4b80809f0b0982
[ "MIT" ]
null
null
null
LeetCode/medium/search_a_2d_matrix_ii.py
hnc01/online-judge
d306dc32c9d8600a987affbe4e4b80809f0b0982
[ "MIT" ]
null
null
null
''' https://leetcode.com/problems/search-a-2d-matrix-ii/ 240. Search a 2D Matrix II Write an efficient algorithm that searches for a target value in an m x n integer matrix. The matrix has the following properties: - Integers in each row are sorted in ascending from left to right. - Integers in each column are sorted in ascending from top to bottom. ''' ''' Accepted ''' class Solution: def searchMatrix(self, matrix: [[int]], target: int) -> bool: m = len(matrix) n = len(matrix[0]) for col in range(0, n): if matrix[0][col] == target: return True elif matrix[0][col] > target: # there's no way we can find it moving forward break else: # if matrix[0][col] < target: # we need to search the column IF the cell at [row][col] < target # there's a chance to find it in the column for row in range(1, m): if matrix[row][col] == target: return True elif matrix[row][col] > target: # we reached a point in the column where the numbers are larger than target break return False matrix = [[1, 4, 7, 11, 15], [2, 5, 8, 12, 19], [3, 6, 9, 16, 22], [10, 13, 14, 17, 24], [18, 21, 23, 26, 30]] target = 30 print(Solution().searchMatrix(matrix, target))
32.933333
134
0.536437
206
1,482
3.859223
0.514563
0.067925
0.037736
0.060377
0.210063
0.072956
0
0
0
0
0
0.057082
0.361673
1,482
44
135
33.681818
0.783298
0.412281
0
0.210526
0
0
0
0
0
0
0
0
0
1
0.052632
false
0
0
0
0.263158
0.052632
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a718a243bcf5867511bde8c48494b0dbfa75d51
7,503
py
Python
automow_ekf/src/automow_ekf/__init__.py
Auburn-Automow/au_automow_common
920be6a740aa6d738e9954417b41490e353efd04
[ "BSD-3-Clause" ]
43
2016-03-05T17:06:29.000Z
2022-03-10T08:50:46.000Z
automow_ekf/src/automow_ekf/__init__.py
qintxwd/au_automow_common
920be6a740aa6d738e9954417b41490e353efd04
[ "BSD-3-Clause" ]
2
2017-07-10T12:43:49.000Z
2019-03-13T13:57:31.000Z
automow_ekf/src/automow_ekf/__init__.py
qintxwd/au_automow_common
920be6a740aa6d738e9954417b41490e353efd04
[ "BSD-3-Clause" ]
22
2016-03-23T06:10:52.000Z
2022-03-10T08:50:49.000Z
import numpy as np import threading def wrapToPi(angle): """ Wrap a given angle in radians to the range -pi to pi. @param angle : The angle to be wrapped @param type angle : float @return : Wrapped angle @rtype : float """ return np.mod(angle + np.pi, 2.0 * np.pi) - np.pi class AutomowEKF: __nx = 7 # Number of States in the Kalman Filter __ny_gps = 2 # Number of measurements from the GPS __ny_imu = 2 # Number of measurements from the IMU __nu = 2 # Number of inputs __prev_time = 0 __dt = np.double C_gps = np.array([[1, 0, 0, 0, 0, 0, 0], [0, 1, 0, 0, 0, 0, 0]], dtype=__dt) C_imu = np.array([0, 0, 1, 0, 0, 0, 1], dtype=__dt) def __init__(self, x_hat_i, P_i, Q, R_gps, R_imu): """ Initialize the Kalman Filter with a set of input arguments @param x_hat_i : The initial state of the Kalman Estimator @param type x_hat_i : (7, ) numpy.array, dtype=np.double @param P_i : The initial covariance matrix of the Kalman Estimator @param type P_i : (7, 7) numpy.array, dtype=np.double @param Q : The process noise covariance of the system @param type Q : (7, 7) numpy.array, dtype=np.double @param R_gps : The GPS measurement noise covariance @param type R_gps : (2, 2) numpy.array, dtype=np.double @param R_imu : The AHRS measurement noise covariance @param type R_imu : (1, 1) numpy.array, dtype=np.double """ self.state_lock = threading.Lock() with self.state_lock: self.x_hat = x_hat_i self.P = P_i self.Q = Q self.R_gps = R_gps self.R_imu = R_imu self.F = np.zeros((self.__nx, self.__nx), dtype=self.__dt) self.G = np.zeros((self.__nx, self.__nx), dtype=self.__dt) @classmethod def fromDefault(cls): """ Initialize the Kalman Filter with a set of default arguments """ x_hat_i = np.array([0, 0, 0, 0.159, 0.159, 0.5461, 0], dtype=cls.__dt) P_i = np.diag(np.array([100, 100, 100, 0.0001, 0.0001, 0.0001, 0.0001], dtype=cls.__dt)) Q = np.diag(np.array([0.1, 0.1, 0, 0, 0, 0, 0], dtype=cls.__dt)) R_gps = np.eye(2, dtype=cls.__dt) * 0.02 R_imu = np.eye(1, dtype=cls.__dt) * 0.02 return cls(x_hat_i, P_i, Q, R_gps, R_imu) def updateModel(self, u, dt): """ Update the process and process noise matricies of the model @param u : The current i @param type u : (2, ) numpy.array, dtype=np.double @param dt : The time delta from the previous time update @param type dt : np.float """ self.F = np.eye(self.__nx, dtype=self.__dt) self.F[0, 2] = -0.5 * dt \ * (self.x_hat[3] * u[0] + self.x_hat[4] * u[1]) \ * np.sin(self.x_hat[2]) self.F[0, 3] = 0.5 * dt * u[0] * np.cos(self.x_hat[2]) self.F[0, 4] = 0.5 * dt * u[1] * np.cos(self.x_hat[2]) self.F[1, 2] = 0.5 * dt \ * (self.x_hat[3] * u[0] + self.x_hat[4] * u[1]) \ * np.cos(self.x_hat[2]) self.F[1, 3] = 0.5 * dt * u[0] * np.sin(self.x_hat[2]) self.F[1, 4] = 0.5 * dt * u[1] * np.sin(self.x_hat[2]) self.F[2, 3] = -1.0 * dt * u[0] / self.x_hat[5] self.F[2, 4] = dt * u[1] / self.x_hat[5] self.F[2, 5] = dt \ * (self.x_hat[3] * u[0] - self.x_hat[4] * u[1]) \ / np.power(self.x_hat[5], 2) self.G = np.zeros((self.__nx, self.__nx), dtype=self.__dt) self.G[0, 0] = 0.5 * dt * self.x_hat[3] * np.cos(self.x_hat[2]) self.G[0, 1] = 0.5 * dt * self.x_hat[4] * np.cos(self.x_hat[2]) self.G[0, 3] = 0.5 * dt * u[0] * np.cos(self.x_hat[2]) self.G[0, 4] = 0.5 * dt * u[1] * np.cos(self.x_hat[2]) self.G[1, 0] = 0.5 * dt * self.x_hat[3] * np.sin(self.x_hat[2]) self.G[1, 1] = 0.5 * dt * self.x_hat[4] * np.sin(self.x_hat[2]) self.G[1, 3] = 0.5 * dt * u[0] * np.cos(self.x_hat[2]) self.G[1, 4] = 0.5 * dt * u[1] * np.cos(self.x_hat[2]) self.G[2, 0] = -1.0 * dt * self.x_hat[3] / self.x_hat[5] self.G[2, 1] = dt * self.x_hat[4] / self.x_hat[5] self.G[2, 2] = dt self.G[2, 3] = -1.0 * dt * self.x_hat[3] / self.x_hat[5] self.G[2, 4] = dt * self.x_hat[4] / self.x_hat[5] self.G[3, 3] = dt self.G[4, 4] = dt self.G[5, 5] = dt self.G[6, 6] = dt return def timeUpdate(self, u, time): dt = time - self.__prev_time self.__prev_time = time self.updateModel(u, dt) v = self.x_hat[4] / 2.0 * u[1] + self.x_hat[3] / 2.0 * u[0] w = self.x_hat[4] / self.x_hat[5] * u[1] - \ self.x_hat[3] / self.x_hat[5] * u[0] with self.state_lock: self.x_hat[0] += dt * v * np.cos(self.x_hat[2] + dt * w / 2.0) self.x_hat[1] += dt * v * np.sin(self.x_hat[2] + dt * w / 2.0) self.x_hat[2] += dt * w self.x_hat[2] = wrapToPi(self.x_hat[2]) self.P = np.dot(self.F, np.dot(self.P, self.F.T)) \ + np.dot(self.G, np.dot(self.Q, self.G.T)) return v, w def measurementUpdateGPS(self, y, R): if y.shape is (2, ): y = y.reshape((1, 2)) if y.dtype is not np.double: y = y.astype(np.double) innovation = y - np.dot(self.C_gps, self.x_hat) S = np.dot(self.C_gps, np.dot(self.P, self.C_gps.T)) S += R K = np.dot(self.P, np.dot(self.C_gps.conj().T, np.linalg.inv(S))) with self.state_lock: self.x_hat = self.x_hat + np.dot(K, innovation) self.P = np.dot((np.eye(self.__nx) - np.dot(K, self.C_gps)), self.P) return innovation, S, K def measurementUpdateAHRS(self, y): y = wrapToPi(y) # if y.dtype is not np.double: # y = y.astype(np.double) innovation = y - np.dot(self.C_imu, self.x_hat) innovation = wrapToPi(innovation) S = np.dot(self.C_imu, np.dot(self.P, self.C_imu.T)) S += self.R_imu[0, 0] K = np.dot(self.P, self.C_imu.T / S) with self.state_lock: self.x_hat += K * innovation self.x_hat[2] = wrapToPi(self.x_hat[2]) self.x_hat[6] = wrapToPi(self.x_hat[6]) self.P = np.dot((np.eye(self.__nx) - \ np.dot(K.reshape((self.__nx, 1)), self.C_imu.reshape((1, self.__nx)))), self.P) return innovation, S, K def getYaw(self): with self.state_lock: return self.x_hat[2] def getNorthing(self): with self.state_lock: return self.x_hat[1] def getEasting(self): with self.state_lock: return self.x_hat[0] def getYawBias(self): with self.state_lock: return self.x_hat[6] def getStateString(self): with self.state_lock: string = '' for ii in range(7): string += str(self.x_hat[ii]) + ", " return string def getStateList(self): with self.state_lock: return self.x_hat.flatten().tolist() def getPList(self): with self.state_lock: return self.P.flatten()
38.086294
103
0.514461
1,280
7,503
2.867188
0.107813
0.076294
0.141689
0.053951
0.584469
0.539237
0.462398
0.39346
0.318801
0.222888
0
0.055622
0.329068
7,503
196
104
38.280612
0.673421
0.1666
0
0.136691
0
0
0.000331
0
0
0
0
0
0
1
0.100719
false
0
0.014388
0
0.273381
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a719e5fbbaf6f206dcab2bd1b09cd9219be9533
2,028
py
Python
functions/oxford.py
codefire53/aegisbot
b4cb4f42e3cbdf8554eb234661cc6091e83f1be4
[ "MIT" ]
null
null
null
functions/oxford.py
codefire53/aegisbot
b4cb4f42e3cbdf8554eb234661cc6091e83f1be4
[ "MIT" ]
null
null
null
functions/oxford.py
codefire53/aegisbot
b4cb4f42e3cbdf8554eb234661cc6091e83f1be4
[ "MIT" ]
null
null
null
'''Importing necessary modules''' from urllib.parse import quote import urllib import requests from bs4 import BeautifulSoup '''Function to search word/phrase on oxford dictionary''' def define(word): #Oxford dictionary search query url url='https://en.oxforddictionaries.com/definition/'+quote(word) #Parse the html file test=urllib.request.urlopen(url) soup=BeautifulSoup(test,'html.parser') #Initialize definition list lst=[] #Find all the elements of the definitions of the keyword section, and looping through them meanings=soup.find_all('section',{'class':'gramb'}) for row in meanings: #Obtain the definition type types=row.find('h3',{'class':'ps pos'}) ulist=row.find('ul',{'class':'semb'}) #find the li tag which contains list of the definitions word_defs=ulist.find_all('li') for defs in word_defs: #If tag <div class="trg"> exist, then fetch the main definition which is located in <p> tag mean_word=defs.find('div',{'class':'trg'}) if mean_word!=None: #Generate all <div class="trg"> children m_word=mean_word.findChildren() for mw in m_word: #If the current section class is ind and the parent tag is p, then that's the main definiton if mw.get('class')==['ind'] and mw.parent.name=='p': #Putting on the type and the main definition to the list lst.append('({}){}'.format(types.get_text().strip(),mw.get_text().strip())) #If the list contains all of the defintions, then print out all of them if lst: res='List of definitions of "{}" word/phrase:\n'.format(word) for num,define in enumerate(lst,1): res+='{}. {}\n'.format(num,define) return res #Otherwise, print error message else: return 'There\'s no "{}" word/phrase in the oxford dictionary database!'.format(word)
45.066667
113
0.61785
275
2,028
4.516364
0.421818
0.016103
0.02657
0
0
0
0
0
0
0
0
0.002011
0.2643
2,028
44
114
46.090909
0.830429
0.321499
0
0
0
0
0.140661
0
0
0
0
0
0
1
0.035714
false
0
0.142857
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a72cc0496b6255bcf4c039018a93cc9e747a2d4
3,546
py
Python
checkov/common/output/baseline.py
peaudecastor/checkov
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
[ "Apache-2.0" ]
null
null
null
checkov/common/output/baseline.py
peaudecastor/checkov
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
[ "Apache-2.0" ]
null
null
null
checkov/common/output/baseline.py
peaudecastor/checkov
a4804b61c1b1390b7abd44ab53285fcbc3e7e80b
[ "Apache-2.0" ]
null
null
null
from __future__ import annotations import json from collections import defaultdict from typing import Any, TYPE_CHECKING if TYPE_CHECKING: from checkov.common.output.record import Record from checkov.common.output.report import Report from checkov.common.typing import _BaselineFinding, _BaselineFailedChecks class Baseline: def __init__(self) -> None: self.path = "" self.path_failed_checks_map: dict[str, list[_BaselineFinding]] = defaultdict(list) self.failed_checks: list[_BaselineFailedChecks] = [] def add_findings_from_report(self, report: Report) -> None: for check in report.failed_checks: try: existing = next( x for x in self.path_failed_checks_map[check.file_path] if x["resource"] == check.resource ) except StopIteration: existing = {"resource": check.resource, "check_ids": []} self.path_failed_checks_map[check.file_path].append(existing) existing["check_ids"].append(check.check_id) existing["check_ids"].sort() # Sort the check IDs to be nicer to the eye def to_dict(self) -> dict[str, Any]: """ The output of this class needs to be very explicit, hence the following structure of the dict: { "failed_checks": [ { "file": "path/to/file", "findings: [ { "resource": "aws_s3_bucket.this", "check_ids": [ "CKV_AWS_1", "CKV_AWS_2", "CKV_AWS_3" ] } ] } ] } """ failed_checks_list = [] for file, findings in self.path_failed_checks_map.items(): formatted_findings = [] for finding in findings: formatted_findings.append({"resource": finding["resource"], "check_ids": finding["check_ids"]}) failed_checks_list.append({"file": file, "findings": formatted_findings}) resp = {"failed_checks": failed_checks_list} return resp def compare_and_reduce_reports(self, scan_reports: list[Report]) -> None: for scan_report in scan_reports: scan_report.passed_checks = [ check for check in scan_report.passed_checks if self._is_check_in_baseline(check) ] scan_report.skipped_checks = [ check for check in scan_report.skipped_checks if self._is_check_in_baseline(check) ] scan_report.failed_checks = [ check for check in scan_report.failed_checks if not self._is_check_in_baseline(check) ] def _is_check_in_baseline(self, check: Record) -> bool: failed_check_id = check.check_id failed_check_resource = check.resource for baseline_failed_check in self.failed_checks: for finding in baseline_failed_check["findings"]: if finding["resource"] == failed_check_resource and failed_check_id in finding["check_ids"]: return True return False def from_json(self, file_path: str) -> None: self.path = file_path with open(file_path, "r") as f: baseline_raw = json.load(f) self.failed_checks = baseline_raw.get("failed_checks", {})
40.295455
111
0.57868
392
3,546
4.936224
0.239796
0.099225
0.028941
0.041344
0.170026
0.15814
0.130749
0.082687
0.045478
0.045478
0
0.0017
0.336435
3,546
87
112
40.758621
0.820654
0.147208
0
0
0
0
0.049232
0
0
0
0
0
0
1
0.105263
false
0.035088
0.122807
0
0.298246
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a74674267c1f4fe6e221bdda6d7d922cfce3228
981
py
Python
Configuration/Generator/python/bJpsiX_filt_cfi.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
Configuration/Generator/python/bJpsiX_filt_cfi.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
Configuration/Generator/python/bJpsiX_filt_cfi.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
import FWCore.ParameterSet.Config as cms bfilter = cms.EDFilter("MCSingleParticleFilter", MaxEta = cms.untracked.vdouble(20.0, 20.0), MinEta = cms.untracked.vdouble(-20.0, -20.0), MinPt = cms.untracked.vdouble(0.0, 0.0), ParticleID = cms.untracked.vint32(5, -5) ) jpsifilter = cms.EDFilter("PythiaFilter", Status = cms.untracked.int32(2), MaxEta = cms.untracked.double(20.0), MinEta = cms.untracked.double(-20.0), MinPt = cms.untracked.double(0.0), ParticleID = cms.untracked.int32(443) ) mumufilter = cms.EDFilter("MCParticlePairFilter", Status = cms.untracked.vint32(1, 1), MinPt = cms.untracked.vdouble(2.0, 2.0), MaxEta = cms.untracked.vdouble(2.5, 2.5), MinEta = cms.untracked.vdouble(-2.5, -2.5), ParticleCharge = cms.untracked.int32(-1), MaxInvMass = cms.untracked.double(4.0), MinInvMass = cms.untracked.double(2.0), ParticleID1 = cms.untracked.vint32(13), ParticleID2 = cms.untracked.vint32(13) )
33.827586
49
0.680938
133
981
5.022556
0.270677
0.323353
0.170659
0.08982
0.347305
0.143713
0.143713
0
0
0
0
0.083333
0.155963
981
28
50
35.035714
0.72343
0
0
0
0
0
0.055046
0.022426
0
0
0
0
0
1
0
false
0
0.04
0
0.04
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a7625718df7e6640a9e34803147593716f4fd9f
5,618
py
Python
tests/support/rabbitmq/__init__.py
juntossomosmais/django-stomp
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
[ "MIT" ]
32
2019-06-10T13:24:11.000Z
2021-12-17T21:00:41.000Z
tests/support/rabbitmq/__init__.py
juntossomosmais/django-stomp
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
[ "MIT" ]
26
2019-12-17T12:51:00.000Z
2022-02-16T16:13:14.000Z
tests/support/rabbitmq/__init__.py
juntossomosmais/django-stomp
65e7cb86f8f6e2336a2739df8f33f985c9b4c792
[ "MIT" ]
1
2021-09-11T03:55:30.000Z
2021-09-11T03:55:30.000Z
import json import logging import urllib.parse from time import sleep from typing import Generator from typing import Optional import requests from requests.adapters import HTTPAdapter from tests.support.dtos import ConsumerStatus from tests.support.dtos import CurrentDestinationStatus from tests.support.dtos import MessageStatus logger = logging.getLogger(__name__) _queues_details_request_path = "/api/queues" _specific_queue_details_request_path = _queues_details_request_path + "/%2F/{queue_name}" _bindings_from_queue_request_path = _queues_details_request_path + "/%2F/{queue_name}/bindings" _get_message_from_queue_request_path = _queues_details_request_path + "/%2F/{queue_name}/get" _channels_details_request_path = "/api/channels" _channel_details_from_channel_request_path = _channels_details_request_path + "/{channel_name}" _overview_request_path = "/api/overview" def current_queue_configuration(queue_name, host="localhost", port=15672) -> Optional[CurrentDestinationStatus]: result = _do_request(host, port, _specific_queue_details_request_path.format(queue_name=queue_name)) logger.debug("RabbitMQ request result: %s", result) if result.get("error"): return None if result.get("message_stats"): message_stats = result["message_stats"] messages_dequeued = message_stats.get("deliver_get", 0) messages_enqueued = message_stats.get("publish") else: messages_dequeued = 0 messages_enqueued = None number_of_pending_messages = result["messages"] number_of_consumers = result["consumers"] return CurrentDestinationStatus( number_of_pending_messages, number_of_consumers, messages_enqueued, messages_dequeued ) def current_topic_configuration(topic_name, host="localhost", port=15672) -> Optional[CurrentDestinationStatus]: queues = _do_request(host, port, _queues_details_request_path + "?name=&use_regex=false") for queue_details in queues: queue_name = queue_details["name"] bindings = _do_request(host, port, _bindings_from_queue_request_path.format(queue_name=queue_name)) for binding in bindings: if binding["source"] == "amq.topic" and binding["routing_key"] == topic_name: message_stats = queue_details["message_stats"] number_of_pending_messages = queue_details["messages"] number_of_consumers = queue_details["consumers"] messages_enqueued = message_stats["publish"] messages_dequeued = message_stats["deliver_get"] if message_stats.get("deliver_get") else 0 return CurrentDestinationStatus( number_of_pending_messages, number_of_consumers, messages_enqueued, messages_dequeued ) return None def consumers_details(connection_id, host="localhost", port=15672) -> Generator[ConsumerStatus, None, None]: channels = _do_request(host, port, _channels_details_request_path) for channel in channels: channel_name = channel["connection_details"]["name"] channel_details = _do_request( host, port, _channel_details_from_channel_request_path.format( channel_name=urllib.parse.quote(f"{channel_name} ") + "(1)" ), ) if channel_details.get("consumer_details"): for consumer in channel_details["consumer_details"]: if consumer["consumer_tag"] == f"T_{connection_id}": yield ConsumerStatus( address_to_destination_details=None, destination_name=consumer["queue"]["name"], session_id=None, enqueues=None, dequeues=None, dispatched=None, dispatched_queue=None, prefetch=consumer["prefetch_count"], max_pending=channel_details["messages_unacknowledged"], exclusive=consumer["exclusive"], retroactive=None, ) def retrieve_message_published(destination_name, host="localhost", port=15672) -> MessageStatus: body = json.dumps( { "vhost": "/", "name": destination_name, "truncate": "50000", "ackmode": "ack_requeue_false", "encoding": "auto", "count": "1", } ) message_details = _do_request( host, port, _get_message_from_queue_request_path.format(queue_name=destination_name), do_post=True, body=body ) assert len(message_details) == 1 properties = message_details[0]["properties"] details = json.loads(message_details[0]["payload"]) persistent = None correlation_id = properties["correlation_id"] headers = properties.pop("headers") return MessageStatus(None, details, persistent, correlation_id, {**headers, **properties}) def get_broker_version(host="localhost", port=15672) -> str: broker_overview = _do_request(host, port, _overview_request_path) return broker_overview["rabbitmq_version"] def _do_request(host, port, request_path, do_post=False, body=None): sleep(2) session = requests.Session() session.mount("http://", HTTPAdapter(max_retries=3)) address, auth = f"http://{host}:{port}{request_path}", ("guest", "guest") with session: if not do_post: data = session.get(address, auth=auth) else: data = session.post(address, auth=auth, data=body) return data.json()
41.007299
117
0.672481
611
5,618
5.826514
0.222586
0.061798
0.050562
0.038202
0.266292
0.196067
0.170225
0.105618
0.105618
0.105618
0
0.009933
0.229441
5,618
136
118
41.308824
0.812428
0
0
0.070175
0
0
0.122998
0.016376
0
0
0
0
0.008772
1
0.052632
false
0
0.096491
0
0.210526
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a77731700f270ddcae5f715cc1cc8dec54c3bd0
964
py
Python
Searching/Ternary Search/Python/TernarySearch.py
priyanshi2808/DSA
1e907e869a55049c6b5d9469d8d52bfce2add4f0
[ "MIT" ]
8
2021-10-14T16:31:54.000Z
2022-01-05T11:56:37.000Z
Searching/Ternary Search/Python/TernarySearch.py
priyanshi2808/DSA
1e907e869a55049c6b5d9469d8d52bfce2add4f0
[ "MIT" ]
55
2021-10-15T14:53:05.000Z
2021-12-21T07:29:00.000Z
Searching/Ternary Search/Python/TernarySearch.py
priyanshi2808/DSA
1e907e869a55049c6b5d9469d8d52bfce2add4f0
[ "MIT" ]
12
2021-10-14T12:13:22.000Z
2022-02-22T13:41:42.000Z
# Here, left = 0 and right = length of array - 1 def ternarySearch(ar , key , left , right): if left < right: inter = (right - left ) // 3 rightmid = right - inter leftmid = left +inter if (ar[rightmid] == key ): print( "Element found!Index:",rightmid ) return 0; elif ( ar[leftmid] == key ): print( "Element found!Index:",leftmid ) return 0; elif ( key < ar[rightmid] and key > ar[leftmid] ) : return ternarySearch( ar , key , leftmid , rightmid) elif ( key > ar[rightmid] ) : return ternarySearch( ar , key , rightmid , right) else: return ternarySearch( a , key , left , leftmid ) print( "Key not found!" ) return 0 # Sample Input : # Ar = [12 , 90 , 67 , 19 , 18] # Key = 19 # Output: # Element found!Index: 3
22.952381
64
0.48029
102
964
4.539216
0.343137
0.097192
0.116631
0.086393
0.107991
0
0
0
0
0
0
0.033392
0.409751
964
41
65
23.512195
0.780316
0.135892
0
0.105263
0
0
0.065375
0
0
0
0
0
0
1
0.052632
false
0
0
0
0.368421
0.157895
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a7790db65580f59c5c2b4b7513f7c447480f699
1,114
py
Python
feature_engineering/featuretools.py
bukosabino/ml-utils
3f9379e0558a3db8f4b43b924da8fa30f6d42edb
[ "MIT" ]
2
2019-03-03T15:17:51.000Z
2021-01-31T15:35:21.000Z
feature_engineering/featuretools.py
bukosabino/ml-utils
3f9379e0558a3db8f4b43b924da8fa30f6d42edb
[ "MIT" ]
null
null
null
feature_engineering/featuretools.py
bukosabino/ml-utils
3f9379e0558a3db8f4b43b924da8fa30f6d42edb
[ "MIT" ]
1
2018-10-20T16:42:54.000Z
2018-10-20T16:42:54.000Z
import featuretools as ft def merge_featuretools(df_parent, df_related, parent_column, related_column, date_column): """Automated feature engineering More info: https://www.featuretools.com https://github.com/featuretools/featuretools https://docs.featuretools.com http://www.jmaxkanter.com/static/papers/DSAA_DSM_2015.pdf """ # Create the entityset es = ft.EntitySet('parent') # Add the entities to the entityset es = es.entity_from_dataframe('parent', df_parent, index=parent_column) es = es.entity_from_dataframe('relate', df_related, make_index=True, time_index=date_column, index='related_id') # Define the relationships relationship = ft.Relationship(es['parent'][parent_column], es['relate'][related_column]) # Add the relationships es = es.add_relationships([relationship]) # Deep feature synthesis feature_matrix, feature_defs = ft.dfs(entityset=es, target_entity='parent') return feature_matrix.reset_index()
32.764706
93
0.66158
128
1,114
5.546875
0.4375
0.050704
0.039437
0.039437
0.064789
0
0
0
0
0
0
0.004723
0.239677
1,114
33
94
33.757576
0.83353
0.29623
0
0
0
0
0.061415
0
0
0
0
0
0
1
0.083333
false
0
0.083333
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a788cd072bdcfa375e6dc2987b8609a8045d6fa
6,234
py
Python
recommender/colaborative/prepare_data.py
DigasNikas/PyRecommender
fb056929bba45431a5fc98691332b9bf91e730bb
[ "MIT" ]
2
2017-05-27T15:06:04.000Z
2018-11-23T06:43:25.000Z
recommender/colaborative/prepare_data.py
DigasNikas/PyRecommender
fb056929bba45431a5fc98691332b9bf91e730bb
[ "MIT" ]
null
null
null
recommender/colaborative/prepare_data.py
DigasNikas/PyRecommender
fb056929bba45431a5fc98691332b9bf91e730bb
[ "MIT" ]
null
null
null
from pyspark import SQLContext from pyspark.sql.functions import lit from datetime import datetime def prepare_data(sc, months, output_path): sqlContext = SQLContext(sc) sqlContext.setConf('spark.sql.parquet.compression.codec', 'snappy') blacklist = [] blacklist_top50 = ['({})|'.format(x) for x in get_top50()] blacklist_filters = ['(.+\.{}.*)|'.format(x) for x in get_blackList()] blacklist.extend(blacklist_top50) blacklist.extend(blacklist_filters) blacklist = list(set(blacklist)) rx = ''.join(blacklist) rx = rx[:-1] # gets all user installs from the selected number of previous months excluding the current month df = get_files_from_s3(sqlContext, months) # select only the hash and explode the list of packages df_pkg = df.select( df['hash'].alias('hash'), df['pkg'].alias('package') ).drop_duplicates().cache() # remove incoherent packages like "android" rpkg = '.+\..+' df_pkg = df_pkg.filter(df_pkg['package'].rlike(rpkg)).cache() # filter blacklist packages and top 50 df_pkg_nosystemapps = df_pkg.filter(~df_pkg['package'].rlike(rx)).cache() # connects to database and filter packages with less than 500 downloads df_pkg_nosystemapps = filter_less_500_downloads(sqlContext, df_pkg_nosystemapps).cache() def toCSVLine(data): name = data[0] id = data[1] return "{},{}".format(name, id) # mapping of hashs and ID used for recommendations rdd_hashs = df_pkg_nosystemapps.select(df_pkg_nosystemapps['hash']).distinct().rdd.zipWithUniqueId().map( lambda x: (x[0][0], x[1] + 1)).cache() df_hashs = sqlContext.createDataFrame(rdd_hashs, ['hash', 'user_id']) rdd_hashs = rdd_hashs.map(toCSVLine) rdd_hashs.repartition(1).saveAsTextFile(output_path + "/hashs") rdd_hashs.unpersist() print("user hashs saved") # mapping of packages and ID used for recommendations rdd_packages = df_pkg_nosystemapps.select(df_pkg_nosystemapps['package']).distinct().rdd.zipWithUniqueId().map( lambda x: (x[0][0], x[1]+1)).cache() df_packages = sqlContext.createDataFrame(rdd_packages, ['package', 'app_id']) rdd_packages = rdd_packages.map(toCSVLine) rdd_packages.repartition(1).saveAsTextFile(output_path + "/apps") print("apps ID's saved") def toCSVLine_2(data): app_id = data[0] count = data[1] quo = data[2] return "{},{},{}".format(app_id, count, quo) # final dataframe to be sent to recommend engine df_data = df_pkg_nosystemapps.join(df_hashs, 'hash', 'left_outer').select('user_id', 'package').cache() df_data = df_data.join(df_packages, 'package', 'left_outer').select('user_id', 'app_id').cache() df_data = df_data.withColumn("rating", lit(1)).cache() df_data.rdd.map(toCSVLine_2).repartition(1).saveAsTextFile(output_path + "/dataset") print("dataset saved") # save apps histogram df_hist = get_app_histogram(df_data, df_packages) df_hist.rdd.map(toCSVLine_2).repartition(1).saveAsTextFile(output_path + "/histogram") print("apps histogram saved") return df_data.rdd def get_files_from_s3(sqlContext, amount_months): year = datetime.today().year month = datetime.today().month if month - amount_months >= 0: months = range(month - amount_months, month) year_and_month = ["year={}/month={}".format(year, m) for m in months] else: previous_year_months = [x for x in range(12 - abs(month - amount_months), 13)] this_year_months = [x for x in range(1, month)] year_and_month = ["year={}/month={}".format(year - 1, m) for m in previous_year_months] year_and_month = year_and_month + ["year={}/month={}".format(year, m) for m in this_year_months] day = '*' filename = '*' version = '1' filepath = ['{}/{}/{}/{}'.format(version, pair, day, filename) for pair in year_and_month] print("reading {}".format(filepath)) return sqlContext.read.parquet(*filepath) def filter_less_500_downloads(sqlContext, df_pkg_nosystemapps): u, p = ['user', 'password'] durl = 'url' dbta = 'table' psql_df = sqlContext.read.format('jdbc').options(url=durl, user=u, password=p, dbtable=dbta, driver='org.postgresql.Driver').load() psql_df = psql_df.drop(psql_df['added_timestamp']) df_pkg_nosystemapps = df_pkg_nosystemapps.join(psql_df, psql_df['data'] == df_pkg_nosystemapps['package']) \ .drop(psql_df['data']) dbta = 'table' psql_df = sqlContext.read.format('jdbc').options(url=durl, user=u, password=p, dbtable=dbta, driver='org.postgresql.Driver').load() psql_df = psql_df.drop(psql_df['id']) df_pkg_nosystemapps = df_pkg_nosystemapps.join(psql_df, psql_df['app_package'] == df_pkg_nosystemapps['id']) \ .drop(psql_df['app_package']) \ .drop(df_pkg_nosystemapps['id']) df_pkg_nosystemapps = df_pkg_nosystemapps.filter( df_pkg_nosystemapps['downloads'] > 500).drop(df_pkg_nosystemapps['downloads']) df_pkg_nosystemapps = df_pkg_nosystemapps.drop_duplicates() return df_pkg_nosystemapps def get_app_histogram(df_data, df_packages): total = df_data.count() df_hist = df_data.groupBy("app_id").count() # histogram df_hist = df_hist.withColumn("total", lit(total)) df_hist = df_hist.withColumn('percentage', (df_hist['count'] / df_hist['total'])*100) df_hist = df_hist.join(df_packages, 'app_id', 'left_outer').select('package', 'count', 'percentage') return df_hist def get_blackList(): blacklist_filters = ['list'] return blacklist_filters def get_top50(): blacklist_top50 = ['list'] return blacklist_top50
38.9625
115
0.623837
774
6,234
4.788114
0.205426
0.040475
0.105505
0.025634
0.401241
0.320022
0.286023
0.21047
0.174312
0.146249
0
0.012937
0.243664
6,234
159
116
39.207547
0.773065
0.076195
0
0.142857
0
0
0.104384
0.013396
0
0
0
0
0
1
0.071429
false
0.026786
0.026786
0
0.169643
0.044643
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a7af5fdb993c98e171a62bd93ae82a0d3f1b8bd
291
py
Python
tests/globals/documents/misc_documents.py
RelevanceAI/RelevanceAI
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
[ "Apache-2.0" ]
21
2021-11-23T13:01:36.000Z
2022-03-23T03:45:30.000Z
tests/globals/documents/misc_documents.py
RelevanceAI/RelevanceAI
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
[ "Apache-2.0" ]
217
2021-11-23T00:11:01.000Z
2022-03-30T08:11:49.000Z
tests/globals/documents/misc_documents.py
RelevanceAI/RelevanceAI
a0542f35153d9c842f3d2cd0955d6b07f6dfc07b
[ "Apache-2.0" ]
4
2022-01-04T01:48:30.000Z
2022-02-11T03:19:32.000Z
import pytest from typing import List from tests.globals.constants import NUMBER_OF_DOCUMENTS from tests.globals.document import dataclass_document @pytest.fixture(scope="session") def dataclass_documents() -> List: return [dataclass_document() for _ in range(NUMBER_OF_DOCUMENTS)]
22.384615
69
0.80756
38
291
5.973684
0.552632
0.079295
0.140969
0
0
0
0
0
0
0
0
0
0.116838
291
12
70
24.25
0.883268
0
0
0
0
0
0.024055
0
0
0
0
0
0
1
0.142857
true
0
0.571429
0.142857
0.857143
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
4
7a7b176678afd25894ed7a7d41c1b8b41f6c77eb
6,711
py
Python
social-update.py
uditvashisht/SaralGyaanTwitterRedditBot
7e6238afd57383eb6a95df4395e23c868c5e4b2c
[ "MIT" ]
null
null
null
social-update.py
uditvashisht/SaralGyaanTwitterRedditBot
7e6238afd57383eb6a95df4395e23c868c5e4b2c
[ "MIT" ]
null
null
null
social-update.py
uditvashisht/SaralGyaanTwitterRedditBot
7e6238afd57383eb6a95df4395e23c868c5e4b2c
[ "MIT" ]
null
null
null
import tweepy import praw import prawcore import time import requests import logging import os import shutil import facebook import requests # pip install python-decouple from decouple import config # Login Credentials REDDIT_CLIENT_ID = config('REDDIT_CLIENT_ID') REDDIT_CLIENT_SECRET = config('REDDIT_CLIENT_SECRET') REDDIT_USERNAME = config('REDDIT_USERNAME') REDDIT_PASSWORD = config('REDDIT_PASSWORD') TWITTER_CONSUMER_KEY = config('TWITTER_CONSUMER_KEY') TWITTER_CONSUMER_SECRET = config('TWITTER_CONSUMER_SECRET') TWITTER_ACCESS_TOKEN = config('TWITTER_ACCESS_TOKEN') TWITTER_ACCESS_TOKEN_SECRET = config('TWITTER_ACCESS_TOKEN_SECRET') USER_AGENT = 'python:saralgyaan_social_updates:v1.0.0 (by /u/uditvashisht)' FACEBOOK_PAGE_ID = config('FACEBOOK_PAGE_ID') FACEBOOK_ACCESS_TOKEN = config('FACEBOOK_ACCESS_TOKEN') # Dictionary containing subreddits and tags SUBREDDIT_DICT = {'programmerhumor': ['progammer', 'programmerhumor', 'humor'], 'programmingmemes': ['programming', 'programmingmemes', 'programmerhumor'], 'xkcd': ['xkcd', 'xkcdcomics'] } current_dir = os.getcwd() logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) file_handler = logging.FileHandler(f'{os.path.join(current_dir, "social-update.log")}') fmt = logging.Formatter('%(levelname)s : %(name)s : %(asctime)s : %(message)s') file_handler.setFormatter(fmt) logger.addHandler(file_handler) def auto_post_facebook(picture, message): """A function which auto-posts the photos with hastags on facebook. Requires -------- facebook: module pip install facebook-sdk, import facebook page_id : str Page ID of the facebook page. access_token : str Access token of facebook account. Can be obtained from https://developers.facebook.com/tools. Use this tutorial https://pythoncircle.com/post/666/automating-facebook-page-posts-using-python-script/ Parameters __________ message : str title and hashtags of the photo. picture: str Complete link of the header image. Posts _____ A post containing photo title and hashtags """ graph = facebook.GraphAPI(FACEBOOK_ACCESS_TOKEN) facebook_page_id = FACEBOOK_PAGE_ID #IF you want to post a status update # graph.put_object(facebook_page_id, "feed", message='test message') graph.put_photo(image=open(picture, 'rb'), message=message) def login_to_reddit(): """ This function log into to the reddit account and returns the Reddit Instance by interacting with Reddit's API through PRAW Parameters: ----------- None Returns: -------- A Reddit Instance """ try: logger.info('* Logging into Reddit Account') reddit = praw.Reddit(client_id=REDDIT_CLIENT_ID, client_secret=REDDIT_CLIENT_SECRET, password=REDDIT_PASSWORD, user_agent=USER_AGENT, username=REDDIT_USERNAME) logger.info('* Login successful') return reddit except: logger.info('* Login failed') def grab_new_image(url): """ This function grabs the image from the URL of the reddit post and save it as img.jpg Parameters: ----------- url : str URL of the subreddit containing the image Returns: -------- An Image """ logger.info('* Fetching image from the Reddit') try: response = requests.get(url) with open('img.jpg', 'wb') as image: image.write(response.content) image.close() logger.info('* Image saved successfully') except: logger.info('* Something went wrong while downloading image') def post_tweet(tweet_content): """ This function post the tweet update with the image Parameters: ----------- tweet_content : str Execute: -------- Post the tweet with the image """ try: logger.info('* Logging into twitter') auth = tweepy.OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) api = tweepy.API(auth) logger.info('* Login successful') tweet = tweet_content image_path = 'img.jpg' logger.info('* Posting on twitter') api.update_with_media(image_path, tweet) logger.info("* Successfully posted") except: logger.info('* Something went wrong while posting tweet') def main(sub_reddit, tags): """ This main function check the sub reddit for images, download the images using grab_new_image() and then tweet it using post_tweet() Parameters: ----------- sub_reddit : str Name of the sub reddit to check tags : list list of hashtags to be used """ reddit = login_to_reddit() try: for submission in reddit.subreddit(sub_reddit).hot(limit=8): if submission.stickied == False: logger.info("* Fetching submission from reddit") post_url = f'redd.it/{str(submission)}' title = submission.title tweet_content = f'{title} posted by {str(submission.author)} {post_url} #{" #".join(tags)}' url = submission.url if 'jpg' in url: grab_new_image(url) post_tweet(tweet_content) auto_post_facebook('img.jpg', f'{title} #{" #".join(tags)}') time.sleep(20) elif 'png' in url: grab_new_image(url) post_tweet(tweet_content) auto_post_facebook('img.jpg', f'{title} #{" #".join(tags)}') time.sleep(20) else: logger.info("* Not an image url") # exception handling except prawcore.exceptions.ServerError as e: logger.info(e) time.sleep(20) pass # excepts errors like rate limit except praw.exceptions.APIException as e: logger.info(e) time.sleep(60) # excepts other PRAW errors except praw.exceptions.PRAWException as e: logger.info(e) time.sleep(20) # excepts network connection errors except prawcore.exceptions.RequestException: logger.info("* Please check your network connection") logger.info("* Sleeping for 1 minute") time.sleep(60) if __name__ == "__main__": for key, value in SUBREDDIT_DICT.items(): main(key, value)
30.22973
139
0.627924
779
6,711
5.223363
0.288832
0.044237
0.026542
0.017695
0.147456
0.120177
0.101008
0.055542
0.043254
0.043254
0
0.004091
0.271495
6,711
221
140
30.366516
0.828186
0.252868
0
0.226087
0
0.008696
0.224087
0.043223
0
0
0
0
0
1
0.043478
false
0.026087
0.095652
0
0.147826
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a7c1628ddb94ea8345017d325bb02a7c5bda430
237
py
Python
board.py
MennovDijk/bilge_bot
65920e063d9201d495047073c8de50e45f7d0823
[ "MIT" ]
5
2019-05-09T11:23:42.000Z
2022-02-06T02:39:33.000Z
board.py
MennovDijk/bilge_bot
65920e063d9201d495047073c8de50e45f7d0823
[ "MIT" ]
3
2020-04-27T17:25:35.000Z
2021-01-23T22:13:02.000Z
board.py
MennovDijk/bilge_bot
65920e063d9201d495047073c8de50e45f7d0823
[ "MIT" ]
null
null
null
class Board(): def __init__(self, score=None, move=None, board=None): self.score = score self.move = move self.board = board self.previous = None def __repr__(self): return str(self.move)
23.7
58
0.586498
30
237
4.366667
0.4
0.137405
0
0
0
0
0
0
0
0
0
0
0.303797
237
10
59
23.7
0.793939
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0
0.125
0.5
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
2
7a7d6087e717f659ba3bf75a8fbf15e172cbecdc
11,214
py
Python
models.py
AndrejsinD/Hopes-Wings-Database
9646890ac7c52440327129b2370cf062b099ded9
[ "MIT" ]
null
null
null
models.py
AndrejsinD/Hopes-Wings-Database
9646890ac7c52440327129b2370cf062b099ded9
[ "MIT" ]
null
null
null
models.py
AndrejsinD/Hopes-Wings-Database
9646890ac7c52440327129b2370cf062b099ded9
[ "MIT" ]
null
null
null
from peewee import * database = MySQLDatabase('hopes_wings', **{'charset': 'utf8', 'use_unicode': True, 'host': 'localhost', 'user': 'root', 'password': 'david'}) class UnknownField(object): def __init__(self, *_, **__): pass class BaseModel(Model): class Meta: database = database class Donation(BaseModel): amount = DecimalField(column_name='Amount') comment = CharField(column_name='Comment', null=True) date = DateField(column_name='Date') donation_id = AutoField(column_name='DonationID') letter_sent = CharField(column_name='LetterSent') class Meta: table_name = 'Donation' class Cash(BaseModel): amount = DecimalField(column_name='Amount') cash_id = AutoField(column_name='CashID') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) class Meta: table_name = 'Cash' class CheckTable(BaseModel): amount = DecimalField(column_name='Amount') check_id = AutoField(column_name='CheckID') check_number = IntegerField(column_name='CheckNumber') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) class Meta: table_name = 'CheckTable' class Donors(BaseModel): address = CharField(column_name='Address', null=True) city = CharField(column_name='City', null=True) comment = CharField(column_name='Comment', null=True) donor_id = AutoField(column_name='DonorID') donor_name = CharField(column_name='DonorName') email_address = CharField(column_name='EmailAddress', null=True) phone_number = IntegerField(column_name='PhoneNumber', null=True) state = CharField(column_name='State', null=True) zip = IntegerField(column_name='Zip', null=True) class Meta: table_name = 'donors' class DonorsToDonations(BaseModel): dtd_id = AutoField(column_name='DTD_ID') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) donor = ForeignKeyField(column_name='DonorID', field='donor_id', model=Donors) class Meta: table_name = 'DonorsToDonations' class Eft(BaseModel): amount = DecimalField(column_name='Amount') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) eftid = AutoField(column_name='EFTID') class Meta: table_name = 'EFT' class Grant(BaseModel): amount = DecimalField(column_name='Amount') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) grant_id = AutoField(column_name='GrantID') grant_name = CharField(column_name='GrantName') class Meta: table_name = 'Grant' class InKind(BaseModel): amount = DecimalField(column_name='Amount') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) in_kind_id = AutoField(column_name='InKindID') item_given = CharField(column_name='ItemGiven') class Meta: table_name = 'In Kind' class PayPal(BaseModel): amount = DecimalField(column_name='Amount') donation = ForeignKeyField(column_name='DonationID', field='donation_id', model=Donation) pay_pal_id = AutoField(column_name='PayPalID') receipt_number = CharField(column_name='ReceiptNumber', null=True) class Meta: table_name = 'PayPal' # Practice query def getDonorsByID(db, donorID): sql_query = "SELECT * FROM donors WHERE DonorID = " + str(donorID) return db.execute_sql(sql_query).fetchall() database.connect() #query = getDonorsByID(database, 10) query = TotalAmountbyAlltimeByDonorID(database, 1) print(query[0]) database.close() #This query shows the amount donated in a particular year by a donor with DonorID ___ def AmountByYearByDonorID(db, donorID): return db.execute_sql("SELECT SUM(Donation.Amount) FROM donors INNER JOIN DonorsToDonations ON donors.DonorID INNER JOIN Donation ON Donation.DonationID WHERE Donation.Date >= '2020-01-01 00:00:00' and Date < '2021-01-01 00:00:00' and donors.DonorID= " + str(donorID)).fetchall() #This query shows the total amount of donation made by a donor with DonorID ___ since the beginning of time def TotalAmountbyAlltimeByDonorID(db, donorID): return db.execute_sql("SELECT SUM(Donation.Amount) FROM donors INNER JOIN DonorsToDonations ON donors.DonorID INNER JOIN Donation ON Donation.DonationID WHERE donors.DonorID= " + str(donorID)).fetchall() #This query shows the number of donors for a particular year def NumberOfDonorsByYear(db): return db.execute_sql("SELECT DISTINCT COUNT(donors.DonorID) FROM donors INNER JOIN DonorsToDonations ON donors.DonorID INNER JOIN Donation ON Donation.DonationID WHERE Donation.Date >= '2020-01-01 00:00:00' and Date < '2021-01-01 00:00:00'").fetchall() #This query list all donations and its information (Date, comment, amount, type, donor) for all time (from the beginning of time) def AllDonationsByAlltime(db): return db.execute_sql("SELECT * FROM `Donation` INNER JOIN DonorsToDonations WHERE DonorsToDonations.DonationID= Donation.DonationID").fetchall() #This query list all donations and its information (Date, comment, amount, type, donor) for a particular year def AllDonationsByYear(db): return db.execute_sql("SELECT * FROM `Donation` INNER JOIN DonorsToDonations WHERE DonorsToDonations.DonationID= Donation.DonationID AND Donation.Date >= '2020-01-01 00:00:00' and Date < '2021-01-01 00:00:00'").fetchall() #This query shows whether thank you letter is sent for a donation def LetterSentCheck(db, donationID): return db.execute_sql("SELECT Donation.LetterSent FROM Donation WHERE DonationID" + str(donationID)).fetchall() #This query shows the total amount donated since the beginning of time (excluding grants) def TotalAmountDonatedByAlltime(db): return db.execute_sql("SELECT SUM(Amount) FROM `Donation`").fetchall() #This query shows the total amount donated for a particular year (ex: 2019) (excluding grant) def TotalAmountDonatedByYear(db): return db.execute_sql("SELECT SUM(Amount) FROM `Donation` WHERE Date >= '2020-01-01 00:00:00' and Date < '2021-01-01 00:00:00'").fetchall() #This query shows the total amount of grant received since the beginning of time def TotalGrantByAlltime(db): return db.execute_sql("SELECT SUM(Amount) FROM `Grant`").fetchall() #This query shows the total amount of grant received for one year def TotalGrantByYear(db): return db.execute_sql("SELECT * FROM `Donation` WHERE Date >= '2020-01-01 00:00:00' and Date < '2021-01-01 00:00:00' and DonationType='Grant'").fetchall() #This query list all donors and their information (DonorName, Address, City, State, Zip, Comment, Email, Phone) for all time (from the beginning of time) def AllDonorInformationByAlltime(db): return db.execute_sql("SELECT DonorName,Address,City,State,Zip,Comment,EmailAddress,PhoneNumber FROM donors").fetchall() #This query list all donors and their information (DonorName, Address, City, State, Zip, Comment, Email, Phone) for this current year only def AllDonorInformationByYear(db): return db.execute_sql("SELECT DISTINCT donors.DonorName,donors.Address,donors.City,donors.State,donors.Zip,donors.Comment,donors.EmailAddress,donors.PhoneNumber FROM donors INNER JOIN DonorsToDonations AS dtd ON dtd.DonorID INNER JOIN Donation ON Donation.DonationID WHERE Donation.Date >= '2020-01-01 00:00:00' and Donation.Date < '2021-01-01 00:00:00'").fetchall() #This query shows the number of donors for all time (from the beginning of time) def AllDonorAllTime(db): return db.execute_sql("SELECT COUNT(*) FROM donors").fetchall() #This query update data for a donor – DonorName, Address, City, State, Zip, Comment, Email, Phone) def UpdateDonor(db, donorID): db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET EmailAddress = `hello@gmail.com` WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET Phone = NULL WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET Address = NULL WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET City = NULL WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET State = NULL WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET Zip = NULL WHERE DonorID = " + str(donorID)) db.execute_sql("SELECT DonorName, Address, City,State,Zip,Comment,EmailAddress, PhoneNumber FROM donors WHERE DonorID = 1;UPDATE donors SET Comment = NULL WHERE DonorID = " str(donorID)) #This query delete a donor def DeleteDonor(db, donorID): db.execute_sql("DELETE FROM donors WHERE DonorID = "+str(donorID)) #This query shows the date of the last thank you letter sent for a particular donor def LastLetterDateByDonorID(db): return db.execute_sql("SELECT DonorID, Donation.LetterSent, MAX(Donation.Date) AS date_last_donation FROM Donation INNER JOIN DonorsToDonations AS dtd ON dtd.DonorID WHERE DonorID = " +str(donorID) + "AND Donation.LetterSent = 'yes' GROUP BY Donation.LetterSent").fetchall() #This query shows all the donation made by the donor which thank you letter had not been sent to, as well as the date, amount, and type of each of those donations def AllDonationsByDonorNoLetter(db): return db.execute_sql("SELECT * ,Date,Amount,DonationType FROM `Donation` WHERE LetterSent = 'NO'").fetchall() #This query shows the donor name, address, city, state, zip, email, and phone for a donor whose DonorID is _____ #This query shows the date, amount, and type of the donation, for the donations selected by the user to send thank you letter to, and the sum of all the donations selected #This query list all grants and its information (Date, grant name, amount, comment) for a particular year #This query list all grants and its information (Date, grant name, amount, funding source?, comment) for all time #This query update data for a grant – grant name, amount) #This query inserts a new grant – grant name, amount) #This query list all donations and its information (Date, comment, amount, type) made by a particular donor with DonorID ___ #This query shows the 5 top donors for all time (from the beginning of time) #This query shows the 5 top donors for this year only #This query update data for a donation – Address, City, State, Zip, Comment, Email, Phone, type, amount, comments) #This query inserts a new donation (Data: Date, DonorName, Address, City, State, Zip, Comment, Email, Phone, type, amount, comments) – if the donor doesn’t exist, make a new donor; otherwise, if donor already exists, simply add a donation to the donor.
55.514851
370
0.753077
1,539
11,214
5.408707
0.136452
0.046852
0.034599
0.047573
0.627583
0.592143
0.527751
0.503604
0.466482
0.430082
0
0.019415
0.145711
11,214
201
371
55.791045
0.848956
0.256376
0
0.216667
0
0.125
0.43987
0.065246
0
0
0
0
0
0
null
null
0.016667
0.008333
null
null
0.008333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
7a7eeca58b940b9dce392dd95136944747af3a0f
2,553
py
Python
youths/management/commands/import_youth_data.py
City-of-Helsinki/youth-membership
36f5324fa7444753d49fb476e71b09cc6e842dc2
[ "MIT" ]
null
null
null
youths/management/commands/import_youth_data.py
City-of-Helsinki/youth-membership
36f5324fa7444753d49fb476e71b09cc6e842dc2
[ "MIT" ]
31
2020-07-02T11:26:39.000Z
2022-03-12T00:50:49.000Z
youths/management/commands/import_youth_data.py
City-of-Helsinki/youth-membership
36f5324fa7444753d49fb476e71b09cc6e842dc2
[ "MIT" ]
null
null
null
import json from django.contrib.auth import get_user_model from django.core import serializers from django.core.management.base import BaseCommand from django.db import transaction from django.db.models.signals import post_save from helusers.models import ADGroup, ADGroupMapping from sequences.models import Sequence from youths.models import YouthProfile from youths.signals import generate_membership_number from youths.utils import generate_admin_group User = get_user_model() class Command(BaseCommand): help = "Import youth data from a JSON file created using the open-city-profile backend's export_youth_data command." def add_arguments(self, parser): parser.add_argument("filename", nargs="+", type=str) def handle(self, *args, **kwargs): filename = kwargs["filename"][0] with open(filename, "r") as infile: data = json.load(infile) post_save.disconnect(generate_membership_number, sender=YouthProfile) with transaction.atomic(): YouthProfile.objects.all().delete() User.objects.exclude(is_superuser=True).delete() ADGroup.objects.all().delete() User.objects.get_by_natural_key = lambda uuid: User.objects.get(uuid=uuid) ADGroup.objects.get_by_natural_key = lambda name: ADGroup.objects.get( name=name ) YouthProfile.objects.get_by_natural_key = ( lambda uuid: YouthProfile.objects.get(id=uuid) ) max_membership_number = 0 for obj in serializers.deserialize("json", json.dumps(data)): obj.save() if obj.object.__class__ == YouthProfile: membership_number = int(obj.object.membership_number.lstrip("0")) if membership_number > max_membership_number: max_membership_number = membership_number Sequence.objects.filter(name="membership_number").update( last=max_membership_number ) YouthProfile.objects.update(approval_token="") admin_group = generate_admin_group() for ad_group in ADGroup.objects.all(): ADGroupMapping.objects.create(group=admin_group, ad_group=ad_group) self.stdout.write( self.style.SUCCESS( f"Successfully read {get_user_model().objects.count()} users and " f"{YouthProfile.objects.count()} from {filename}" ) )
37
120
0.647865
288
2,553
5.559028
0.388889
0.109931
0.04747
0.035603
0.12055
0.057464
0.039975
0
0
0
0
0.001598
0.264787
2,553
68
121
37.544118
0.851359
0
0
0
0
0.019231
0.100274
0.025069
0
0
0
0
0
1
0.038462
false
0
0.230769
0
0.307692
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a7f11e6c6a877151c17c25eab1dd510ce82e20f
2,412
py
Python
1_screen_pipeline/03_peak_intersection/pcommon.py
weng-lab/SCREEN
e8e7203e2f9baa2de70e2f75bdad3ae24b568367
[ "MIT" ]
5
2020-07-30T02:35:20.000Z
2020-12-24T01:26:47.000Z
1_screen_pipeline/03_peak_intersection/pcommon.py
weng-lab/SCREEN
e8e7203e2f9baa2de70e2f75bdad3ae24b568367
[ "MIT" ]
6
2021-03-04T10:30:11.000Z
2022-03-16T16:47:47.000Z
1_screen_pipeline/03_peak_intersection/pcommon.py
weng-lab/SCREEN
e8e7203e2f9baa2de70e2f75bdad3ae24b568367
[ "MIT" ]
2
2020-12-08T10:05:02.000Z
2022-03-10T09:41:19.000Z
# SPDX-License-Identifier: MIT # Copyright (c) 2016-2020 Michael Purcaro, Henry Pratt, Jill Moore, Zhiping Weng from __future__ import print_function import sys import os import gzip import json sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), "../../../metadata/utils")) from utils import AddPath, Utils, Timer, printt, printWroteNumLines AddPath(__file__, '../../common/') from common import printr, printt def doIntersection(cres, others): try: return [p.rstrip().split("\t")[4] for p in Utils.runCmds([ "bedtools", "intersect", "-a", cres, "-b", others, "-wa" ])] except: print("pcommon$doIntersection: failed to intersect %s with %s" % (cres, others), file=sys.stderr) def runIntersectJob(jobargs, bedfnp): if not os.path.exists(jobargs["bed"]["fnp"]): print("pcommon$runIntersectJob: missing bed %s; cannot intersect" % jobargs["bed"]["fnp"], file=sys.stderr) return None ret = [] printr("pcommon$runIntersectJob: (exp %d of %d)" % (jobargs["i"], jobargs["total"]), "intersecting", jobargs["etype"], jobargs["label"]) accessions = doIntersection(bedfnp, jobargs["bed"]["fnp"]) if accessions is None: print("pcommon$runIntersectJob: warning: unable to intersect REs with bed %s" % jobargs["bed"]["fnp"], file=sys.stderr) else: ret.append((jobargs["etype"], jobargs["label"], jobargs["bed"]["fileID"], accessions)) return ret def processResults(results, outFnp): tfImap = {} fileJsons = [] for fileJson, accessions in results: if not accessions: continue for etype, label, fileID, accs in accessions: for acc in accs: if acc not in tfImap: tfImap[acc] = {"tf": {}, "histone": {}} if label not in tfImap[acc][etype]: tfImap[acc][etype][label] = [] tfImap[acc][etype][label].append(fileID) fileJsons += fileJson printt("completed hash merge") with gzip.open(outFnp, 'w') as f: for k, v in tfImap.iteritems(): f.write('\t'.join([k, json.dumps(v["tf"]), json.dumps(v["histone"]) ]) + '\n') printt("wrote", outFnp)
33.5
110
0.569652
271
2,412
5.02214
0.431734
0.036738
0.038207
0.024982
0.038207
0.038207
0
0
0
0
0
0.00519
0.281095
2,412
71
111
33.971831
0.7797
0.044362
0
0.054545
0
0
0.173837
0.051282
0
0
0
0
0
1
0.054545
false
0
0.127273
0
0.236364
0.163636
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a8001c534cd737e89955f0347ad32c0a03dee61
2,610
py
Python
finliveapp/migrations/0020_auto_20211110_1019.py
FinLiveRI/FinLiveApp
c5634c1da2a4081119890df7674c1f170f106e7f
[ "MIT" ]
null
null
null
finliveapp/migrations/0020_auto_20211110_1019.py
FinLiveRI/FinLiveApp
c5634c1da2a4081119890df7674c1f170f106e7f
[ "MIT" ]
null
null
null
finliveapp/migrations/0020_auto_20211110_1019.py
FinLiveRI/FinLiveApp
c5634c1da2a4081119890df7674c1f170f106e7f
[ "MIT" ]
null
null
null
# Generated by Django 3.2.4 on 2021-11-10 10:19 from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ ('finliveapp', '0019_remove_organization_apikey'), ] operations = [ migrations.RenameField( model_name='gasmeasurement', old_name='equipmentid', new_name='equipment', ), migrations.RemoveField( model_name='weight', name='equipment_id', ), migrations.AddField( model_name='barn', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='breed', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='equipment', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='equipment', name='barn', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='finliveapp.barn'), ), migrations.AddField( model_name='equipment', name='uuid', field=models.UUIDField(default=None, null=True), ), migrations.AddField( model_name='laboratory', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='milkingsystem', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='organization', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='seedingtype', name='active', field=models.BooleanField(default=True), ), migrations.AddField( model_name='weight', name='equipment', field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='finliveapp.equipment'), ), migrations.AlterField( model_name='equipment', name='equipmentid', field=models.CharField(max_length=128), ), migrations.AlterField( model_name='equipment', name='id', field=models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID'), ), ]
31.071429
120
0.557854
230
2,610
6.213043
0.304348
0.088174
0.160952
0.188943
0.587124
0.53254
0.445766
0.445766
0.445766
0.445766
0
0.012493
0.325287
2,610
83
121
31.445783
0.798978
0.017241
0
0.61039
1
0
0.120172
0.012095
0
0
0
0
0
1
0
false
0
0.025974
0
0.064935
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
7a8034f5cd14516baa13647e9508787ee3e7837b
18,882
py
Python
nested/train_vgg.py
yingyichen-cyy/Nested-Co-teaching
8b7e3ed02d8994d93dcb2011340fe28ba6012283
[ "MIT" ]
39
2021-04-29T08:36:59.000Z
2022-02-26T03:53:48.000Z
nested/train_vgg.py
yingyichen-cyy/Nested-Co-teaching
8b7e3ed02d8994d93dcb2011340fe28ba6012283
[ "MIT" ]
1
2021-05-19T07:53:53.000Z
2021-09-24T09:00:45.000Z
nested/train_vgg.py
yingyichen-cyy/Nested-Co-teaching
8b7e3ed02d8994d93dcb2011340fe28ba6012283
[ "MIT" ]
3
2021-05-14T06:53:19.000Z
2021-08-04T13:44:24.000Z
import torch import torch.nn as nn import torch.optim as optim from torch.optim.lr_scheduler import MultiStepLR import json import os import argparse import utils from model import vgg import itertools import numpy as np import random import torchvision import torchvision.transforms as transforms from torch.utils.data import DataLoader from torchvision.datasets import ImageFolder import torch.nn.functional as F ### ------------------------------------ Dataloader -------------------------------------- ### def get_dataloader(dataset, train_dir, val_dir, batchsize): if dataset == 'Animal10N': nb_cls = 10 # transformation of the training set transform_train = transforms.Compose([ transforms.ToTensor()]) # transformation of the validation set transform_test = transforms.Compose([ transforms.ToTensor()]) trainloader = DataLoader(ImageFolder(train_dir, transform_train), batch_size=batchsize, shuffle=True, drop_last=True, num_workers = 4, pin_memory = True) valloader = DataLoader(ImageFolder(val_dir, transform_test), batch_size=batchsize, shuffle=False, drop_last=False, num_workers = 4, pin_memory = True) return trainloader, valloader, nb_cls ### -------------------------------------------------------------------------------------------- ### ------------------------------------ Distribution -------------------------------------- ### def GaussianDist(mu, std, N): dist = np.array([np.exp(-((i - mu) / std)**2) for i in range(1, N + 1)]) return dist / np.sum(dist) ### --------------------------------------------------------------------------------------------- ### ------------------------ Test with Nested (iterate all possible K) --------------------- ### def TestNested(epoch, best_acc, best_k, net_feat, net_cls, valloader, out_dir, mask_feat_dim): net_feat.eval() net_cls.eval() bestTop1 = 0 true_pred = torch.zeros(len(mask_feat_dim)).cuda() nb_sample = 0 for batchIdx, (inputs, targets) in enumerate(valloader): inputs = inputs.cuda() targets = targets.cuda() feature = net_feat(inputs) outputs = [] for i in range(len(mask_feat_dim)): feature_mask = feature * mask_feat_dim[i] outputs.append( net_cls(feature_mask).unsqueeze(0) ) outputs = torch.cat(outputs, dim=0) _, pred = torch.max(outputs, dim=2) targets = targets.unsqueeze(0).expand_as(pred) true_pred = true_pred + torch.sum(pred == targets, dim=1).type(torch.cuda.FloatTensor) nb_sample += len(inputs) acc, k = torch.max((true_pred / nb_sample - 1e-5 * torch.arange(len(mask_feat_dim)).type_as(true_pred)), dim=0) acc, k = acc.item(), k.item() msg = '\nNested ... Epoch {:d}, Acc {:.3f} %, K {:d} (Best Acc {:.3f} %)'.format(epoch, acc * 100, k, best_acc * 100) print (msg) # save checkpoint if acc > best_acc: msg = 'Best Performance improved from {:.3f} --> {:.3f}'.format(best_acc, acc) print(msg) print ('Saving Best!!!') param = {'feat': net_feat.state_dict(), 'cls': net_cls.state_dict(), } torch.save(param, os.path.join(out_dir, 'netBest.pth')) best_acc = acc best_k = k return best_acc, acc, best_k ### -------------------------------------------------------------------------------------------- ### --------------- Test standard (used for model w/o nested, baseline, dropout) ------------### def TestStandard(epoch, best_acc, best_k, net_feat, net_cls, valloader, out_dir, mask_feat_dim): net_feat.eval() net_cls.eval() bestTop1 = 0 true_pred = torch.zeros(1).cuda() nb_sample = 0 for batchIdx, (inputs, targets) in enumerate(valloader): inputs = inputs.cuda() targets = targets.cuda() feature = net_feat(inputs) outputs = net_cls(feature) _, pred = torch.max(outputs, dim=1) true_pred = true_pred + torch.sum(pred == targets).type(torch.cuda.FloatTensor) nb_sample += len(inputs) acc = true_pred / nb_sample acc = acc.item() msg = 'Standard ... Epoch {:d}, Acc {:.3f} %, (Best Acc {:.3f} %)'.format(epoch, acc * 100, best_acc * 100) print (msg) # save checkpoint if acc > best_acc: msg = 'Best Performance improved from {:.3f} --> {:.3f}'.format(best_acc * 100, acc * 100) print (msg) print ('Saving Best!!!') param = {'feat': net_feat.state_dict(), 'cls': net_cls.state_dict(), } torch.save(param, os.path.join(out_dir, 'netBest.pth')) best_acc = acc return best_acc, acc, len(mask_feat_dim) ### -------------------------------------------------------------------------------------------- ### -------------------------------------- Training --------------------------------------- ### def Train(epoch, optimizer, net_feat, net_cls, trainloader, criterion, dist1, dist2, mask_feat_dim, alter_train, freeze_bn): msg = '\nEpoch: {:d}'.format(epoch) print (msg) net_feat.train(freeze_bn = freeze_bn) net_cls.train() losses = utils.AverageMeter() top1 = utils.AverageMeter() top5 = utils.AverageMeter() for batchIdx, (inputs, targets) in enumerate(trainloader): inputs = inputs.cuda() targets = targets.cuda() for optim in optimizer: optim.zero_grad() # whether to use alterative training for the nested mode if alter_train: alter = random.randint(0, 1) else: alter = None if dist1 is not None: if alter == 0 or alter is None: k1 = np.random.choice(range(len(mask_feat_dim)), p=dist1) mask1 = mask_feat_dim[k1] else: # train both nested layers mask1 = mask_feat_dim[-1] else: mask1 = mask_feat_dim[-1] feature = net_feat(inputs, mask1) if dist2 is not None: if alter == 1 or alter is None: k2 = np.random.choice(range(len(mask_feat_dim)), p=dist2) mask2 = mask_feat_dim[k2] feature_masked = feature * mask2 else: feature_masked = feature else: feature_masked = feature outputs = net_cls(feature_masked) loss = criterion(outputs, targets) loss.backward() for optim in optimizer: optim.step() acc1, acc5 = utils.accuracy(outputs, targets, topk=(1, 5)) losses.update(loss.item(), inputs.size()[0]) top1.update(acc1[0].item(), inputs.size()[0]) top5.update(acc5[0].item(), inputs.size()[0]) msg = 'Loss: {:.3f} | Top1: {:.3f}% | Top5: {:.3f}%'.format(losses.avg, top1.avg, top5.avg) utils.progress_bar(batchIdx, len(trainloader), msg) return losses.avg, top1.avg, top5.avg ### -------------------------------------------------------------------------------------------- ### ------------------------------------ Lr Warm Up --------------------------------------- ### def LrWarmUp(warmUpIter, lr, optimizer, net_feat, net_cls, trainloader, criterion, dist1, dist2, mask_feat_dim, alter_train, freeze_bn): nbIter = 0 while nbIter < warmUpIter: net_feat.train(freeze_bn = freeze_bn) net_cls.train() losses = utils.AverageMeter() top1 = utils.AverageMeter() top5 = utils.AverageMeter() for batchIdx, (inputs, targets) in enumerate(trainloader): nbIter += 1 if nbIter == warmUpIter: break lrUpdate = nbIter / float(warmUpIter) * lr for optim in optimizer: for g in optim.param_groups: g['lr'] = lrUpdate inputs = inputs.cuda() targets = targets.cuda() for optim in optimizer: optim.zero_grad() # whether to use alterative training for the nested mode if alter_train: alter = random.randint(0, 1) else: # train both nested layers alter = None if dist1 is not None: if alter == 0 or alter is None: k1 = np.random.choice(range(len(mask_feat_dim)), p=dist1) mask1 = mask_feat_dim[k1] else: mask1 = mask_feat_dim[-1] else: mask1 = mask_feat_dim[-1] feature = net_feat(inputs, mask1) if dist2 is not None: if alter == 1 or alter is None: k2 = np.random.choice(range(len(mask_feat_dim)), p=dist2) mask2 = mask_feat_dim[k2] feature_masked = feature * mask2 else: feature_masked = feature else: feature_masked = feature outputs = net_cls(feature_masked) loss = criterion(outputs, targets) loss.backward() for optim in optimizer: optim.step() acc1, acc5 = utils.accuracy(outputs, targets, topk=(1, 5)) losses.update(loss.item(), inputs.size()[0]) top1.update(acc1[0].item(), inputs.size()[0]) top5.update(acc5[0].item(), inputs.size()[0]) msg = 'Loss: {:.3f} | Lr : {:.5f} | Top1: {:.3f}% | Top5: {:.3f}%'.format(losses.avg, lrUpdate, top1.avg, top5.avg) utils.progress_bar(batchIdx, len(trainloader), msg) ### -------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------- ########################################-- MAIN FUNCTION --##################################### #----------------------------------------------------------------------------------------------- #----------------------------------------------------------------------------------------------- def main(gpu, arch, vgg_dropout, out_dir, dataset, train_dir, val_dir, warmUpIter, lr, nbEpoch, batchsize, momentum=0.9, weightDecay = 5e-4, lrSchedule = [200, 300], lr_gamma=0.1, mu=0, nested1=1.0, nested2=1.0, alter_train=False, resumePth=None, freeze_bn=False, pretrained=False): best_acc = 0 # best test accuracy os.environ['CUDA_VISIBLE_DEVICES'] = gpu trainloader, valloader, nb_cls = get_dataloader(dataset, train_dir, val_dir, batchsize) # feature net + classifier net (a linear layer) net_feat = vgg.NetFeat(arch = arch, pretrained = pretrained, dataset = dataset, vgg_dropout = vgg_dropout) net_cls = vgg.NetClassifier(feat_dim = net_feat.feat_dim, nb_cls = nb_cls) net_feat.cuda() net_cls.cuda() feat_dim = net_feat.feat_dim best_k = feat_dim # generate mask mask_feat_dim = [] for i in range(feat_dim): tmp = torch.cuda.FloatTensor(1, feat_dim).fill_(0) tmp[:, : (i + 1)] = 1 mask_feat_dim.append(tmp) # distribution and test function dist1 = GaussianDist(mu, nested1, feat_dim) if nested1 > 0 else None dist2 = GaussianDist(mu, nested2, feat_dim) if nested2 > 0 else None Test = TestNested if (nested1 > 0) or (nested2 > 0) else TestStandard # load model if resumePth: param = torch.load(resumePth) net_feat.load_state_dict(param['feat']) print ('Loading feature weight from {}'.format(resumePth)) net_cls.load_state_dict(param['cls']) print ('Loading classifier weight from {}'.format(resumePth)) # output dir + loss + optimizer if not os.path.isdir(out_dir): os.mkdir(out_dir) criterion = nn.CrossEntropyLoss() optimizer = [torch.optim.SGD(itertools.chain(*[net_feat.parameters()]), 1e-7, momentum=args.momentum, weight_decay=args.weightDecay), torch.optim.SGD(itertools.chain(*[net_cls.parameters()]), 1e-7, momentum=args.momentum, weight_decay=args.weightDecay)] # remove the weight decay in classifier # learning rate warm up LrWarmUp(warmUpIter, lr, optimizer, net_feat, net_cls, trainloader, criterion, dist1, dist2, mask_feat_dim, alter_train, freeze_bn) with torch.no_grad(): best_acc, acc, best_k = Test(0, best_acc, best_k, net_feat, net_cls, valloader, out_dir, mask_feat_dim) best_acc, best_k = 0, feat_dim for optim in optimizer: for g in optim.param_groups: g['lr'] = lr history = {'trainTop1':[], 'best_acc':[], 'trainTop5':[], 'valTop1':[], 'trainLoss':[], 'best_k':[]} lrScheduler = [MultiStepLR(optim, milestones=lrSchedule, gamma=lr_gamma) for optim in optimizer] for epoch in range(nbEpoch): trainLoss, trainTop1, trainTop5 = Train(epoch, optimizer, net_feat, net_cls, trainloader, criterion, dist1, dist2, mask_feat_dim, alter_train, freeze_bn) with torch.no_grad(): best_acc, valTop1, best_k = Test(epoch, best_acc, best_k, net_feat, net_cls, valloader, out_dir, mask_feat_dim) history['trainTop1'].append(trainTop1) history['trainTop5'].append(trainTop5) history['trainLoss'].append(trainLoss) history['valTop1'].append(valTop1) history['best_acc'].append(best_acc) history['best_k'].append(best_k) with open(os.path.join(out_dir, 'history.json'), 'w') as f: json.dump(history, f) for lr_schedule in lrScheduler: lr_schedule.step() msg = 'mv {} {}'.format(out_dir, '{}_Acc{:.3f}_K{:d}'.format(out_dir, best_acc, best_k)) print (msg) os.system(msg) if __name__ == '__main__': parser = argparse.ArgumentParser(description='PyTorch Classification', formatter_class=argparse.ArgumentDefaultsHelpFormatter) # data parser.add_argument('--train-dir', type=str, default='../data/Animal10N/train/', help='train directory') parser.add_argument('--val-dir', type=str, default='../data/Animal10N/test/', help='val directory') parser.add_argument('--dataset', type=str, choices=['Animal10N'], default='Animal10N', help='which dataset?') # training parser.add_argument('--warmUpIter', type=int, default=6000, help='total iterations for learning rate warm') parser.add_argument('--lr', default=1e-1, type=float, help='learning rate') parser.add_argument('--weightDecay', default=5e-4, type=float, help='weight decay') parser.add_argument('--momentum', default=0.9, type=float, help='momentum') parser.add_argument('--batchsize', type=int, default=128, help='batch size') parser.add_argument('--nbEpoch', type=int, default=100, help='nb epoch') parser.add_argument('--lrSchedule', nargs='+', type=int, default=[50, 75], help='lr schedule') parser.add_argument('--lr-gamma', type=float, default=0.2, help='decrease learning rate by lr-gamma') parser.add_argument('--gpu', type=str, default='0', help='gpu devices') # model parser.add_argument('--arch', type=str, choices=['vgg19-bn'], default='vgg19-bn', help='which archtecture?') parser.add_argument('--out-dir', type=str, help='output directory') parser.add_argument('--mu', type=float, default=0.0, help='nested mean hyperparameter') parser.add_argument('--nested1', type=float, default=0.0, help='nested1 std hyperparameter') parser.add_argument('--nested2', type=float, default=0.0, help='nested2 std hyperparameter') parser.add_argument('--alter-train', action='store_true', help='whether to use alternative training for nested') parser.add_argument('--vgg-dropout', type=float, default=0.0, help='dropout ratio') parser.add_argument('--resumePth', type=str, help='resume path') parser.add_argument('--freeze-bn', action='store_true', help='freeze the BN layers') parser.add_argument('--pretrained', action='store_true', help='Start with ImageNet pretrained model (Pytorch Model Zoo)') args = parser.parse_args() print (args) if (args.nested1 > 0 or args.nested2 > 0) and args.vgg_dropout > 0: raise RuntimeError('Activating both nested1 / nested2 (eta = {:.3f} / {:.3f}) and vgg_dropout \ (ratio = {:.3f})'.format(args.nested1, args.nested2, args.vgg_dropout)) main(gpu = args.gpu, arch = args.arch, vgg_dropout= args.vgg_dropout, out_dir = args.out_dir, dataset = args.dataset, train_dir = args.train_dir, val_dir = args.val_dir, warmUpIter = args.warmUpIter, lr = args.lr, nbEpoch = args.nbEpoch, batchsize = args.batchsize, momentum = args.momentum, weightDecay = args.weightDecay, lrSchedule = args.lrSchedule, lr_gamma = args.lr_gamma, mu = args.mu, nested1 = args.nested1, nested2 = args.nested2, alter_train = args.alter_train, resumePth = args.resumePth, freeze_bn = args.freeze_bn, pretrained = args.pretrained)
37.915663
283
0.517848
2,016
18,882
4.69494
0.15129
0.027364
0.031379
0.010988
0.464237
0.439197
0.402007
0.390808
0.374326
0.365029
0
0.020141
0.300551
18,882
498
284
37.915663
0.696525
0.111588
0
0.437107
0
0.006289
0.086517
0.002828
0
0
0
0
0
1
0.022013
false
0
0.053459
0
0.091195
0.034591
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a81537d6715e748ac6f076189e71e95d4899578
918
py
Python
src/unpacking.py
marvey11/python-playground
1a978a8b4e593f3497c48a921037b7bcf7af0762
[ "MIT" ]
null
null
null
src/unpacking.py
marvey11/python-playground
1a978a8b4e593f3497c48a921037b7bcf7af0762
[ "MIT" ]
null
null
null
src/unpacking.py
marvey11/python-playground
1a978a8b4e593f3497c48a921037b7bcf7af0762
[ "MIT" ]
null
null
null
""" Fun with unpacking stuff. """ from typing import Any, Tuple def print_integers(val01: int, val02: int) -> None: """ Prints some message for the integers. """ print(f"--> print_integers() -- VAL 1 = {val01}, VAL 2 = {val02}") def print_tuples(tup: tuple[Any, ...]) -> None: """ Prints some message for the tuple. """ print(f"--> print_tuples() -- TUPLE = {tup}") def get_tuples(val01: int, val02: int) -> Tuple[int, int]: """ Constructs a tuple out of integers. """ return (val01, val02) tup01: Tuple[int, int] = get_tuples(11, 14) # --> print_tuples() -- TUPLE = (11, 14) print_tuples(tup01) # --> print_integers() -- VAL 1 = 11, VAL 2 = 14 print_integers(*tup01) tup02 = (13, get_tuples(17, 19), 23) # --> print_tuples() -- TUPLE = (13, (17, 19), 23) print_tuples(tup02) tup03 = (13, *get_tuples(17, 19), 23) # --> print_tuples() -- TUPLE = (13, 17, 19, 23) print_tuples(tup03)
27
70
0.61329
132
918
4.143939
0.318182
0.160878
0.117002
0.080439
0.288848
0.288848
0.190128
0.190128
0.190128
0.190128
0
0.104698
0.188453
918
33
71
27.818182
0.62953
0.349673
0
0
0
0
0.15993
0
0
0
0
0
0
1
0.214286
false
0
0.071429
0
0.357143
0.571429
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
2
7a84e7aa21897fbe53bd431bdaec619accc01001
355
py
Python
codecs.py
HackersTech/black-hat_python
477246ebde4f944c15979f137cc73aa39fdc7669
[ "MIT" ]
3
2021-12-17T15:53:24.000Z
2022-02-10T14:26:36.000Z
codecs.py
HackersTech/black-hat_python
477246ebde4f944c15979f137cc73aa39fdc7669
[ "MIT" ]
null
null
null
codecs.py
HackersTech/black-hat_python
477246ebde4f944c15979f137cc73aa39fdc7669
[ "MIT" ]
null
null
null
import codecs print("""encodings : # hex # quopri # uu # uu_codec # zip # zlib # bz2 # bz2_codec base64""") en=input('enter encoding -: ') i=open(input("enter file to be encoded"),'r') k=i.read() main=eval(f"codecs.encode(b{k},'{en}')") n=open(input('enter output file name -: '),'w') n.write(f'a=codecs.decode(b"{main}","{en}")') n.write('exec(a)')
18.684211
47
0.619718
60
355
3.633333
0.633333
0.137615
0.12844
0
0
0
0
0
0
0
0
0.012903
0.126761
355
18
48
19.722222
0.690323
0
0
0
0
0
0.622535
0.166197
0
0
0
0
0
1
0
false
0
0.055556
0
0.055556
0.055556
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7a85b5168659dd79cc872d19ff4716a8418facb0
2,352
py
Python
loader.py
Faylixe/pibooth
6ce63753c35ce7d38c6630e7eeb10947fdea1ffa
[ "MIT" ]
null
null
null
loader.py
Faylixe/pibooth
6ce63753c35ce7d38c6630e7eeb10947fdea1ffa
[ "MIT" ]
null
null
null
loader.py
Faylixe/pibooth
6ce63753c35ce7d38c6630e7eeb10947fdea1ffa
[ "MIT" ]
null
null
null
#!/usr/bin/python from ui import Window, Panel, Label, Image import pygame WHITE = (255, 255, 255) BLACK = (0, 0, 0) GRAY = (100, 100, 100) class Loader(object): """ Loader class. """ def __init__(self, size=(640, 480)): """ Default constructor. """ self.size = size self.window = Window(size=size, fullscreen=False, backgroundColor=WHITE) self.container = Panel(orientation='vertical') self.window.add(self.container) def welcome(self): """ Welcome screen. """ header = Label('Bienvenue', color=BLACK, size='huge') message = Label('Appuyer pour commencer', color=BLACK, size='medium') self.container.add(header) self.container.add(message) def onClick(position): """ Window click callback. """ self.container.remove(header) self.container.remove(message) self.window.onWindowClick = None self.prompt('Voulez vous configurer la connection internet ?', lambda r: self.wifi(r)) self.window.onWindowClick = onClick def prompt(self, question, callback): """ Prompt screen (Yes / No question only) """ header = Label(question, color=BLACK, size='medium') panel = Panel(orientation='horizontal', padding=20) def createPromptCallback(callback, answer): def delegate(): self.container.remove(header) self.container.remove(panel) callback(answer) return delegate yes = Label(' Oui ', color=WHITE, background=GRAY, size='medium') no = Label(' Non ', color=WHITE, background=GRAY, size='medium') yes.onClick = createPromptCallback(callback, True) no.onClick = createPromptCallback(callback, False) panel.add(yes) panel.add(no) self.container.add(header) self.container.add(panel) self.window.invalidate() def wifi(self, configure): """ WiFi configuration screen. """ if configure: # TODO : Set RPI as WiFi hotspot. # TODO : Start webserver. # TODO : Quit and go next. pass else: quit() if __name__ == '__main__': info = pygame.display.Info() loader = Loader() loader.welcome() loader.window.start()
34.086957
98
0.592687
250
2,352
5.528
0.396
0.094067
0.04631
0.028944
0.167873
0.167873
0.118669
0
0
0
0
0.0172
0.283163
2,352
69
99
34.086957
0.802491
0.103316
0
0.08
0
0
0.068599
0
0
0
0
0.014493
0
1
0.14
false
0.02
0.04
0
0.22
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
7a86fd5c7b92c28b787452e765522fe7a377370d
1,255
py
Python
app/routers/users.py
Ashishb21/propertyConnect
60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8
[ "MIT" ]
null
null
null
app/routers/users.py
Ashishb21/propertyConnect
60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8
[ "MIT" ]
null
null
null
app/routers/users.py
Ashishb21/propertyConnect
60bc23125e4b45cc5e0c3b2cff8d57afd57ecbb8
[ "MIT" ]
null
null
null
from fastapi import APIRouter, Depends from sqlalchemy.orm import Session from core.database import get_db from db.services.userservices import UserService from schemas.users import RegisterUser from core.token import get_currentUser from db.models.usermodels import User router = APIRouter() @router.get("/") def getAllUser(db: Session = Depends(get_db)): return UserService.get_allUsers(db=db) @router.post("/") def createUser(user: RegisterUser, db: Session = Depends(get_db)): invalid = False if UserService.get_user_by_username(db=db, username=user.username): invalid = True if UserService.get_user_by_email(db=db, email=user.email): invalid = True if not invalid: return UserService.create_user(user, db) else: return {"error_message":"User or email already exists "} @router.get("/me") def getMe(current_user: User = Depends(get_currentUser)): return current_user @router.put("/{userid}") def updateUser(userid: str, user: RegisterUser, db: Session = Depends(get_db)): return UserService.update_user(id=userid, user=user, db=db) @router.delete("/{userid}") def deleteUser(userid: str, db: Session = Depends(get_db)): return UserService.delete_user_by_id(id=userid, db=db)
31.375
79
0.737052
173
1,255
5.219653
0.323699
0.027685
0.070875
0.084164
0.233666
0.184939
0.184939
0
0
0
0
0
0.148207
1,255
40
80
31.375
0.844715
0
0
0.064516
0
0
0.051752
0
0
0
0
0
0
1
0.16129
false
0
0.225806
0.129032
0.580645
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
1
7a8b6a5f33d6240f73b55ff527bd30a0bb87df51
4,027
py
Python
awards/models.py
Sundaybrian/prodev
ed51fbd7c70941de36f7bc59c1940acfafaecf72
[ "MIT" ]
null
null
null
awards/models.py
Sundaybrian/prodev
ed51fbd7c70941de36f7bc59c1940acfafaecf72
[ "MIT" ]
7
2020-06-05T22:53:10.000Z
2022-02-10T08:29:14.000Z
awards/models.py
Sundaybrian/prodev
ed51fbd7c70941de36f7bc59c1940acfafaecf72
[ "MIT" ]
null
null
null
from django.db import models from django.utils import timezone from django.contrib.auth.models import User from django.db.models import Q from django.core.exceptions import ObjectDoesNotExist from django.http import Http404 from users.models import Profile from django.contrib.auth.models import User from statistics import mean # from django.urls import reverse # Create your models here. class Post(models.Model): ''' ''' title=models.CharField(max_length=30) description=models.TextField() link=models.CharField(max_length=100) image=models.ImageField(upload_to='poster/',default='') date_posted=models.DateTimeField(auto_now_add=True) author=models.ForeignKey(User,on_delete=models.CASCADE) design=models.IntegerField(blank=True,default=0) usability=models.IntegerField(blank=True,default=0) creativity=models.IntegerField(blank=True,default=0) content=models.IntegerField(blank=True,default=0) mobile=models.IntegerField(blank=True,default=0) def __str__(self): return f'Post{self.title}--{self.description}--{self.author.username}' def get_absolute_url(self): ''' return full path of a url ''' return reverse('post-detail',kwargs={'pk':self.pk}) def save_post(self): ''' method to save a post ''' self.save() @classmethod def get_posts(cls): ''' method to fetch all posts ''' posts=cls.objects.order_by('-date_posted') return posts @classmethod def get_post_by_id(cls,id): try: post=cls.objects.get(id=id) except ObjectDoesNotExist: raise Http404() assert False return post @classmethod def get_posts_by_username(cls,username): posts=cls.objects.filter(author=username).order_by('-date_posted') return posts @classmethod def delete_post(cls,post_id): ''' method to delete a post ''' img=cls.objects.get(id=post_id).delete() @classmethod def search(cls,search_term): ''' method that returns a post based on search query ''' posts=cls.objects.filter(Q(title__icontains=search_term) |Q(author__username__icontains=search_term)) return posts class Review(models.Model): ''' ''' design=models.IntegerField(blank=True,default=0) usability=models.IntegerField(blank=True,default=0) creativity=models.IntegerField(blank=True,default=0) content=models.IntegerField(blank=True,default=0) mobile=models.IntegerField(blank=True,default=0) post=models.ForeignKey(Post,on_delete=models.CASCADE) judge=models.ForeignKey(User,blank=True,null=True,on_delete=models.CASCADE) average_review=models.IntegerField(blank=True,default=0) def save_review(self): self.save() def __str__(self): return f'{self.post.title}:Review-{self.design}-{self.usability}-{self.creativity}-{self.content}-{self.mobile}-{self.post.id}' @classmethod def get_all_reviews(cls,post_id): design=round(mean(cls.objects.filter(post_id=post_id).values_list('design',flat=True))) usability=round(mean(cls.objects.filter(post_id=post_id).values_list('usability',flat=True))) creativity=round(mean(cls.objects.filter(post_id=post_id).values_list('creativity',flat=True))) content=round(mean(cls.objects.filter(post_id=post_id).values_list('content',flat=True))) mobile=round(mean(cls.objects.filter(post_id=post_id).values_list('mobile',flat=True))) average_review=(design+usability+creativity+content+mobile)/5 return { 'design':design, 'usability':usability, 'creativity':creativity, 'content':content, 'mobile':mobile, 'average_review':average_review }
26.320261
139
0.656816
494
4,027
5.220648
0.230769
0.032571
0.0981
0.115161
0.348585
0.336565
0.336565
0.320667
0.256301
0.256301
0
0.007358
0.22374
4,027
152
140
26.493421
0.817658
0.04296
0
0.3125
0
0.0125
0.084488
0.048085
0
0
0
0
0.0125
1
0.1375
false
0
0.125
0.025
0.625
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
7a8cb7844af88c35eb1b63d6733fa0006e6aff06
1,291
py
Python
src/xg_all/xg.py
tapojyotipaul/xgboost-benchmarks
789b99acbf401617a45a8c82dbae1210378527d8
[ "Apache-2.0" ]
null
null
null
src/xg_all/xg.py
tapojyotipaul/xgboost-benchmarks
789b99acbf401617a45a8c82dbae1210378527d8
[ "Apache-2.0" ]
null
null
null
src/xg_all/xg.py
tapojyotipaul/xgboost-benchmarks
789b99acbf401617a45a8c82dbae1210378527d8
[ "Apache-2.0" ]
2
2021-04-07T12:32:42.000Z
2021-04-21T16:28:42.000Z
from timeit import default_timer as timer import xgboost as xgb import common import gc NUM_LOOPS = 100 PARAMS = { 'objective': 'reg:squarederror', 'alpha': 0.9, 'max_bin': 256, 'scale_pos_weight': 2, 'learning_rate': 0.1, 'subsample': 1, 'reg_lambda': 1, 'min_child_weight': 0, 'max_depth': 8, 'max_leaves': 2**8, 'tree_method': 'hist', 'predictor': 'cpu_predictor' } TRAIN_DF = xgb.DMatrix(data=common.X, label=common.y) MODEL = xgb.train(params=PARAMS, dtrain=TRAIN_DF) def run_inference(num_observations:int = 1000): """Run xgboost for specified number of observations""" # Load data test_df = common.get_test_data(num_observations) num_rows = len(test_df) # print(f"Running {NUM_LOOPS} inference loops with batch size {num_rows}...") run_times3 = [] inference_times3 = [] for _ in range(NUM_LOOPS): start_time = timer() data = xgb.DMatrix(test_df) MODEL.predict(data) end_time = timer() total_time3 = end_time - start_time run_times3.append(total_time3*10e3) inference_time3 = total_time3*(10e6)/num_rows inference_times3.append(inference_time3) print(num_observations, ", ", common.calculate_stats(inference_times3))
26.346939
81
0.659954
172
1,291
4.69186
0.5
0.02974
0
0
0
0
0
0
0
0
0
0.036743
0.219985
1,291
49
82
26.346939
0.764647
0.10457
0
0
0
0
0.138261
0
0
0
0
0
0
1
0.027778
false
0
0.111111
0
0.138889
0.027778
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a8dbcfb9f48db68d70725b0f63d6593739a5371
255
py
Python
gyak/03/duplum-torol.py
horverno/sze-academic-python
3ac8f2c62b827822f529dc600eef91713e82d551
[ "MIT" ]
4
2019-06-24T17:01:03.000Z
2021-11-09T21:48:32.000Z
gyak/03/duplum-torol.py
horverno/sze-academic-python
3ac8f2c62b827822f529dc600eef91713e82d551
[ "MIT" ]
null
null
null
gyak/03/duplum-torol.py
horverno/sze-academic-python
3ac8f2c62b827822f529dc600eef91713e82d551
[ "MIT" ]
6
2018-07-24T10:08:14.000Z
2021-09-11T20:40:47.000Z
# irjunk olyan python kodot, amely kitorli egy listabol a duplumokat a = [10,20,30,20,10,50,60,40,80,50,40] dup_items = set() uniq_items = [] for x in a: if x not in dup_items: uniq_items.append(x) dup_items.add(x) print(dup_items)
19.615385
68
0.662745
48
255
3.395833
0.604167
0.196319
0
0
0
0
0
0
0
0
0
0.11
0.215686
255
12
69
21.25
0.705
0.258824
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.125
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a8dcc97cfb480e0204ea275bf860897bec39fc3
34
py
Python
bzoing/share.py
marado/bzoing
7d3ac9c36f23d89b692de2e2d8871c108b7d335d
[ "MIT" ]
7
2017-01-12T18:25:39.000Z
2021-06-30T11:56:07.000Z
bzoing/share.py
marado/bzoing
7d3ac9c36f23d89b692de2e2d8871c108b7d335d
[ "MIT" ]
20
2017-03-13T19:47:30.000Z
2021-01-19T04:25:35.000Z
bzoing/share.py
marado/bzoing
7d3ac9c36f23d89b692de2e2d8871c108b7d335d
[ "MIT" ]
4
2017-07-16T05:39:32.000Z
2020-09-30T23:22:43.000Z
tasklist = None my_monitor = None
11.333333
17
0.764706
5
34
5
0.8
0
0
0
0
0
0
0
0
0
0
0
0.176471
34
2
18
17
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
7a8f9fda09004030cc2c8b33de4dfe7521550783
261
py
Python
backend/config/settings/environments/__init__.py
offurface/logistic-company
1e98b1191fd9ee63fbd9d6c2eef1354822e53d14
[ "MIT" ]
null
null
null
backend/config/settings/environments/__init__.py
offurface/logistic-company
1e98b1191fd9ee63fbd9d6c2eef1354822e53d14
[ "MIT" ]
null
null
null
backend/config/settings/environments/__init__.py
offurface/logistic-company
1e98b1191fd9ee63fbd9d6c2eef1354822e53d14
[ "MIT" ]
null
null
null
from .common import * from ..components import env if DEBUG: SECRET_KEY = env('SECRET_KEY', default='-qf)o7hs$jk@b8o)zidroo9wskuf^95m2$@k)5^@hl-=)349-7') from .development import * else: SECRET_KEY = env('SECRET_KEY') from .production import *
26.1
96
0.685824
38
261
4.605263
0.631579
0.205714
0.137143
0.205714
0.24
0
0
0
0
0
0
0.050459
0.164751
261
9
97
29
0.752294
0
0
0
0
0.125
0.268199
0.191571
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
7a91c1b9651492ee7c9426b0fcd2c1c972c0dcc4
7,034
py
Python
rudderthralloc/forcealloc.py
kplindegaard/pycs2
a21a9b9403b84601e889bf253b85b11659ea5896
[ "BSD-2-Clause" ]
2
2021-04-21T01:53:11.000Z
2022-03-21T10:05:23.000Z
rudderthralloc/forcealloc.py
kplindegaard/pycs2
a21a9b9403b84601e889bf253b85b11659ea5896
[ "BSD-2-Clause" ]
null
null
null
rudderthralloc/forcealloc.py
kplindegaard/pycs2
a21a9b9403b84601e889bf253b85b11659ea5896
[ "BSD-2-Clause" ]
1
2020-09-16T03:47:12.000Z
2020-09-16T03:47:12.000Z
""" forcealloc.py - Map commanded thrust to generalized, cartesian forces """ # BSD 2-Clause License # # Copyright (c) 2001-2017, Karl-Petter Lindegaard # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # * Redistributions of source code must retain the above copyright notice, this # list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright notice, # this list of conditions and the following disclaimer in the documentation # and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import numpy as np from math import sin, cos from cs2data import T1LX, T1LY, T2LX, T2LY, T3LX, T3LY class ForceAllocation: """ ForceAllocation - maps from tau_c to generalized forces """ def __init__(self, theta1, theta2, c1, c2): """ :param theta1: Port main propeller/rudder postive force angle span [rad] :param theta2: Starboard main propeller/rudder postive force angle span [rad] :param c1: Port main propeller positive thrust bias [N] :param c2: Starboard main properller positive thrust bias [N] """ self.theta1 = theta1 self.theta2 = theta2 self.c1 = c1 self.c2 = c2 self.Q1 = np.eye(4) self.Q2 = np.eye(4) # Full allocation matrix self.A = np.array([ [1, 0, 1, 0, 0], [0, 1, 0, 1, 1], [-T1LY, T1LX, -T2LY, T2LX, T3LX] ]) # Filters. f1 = Rudder 2 inactive, f2 = Rudder 1 inactive self.f1 = np.array([True, True, True, False, True]) self.f2 = np.array([True, False, True, True, True]) # Configure A1, n1 and A1dagger etc. self.A1 = self.A[:,self.f1] # Null-vector for A1 self.n1 = np.zeros(4) self.n1[0] = (T1LX - T3LX) / (T1LY - T2LY) self.n1[1] = 1.0 self.n1[2] = -self.n1[0] self.n1[3] = -1.0 # A1_dagger = A1'*inv(A1*A1') self.A1_dagger = self.A1.T.dot(np.linalg.inv(self.A1.dot(self.A1.T))) # Configure A2, n2 and A2dagger etc. self.A2 = self.A[:,self.f2] # Null-vector for A2 self.n2 = np.zeros(4) self.n2[0] = (T3LX - T2LX) / (T1LY - T2LY) self.n2[1] = -self.n2[0] self.n2[2] = -1.0 self.n2[3] = 1.0 # A2_dagger = A2'*inv(A2*A2') self.A2_dagger = self.A2.T.dot(np.linalg.inv(self.A2.dot(self.A2.T))) def nullsub1(self, tauc, Adagger, n, theta, c): # type: (np.array, np.array, np.array, float, float) -> np.array # Step 0: Prepare the a-vector (sector boundary) a1 = cos(theta) a2 = -sin(theta) # Step 1: Find optimal solution based on pseudo-inverse u0 = Adagger.dot(tauc) # Step 2: Extract prop/rudder and translate to "the other" ref. frame u0m1 = u0[0] - c u0m2 = u0[1] # Step 3: Sector check nn1 = n[1] nn2 = -n[0] dp = nn1*u0m1 + nn2*u0m2 insector = False if dp <= 0.0: # Traverse in x-asxis (fx,0) b1 = 0.0 b2 = 1.0 else: # Are we in sector "1" if u0m2 >= 0.0: b1 = 0.0 b2 = 1.0 # Or perhaps we are already within the valid sector elif u0m1*a2 < u0m2*a1: insector = True # Otherwise, traverse along the nullvector until sector limit "a" else: b1 = a2 b2 = -a1 # Step 4: Find lambda, the distance to traverse gamma = 0.0 if not insector: gamma = -(u0m1*b1 + u0m2*b2) / (n[0]*b1 + n[1]*b2) # Step 5: Adjust solution u = u0 + gamma*n return u def nullsub2(self, tauc, Adagger, n, theta, c): # type: (np.array, np.array, np.array, float, float) -> np.array # Step 0: Prepare the a-vector (sector boundary) a1 = cos(theta) a2 = sin(theta) # Step 1: Find optimal solution based on pseudo-inverse u0 = Adagger.dot(tauc) # Step 2: Extract prop/rudder and translate to "the other" ref. frame u0m1 = u0[1] - c u0m2 = u0[2] # Step 3: Sector check nn1 = n[2] nn2 = -n[1] dp = nn1 * u0m1 + nn2 * u0m2 insector = False if dp >= 0.0: # Traverse in x-asxis (fx,0) b1 = 0.0 b2 = 1.0 else: # Are we in sector "1" if u0m2 <= 0.0: b1 = 0.0 b2 = 1.0 # Or perhaps we are already within the valid sector elif u0m1 * a2 > u0m2 * a1: insector = True # Otherwise, traverse along the nullvector until sector limit "a" else: b1 = a2 b2 = -a1 # Step 4: Find lambda, the distance to traverse gamma = 0.0 if not insector: gamma = -(u0m1 * b1 + u0m2 * b2) / (n[1] * b1 + n[2] * b2) # Step 5: Adjust solution u = u0 + gamma * n return u def allocate(self, tau): """ Map 3-DOF commanded thrust to generalized forces. First two elements are surge and sway for thruster 1 (port main prop+rudder), next two for starboard main prop+rudder, fifth element is the bow thruster's sway force. :param tau: Commanded thrust vector (surge, sway, yaw) :return: Generalized forces """ # type: (np.array) -> np.array # Call subroutines for each rudder x1 = self.nullsub1(tau, self.A1_dagger, self.n1, self.theta1, self.c1) x2 = self.nullsub2(tau, self.A2_dagger, self.n2, self.theta2, self.c2) # Compare results and pick the best solution J = x'*Q*x j1 = x1.dot(self.Q1.dot(x1)) j2 = x2.dot(self.Q2.dot(x2)) u = np.zeros(5) if j1 <= j2: # Use: u = [x1(0) x1(1) x1(2) 0 x1(3)]; u[self.f1] = x1 else: # u = [x2(0) 0 x2(1) x2(2) x2(3)]; u[self.f2] = x2 return u
33.495238
95
0.565254
996
7,034
3.980924
0.276104
0.022951
0.011349
0.017654
0.387894
0.371248
0.351576
0.351576
0.351576
0.327364
0
0.066624
0.329969
7,034
209
96
33.655502
0.774666
0.486636
0
0.371134
0
0
0
0
0
0
0
0
0
1
0.041237
false
0
0.030928
0
0.113402
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a9363b2cf078e6aa15dc4babf04d9305d0349df
158
py
Python
pygamebook/SourceCode_PyGame/User Defined Functions/trivialfunction.py
satrapade/sofia
f8903eb48a88eb9575823b4fe9f61435b882cdd4
[ "MIT" ]
null
null
null
pygamebook/SourceCode_PyGame/User Defined Functions/trivialfunction.py
satrapade/sofia
f8903eb48a88eb9575823b4fe9f61435b882cdd4
[ "MIT" ]
9
2018-06-18T11:17:44.000Z
2018-06-19T21:00:48.000Z
pygamebook/SourceCode_PyGame/User Defined Functions/trivialfunction.py
satrapade/sofia
f8903eb48a88eb9575823b4fe9f61435b882cdd4
[ "MIT" ]
null
null
null
#!/usr/bin/python def add(num1, num2): return num1 + num2 print add(10, 5) print add('sloan ', 'kelly') print add(3.14, 1.61) print add((1,2,3), (4,5,6))
15.8
28
0.613924
32
158
3.03125
0.625
0.329897
0
0
0
0
0
0
0
0
0
0.142857
0.158228
158
10
29
15.8
0.586466
0.101266
0
0
0
0
0.077465
0
0
0
0
0
0
0
null
null
0
0
null
null
0.666667
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
3
7a958e3eba071c4fbf127852dcb80e42dd73f774
3,982
py
Python
docly/logic/logic_main.py
autosoft-dev/docly
0bd6216b8a9735e9fa76bffd4ffea6cec6cc4a01
[ "MIT" ]
29
2020-12-31T08:27:32.000Z
2022-02-15T08:48:51.000Z
docly/logic/logic_main.py
autosoft-dev/docly
0bd6216b8a9735e9fa76bffd4ffea6cec6cc4a01
[ "MIT" ]
4
2020-12-30T18:18:54.000Z
2021-08-03T14:42:35.000Z
docly/logic/logic_main.py
autosoft-dev/docly
0bd6216b8a9735e9fa76bffd4ffea6cec6cc4a01
[ "MIT" ]
2
2022-01-04T17:58:22.000Z
2022-02-05T13:04:14.000Z
import os import sys from io import open import numpy as np import torch import torch.nn as nn from .example import make_example, make_new_example from .input_features import convert_examples_to_features from torch.utils.data import DataLoader, Dataset, SequentialSampler, TensorDataset from transformers import (WEIGHTS_NAME, AdamW, get_linear_schedule_with_warmup, RobertaConfig, RobertaModel, RobertaTokenizer) MODEL_CLASSES = {'roberta': (RobertaConfig, RobertaModel, RobertaTokenizer)} model_name_or_path = "microsoft/codebert-base" beam_size = 10 max_target_length = 128 max_source_length = 256 seed = 42 def load_model(model_path, is_old=False): if is_old: from .model import Seq2Seq else: from .model_new import Seq2Seq config_class, model_class, tokenizer_class = MODEL_CLASSES['roberta'] config = config_class.from_pretrained(model_name_or_path) if is_old: tokenizer = tokenizer_class.from_pretrained(model_name_or_path) else: tokenizer = tokenizer_class.from_pretrained(model_name_or_path, do_lower_case=False) encoder = model_class.from_pretrained(model_name_or_path, config=config) decoder_layer = nn.TransformerDecoderLayer(d_model=config.hidden_size, nhead=config.num_attention_heads) decoder = nn.TransformerDecoder(decoder_layer, num_layers=6) model = Seq2Seq(encoder=encoder, decoder=decoder, config=config, beam_size=beam_size, max_length=max_target_length, sos_id=tokenizer.cls_token_id, eos_id=tokenizer.sep_token_id ) if is_old: if not torch.cuda.is_available(): model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu'))) else: model.load_state_dict(torch.load(model_path)) else: if not torch.cuda.is_available(): model.load_state_dict(torch.load(model_path, map_location=torch.device('cpu')), strict=False) else: model.load_state_dict(torch.load(model_path), strict=False) if not torch.cuda.is_available(): model.to("cpu") model.eval() return model, tokenizer def predict_docstring(model, tokenizer, code_tokens, is_old): examples = make_example(code_tokens) if is_old else make_new_example(code_tokens) features = convert_examples_to_features(examples, tokenizer) if is_old: all_source_ids = torch.tensor([f.source_ids for f in features], dtype=torch.long) all_source_mask = torch.tensor([f.source_mask for f in features], dtype=torch.long) else: all_source_ids = torch.tensor([f.source_ids[: max_source_length] for f in features], dtype=torch.long) all_source_mask = torch.tensor([f.source_mask[: max_source_length] for f in features], dtype=torch.long) eval_data = TensorDataset(all_source_ids, all_source_mask) eval_sampler = SequentialSampler(eval_data) batch_size = len(code_tokens) if is_old else len(eval_data) eval_dataloader = DataLoader(eval_data, sampler=eval_sampler, batch_size=batch_size) p=[] for batch in eval_dataloader: if not torch.cuda.is_available(): batch = tuple(t.to('cpu') for t in batch) else: batch = tuple(t for t in batch) source_ids, source_mask = batch with torch.no_grad(): preds = model(source_ids=source_ids, source_mask=source_mask) for pred in preds: t=pred[0].cpu().numpy() t=list(t) if 0 in t: t=t[:t.index(0)] text = tokenizer.decode(t,clean_up_tokenization_spaces=False) p.append(text) px = p[0].split() if px[-1] == ".": px[-2] = px[-2].strip() + "." px.pop() return [" ".join(px)]
35.553571
112
0.659719
525
3,982
4.725714
0.270476
0.016123
0.016929
0.03023
0.322048
0.322048
0.295042
0.244256
0.217654
0.143491
0
0.00703
0.249874
3,982
111
113
35.873874
0.823569
0
0
0.170455
0
0
0.013059
0.005776
0
0
0
0
0
1
0.022727
false
0
0.136364
0
0.181818
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a975bde69531b07d4ebbabad0e38aa1e1ed2b20
268
py
Python
app.py
zhoujiahua/TaskTool
ba8e359be0d016f0e14a5ac5671ce926945bf21e
[ "MIT" ]
null
null
null
app.py
zhoujiahua/TaskTool
ba8e359be0d016f0e14a5ac5671ce926945bf21e
[ "MIT" ]
null
null
null
app.py
zhoujiahua/TaskTool
ba8e359be0d016f0e14a5ac5671ce926945bf21e
[ "MIT" ]
null
null
null
#!/usr/bin/python3 # -*- coding: UTF-8 -*- from flask import Flask from common.BaseClass import Student app = Flask(__name__) @app.route('/') def home_index(): BQ = Student('jerry', 18) return BQ.get_user_info() if __name__ == '__main__': app.run()
14.105263
36
0.645522
37
268
4.27027
0.756757
0
0
0
0
0
0
0
0
0
0
0.018349
0.186567
268
18
37
14.888889
0.706422
0.145522
0
0
0
0
0.061674
0
0
0
0
0
0
1
0.111111
false
0
0.222222
0
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7a9a676c31323b69b36cedbc893b0b6545cf2acc
3,228
py
Python
dae/dae/annotation/tools/frequency_annotator.py
iossifovlab/gpf
e556243d29666179dbcb72859845b4d6c011af2b
[ "MIT" ]
null
null
null
dae/dae/annotation/tools/frequency_annotator.py
iossifovlab/gpf
e556243d29666179dbcb72859845b4d6c011af2b
[ "MIT" ]
82
2019-07-22T11:44:23.000Z
2022-01-13T15:27:33.000Z
dae/dae/annotation/tools/frequency_annotator.py
iossifovlab/gpf
e556243d29666179dbcb72859845b4d6c011af2b
[ "MIT" ]
null
null
null
#!/usr/bin/env python import logging from os.path import basename from dae.variants.attributes import VariantType from dae.annotation.tools.score_annotator import VariantScoreAnnotatorBase logger = logging.getLogger(__name__) class FrequencyAnnotator(VariantScoreAnnotatorBase): def __init__(self, config, genomes_db): super(FrequencyAnnotator, self).__init__(config, genomes_db) def _init_score_file(self): super(FrequencyAnnotator, self)._init_score_file() self.score_filename_base = basename(self.score_file.score_filename) self.variant_col_name = self.score_file.config.columns.variant assert self.variant_col_name assert self.variant_col_name in self.score_file.schema.col_names, \ "'{}' not in score file schema! Schema columns: {}".format( self.variant_col_name, self.score_file.schema.col_names) logger.debug(f"variants builder {self.variant_builder}") def collect_annotator_schema(self, schema): super(FrequencyAnnotator, self).collect_annotator_schema(schema) def do_annotate(self, aline, variant, liftover_variants): if VariantType.is_cnv(variant.variant_type): logger.info( f"skip trying to add frequency for CNV variant {variant}") self._scores_not_found(aline) return if self.liftover: variant = liftover_variants.get(self.liftover) if variant is None: self._scores_not_found(aline) return if self.liftover and liftover_variants.get(self.liftover): variant = liftover_variants.get(self.liftover) chrom = variant.chromosome pos = variant.details.cshl_position logger.debug( f"{self.score_filename_base}: looking for DAE frequency of " f"{variant}; {chrom}:{pos};") scores = self.score_file.fetch_scores(chrom, pos, pos) if not scores: self._scores_not_found(aline) return variant_detail = variant.details.cshl_variant variant_occurrences = scores[self.variant_col_name] \ .count(variant_detail) if variant_occurrences > 0: if variant_occurrences > 1: logger.warning( f"WARNING {self.score_filename_base}: " f"multiple variant occurrences of {chrom}:{pos} {variant}") variant_index = scores[self.variant_col_name].index(variant_detail) for native, output in self.config.columns.items(): # FIXME: this conversion should come from schema val = scores[native][variant_index] try: if val in set(["", " "]): aline[output] = self.score_file.no_score_value else: aline[output] = float(val) logger.debug( f"DAE frequency: aline[{output}]={aline[output]}") except ValueError as ex: logger.error( f"problem with: {output}: {chrom}:{pos} - {val}") logger.error(ex) raise ex
38.428571
79
0.615242
356
3,228
5.348315
0.303371
0.042542
0.040966
0.056723
0.221113
0.160714
0.12395
0.091387
0.045168
0
0
0.000881
0.296468
3,228
83
80
38.891566
0.837517
0.020756
0
0.15625
0
0
0.128838
0.033871
0
0
0
0.012048
0.03125
1
0.0625
false
0
0.0625
0
0.1875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
7a9b8a645daeb7f2e4e7bf60b47320308bca2fef
6,101
py
Python
twitcher/inotify.py
liquidgecka/twitcher
a0a12106f7210b4ee46c01847cba47cb5e4032bb
[ "Apache-2.0" ]
1
2015-03-30T07:56:12.000Z
2015-03-30T07:56:12.000Z
twitcher/inotify.py
liquidgecka/twitcher
a0a12106f7210b4ee46c01847cba47cb5e4032bb
[ "Apache-2.0" ]
null
null
null
twitcher/inotify.py
liquidgecka/twitcher
a0a12106f7210b4ee46c01847cba47cb5e4032bb
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/python26 """Watches a list of directories for file updates. The classes in this module will watch a list of subdirectories for file updates. A class is passed in at object initialization time and is used to create objects as new files are discovered. If a file is updated then the reload() function on that class will be called. If the file is removed the class will be deleted. It is important to verify that __init__, __del__, and reload() are all defined properly. A simple example of this module use looks like this: class watcher(object): def __init__(self, filename): self._filename = filename print 'Init: %s' % filename def __del__(self): print 'Del: %s' % self._filename def reload(self): print 'reload: %s' % self._filename x = inotify.InotifyWatcher(['/tmp/bar'], watcher) Only one InotifyWatcher can be registered per process due to the way that inotify works. Author: Brady Catherman (brady@twitter.com) """ import fcntl import logging import os import signal import stat WATCH_MASK = (fcntl.DN_MODIFY | fcntl.DN_CREATE | fcntl.DN_DELETE | fcntl.DN_RENAME | fcntl.DN_MULTISHOT) class WatchClass(object): """Interface class to be passed into InotifyWatcher()""" def __init__(self, filename): pass def __del__(self): pass def reload(self): """Called when the file is updated on disk.""" pass class InotifyWatcher(object): """Watches a list of directories for updates to the files in them. This class will watch the directories in watch_directories and will automatically make a class of watch_class type when a new one is found. Args: watch_directories: An iterable list of directories to watch for files in. watch_class: The class that will be used to wrap each file. file_pattern: An optional function that filters filenames. The basic footprint takes a single parameter (the filename) and returns True/False if it should be watched or not. If this is not given then all files will be watched. """ def __init__(self, watch_directories, watch_class, file_pattern=None): if file_pattern is None: file_pattern = (lambda x: True) self._watch_directories = watch_directories self._watch_class = watch_class self._file_pattern = file_pattern self._watch_fds = {} self._watch_files = {} signal.signal(signal.SIGIO, self._inotify) signal.signal(signal.SIGHUP, self._inotify) self.rescan() def _recurse_directory(self): """Recurses through all self._watch_directories finding files.""" all_files = set() dirs = set(self._watch_directories) all_dirs = set() while dirs: dir = dirs.pop() try: files = [os.path.join(dir, f) for f in os.listdir(dir)] all_dirs.add(dir) all_files.update([f for f in files if os.path.isfile(f) and self._file_pattern(f)]) dirs.update([f for f in files if os.path.isdir(f) and f[0] != '.']) except IOError, e: logging.warning('Unable to access: %s' % dir) except OSError, e: logging.warning('Unable to access: %s' % dir) return (all_dirs, all_files) def _register_inotify(self, dir): """Registers a watch on the given directory.""" if dir in self._watch_fds: return logging.info('Registering a inotify watch on %s' % dir) try: fd = os.open(dir, os.O_RDONLY) fcntl.fcntl(fd, fcntl.F_NOTIFY, WATCH_MASK) self._watch_fds[dir] = fd except IOError, e: logging.error('Unable to register watch on %s: %s' % (dir, e)) def _unregister_inotify(self, dir): """Unregisters the directory for update notification.""" if dir not in self._watch_fds: return logging.info('Unregistering a inotify watch on %s' % dir) del self._watch_fds[dir] def _inotify(self, signum, frame): """Called when either SIGHUP or SIGIO (inotify) is received.""" logging.info('Received SIGHUP or a file update notification.') signal.signal(signal.SIGIO, self._inotify) signal.signal(signal.SIGHUP, self._inotify) self.rescan() def _mtime(self, filename): """Returns the mtime of the given file (in seconds).""" try: s = os.stat(filename) return s[stat.ST_MTIME] except IOError: # On error we just return zero.. # FIXME[brady]: Make this work better. return 0 def files(self): """Returns a list of all WatchFile objects we are watching. This will return a list of all WatchFile objects associated with config files in the list of directories that we are currently watching. Returns: A list of all WatchConfig objects we are maintaining. """ return [w for _, w in self._watch_files.itervalues()] def rescan(self): """Rescans all directories looking for files inside. This will walk all the directories listed when this class was created looking for configuration files. If new config files are found then a object will be created using the class passed in at init time. If a file that used to exist was deleted then the config object for it will also be deleted. """ new_dirs, new_files = self._recurse_directory() # Old directories, unregister watches. for dir in set(self._watch_fds.iterkeys()).difference(new_dirs): self._unregister_inotify(dir) # New directories, register watches. for dir in new_dirs: self._register_inotify(dir) # Walk through all files that no longer exist. for file in set(self._watch_files).difference(new_files): logging.info('File deleted (%s): Removing its object.', file) del self._watch_files[file] for file in new_files: if file not in self._watch_files: w = self._watch_class(file) self._watch_files[file] = [None, w] logging.info('Found new file (%s): Making new object', file) t = self._watch_files[file] m = self._mtime(file) if t and t[0] != m: t[0] = m t[1].reload()
33.521978
79
0.677758
888
6,101
4.51464
0.251126
0.042654
0.024445
0.005238
0.128212
0.122225
0.085807
0.070342
0.053879
0.040908
0
0.001498
0.234224
6,101
181
80
33.707182
0.856592
0.033273
0
0.2
0
0
0.08284
0
0
0
0
0.005525
0
0
null
null
0.033333
0.055556
null
null
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
1
7a9dbb50b13d01bd1c12413cedbd4dd221c63516
106
py
Python
tests/__init__.py
pythoncatcoder/go.py
2fe83bbeac4190770678e3cf9df0a908b61be08d
[ "MIT" ]
34
2015-05-25T05:24:17.000Z
2022-01-18T08:49:46.000Z
tests/__init__.py
pythoncatcoder/go.py
2fe83bbeac4190770678e3cf9df0a908b61be08d
[ "MIT" ]
1
2019-12-14T20:31:20.000Z
2019-12-17T02:30:53.000Z
tests/__init__.py
pythoncatcoder/go.py
2fe83bbeac4190770678e3cf9df0a908b61be08d
[ "MIT" ]
18
2015-01-15T19:14:32.000Z
2021-05-17T23:09:54.000Z
from .location_test import * from .array_test import * from .view_test import * from .board_test import *
21.2
28
0.773585
16
106
4.875
0.4375
0.512821
0.538462
0
0
0
0
0
0
0
0
0
0.150943
106
4
29
26.5
0.866667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
7
7aa2ecacc1b4edd27824ab28d2f199ab480f960d
141
py
Python
thirvusoft/thirvusoft/doctype/ts_payroll/test_ts_payroll.py
SaraneshThirvu/Script_Report
5af8f4d3dc32ead2b124f10c55040d49a21ebb5d
[ "MIT" ]
null
null
null
thirvusoft/thirvusoft/doctype/ts_payroll/test_ts_payroll.py
SaraneshThirvu/Script_Report
5af8f4d3dc32ead2b124f10c55040d49a21ebb5d
[ "MIT" ]
null
null
null
thirvusoft/thirvusoft/doctype/ts_payroll/test_ts_payroll.py
SaraneshThirvu/Script_Report
5af8f4d3dc32ead2b124f10c55040d49a21ebb5d
[ "MIT" ]
null
null
null
# Copyright (c) 2021, TS and Contributors # See license.txt # import frappe import unittest class TestTS_Payroll(unittest.TestCase): pass
15.666667
41
0.77305
19
141
5.684211
0.894737
0
0
0
0
0
0
0
0
0
0
0.033333
0.148936
141
8
42
17.625
0.866667
0.489362
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
7aa3477bd6ed541a43bdde0d7bf39d526c22e133
646
py
Python
01/116.py
shuowangphd/lcpy
18e11bf7ca77acacadeeef93bf6b7f1667eae2cd
[ "MIT" ]
null
null
null
01/116.py
shuowangphd/lcpy
18e11bf7ca77acacadeeef93bf6b7f1667eae2cd
[ "MIT" ]
null
null
null
01/116.py
shuowangphd/lcpy
18e11bf7ca77acacadeeef93bf6b7f1667eae2cd
[ "MIT" ]
null
null
null
""" # Definition for a Node. class Node: def __init__(self, val: int = 0, left: 'Node' = None, right: 'Node' = None, next: 'Node' = None): self.val = val self.left = left self.right = right self.next = next """ class Solution: def connect(self, root: 'Optional[Node]') -> 'Optional[Node]': if not root: return None nd = root while root.left: nl = root.left while root: root.left.next = root.right root.right.next = root.next.left if root.next else None root = root.next root = nl return nd
29.363636
101
0.520124
81
646
4.098765
0.320988
0.072289
0
0
0
0
0
0
0
0
0
0.002439
0.365325
646
22
102
29.363636
0.807317
0.368421
0
0
0
0
0.069825
0
0
0
0
0
0
1
0.083333
false
0
0
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa4fa934c1256120ad178cb78b93a531007672d
821
py
Python
data/signals/rel_coords.py
TYSSSY/Apb-gcn
b7c9324d3ef3baafa2fe85d57fc1f81f24e0b1e7
[ "MIT" ]
null
null
null
data/signals/rel_coords.py
TYSSSY/Apb-gcn
b7c9324d3ef3baafa2fe85d57fc1f81f24e0b1e7
[ "MIT" ]
1
2020-10-30T02:01:39.000Z
2020-10-30T02:01:39.000Z
data/signals/rel_coords.py
TYSSSY/Apb-gcn
b7c9324d3ef3baafa2fe85d57fc1f81f24e0b1e7
[ "MIT" ]
null
null
null
import numpy as np def get_relative_coordinates(sample, references=(4, 8, 12, 16)): # input: C, T, V, M c, t, v, m = sample.shape final_sample = np.zeros((4 * c, t, v, m)) valid_frames = (sample != 0).sum(axis=3).sum(axis=2).sum(axis=0) > 0 start = valid_frames.argmax() end = len(valid_frames) - valid_frames[::-1].argmax() sample = sample[:, start:end, :, :] rel_coords = [] for i in range(len(references)): ref_loc = sample[:, :, references[i], :] coords_diff = (sample.transpose((2, 0, 1, 3)) - ref_loc).transpose((1, 2, 0, 3)) rel_coords.append(coords_diff) # Shape: 4*C, t, V, M rel_coords = np.vstack(rel_coords) # Shape: C, T, V, M final_sample[:, start:end, :, :] = rel_coords return final_sample
31.576923
88
0.5676
123
821
3.642276
0.382114
0.022321
0.033482
0.044643
0.125
0
0
0
0
0
0
0.036244
0.260658
821
25
89
32.84
0.701812
0.06821
0
0
0
0
0
0
0
0
0
0
0
1
0.058824
false
0
0.058824
0
0.176471
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa5f484404f8770edc1499119077e82dbc3d9e3
4,774
py
Python
src/preprocess_librosa.py
Un-bias/musicnn-training
c7aa67bd1641592fcf43467c37db01b553dd4e5c
[ "0BSD" ]
41
2019-07-24T07:11:53.000Z
2022-03-28T13:42:34.000Z
src/preprocess_librosa.py
Un-bias/musicnn-training
c7aa67bd1641592fcf43467c37db01b553dd4e5c
[ "0BSD" ]
3
2020-05-04T13:17:58.000Z
2020-11-14T14:37:06.000Z
src/preprocess_librosa.py
Un-bias/musicnn-training
c7aa67bd1641592fcf43467c37db01b553dd4e5c
[ "0BSD" ]
13
2019-08-19T15:52:00.000Z
2021-03-22T02:36:33.000Z
import os import librosa from joblib import Parallel, delayed import json import config_file import argparse import pickle import numpy as np from pathlib import Path DEBUG = False def compute_audio_repr(audio_file, audio_repr_file): audio, sr = librosa.load(audio_file, sr=config['resample_sr']) if config['type'] == 'waveform': audio_repr = audio audio_repr = np.expand_dims(audio_repr, axis=1) elif config['spectrogram_type'] == 'mel': audio_repr = librosa.feature.melspectrogram(y=audio, sr=sr, hop_length=config['hop'], n_fft=config['n_fft'], n_mels=config['n_mels']).T # Compute length print(audio_repr.shape) length = audio_repr.shape[0] # Transform to float16 (to save storage, and works the same) audio_repr = audio_repr.astype(np.float16) # Write results: with open(audio_repr_file, "wb") as f: pickle.dump(audio_repr, f) # audio_repr shape: NxM return length def do_process(files, index): try: [id, audio_file, audio_repr_file] = files[index] if not os.path.exists(audio_repr_file[:audio_repr_file.rfind('/') + 1]): path = Path(audio_repr_file[:audio_repr_file.rfind('/') + 1]) path.mkdir(parents=True, exist_ok=True) # compute audio representation (pre-processing) length = compute_audio_repr(audio_file, audio_repr_file) # index.tsv writing fw = open(config_file.DATA_FOLDER + config['audio_representation_folder'] + "index_" + str(config['machine_i']) + ".tsv", "a") fw.write("%s\t%s\t%s\n" % (id, audio_repr_file[len(config_file.DATA_FOLDER):], audio_file[len(config_file.DATA_FOLDER):])) fw.close() print(str(index) + '/' + str(len(files)) + ' Computed: %s' % audio_file) except Exception as e: ferrors = open(config_file.DATA_FOLDER + config['audio_representation_folder'] + "errors" + str(config['machine_i']) + ".txt", "a") ferrors.write(audio_file + "\n") ferrors.write(str(e)) ferrors.close() print('Error computing audio representation: ', audio_file) print(str(e)) def process_files(files): if DEBUG: print('WARNING: Parallelization is not used!') for index in range(0, len(files)): do_process(files, index) else: Parallel(n_jobs=config['num_processing_units'])( delayed(do_process)(files, index) for index in range(0, len(files))) if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('configurationID', help='ID of the configuration dictionary') args = parser.parse_args() config = config_file.config_preprocess[args.configurationID] config['audio_representation_folder'] = "audio_representation/%s__%s/" % (config['identifier'], config['type']) # set audio representations folder if not os.path.exists(config_file.DATA_FOLDER + config['audio_representation_folder']): os.makedirs(config_file.DATA_FOLDER + config['audio_representation_folder']) else: print("WARNING: already exists a folder with this name!" "\nThis is expected if you are splitting computations into different machines.." "\n..because all these machines are writing to this folder. Otherwise, check your config_file!") # list audios to process: according to 'index_file' files_to_convert = [] f = open(config_file.DATA_FOLDER + config["index_file"]) for line in f.readlines(): id, audio = line.strip().split("\t") audio_repr = audio[:audio.rfind(".")] + ".pk" # .npy or .pk files_to_convert.append((id, config['audio_folder'] + audio, config_file.DATA_FOLDER + config['audio_representation_folder'] + audio_repr)) # compute audio representation if config['machine_i'] == config['n_machines'] - 1: process_files(files_to_convert[int(len(files_to_convert) / config['n_machines']) * (config['machine_i']):]) # we just save parameters once! In the last thread run by n_machine-1! json.dump(config, open(config_file.DATA_FOLDER + config['audio_representation_folder'] + "config.json", "w")) else: first_index = int(len(files_to_convert) / config['n_machines']) * (config['machine_i']) second_index = int(len(files_to_convert) / config['n_machines']) * (config['machine_i'] + 1) assigned_files = files_to_convert[first_index:second_index] process_files(assigned_files) print("Audio representation folder: " + config_file.DATA_FOLDER + config['audio_representation_folder'])
42.247788
139
0.651445
610
4,774
4.855738
0.288525
0.069885
0.047265
0.067522
0.304862
0.272113
0.243754
0.227549
0.133018
0.053005
0
0.003512
0.22455
4,774
112
140
42.625
0.796596
0.076875
0
0.0375
0
0
0.205233
0.055518
0
0
0
0
0
1
0.0375
false
0
0.1125
0
0.1625
0.0875
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa642c92098c5cae5a9ed0435a274be4705e211
245
py
Python
Lista09/ex013.py
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
f306c8dc6385ee8c9580e687afa16a49ace68f95
[ "MIT" ]
2
2021-09-05T22:29:33.000Z
2021-09-09T00:13:16.000Z
Lista09/ex013.py
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
f306c8dc6385ee8c9580e687afa16a49ace68f95
[ "MIT" ]
null
null
null
Lista09/ex013.py
Guilherme-Schwann/Listas-de-Exercicios-UFV-CCF-110
f306c8dc6385ee8c9580e687afa16a49ace68f95
[ "MIT" ]
null
null
null
N = int(input('Ordem da matriz: ')) A = [[int(input()) for i in range(N)] for j in range(N)] At = [[0 for i in range(N)] for j in range(N)] for i in range(N): for j in range(N): At[i][j] = A[j][i] for i in range(N): print(At[i])
27.222222
56
0.542857
55
245
2.418182
0.272727
0.368421
0.421053
0.330827
0.661654
0.571429
0.571429
0.571429
0.571429
0.571429
0
0.005405
0.244898
245
8
57
30.625
0.713514
0
0
0.25
0
0
0.069388
0
0
0
0
0
0
1
0
false
0
0
0
0
0.125
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
7aa67f347816a860d8954caac66621040cc0046d
532
py
Python
45/45.py
cleamoon/ProjectEuler
8d51ad089e5fa21c709161fc658f8c4b533a3ac3
[ "MIT" ]
null
null
null
45/45.py
cleamoon/ProjectEuler
8d51ad089e5fa21c709161fc658f8c4b533a3ac3
[ "MIT" ]
null
null
null
45/45.py
cleamoon/ProjectEuler
8d51ad089e5fa21c709161fc658f8c4b533a3ac3
[ "MIT" ]
null
null
null
ltn = [] lpn = [] for i in range(1, 1000000): t = 143 + i p = 165 + i ltn.append(t * (2 * t - 1)) lpn.append(p * (3 * p - 1) // 2) def bst(n, b = 0, e = len(ltn)): if b >= e: if ltn[b] == n: return True else: return False else: m = (b + e)//2 if n > ltn[m]: return bst(n, m+1, e) elif n < ltn[m]: return bst(n, b, m) else: return True for p in lpn: if (bst(p)): print(p) break
18.344828
36
0.381579
84
532
2.416667
0.369048
0.059113
0.049261
0.108374
0.147783
0.147783
0
0
0
0
0
0.076923
0.462406
532
28
37
19
0.632867
0
0
0.2
0
0
0
0
0
0
0
0
0
1
0.04
false
0
0
0
0.24
0.04
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa6a0eddaf3a04a78ac717238e95549001e1d08
2,508
py
Python
tests/python/test_workitems.py
mdp/rpaframework
d427a3a4b9ea360780e449ece2674e275060310e
[ "Apache-2.0" ]
null
null
null
tests/python/test_workitems.py
mdp/rpaframework
d427a3a4b9ea360780e449ece2674e275060310e
[ "Apache-2.0" ]
null
null
null
tests/python/test_workitems.py
mdp/rpaframework
d427a3a4b9ea360780e449ece2674e275060310e
[ "Apache-2.0" ]
null
null
null
import copy import pytest from RPA.Robocloud.Items import BaseAdapter, Items VALID_DATABASE = { ("test-ws", "test-item"): {"username": "testguy", "address": "guy@company.com"}, ("test-ws", "second-item"): {"username": "another", "address": "dude@company.com"}, } class MockAdapter(BaseAdapter): DATABASE = {} @classmethod def validate(cls, item, key, val): data = cls.DATABASE.get((item.workspace_id, item.item_id)) assert data is not None assert data[key] == val def save(self, workspace_id, item_id, data): self.DATABASE[(workspace_id, item_id)] = data def load(self, workspace_id, item_id): return self.DATABASE.get((workspace_id, item_id), {}) @pytest.fixture def valid_adapter(monkeypatch): monkeypatch.setenv("RC_WORKSPACE_ID", "test-ws") monkeypatch.setenv("RC_WORKITEM_ID", "test-item") MockAdapter.DATABASE = copy.deepcopy(VALID_DATABASE) yield MockAdapter MockAdapter.DATABASE = {} def test_no_env(monkeypatch): monkeypatch.delenv("RC_WORKSPACE_ID", raising=False) monkeypatch.delenv("RC_WORKITEM_ID", raising=False) lib = Items(default_adapter=MockAdapter) assert lib.current is None def test_load_env(valid_adapter): lib = Items(default_adapter=valid_adapter) # Called by Robot Framework listener lib._start_suite(None, None) # Work item loaded using env variables env = lib.current assert env is not None assert env.data["username"] == "testguy" def test_load_env_disable(valid_adapter): lib = Items(load_env=False, default_adapter=valid_adapter) # Called by Robot Framework listener lib._start_suite(None, None) assert lib.current is None def test_keyword_load_item(valid_adapter): lib = Items(default_adapter=valid_adapter) item = lib.load_work_item("test-ws", "second-item") assert item.data["username"] == "another" assert item == lib.current def test_keyword_save_item(valid_adapter): lib = Items(default_adapter=valid_adapter) item = lib.load_work_item("test-ws", "second-item") MockAdapter.validate(item, "username", "another") item.data["username"] = "changed" lib.save_work_item() MockAdapter.validate(item, "username", "changed") def test_keyword_no_active_item(): lib = Items(default_adapter=MockAdapter) assert lib.current is None with pytest.raises(AssertionError) as err: lib.save_work_item() assert str(err.value) == "No active work item"
27.56044
87
0.702153
329
2,508
5.155015
0.243161
0.063679
0.044222
0.064858
0.367335
0.288915
0.288915
0.267689
0.255896
0.255896
0
0
0.176236
2,508
90
88
27.866667
0.82091
0.042265
0
0.241379
0
0
0.130525
0
0
0
0
0
0.189655
1
0.172414
false
0
0.051724
0.017241
0.275862
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa74729651c5f6bb13c86b7a22664c4c8570ee6
7,418
py
Python
seq2seq/models/conv_seq2seq.py
gyy8426/TF_concaption
7b3face47c96c885b2715605122328b7b6bef609
[ "Apache-2.0" ]
342
2017-06-23T12:47:32.000Z
2021-12-06T06:56:15.000Z
seq2seq/models/conv_seq2seq.py
gyy8426/TF_concaption
7b3face47c96c885b2715605122328b7b6bef609
[ "Apache-2.0" ]
26
2017-07-25T01:39:39.000Z
2020-06-08T09:59:17.000Z
seq2seq/models/conv_seq2seq.py
gyy8426/TF_concaption
7b3face47c96c885b2715605122328b7b6bef609
[ "Apache-2.0" ]
123
2017-06-25T16:02:37.000Z
2020-07-08T08:14:11.000Z
# Copyright 2017 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ Definition of a basic seq2seq model """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from pydoc import locate import tensorflow as tf from seq2seq.contrib.seq2seq import helper as tf_decode_helper from seq2seq.models.seq2seq_model import Seq2SeqModel from seq2seq.graph_utils import templatemethod from seq2seq.models import bridges from seq2seq.inference import beam_search class ConvSeq2Seq(Seq2SeqModel): """Basic Sequence2Sequence model with a unidirectional encoder and decoder. The last encoder state is used to initialize the decoder and thus both must share the same type of RNN cell. Args: source_vocab_info: An instance of `VocabInfo` for the source vocabulary target_vocab_info: An instance of `VocabInfo` for the target vocabulary params: A dictionary of hyperparameters """ def __init__(self, params, mode, name="conv_seq2seq"): super(ConvSeq2Seq, self).__init__(params, mode, name) self.encoder_class = locate(self.params["encoder.class"]) self.decoder_class = locate(self.params["decoder.class"]) @staticmethod def default_params(): params = Seq2SeqModel.default_params().copy() params.update({ "encoder.class": "seq2seq.encoders.ConvEncoderFairseq", "encoder.params": {}, # Arbitrary parameters for the encoder "decoder.class": "seq2seq.decoders.ConvDecoder", "decoder.params": {}, # Arbitrary parameters for the decoder "source.max_seq_len": 50, "source.reverse": False, "target.max_seq_len": 50, "embedding.dim": 256, "embedding.init_scale": 0.04, "embedding.share": False, "position_embeddings.num_positions": 100, "inference.beam_search.beam_width": 0, "inference.beam_search.length_penalty_weight": 1.0, "inference.beam_search.choose_successors_fn": "choose_top_k", "vocab_source": "", "vocab_target": "", "optimizer.name": "Momentum", "optimizer.learning_rate": 0.25, "optimizer.params": {"momentum": 0.99, "use_nesterov": True}, # Arbitrary parameters for the optimizer #"optimizer.params": { "epsilon": 0.0000008}, # Arbitrary parameters for the optimizer "optimizer.lr_decay_type": "exponential_decay", "optimizer.lr_decay_steps": 5000, # one epoch steps "optimizer.lr_decay_rate": 0.9, "optimizer.lr_start_decay_at": 0, # start annealing epoch 0 "optimizer.lr_stop_decay_at": tf.int32.max, "optimizer.lr_min_learning_rate": 1e-5, "optimizer.lr_staircase": True, "optimizer.clip_gradients": 0.1, "optimizer.clip_embed_gradients": 5, "optimizer.sync_replicas": 0, "optimizer.sync_replicas_to_aggregate": 0, }) return params def source_embedding_fairseq(self): """Returns the embedding used for the source sequence. """ return tf.get_variable( name="W", shape=[self.source_vocab_info.total_size, self.params["embedding.dim"]], initializer=tf.random_normal_initializer( mean=0.0, stddev=0.1)) def target_embedding_fairseq(self): """Returns the embedding used for the target sequence. """ if self.params["embedding.share"]: return self.source_embedding_fairseq() return tf.get_variable( name="W", shape=[self.target_vocab_info.total_size, self.params["embedding.dim"]], initializer=tf.random_normal_initializer( mean=0.0, stddev=0.1)) def source_pos_embedding_fairseq(self): return tf.get_variable( name="pos", shape=[self.params["position_embeddings.num_positions"], self.params["embedding.dim"]], initializer=tf.random_normal_initializer( mean=0.0, stddev=0.1)) def target_pos_embedding_fairseq(self): return tf.get_variable( name="pos", shape=[self.params["position_embeddings.num_positions"], self.params["embedding.dim"]], initializer=tf.random_normal_initializer( mean=0.0, stddev=0.1)) def _create_decoder(self, encoder_output, features, _labels): config = beam_search.BeamSearchConfig( beam_width=self.params["inference.beam_search.beam_width"], vocab_size=self.target_vocab_info.total_size, eos_token=self.target_vocab_info.special_vocab.SEQUENCE_END, length_penalty_weight=self.params[ "inference.beam_search.length_penalty_weight"], choose_successors_fn=getattr( beam_search, self.params["inference.beam_search.choose_successors_fn"])) return self.decoder_class( params=self.params["decoder.params"], mode=self.mode, vocab_size=self.target_vocab_info.total_size, config=config, target_embedding=self.target_embedding_fairseq(), pos_embedding=self.target_pos_embedding_fairseq(), start_tokens=self.target_vocab_info.special_vocab.SEQUENCE_END) def _decode_train(self, decoder, _encoder_output, _features, labels): """Runs decoding in training mode""" target_embedded = tf.nn.embedding_lookup(decoder.target_embedding, labels["target_ids"]) return decoder(_encoder_output, labels=target_embedded[:,:-1], sequence_length=labels["target_len"]-1) def _decode_infer(self, decoder, _encoder_output, features, labels): """Runs decoding in inference mode""" return decoder(_encoder_output, labels) @templatemethod("encode") def encode(self, features, labels): features["source_ids"] = tf.reverse_sequence(features["source_ids"], features["source_len"], batch_dim=0, seq_dim=1) # [[1,2,3,4,PAD,PAD,PAD],[2,3,PAD,PAD,PAD,PAD,PAD]] [4,2] features["source_ids"] = tf.reverse(features["source_ids"],[1]) # --> [[4,3,2,1,PAD,PAD,PAD],[3,2,PAD,PAD,PAD,PAD,PAD]] --> [[PAD,PAD,PAD,1,2,3,4],[PAD,PAD,PAD,PAD,PAD,2,3]] source_embedded = tf.nn.embedding_lookup(self.source_embedding_fairseq(), features["source_ids"]) encoder_fn = self.encoder_class(self.params["encoder.params"], self.mode, self.source_pos_embedding_fairseq()) return encoder_fn(source_embedded, features["source_len"]) @templatemethod("decode") def decode(self, encoder_output, features, labels): decoder = self._create_decoder(encoder_output, features, labels) if self.mode == tf.contrib.learn.ModeKeys.INFER: return self._decode_infer(decoder, encoder_output, features, labels) else: return self._decode_train(decoder, encoder_output, features, labels)
40.758242
181
0.685495
929
7,418
5.241119
0.259419
0.023413
0.025878
0.022181
0.376463
0.272335
0.222633
0.212364
0.151571
0.110084
0
0.01914
0.204098
7,418
181
182
40.983425
0.805556
0.209356
0
0.211382
0
0
0.209427
0.122065
0
0
0
0
0
1
0.089431
false
0
0.089431
0.01626
0.284553
0.00813
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aa7505271b59ca60c424c74d2860fd22e8cc684
159
py
Python
system/imports/validador_de_email.py
ryanprogrammer/Sistema-de-cadastro
de1f1e2332650e7ba1dc43eb7daeafe2e5753b75
[ "MIT" ]
4
2021-12-23T22:56:42.000Z
2022-01-01T06:00:38.000Z
system/imports/validador_de_email.py
ryanprogrammer/registration-system
de1f1e2332650e7ba1dc43eb7daeafe2e5753b75
[ "MIT" ]
null
null
null
system/imports/validador_de_email.py
ryanprogrammer/registration-system
de1f1e2332650e7ba1dc43eb7daeafe2e5753b75
[ "MIT" ]
null
null
null
def emailValida(email): if '@gmail.com' in email or '@hotmail.com' in email or '@outlook.com' in email: return True else: return False
26.5
83
0.622642
23
159
4.304348
0.608696
0.151515
0.30303
0.242424
0
0
0
0
0
0
0
0
0.27044
159
5
84
31.8
0.853448
0
0
0
0
0
0.213836
0
0
0
0
0
0
1
0.2
false
0
0
0
0.6
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
7aa756bb5b3c685c48a483ea6ad71265f7bd863f
6,090
py
Python
tools/hacking.py
bopopescu/deb-openstack-nova
d80d110fa1a7d781a2ed23138b037bcc3db7a441
[ "Apache-2.0" ]
1
2015-07-15T08:51:16.000Z
2015-07-15T08:51:16.000Z
tools/hacking.py
eneabio/nova
535e8b9c3c5354187ff6cba1bacdc52b949eff8c
[ "Apache-2.0" ]
null
null
null
tools/hacking.py
eneabio/nova
535e8b9c3c5354187ff6cba1bacdc52b949eff8c
[ "Apache-2.0" ]
2
2019-06-12T00:52:15.000Z
2020-07-24T10:35:29.000Z
#!/usr/bin/env python # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright (c) 2012, Cloudscaling # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """nova HACKING file compliance testing built on top of pep8.py """ import inspect import os import re import sys import pep8 #N1xx comments #N2xx except #N3xx imports #N4xx docstrings #N5xx dictionaries/lists #N6xx Calling methods def nova_todo_format(physical_line): """ nova HACKING guide recommendation for TODO: Include your name with TODOs as in "#TODO(termie)" N101 """ pos = physical_line.find('TODO') pos1 = physical_line.find('TODO(') pos2 = physical_line.find('#') # make sure its a comment if (pos != pos1 and pos2 >= 0 and pos2 < pos): return pos, "NOVA N101: Use TODO(NAME)" def nova_except_format(logical_line): """ nova HACKING guide recommends not using except: Do not write "except:", use "except Exception:" at the very least N201 """ if logical_line.startswith("except:"): return 6, "NOVA N201: no 'except:' at least use 'except Exception:'" def nova_except_format(logical_line): """ nova HACKING guide recommends not using assertRaises(Exception...): Do not use overly broad Exception type N202 """ if logical_line.startswith("self.assertRaises(Exception"): return 1, "NOVA N202: assertRaises Exception too broad" def nova_one_import_per_line(logical_line): """ nova HACKING guide recommends one import per line: Do not import more than one module per line Examples: BAD: from nova.rpc.common import RemoteError, LOG BAD: from sqlalchemy import MetaData, Table N301 """ pos = logical_line.find(',') if (pos > -1 and (logical_line.startswith("import ") or (logical_line.startswith("from ") and logical_line.split()[2] == "import"))): return pos, "NOVA N301: one import per line" def nova_import_module_only(logical_line): """ nova HACKING guide recommends importing only modules: Do not import objects, only modules N302 import only modules N303 Invalid Import N304 Relative Import """ def importModuleCheck(mod, parent=None, added=False): """ If can't find module on first try, recursively check for relative imports """ current_path = os.path.dirname(pep8.current_file) try: valid = True if parent: parent_mod = __import__(parent, globals(), locals(), [mod], -1) valid = inspect.ismodule(getattr(parent_mod, mod)) else: __import__(mod, globals(), locals(), [], -1) valid = inspect.ismodule(sys.modules[mod]) if not valid: if added: sys.path.pop() added = False return logical_line.find(mod), ("NOVA N304: No relative " "imports. '%s' is a relative import" % logical_line) return logical_line.find(mod), ("NOVA N302: import only " "modules. '%s' does not import a module" % logical_line) except (ImportError, NameError) as exc: if not added: added = True sys.path.append(current_path) return importModuleCheck(mod, parent, added) else: print >> sys.stderr, ("ERROR: import '%s' failed: %s" % (logical_line, exc)) added = False sys.path.pop() return except AttributeError: # Invalid import return logical_line.find(mod), ("NOVA N303: Invalid import, " "AttributeError raised") split_line = logical_line.split() # handle "import x" # handle "import x as y" if (logical_line.startswith("import ") and "," not in logical_line and (len(split_line) == 2 or (len(split_line) == 4 and split_line[2] == "as"))): mod = split_line[1] return importModuleCheck(mod) # handle "from x import y" # handle "from x import y as z" elif (logical_line.startswith("from ") and "," not in logical_line and split_line[2] == "import" and split_line[3] != "*" and split_line[1] != "__future__" and (len(split_line) == 4 or (len(split_line) == 6 and split_line[4] == "as"))): mod = split_line[3] return importModuleCheck(mod, split_line[1]) # TODO(jogo) handle "from x import *" #TODO(jogo) Dict and list objects current_file = "" def readlines(filename): """ record the current file being tested """ pep8.current_file = filename return open(filename).readlines() def add_nova(): """ Look for functions that start with nova_ and have arguments and add them to pep8 module Assumes you know how to write pep8.py checks """ for name, function in globals().items(): if not inspect.isfunction(function): continue args = inspect.getargspec(function)[0] if args and name.startswith("nova"): exec("pep8.%s = %s" % (name, name)) if __name__ == "__main__": #include nova path sys.path.append(os.getcwd()) #NOVA error codes start with an N pep8.ERRORCODE_REGEX = re.compile(r'[EWN]\d{3}') add_nova() pep8.current_file = current_file pep8.readlines = readlines pep8._main()
31.071429
79
0.612972
767
6,090
4.756193
0.325945
0.063322
0.034539
0.027412
0.114857
0.090461
0.035088
0.035088
0.035088
0.035088
0
0.022079
0.286043
6,090
195
80
31.230769
0.816927
0.330213
0
0.091954
0
0
0.125358
0.007037
0
0
0
0.015385
0.022989
1
0.091954
false
0
0.275862
0
0.505747
0.011494
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
2
7aac8956f007f10c83a2012e4e3af7ab31bb2c4c
12,374
py
Python
src/approach/finetuning.py
tunglamlqddb/DFKD
924220a81d2a08bf07e50d86028e04899248d17b
[ "MIT" ]
null
null
null
src/approach/finetuning.py
tunglamlqddb/DFKD
924220a81d2a08bf07e50d86028e04899248d17b
[ "MIT" ]
null
null
null
src/approach/finetuning.py
tunglamlqddb/DFKD
924220a81d2a08bf07e50d86028e04899248d17b
[ "MIT" ]
null
null
null
import torch, warnings import torch.nn.functional as F import torch.nn as nn import numpy as np from argparse import ArgumentParser from .incremental_learning import Inc_Learning_Appr from datasets.exemplars_dataset import ExemplarsDataset class Appr(Inc_Learning_Appr): """Class implementing the finetuning baseline""" def __init__(self, model, device, nepochs=100, lr=0.05, lr_min=1e-4, lr_factor=3, lr_patience=5, clipgrad=10000, momentum=0, wd=0, multi_softmax=False, wu_nepochs=0, wu_lr_factor=1, fix_bn=False, eval_on_train=False, logger=None, exemplars_dataset=None, all_outputs=False, CE=True, OPL=False, gamma=0.5, opl_weight=1.0): super(Appr, self).__init__(model, device, nepochs, lr, lr_min, lr_factor, lr_patience, clipgrad, momentum, wd, multi_softmax, wu_nepochs, wu_lr_factor, fix_bn, eval_on_train, logger, exemplars_dataset) self.all_out = all_outputs self.CE = CE self.OPL = OPL self.gamma = gamma self.opl_weight = opl_weight self.means = [] self.covs = [] self.class_labels = [] @staticmethod def exemplars_dataset_class(): return ExemplarsDataset @staticmethod def extra_parser(args): """Returns a parser containing the approach specific parameters""" parser = ArgumentParser() parser.add_argument('--all-outputs', action='store_true', required=False, help='Allow all weights related to all outputs to be modified (default=%(default)s)') parser.add_argument('--CE', action='store_false', required=False, help='CE loss (default=%(default)s)') parser.add_argument('--OPL', action='store_true', required=False, help='OPL loss (default=%(default)s)') parser.add_argument('--gamma', default=0.5, type=float, required=False, help='Gamma for neg pair in OPL (default=%(default)s)') parser.add_argument('--opl_weight', default=1, type=float, required=False, help='Weight for OPL loss (default=%(default)s)') return parser.parse_known_args(args) def _get_optimizer(self): """Returns the optimizer""" if len(self.exemplars_dataset) == 0 and len(self.model.heads) > 1 and not self.all_out: # if there are no exemplars, previous heads are not modified params = list(self.model.model.parameters()) + list(self.model.heads[-1].parameters()) else: params = self.model.parameters() return torch.optim.SGD(params, lr=self.lr, weight_decay=self.wd, momentum=self.momentum) def save_protype(self, trained_model, loader): trained_model.eval() features = [] labels = [] with torch.no_grad(): for images, targets in loader: output, feature = trained_model(images.to(self.device), return_features=True) labels.append(targets.numpy()) features.append(feature.cpu().numpy()) labels = np.hstack(labels) labels_set = np.unique(labels) features = np.concatenate(features, 0) feature_dim = features.shape[1] for item in labels_set: index = np.where(item==labels)[0] feature_classwise = features[index] self.class_labels.append(item) self.means.append(torch.from_numpy(np.mean(feature_classwise, axis=0))) self.covs.append(torch.from_numpy(np.cov(feature_classwise.T))) def pre_train_process(self, t, trn_loader): """Runs before training all epochs of the task (before the train session)""" if t == 0: # Sec. 4.1: "the ReLU in the penultimate layer is removed to allow the features to take both positive and # negative values" if self.model.model.__class__.__name__ == 'ResNet': old_block = self.model.model.layer3[-1] self.model.model.layer3[-1] = BasicBlockNoRelu(old_block.conv1, old_block.bn1, old_block.relu, old_block.conv2, old_block.bn2, old_block.downsample) elif self.model.model.__class__.__name__ == 'SmallCNN': self.model.model.last_relu = False else: warnings.warn("Warning: ReLU not removed from last block.") super().pre_train_process(t, trn_loader) def train_epoch(self, t, trn_loader): """Runs a single epoch""" self.model.train() if self.fix_bn and t > 0: self.model.freeze_bn() for images, targets in trn_loader: # Forward current model if not self.OPL: features = None outputs = self.model(images.to(self.device)) else: outputs, features = self.model(images.to(self.device), return_features=True) loss = self.criterion(t, outputs, targets.to(self.device), features) # Backward self.optimizer.zero_grad() loss.backward() torch.nn.utils.clip_grad_norm_(self.model.parameters(), self.clipgrad) self.optimizer.step() def train_loop(self, t, trn_loader, val_loader): """Contains the epochs loop""" # add exemplars to train_loader if len(self.exemplars_dataset) > 0 and t > 0: trn_loader = torch.utils.data.DataLoader(trn_loader.dataset + self.exemplars_dataset, batch_size=trn_loader.batch_size, shuffle=True, num_workers=trn_loader.num_workers, pin_memory=trn_loader.pin_memory) # FINETUNING TRAINING -- contains the epochs loop super().train_loop(t, trn_loader, val_loader) # EXEMPLAR MANAGEMENT -- select training subset self.exemplars_dataset.collect_exemplars(self.model, trn_loader, val_loader.dataset.transform) self.save_protype(self.model, trn_loader) def classify(self, task, features, targets): # expand means to all batch images # bs*256*num_classes means = torch.stack(self.means) means = torch.stack([means]*features.shape[0]) means = means.transpose(1,2) # expand all features to all classes features = features.unsqueeze(2) features = features.expand_as(means) # get cosine-similarities for all images to all prototypes # note: features and means do not need normalize cos_sim = torch.nn.functional.cosine_similarity(features, means.to(self.device), dim=1, eps=1e-08) # bs*num_classes pred = cos_sim.argmax(1) hits_tag = (pred == targets.to(self.device)).float() return hits_tag, hits_tag def eval_ncm(self, t, val_loader): with torch.no_grad(): total_loss, total_acc_taw, total_acc_tag, total_num = 0, 0, 0, 0 self.model.eval() for images, targets in val_loader: # Forward old model old_features = None if t > 0: old_outputs, old_features = self.model_old(images.to(self.device), return_features=True) # Forward current model outputs, feats = self.model(images.to(self.device), return_features=True) loss = self.criterion(t, outputs, targets.to(self.device), feats) # during training, the usual accuracy is not computed if t > len(self.means)-1: print('No means created yet!') hits_taw, hits_tag = torch.zeros(targets.shape[0]).float(), torch.zeros(targets.shape[0]).float() else: hits_taw, hits_tag = self.classify(t, feats, targets) # Log total_loss += loss.item() * len(targets) total_acc_taw += hits_taw.sum().item() total_acc_tag += hits_tag.sum().item() total_num += len(targets) return total_loss / total_num, total_acc_taw / total_num, total_acc_tag / total_num def eval(self, t, val_loader): """Contains the evaluation code""" with torch.no_grad(): total_loss, total_acc_taw, total_acc_tag, total_num = 0, 0, 0, 0 self.model.eval() for images, targets in val_loader: # Forward current model if self.OPL: outputs, features = self.model(images.to(self.device), return_features=True) else: outputs = self.model(images.to(self.device)) features = None loss = self.criterion(t, outputs, targets.to(self.device), features) hits_taw, hits_tag = self.calculate_metrics(outputs, targets) # Log total_loss += loss.item() * len(targets) total_acc_taw += hits_taw.sum().item() total_acc_tag += hits_tag.sum().item() total_num += len(targets) return total_loss / total_num, total_acc_taw / total_num, total_acc_tag / total_num def criterion(self, t, outputs, targets, features=None): """Returns the loss value""" if self.all_out or len(self.exemplars_dataset) > 0: if self.CE and not self.OPL: return torch.nn.functional.cross_entropy(torch.cat(outputs, dim=1), targets) if self.CE and self.OPl: return torch.nn.functional.cross_entropy(torch.cat(outputs, dim=1), targets) + self.opl_weight*OrthogonalProjectionLoss(self.gamma)(features, targets, normalize=True) if not self.CE and self.OPL: return OrthogonalProjectionLoss(self.gamma)(features, targets, normalize=True) else: if self.CE and not self.OPL: return torch.nn.functional.cross_entropy(outputs[t], targets - self.model.task_offset[t]) if self.CE and self.OPL: return torch.nn.functional.cross_entropy(outputs[t], targets - self.model.task_offset[t]) + self.opl_weight*OrthogonalProjectionLoss(self.gamma)(features, targets - self.model.task_offset[t], normalize=True) if not self.CE and self.OPL: return OrthogonalProjectionLoss(self.gamma)(features, targets, normalize=True) class OrthogonalProjectionLoss(nn.Module): def __init__(self, gamma=0.5): super(OrthogonalProjectionLoss, self).__init__() self.gamma = gamma def forward(self, features, labels=None, normalize=True): device = (torch.device('cuda') if features.is_cuda else torch.device('cpu')) # features are normalized if normalize: features = F.normalize(features, p=2, dim=1) labels = labels[:, None] # extend dim mask = torch.eq(labels, labels.t()).bool().to(device) eye = torch.eye(mask.shape[0], mask.shape[1]).bool().to(device) mask_pos = mask.masked_fill(eye, 0).float() mask_neg = (~mask).float() dot_prod = torch.matmul(features, features.t()) pos_pairs_mean = (mask_pos * dot_prod).sum() / (mask_pos.sum() + 1e-6) neg_pairs_mean = (mask_neg * dot_prod).sum() / (mask_neg.sum() + 1e-6) # TODO: removed abs loss = (1.0 - pos_pairs_mean) + self.gamma * neg_pairs_mean return loss # This class implements a ResNet Basic Block without the final ReLu in the forward class BasicBlockNoRelu(nn.Module): expansion = 1 def __init__(self, conv1, bn1, relu, conv2, bn2, downsample): super(BasicBlockNoRelu, self).__init__() self.conv1 = conv1 self.bn1 = bn1 self.relu = relu self.conv2 = conv2 self.bn2 = bn2 self.downsample = downsample def forward(self, x): residual = x out = self.relu(self.bn1(self.conv1(x))) out = self.bn2(self.conv2(out)) if self.downsample is not None: residual = self.downsample(x) out += residual # Removed final ReLU return out
47.409962
223
0.601422
1,530
12,374
4.692157
0.191503
0.032595
0.020059
0.017551
0.333473
0.279983
0.256025
0.218136
0.195431
0.195431
0
0.011894
0.293357
12,374
261
224
47.409962
0.809126
0.091401
0
0.217172
0
0
0.033992
0.009393
0
0
0
0.003831
0
1
0.080808
false
0
0.035354
0.005051
0.207071
0.005051
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aaca2e7fcf2fade3456711cd8963f5e6c2b9c4d
234
py
Python
src/create_flask_app/api/app/some_blueprint/routes.py
isakal/create_flask_app
6ff35f6d1d670b20a4fa7d3aa2f441125b7aecde
[ "MIT" ]
17
2019-12-18T12:41:35.000Z
2022-03-17T22:38:12.000Z
src/create_flask_app/api/app/some_blueprint/routes.py
isakal/create_flask_app
6ff35f6d1d670b20a4fa7d3aa2f441125b7aecde
[ "MIT" ]
2
2020-03-15T11:17:05.000Z
2021-01-26T23:07:46.000Z
src/create_flask_app/api/app/some_blueprint/routes.py
isakal/create_flask_app
6ff35f6d1d670b20a4fa7d3aa2f441125b7aecde
[ "MIT" ]
6
2020-06-09T14:36:54.000Z
2022-03-17T22:13:53.000Z
from flask import Blueprint, jsonify api = Blueprint('api', __name__) @api.route("/") def home(): return {"home": "page"} @api.route("/<string:variable>") def greeting(variable): return {"hello": variable}
15.6
37
0.611111
26
234
5.346154
0.615385
0.115108
0
0
0
0
0
0
0
0
0
0
0.209402
234
14
38
16.714286
0.751351
0
0
0
0
0
0.159091
0
0
0
0
0
0
1
0.25
false
0
0.125
0.25
0.625
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
3
7aacb536ae303f634a3016d2c8edb89930e0c942
11,609
py
Python
SMA/lab_simfleet/simfleet/mystrategy.py
jiwidi/MIARFID
979eda45fc18a4816ef65d33b1423a6d63176c04
[ "MIT" ]
null
null
null
SMA/lab_simfleet/simfleet/mystrategy.py
jiwidi/MIARFID
979eda45fc18a4816ef65d33b1423a6d63176c04
[ "MIT" ]
null
null
null
SMA/lab_simfleet/simfleet/mystrategy.py
jiwidi/MIARFID
979eda45fc18a4816ef65d33b1423a6d63176c04
[ "MIT" ]
null
null
null
import json import random from loguru import logger from simfleet.customer import CustomerStrategyBehaviour from simfleet.fleetmanager import FleetManagerStrategyBehaviour from simfleet.helpers import PathRequestException, distance_in_meters from simfleet.protocol import ( REQUEST_PERFORMATIVE, ACCEPT_PERFORMATIVE, REFUSE_PERFORMATIVE, PROPOSE_PERFORMATIVE, CANCEL_PERFORMATIVE, INFORM_PERFORMATIVE, QUERY_PROTOCOL, REQUEST_PROTOCOL, ) from simfleet.transport import TransportStrategyBehaviour from simfleet.utils import ( TRANSPORT_WAITING, TRANSPORT_WAITING_FOR_APPROVAL, CUSTOMER_WAITING, TRANSPORT_MOVING_TO_CUSTOMER, CUSTOMER_ASSIGNED, TRANSPORT_WAITING_FOR_STATION_APPROVAL, TRANSPORT_MOVING_TO_STATION, TRANSPORT_CHARGING, TRANSPORT_CHARGED, TRANSPORT_NEEDS_CHARGING, ) ################################################################ # # # FleetManager Strategy # # # ################################################################ class MyFleetManagerStrategy(FleetManagerStrategyBehaviour): """ The default strategy for the FleetManager agent. By default it delegates all requests to all transports. # Modified to sent request only to the closest taxi to the customer """ async def run(self): if not self.agent.registration: await self.send_registration() msg = await self.receive(timeout=5) logger.debug("Manager received message: {}".format(msg)) if msg: content = json.loads(msg.body) customer = content["customer_id"] position = content["origin"] destination = content["dest"] best_transport = None min_distance = 10e99 for transport in self.get_transport_agents().values(): logger.warning("EEeeeee") logger.warning(type(transport)) logger.warning((transport)) dst = distance_in_meters(transport.get_position(), position) if dst < min_distance: min_distance = dst best_transport = transport msg.to = str(best_transport["jid"]) logger.debug( "Manager sent request to transport {}".format(best_transport["name"]) ) await self.send(msg) ################################################################ # # # Transport Strategy # # # ################################################################ class MyTransportStrategy(TransportStrategyBehaviour): """ The default strategy for the Transport agent. By default it accepts every request it receives if available. """ async def run(self): if self.agent.needs_charging(): if self.agent.stations is None or len(self.agent.stations) < 1: logger.warning( "Transport {} looking for a station.".format(self.agent.name) ) await self.send_get_stations() else: station = random.choice(list(self.agent.stations.keys())) logger.info( "Transport {} reserving station {}.".format( self.agent.name, station ) ) await self.send_proposal(station) self.agent.status = TRANSPORT_WAITING_FOR_STATION_APPROVAL msg = await self.receive(timeout=5) if not msg: return logger.debug("Transport received message: {}".format(msg)) try: content = json.loads(msg.body) except TypeError: content = {} performative = msg.get_metadata("performative") protocol = msg.get_metadata("protocol") if protocol == QUERY_PROTOCOL: if performative == INFORM_PERFORMATIVE: self.agent.stations = content logger.info( "Got list of current stations: {}".format( list(self.agent.stations.keys()) ) ) elif performative == CANCEL_PERFORMATIVE: logger.info("Cancellation of request for stations information.") elif protocol == REQUEST_PROTOCOL: logger.debug( "Transport {} received request protocol from customer/station.".format( self.agent.name ) ) if performative == REQUEST_PERFORMATIVE: if self.agent.status == TRANSPORT_WAITING: if not self.has_enough_autonomy(content["origin"], content["dest"]): await self.cancel_proposal(content["customer_id"]) self.agent.status = TRANSPORT_NEEDS_CHARGING else: await self.send_proposal(content["customer_id"], {}) self.agent.status = TRANSPORT_WAITING_FOR_APPROVAL elif performative == ACCEPT_PERFORMATIVE: if self.agent.status == TRANSPORT_WAITING_FOR_APPROVAL: logger.debug( "Transport {} got accept from {}".format( self.agent.name, content["customer_id"] ) ) try: self.agent.status = TRANSPORT_MOVING_TO_CUSTOMER await self.pick_up_customer( content["customer_id"], content["origin"], content["dest"] ) except PathRequestException: logger.error( "Transport {} could not get a path to customer {}. Cancelling...".format( self.agent.name, content["customer_id"] ) ) self.agent.status = TRANSPORT_WAITING await self.cancel_proposal(content["customer_id"]) except Exception as e: logger.error( "Unexpected error in transport {}: {}".format( self.agent.name, e ) ) await self.cancel_proposal(content["customer_id"]) self.agent.status = TRANSPORT_WAITING else: await self.cancel_proposal(content["customer_id"]) elif performative == REFUSE_PERFORMATIVE: logger.debug( "Transport {} got refusal from customer/station".format( self.agent.name ) ) self.agent.status = TRANSPORT_WAITING elif performative == INFORM_PERFORMATIVE: if self.agent.status == TRANSPORT_WAITING_FOR_STATION_APPROVAL: logger.info( "Transport {} got accept from station {}".format( self.agent.name, content["station_id"] ) ) try: self.agent.status = TRANSPORT_MOVING_TO_STATION await self.send_confirmation_travel(content["station_id"]) await self.go_to_the_station( content["station_id"], content["dest"] ) except PathRequestException: logger.error( "Transport {} could not get a path to station {}. Cancelling...".format( self.agent.name, content["station_id"] ) ) self.agent.status = TRANSPORT_WAITING await self.cancel_proposal(content["station_id"]) except Exception as e: logger.error( "Unexpected error in transport {}: {}".format( self.agent.name, e ) ) await self.cancel_proposal(content["station_id"]) self.agent.status = TRANSPORT_WAITING elif self.agent.status == TRANSPORT_CHARGING: if content["status"] == TRANSPORT_CHARGED: self.agent.transport_charged() await self.agent.drop_station() elif performative == CANCEL_PERFORMATIVE: logger.info( "Cancellation of request for {} information".format( self.agent.fleet_type ) ) ################################################################ # # # Customer Strategy # # # ################################################################ class MyCustomerStrategy(CustomerStrategyBehaviour): """ The default strategy for the Customer agent. By default it accepts the first proposal it receives. """ async def run(self): if self.agent.fleetmanagers is None: await self.send_get_managers(self.agent.fleet_type) msg = await self.receive(timeout=5) if msg: performative = msg.get_metadata("performative") if performative == INFORM_PERFORMATIVE: self.agent.fleetmanagers = json.loads(msg.body) return elif performative == CANCEL_PERFORMATIVE: logger.info( "Cancellation of request for {} information".format( self.agent.type_service ) ) return if self.agent.status == CUSTOMER_WAITING: await self.send_request(content={}) msg = await self.receive(timeout=5) if msg: performative = msg.get_metadata("performative") transport_id = msg.sender if performative == PROPOSE_PERFORMATIVE: if self.agent.status == CUSTOMER_WAITING: logger.debug( "Customer {} received proposal from transport {}".format( self.agent.name, transport_id ) ) await self.accept_transport(transport_id) self.agent.status = CUSTOMER_ASSIGNED else: await self.refuse_transport(transport_id) elif performative == CANCEL_PERFORMATIVE: if self.agent.transport_assigned == str(transport_id): logger.warning( "Customer {} received a CANCEL from Transport {}.".format( self.agent.name, transport_id ) ) self.agent.status = CUSTOMER_WAITING
41.909747
111
0.484193
917
11,609
5.970556
0.164667
0.073973
0.049315
0.06137
0.454247
0.384658
0.331872
0.262283
0.191233
0.179726
0
0.001321
0.413214
11,609
276
112
42.061594
0.802408
0.073564
0
0.348214
0
0
0.103131
0
0
0
0
0
0
1
0
false
0
0.040179
0
0.066964
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7aae2bd347ba3adf533cbcf9523c93ef01c1feee
540
py
Python
git_plan/util/unix.py
synek/git-plan
4cf5429348a71fb5ea8110272fb89d20bfa38c38
[ "MIT" ]
163
2021-03-06T12:01:06.000Z
2022-03-01T22:52:36.000Z
git_plan/util/unix.py
synek/git-plan
4cf5429348a71fb5ea8110272fb89d20bfa38c38
[ "MIT" ]
61
2021-03-06T07:00:39.000Z
2021-04-13T10:25:58.000Z
git_plan/util/unix.py
synek/git-plan
4cf5429348a71fb5ea8110272fb89d20bfa38c38
[ "MIT" ]
9
2021-03-07T17:52:57.000Z
2021-10-18T21:35:23.000Z
"""Unix utilities Author Rory Byrne <rory@rory.bio> """ from shutil import which import subprocess from typing import List, Optional def is_installed(name: str) -> bool: """Check whether `name` is on PATH and marked as executable.""" return which(name) is not None def run_command(cmd: List[str], capture_output: bool = True) -> Optional[str]: """Run a shell command""" result = subprocess.run(cmd, capture_output=capture_output, check=True) if result.stdout: return result.stdout.decode() return None
24.545455
78
0.701852
76
540
4.921053
0.565789
0.104278
0
0
0
0
0
0
0
0
0
0
0.185185
540
21
79
25.714286
0.85
0.235185
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.3
0
0.8
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
7aae61521064121971fd3e6425db3ee10c5c8792
655
py
Python
Excel Export API/validateUser.py
N4N0CH1P/Proyecto-Android
d2e09fe1d8766ff4369154038fe08d28d2c472b5
[ "Apache-2.0" ]
null
null
null
Excel Export API/validateUser.py
N4N0CH1P/Proyecto-Android
d2e09fe1d8766ff4369154038fe08d28d2c472b5
[ "Apache-2.0" ]
18
2019-04-23T16:00:34.000Z
2019-05-20T12:35:16.000Z
Excel Export API/validateUser.py
N4N0CH1P/Proyecto-Android
d2e09fe1d8766ff4369154038fe08d28d2c472b5
[ "Apache-2.0" ]
2
2019-04-29T16:22:01.000Z
2020-11-22T06:08:38.000Z
import mysql_config as mysql #Funcion para validar usuario def validateUser(userID, userPassword): print("Validando usuario con la base de datos....") #Pedir al servidor mysql los datos data=mysql.fetchDataFromDatabase("SELECT password FROM usuario WHERE userID='"+userID+"'") row=mysql.getFirstElement(data) #Ver si las claves estan correctas if(row[0]!=userPassword): print("Las claves no son validas para el usuario " + userID) return mysql.sendErrorMssg("Error, las claves no son correctas") #Regresar success print("Usuario validado con exito!") returnJson={"success","yes"} return returnJson
38.529412
94
0.716031
82
655
5.707317
0.634146
0.057692
0.047009
0.059829
0
0
0
0
0
0
0
0.001876
0.18626
655
16
95
40.9375
0.876173
0.167939
0
0
0
0
0.367837
0
0
0
0
0
0
1
0.090909
false
0.272727
0.090909
0
0.363636
0.272727
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
7ab152d88bdfe1c7ddd598bc9ed02543b5951c4d
1,582
py
Python
Chapter07/cell_counting.py
giulic3/DeepLearningLifeSciences
20b4b2eeff421d331722637899845e4c9a4a52a6
[ "MIT" ]
1
2020-04-06T04:17:27.000Z
2020-04-06T04:17:27.000Z
Chapter07/cell_counting.py
joe-nano/DeepLearningLifeSciences
258066f904159a7c1c81aba16e74ae4e6b4263b5
[ "MIT" ]
null
null
null
Chapter07/cell_counting.py
joe-nano/DeepLearningLifeSciences
258066f904159a7c1c81aba16e74ae4e6b4263b5
[ "MIT" ]
1
2020-02-16T23:43:16.000Z
2020-02-16T23:43:16.000Z
import deepchem as dc import deepchem.models.tensorgraph.layers as layers import numpy as np import os import re RETRAIN = False # Load the datasets. image_dir = 'BBBC005_v1_images' files = [] labels = [] for f in os.listdir(image_dir): if f.endswith('.TIF'): files.append(os.path.join(image_dir, f)) labels.append(int(re.findall('_C(.*?)_', f)[0])) loader = dc.data.ImageLoader() dataset = loader.featurize(files, np.array(labels)) splitter = dc.splits.RandomSplitter() train_dataset, valid_dataset, test_dataset = splitter.train_valid_test_split(dataset, seed=123) # Create the model. learning_rate = dc.models.optimizers.ExponentialDecay(0.001, 0.9, 250) model = dc.models.TensorGraph(learning_rate=learning_rate, model_dir='models/model') features = layers.Feature(shape=(None, 520, 696)) labels = layers.Label(shape=(None,)) prev_layer = features for num_outputs in [16, 32, 64, 128, 256]: prev_layer = layers.Conv2D(num_outputs, kernel_size=5, stride=2, in_layers=prev_layer) output = layers.Dense(1, in_layers=layers.Flatten(prev_layer)) model.add_output(output) loss = layers.ReduceSum(layers.L2Loss(in_layers=(output, labels))) model.set_loss(loss) if not os.path.exists('./models'): os.mkdir('models') if not os.path.exists('./models/model'): os.mkdir('models/model') if not RETRAIN: model.restore() # Train it and evaluate performance on the test set. if RETRAIN: print("About to fit model for 50 epochs") model.fit(train_dataset, nb_epoch=50) y_pred = model.predict(test_dataset).flatten() print(np.sqrt(np.mean((y_pred-test_dataset.y)**2)))
32.285714
95
0.747155
248
1,582
4.625
0.459677
0.031386
0.012206
0.01918
0.040105
0.040105
0
0
0
0
0
0.031892
0.108091
1,582
48
96
32.958333
0.781006
0.054994
0
0
0
0
0.075788
0
0
0
0
0
0
1
0
false
0
0.128205
0
0.128205
0.051282
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7ab161405dab5a9d19625915f984dc3130fe9f36
2,428
py
Python
fn_playbook_utils/fn_playbook_utils/components/funct_pb_get_playbook_data.py
nickpartner-goahead/resilient-community-apps
097c0dbefddbd221b31149d82af9809420498134
[ "MIT" ]
65
2017-12-04T13:58:32.000Z
2022-03-24T18:33:17.000Z
fn_playbook_utils/fn_playbook_utils/components/funct_pb_get_playbook_data.py
nickpartner-goahead/resilient-community-apps
097c0dbefddbd221b31149d82af9809420498134
[ "MIT" ]
48
2018-03-02T19:17:14.000Z
2022-03-09T22:00:38.000Z
fn_playbook_utils/fn_playbook_utils/components/funct_pb_get_playbook_data.py
nickpartner-goahead/resilient-community-apps
097c0dbefddbd221b31149d82af9809420498134
[ "MIT" ]
95
2018-01-11T16:23:39.000Z
2022-03-21T11:34:29.000Z
# -*- coding: utf-8 -*- #(c) Copyright IBM Corp. 2010, 2021. All Rights Reserved. #pragma pylint: disable=unused-argument, no-self-use, line-too-long """AppFunction implementation""" from cachetools import cached, TTLCache from resilient_circuits import AppFunctionComponent, app_function, FunctionResult from fn_playbook_utils.lib.common import get_playbooks_by_incident_id, parse_inputs PACKAGE_NAME = "fn_playbook_utils" FN_NAME = "pb_get_playbook_data" class FunctionComponent(AppFunctionComponent): """Component that implements function 'pb_get_playbook_data'""" def __init__(self, opts): super(FunctionComponent, self).__init__(opts, PACKAGE_NAME) self.restclient = self.rest_client() @app_function(FN_NAME) def _app_function(self, fn_inputs): """ Function: Get information on workflows run for this incident or for a range of incidents Inputs: - fn_inputs.pb_min_incident_id - fn_inputs.pb_max_incident_id - fn_inputs.pb_min_incident_date - fn_inputs.pb_max_incident_date - fn_inputs.pb_object_name - fn_inputs.pb_object_type """ yield self.status_message("Starting App Function: '{0}'".format(FN_NAME)) min_id, max_id = parse_inputs(self.restclient, fn_inputs) yield self.status_message("Using min_incident: {} max_incident: {}".format(min_id, max_id)) result_data = self.get_all_incident_playbooks(min_id, max_id) yield self.status_message("Finished running App Function: '{0}'".format(FN_NAME)) yield FunctionResult(result_data) @cached(cache=TTLCache(maxsize=30, ttl=60)) def get_all_incident_playbooks(self, min_id, max_id): # get all the incident data to return result_dict = {} result_data = { "org_id" : self.restclient.org_id, "min_id": min_id, "max_id": max_id, "playbook_content": result_dict } # don't continue if no values if bool(min_id and max_id): search_results = get_playbooks_by_incident_id(self.restclient, min_id, max_id) for pb in search_results.get('data', []): if pb['incident_id'] in result_dict: result_dict[pb['incident_id']].append(pb) else: result_dict[pb['incident_id']] = [pb] return result_data
36.787879
99
0.66598
315
2,428
4.790476
0.35873
0.042412
0.032472
0.039761
0.163022
0.031809
0
0
0
0
0
0.008104
0.237644
2,428
65
100
37.353846
0.807131
0.250412
0
0
0
0
0.122248
0
0
0
0
0
0
1
0.088235
false
0
0.088235
0
0.235294
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7ab3b78ef740ec202ef067979278dcabfb3ae5f2
1,274
py
Python
ezdisteach/lib/imscc/imsccfilebuilder.py
call-learning/ez-disteach
7636dfdbfb709769824266800ebba18be764ecc3
[ "MIT" ]
null
null
null
ezdisteach/lib/imscc/imsccfilebuilder.py
call-learning/ez-disteach
7636dfdbfb709769824266800ebba18be764ecc3
[ "MIT" ]
null
null
null
ezdisteach/lib/imscc/imsccfilebuilder.py
call-learning/ez-disteach
7636dfdbfb709769824266800ebba18be764ecc3
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ IMSCC File Builder Produce an lxml entity ready to convert to text """ import io def course_output_file(model, ioopen=io.open) -> None: for item in model: file_output(item, ioopen) def section_output_file(model, ioopen=io.open) -> None: for item in model: file_output(item, ioopen) def label_output_file(model, ioopen=io.open) -> None: pass def assessment_output_file(model, ioopen=io.open) -> None: pass def discussion_output_file(model, ioopen=io.open) -> None: # Output the discussion.xml file pass def binaryfile_output_file(model, ioopen=io.open) -> None: with ioopen(model.name, 'w') as f: f.write(model.export()) def image_output_file(model, ioopen=io.open) -> None: with ioopen(model.name, 'w') as f: f.write(model.export()) def file_output(model, ioopen=io.open): modelclassname = model.__class__.__name__ builderfunctname = '%s_output_file' % modelclassname.lower() builderfunct = globals().get(builderfunctname, None) if builderfunct: return builderfunct(model, ioopen) else: raise NotImplementedError( 'cannot find file output builder function {} for {}'.format(builderfunctname, modelclassname))
24.5
106
0.681319
167
1,274
5.035928
0.353293
0.117717
0.123662
0.161712
0.464923
0.464923
0.464923
0.428062
0.428062
0.337693
0
0.000984
0.202512
1,274
51
107
24.980392
0.826772
0.094192
0
0.392857
0
0
0.058047
0
0
0
0
0
0
1
0.285714
false
0.107143
0.035714
0
0.357143
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
3
7ab7e1f1b7a93fea789add2b69980d7060a7ac0c
648
py
Python
tests/integration/test_multipart.py
yyolk/uplink
c0733e3577cc7539be8ca39d3783c87ebe6410b7
[ "MIT" ]
918
2017-10-20T10:47:40.000Z
2022-03-27T19:10:21.000Z
tests/integration/test_multipart.py
yyolk/uplink
c0733e3577cc7539be8ca39d3783c87ebe6410b7
[ "MIT" ]
248
2017-10-20T03:58:20.000Z
2022-03-13T18:39:16.000Z
tests/integration/test_multipart.py
yyolk/uplink
c0733e3577cc7539be8ca39d3783c87ebe6410b7
[ "MIT" ]
66
2017-10-21T02:56:34.000Z
2022-02-15T08:27:50.000Z
# Local imports from uplink import Consumer, PartMap, post, multipart # Constants BASE_URL = "https://example.com/" def test_without_converter(mock_response, mock_client): class Calendar(Consumer): @multipart @post("/attachments") def upload_attachments(self, **files: PartMap): pass mock_client.with_response(mock_response) calendar = Calendar(base_url=BASE_URL, client=mock_client) file = object() # Run calendar.upload_attachments(file=file) # Assertion: should not convert if converter is None request = mock_client.history[0] assert request.files == {"file": file}
25.92
62
0.694444
77
648
5.662338
0.571429
0.091743
0
0
0
0
0
0
0
0
0
0.001942
0.205247
648
24
63
27
0.84466
0.12037
0
0
0
0
0.063717
0
0
0
0
0
0.071429
1
0.142857
false
0.071429
0.071429
0
0.285714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
7ab7f304cf92455bd030e34fb9b57d32e2f4394e
1,864
py
Python
Amazone/Android_Phone_Price.py
anivalogy/Web_Scraping
9431ee434e9c19adcf45d185065625608755acc4
[ "Apache-2.0" ]
1
2020-11-10T11:30:07.000Z
2020-11-10T11:30:07.000Z
Amazone/Android_Phone_Price.py
anivalogy/Web_Scraping
9431ee434e9c19adcf45d185065625608755acc4
[ "Apache-2.0" ]
null
null
null
Amazone/Android_Phone_Price.py
anivalogy/Web_Scraping
9431ee434e9c19adcf45d185065625608755acc4
[ "Apache-2.0" ]
1
2020-12-24T12:25:40.000Z
2020-12-24T12:25:40.000Z
import csv from bs4 import BeautifulSoup from selenium import webdriver import csv from bs4 import BeautifulSoup from selenium import webdriver from selenium.webdriver import Chrome def get_url(search_item): template="https://www.amazon.in/s?k={}&crid=1GNY6Q6AHOOKS&sprefix=and%2Caps%2C524&ref=nb_sb_ss_ts-oa-p_1_3" search_item=search_item.replace(' ','+') #add query tool url =template.format(search_item) url+='&page{}' return url def extract_record(item): #description Url and heading atag=item.h2.a description=atag.text.strip() url="https://www.amazon.in/" +atag.get('href') try: #price price_present=item.find('span','a-price') price=price_present.find('span' ,'a-offscreen').text except AttributeError: return try: #rating and review rating=item.i.text review_count = item.find('span',{'class':'a-size-base','dir':'auto'}).text except AttributeError: rating='' review_count results=(description,price,rating,review_count,url) return results def main(search_item): record=[] url=get_url(search_item) for page in range(1,21): driver.get(url.format(page)) soup=BeautifulSoup(driver.page_source,'html.parser') results =soup.find_all('div',{"data-component-type":"s-search-result"}) for item in results: record =extract_record(item) if record: records.append(record) driver.close() #save data as csv file with open('results.csv','w',newline='',encoding='utf-8')as f: writer=csv.writer(f) writer.writerow(['Description','Price','Rating','ReviewCount','url']) writer.writerows(records) print(main('android phone'))
24.207792
111
0.624464
235
1,864
4.859574
0.455319
0.052539
0.022767
0.028021
0.108581
0.108581
0.108581
0.108581
0.108581
0.108581
0
0.012057
0.243562
1,864
76
112
24.526316
0.797872
0.045064
0
0.212766
0
0.021277
0.168362
0
0
0
0
0
0
1
0.06383
false
0
0.148936
0
0.276596
0.021277
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7ab8408954f6215140e2f66ce3a023038a830391
25,995
py
Python
Assembly Files/edd project modules/extra/world/original - Copy/unknown.py
Pranesh6767/E_tax_with_MySQL
104c7f57c569938439c35f42706edbd575181d34
[ "MIT" ]
1
2020-10-15T09:09:24.000Z
2020-10-15T09:09:24.000Z
Assembly Files/edd project modules/extra/world/original - Copy/unknown.py
Pranesh6767/E_tax_with_MySQL
104c7f57c569938439c35f42706edbd575181d34
[ "MIT" ]
null
null
null
Assembly Files/edd project modules/extra/world/original - Copy/unknown.py
Pranesh6767/E_tax_with_MySQL
104c7f57c569938439c35f42706edbd575181d34
[ "MIT" ]
2
2020-10-01T20:56:05.000Z
2021-09-22T06:38:36.000Z
#! /usr/bin/env python # -*- coding: utf-8 -*- # # GUI module generated by PAGE version 4.21 # in conjunction with Tcl version 8.6 # Apr 22, 2019 01:28:38 AM +0530 platform: Windows NT import sys try: import Tkinter as tk except ImportError: import tkinter as tk try: import ttk py3 = False except ImportError: import tkinter.ttk as ttk py3 = True import unknown_support import os.path def vp_start_gui(): '''Starting point when module is the main routine.''' global val, w, root global prog_location prog_call = sys.argv[0] print ('prog_call = {}'.format(prog_call)) prog_location = os.path.split(prog_call)[0] print ('prog_location = {}'.format(prog_location)) sys.stdout.flush() root = tk.Tk() top = Toplevel1 (root) unknown_support.init(root, top) root.mainloop() w = None def create_Toplevel1(root, *args, **kwargs): '''Starting point when module is imported by another program.''' global w, w_win, rt global prog_location prog_call = sys.argv[0] print ('prog_call = {}'.format(prog_call)) prog_location = os.path.split(prog_call)[0] print ('prog_location = {}'.format(prog_location)) rt = root w = tk.Toplevel (root) top = Toplevel1 (w) unknown_support.init(w, top, *args, **kwargs) return (w, top) def destroy_Toplevel1(): global w w.destroy() w = None class Toplevel1: def __init__(self, top=None): '''This class configures and populates the toplevel window. top is the toplevel containing window.''' _bgcolor = '#d9d9d9' # X11 color: 'gray85' _fgcolor = '#000000' # X11 color: 'black' _compcolor = '#d9d9d9' # X11 color: 'gray85' _ana1color = '#d9d9d9' # X11 color: 'gray85' _ana2color = '#ececec' # Closest X11 color: 'gray92' font17 = "-family {Berlin Sans FB} -size 15" self.style = ttk.Style() if sys.platform == "win32": self.style.theme_use('winnative') self.style.configure('.',background=_bgcolor) self.style.configure('.',foreground=_fgcolor) self.style.configure('.',font="TkDefaultFont") self.style.map('.',background= [('selected', _compcolor), ('active',_ana2color)]) top.geometry("1600x837+6+194") top.title("New Toplevel") top.configure(background="#ffff24") top.configure(highlightbackground="#d9d9d9") top.configure(highlightcolor="black") self.Label1 = tk.Label(top) self.Label1.place(relx=0.013, rely=0.024, height=81, width=156) self.Label1.configure(activebackground="#f9f9f9") self.Label1.configure(activeforeground="black") self.Label1.configure(background="#ffff24") self.Label1.configure(disabledforeground="#a3a3a3") self.Label1.configure(font="-family {Britannic Bold} -size 48 -weight bold") self.Label1.configure(foreground="#ff250d") self.Label1.configure(highlightbackground="#d9d9d9") self.Label1.configure(highlightcolor="black") self.Label1.configure(text='''eTAX''') self.Label1_1 = tk.Label(top) self.Label1_1.place(relx=0.113, rely=0.024, height=81, width=156) self.Label1_1.configure(activebackground="#f9f9f9") self.Label1_1.configure(activeforeground="black") self.Label1_1.configure(background="#ffff24") self.Label1_1.configure(disabledforeground="#a3a3a3") self.Label1_1.configure(font="-family {Britannic Bold} -size 48 -weight bold") self.Label1_1.configure(foreground="#2212ff") self.Label1_1.configure(highlightbackground="#d9d9d9") self.Label1_1.configure(highlightcolor="black") self.Label1_1.configure(text='''2019''') self.Label2 = tk.Label(top) self.Label2.place(relx=0.069, rely=0.108, height=31, width=141) self.Label2.configure(activebackground="#f9f9f9") self.Label2.configure(activeforeground="black") self.Label2.configure(background="#ffff24") self.Label2.configure(disabledforeground="#a3a3a3") self.Label2.configure(font="-family {Segoe Script} -size 12 -slant italic") self.Label2.configure(foreground="#13c12a") self.Label2.configure(highlightbackground="#d9d9d9") self.Label2.configure(highlightcolor="black") self.Label2.configure(text='''working for you''') self.backbutton = tk.Button(top) self.backbutton.place(relx=0.013, rely=0.167, height=44, width=97) self.backbutton.configure(activebackground="#ececec") self.backbutton.configure(activeforeground="#000000") self.backbutton.configure(background="#120bd8") self.backbutton.configure(disabledforeground="#a3a3a3") self.backbutton.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold") self.backbutton.configure(foreground="#fcffff") self.backbutton.configure(highlightbackground="#d9d9d9") self.backbutton.configure(highlightcolor="black") self.backbutton.configure(pady="0") self.backbutton.configure(text='''Back''') self.exit = tk.Button(top) self.exit.place(relx=0.1, rely=0.167, height=44, width=97) self.exit.configure(activebackground="#ececec") self.exit.configure(activeforeground="#000000") self.exit.configure(background="#120bd8") self.exit.configure(disabledforeground="#a3a3a3") self.exit.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold") self.exit.configure(foreground="#fcffff") self.exit.configure(highlightbackground="#d9d9d9") self.exit.configure(highlightcolor="black") self.exit.configure(pady="0") self.exit.configure(text='''Exit''') self.Label3 = tk.Label(top) self.Label3.place(relx=0.013, rely=0.944, height=21, width=56) self.Label3.configure(activebackground="#f9f9f9") self.Label3.configure(activeforeground="black") self.Label3.configure(background="#ffff24") self.Label3.configure(disabledforeground="#a3a3a3") self.Label3.configure(foreground="#000000") self.Label3.configure(highlightbackground="#d9d9d9") self.Label3.configure(highlightcolor="black") self.Label3.configure(text='''etax-2019''') self.Label3_3 = tk.Label(top) self.Label3_3.place(relx=0.013, rely=0.968, height=21, width=34) self.Label3_3.configure(activebackground="#f9f9f9") self.Label3_3.configure(activeforeground="black") self.Label3_3.configure(background="#ffff24") self.Label3_3.configure(disabledforeground="#a3a3a3") self.Label3_3.configure(foreground="#000000") self.Label3_3.configure(highlightbackground="#d9d9d9") self.Label3_3.configure(highlightcolor="black") self.Label3_3.configure(text='''v 1.0.2''') self.Label3_4 = tk.Label(top) self.Label3_4.place(relx=0.006, rely=1.016, height=21, width=134) self.Label3_4.configure(activebackground="#f9f9f9") self.Label3_4.configure(activeforeground="black") self.Label3_4.configure(background="#ffff24") self.Label3_4.configure(disabledforeground="#a3a3a3") self.Label3_4.configure(foreground="#000000") self.Label3_4.configure(highlightbackground="#d9d9d9") self.Label3_4.configure(highlightcolor="black") self.Label3_4.configure(text='''Working On Windows''') self.Label3_1 = tk.Label(top) self.Label3_1.place(relx=0.013, rely=0.992, height=21, width=164) self.Label3_1.configure(activebackground="#f9f9f9") self.Label3_1.configure(activeforeground="black") self.Label3_1.configure(background="#ffff24") self.Label3_1.configure(disabledforeground="#a3a3a3") self.Label3_1.configure(foreground="#000000") self.Label3_1.configure(highlightbackground="#d9d9d9") self.Label3_1.configure(highlightcolor="black") self.Label3_1.configure(text='''Connected to MySQL server 8.0''') self.Label4 = tk.Label(top) self.Label4.place(relx=0.375, rely=0.024, height=68, width=361) self.Label4.configure(activebackground="#f9f9f9") self.Label4.configure(activeforeground="black") self.Label4.configure(background="#ffff24") self.Label4.configure(disabledforeground="#36911a") self.Label4.configure(font="-family {Rockwell Extra Bold} -size 40 -weight bold") self.Label4.configure(foreground="#36911a") self.Label4.configure(highlightbackground="#d9d9d9") self.Label4.configure(highlightcolor="black") self.Label4.configure(text='''Workspace''') self.Label5 = tk.Label(top) self.Label5.place(relx=0.763, rely=0.036, height=28, width=192) self.Label5.configure(activebackground="#f9f9f9") self.Label5.configure(activeforeground="black") self.Label5.configure(background="#ffff24") self.Label5.configure(disabledforeground="#a3a3a3") self.Label5.configure(font="-family {Rockwell} -size 15") self.Label5.configure(foreground="#000000") self.Label5.configure(highlightbackground="#d9d9d9") self.Label5.configure(highlightcolor="black") self.Label5.configure(text='''Village : Kalamwadi''') self.Label5_2 = tk.Label(top) self.Label5_2.place(relx=0.781, rely=0.072, height=28, width=172) self.Label5_2.configure(activebackground="#f9f9f9") self.Label5_2.configure(activeforeground="black") self.Label5_2.configure(background="#ffff24") self.Label5_2.configure(disabledforeground="#a3a3a3") self.Label5_2.configure(font="-family {Rockwell} -size 15") self.Label5_2.configure(foreground="#000000") self.Label5_2.configure(highlightbackground="#d9d9d9") self.Label5_2.configure(highlightcolor="black") self.Label5_2.configure(text='''District : Sangli''') self.Label5_3 = tk.Label(top) self.Label5_3.place(relx=0.863, rely=0.968, height=28, width=172) self.Label5_3.configure(activebackground="#f9f9f9") self.Label5_3.configure(activeforeground="black") self.Label5_3.configure(background="#ffff24") self.Label5_3.configure(disabledforeground="#a3a3a3") self.Label5_3.configure(font="-family {Rockwell} -size 9") self.Label5_3.configure(foreground="#000000") self.Label5_3.configure(highlightbackground="#d9d9d9") self.Label5_3.configure(highlightcolor="black") self.Label5_3.configure(text='''Server Status : Online''') self.Label5_4 = tk.Label(top) self.Label5_4.place(relx=0.869, rely=0.992, height=28, width=172) self.Label5_4.configure(activebackground="#f9f9f9") self.Label5_4.configure(activeforeground="black") self.Label5_4.configure(background="#ffff24") self.Label5_4.configure(disabledforeground="#a3a3a3") self.Label5_4.configure(font="-family {Rockwell} -size 9") self.Label5_4.configure(foreground="#000000") self.Label5_4.configure(highlightbackground="#d9d9d9") self.Label5_4.configure(highlightcolor="black") self.Label5_4.configure(text='''Host : localhost''') self.Label5_5 = tk.Label(top) self.Label5_5.place(relx=0.869, rely=1.016, height=28, width=172) self.Label5_5.configure(activebackground="#f9f9f9") self.Label5_5.configure(activeforeground="black") self.Label5_5.configure(background="#ffff24") self.Label5_5.configure(disabledforeground="#a3a3a3") self.Label5_5.configure(font="-family {Rockwell} -size 9") self.Label5_5.configure(foreground="#000000") self.Label5_5.configure(highlightbackground="#d9d9d9") self.Label5_5.configure(highlightcolor="black") self.Label5_5.configure(text='''Port : 3306''') self.Label5_1 = tk.Label(top) self.Label5_1.place(relx=0.875, rely=0.096, height=28, width=172) self.Label5_1.configure(activebackground="#f9f9f9") self.Label5_1.configure(activeforeground="black") self.Label5_1.configure(background="#ffff24") self.Label5_1.configure(disabledforeground="#a3a3a3") self.Label5_1.configure(font="-family {Rockwell} -size 12") self.Label5_1.configure(foreground="#000000") self.Label5_1.configure(highlightbackground="#d9d9d9") self.Label5_1.configure(highlightcolor="black") self.Label5_1.configure(text='''User : user''') self.box1o1 = ScrolledListBox(top) self.box1o1.place(relx=0.388, rely=0.287, relheight=0.639 , relwidth=0.238) self.box1o1.configure(background="white") self.box1o1.configure(disabledforeground="#a3a3a3") self.box1o1.configure(font="TkFixedFont") self.box1o1.configure(foreground="black") self.box1o1.configure(highlightbackground="#d9d9d9") self.box1o1.configure(highlightcolor="#d9d9d9") self.box1o1.configure(selectbackground="#c4c4c4") self.box1o1.configure(selectforeground="black") self.box1o1.configure(width=10) self.box2o1 = ScrolledListBox(top) self.box2o1.place(relx=0.625, rely=0.287, relheight=0.639 , relwidth=0.326) self.box2o1.configure(background="white") self.box2o1.configure(disabledforeground="#a3a3a3") self.box2o1.configure(font="TkFixedFont") self.box2o1.configure(foreground="black") self.box2o1.configure(highlightbackground="#d9d9d9") self.box2o1.configure(highlightcolor="#d9d9d9") self.box2o1.configure(selectbackground="#c4c4c4") self.box2o1.configure(selectforeground="black") self.box2o1.configure(width=10) self.TSeparator1 = ttk.Separator(top) self.TSeparator1.place(relx=0.888, rely=0.012, relheight=0.119) self.TSeparator1.configure(orient="vertical") self.TSeparator2 = ttk.Separator(top) self.TSeparator2.place(relx=0.013, rely=0.143, relwidth=0.194) self.TSeparator3 = ttk.Separator(top) self.TSeparator3.place(relx=0.013, rely=0.239, relwidth=0.938) self.TSeparator3_6 = ttk.Separator(top) self.TSeparator3_6.place(relx=0.013, rely=0.938, relwidth=0.938) self.viewbutton = tk.Button(top) self.viewbutton.place(relx=0.425, rely=0.98, height=33, width=148) self.viewbutton.configure(activebackground="#ececec") self.viewbutton.configure(activeforeground="#000000") self.viewbutton.configure(background="#2020d8") self.viewbutton.configure(disabledforeground="#a3a3a3") self.viewbutton.configure(font="-family {Rockwell} -size 13 -weight bold") self.viewbutton.configure(foreground="#ffffff") self.viewbutton.configure(highlightbackground="#d9d9d9") self.viewbutton.configure(highlightcolor="black") self.viewbutton.configure(pady="0") self.viewbutton.configure(takefocus="0") self.viewbutton.configure(text='''View all Names''') self.viewbutton_8 = tk.Button(top) self.viewbutton_8.place(relx=0.581, rely=0.98, height=33, width=148) self.viewbutton_8.configure(activebackground="#ececec") self.viewbutton_8.configure(activeforeground="#000000") self.viewbutton_8.configure(background="#2020d8") self.viewbutton_8.configure(disabledforeground="#a3a3a3") self.viewbutton_8.configure(font="-family {Rockwell} -size 13 -weight bold") self.viewbutton_8.configure(foreground="#ffffff") self.viewbutton_8.configure(highlightbackground="#d9d9d9") self.viewbutton_8.configure(highlightcolor="black") self.viewbutton_8.configure(pady="0") self.viewbutton_8.configure(takefocus="0") self.viewbutton_8.configure(text='''View all Data''') self.viewbutton_9 = tk.Button(top) self.viewbutton_9.place(relx=0.744, rely=0.98, height=33, width=108) self.viewbutton_9.configure(activebackground="#ececec") self.viewbutton_9.configure(activeforeground="#000000") self.viewbutton_9.configure(background="#2020d8") self.viewbutton_9.configure(disabledforeground="#a3a3a3") self.viewbutton_9.configure(font="-family {Rockwell} -size 13 -weight bold") self.viewbutton_9.configure(foreground="#ffffff") self.viewbutton_9.configure(highlightbackground="#d9d9d9") self.viewbutton_9.configure(highlightcolor="black") self.viewbutton_9.configure(pady="0") self.viewbutton_9.configure(takefocus="0") self.viewbutton_9.configure(text='''Clear all''') self.Label6 = tk.Label(top) self.Label6.place(relx=0.913, rely=0.036, height=44, width=44) self.Label6.configure(activebackground="#f9f9f9") self.Label6.configure(activeforeground="black") self.Label6.configure(background="#d9d9d9") self.Label6.configure(disabledforeground="#a3a3a3") self.Label6.configure(foreground="#000000") self.Label6.configure(highlightbackground="#d9d9d9") self.Label6.configure(highlightcolor="black") photo_location = os.path.join(prog_location,"../../../view database/original/login3.png") self._img0 = tk.PhotoImage(file=photo_location) self.Label6.configure(image=self._img0) self.Label6.configure(text='''Label''') self.Frame1 = tk.Frame(top) self.Frame1.place(relx=0.013, rely=0.251, relheight=0.675 , relwidth=0.197) self.Frame1.configure(relief='groove') self.Frame1.configure(borderwidth="2") self.Frame1.configure(relief='groove') self.Frame1.configure(background="#d9d9d9") self.Frame1.configure(highlightbackground="#d9d9d9") self.Frame1.configure(highlightcolor="black") self.Frame1.configure(width=315) self.villagename = tk.Entry(top) self.villagename.place(relx=0.375, rely=0.179, height=20, relwidth=0.153) self.villagename.configure(background="white") self.villagename.configure(disabledforeground="#a3a3a3") self.villagename.configure(font="TkFixedFont") self.villagename.configure(foreground="#1b1391") self.villagename.configure(insertbackground="black") self.villagename.configure(width=244) self.Entry2 = tk.Entry(top) self.Entry2.place(relx=0.675, rely=0.179,height=20, relwidth=0.146) self.Entry2.configure(background="white") self.Entry2.configure(disabledforeground="#a3a3a3") self.Entry2.configure(font="TkFixedFont") self.Entry2.configure(foreground="#000000") self.Entry2.configure(insertbackground="black") self.Entry2.configure(width=234) self.Label7 = tk.Label(top) self.Label7.place(relx=0.269, rely=0.179, height=21, width=154) self.Label7.configure(background="#ffff24") self.Label7.configure(disabledforeground="#a3a3a3") self.Label7.configure(font=font17) self.Label7.configure(foreground="#000000") self.Label7.configure(text='''Village Name :''') self.Label7.configure(width=154) self.Label7_1 = tk.Label(top) self.Label7_1.place(relx=0.575, rely=0.179, height=21, width=154) self.Label7_1.configure(activebackground="#f9f9f9") self.Label7_1.configure(activeforeground="black") self.Label7_1.configure(background="#ffff24") self.Label7_1.configure(disabledforeground="#a3a3a3") self.Label7_1.configure(font="-family {Berlin Sans FB} -size 15") self.Label7_1.configure(foreground="#000000") self.Label7_1.configure(highlightbackground="#d9d9d9") self.Label7_1.configure(highlightcolor="black") self.Label7_1.configure(text='''UID Number :''') self.Label7_1.configure(width=154) self.btn_find = tk.Button(top) self.btn_find.place(relx=0.856, rely=0.167, height=34, width=97) self.btn_find.configure(activebackground="#ececec") self.btn_find.configure(activeforeground="#000000") self.btn_find.configure(background="#ff330a") self.btn_find.configure(disabledforeground="#a3a3a3") self.btn_find.configure(font="-family {Rockwell Extra Bold} -size 12 -weight bold") self.btn_find.configure(foreground="#fcffff") self.btn_find.configure(highlightbackground="#d9d9d9") self.btn_find.configure(highlightcolor="black") self.btn_find.configure(pady="0") self.btn_find.configure(text='''FIND''') self.btn_find.configure(width=97) # The following code is added to facilitate the Scrolled widgets you specified. class AutoScroll(object): '''Configure the scrollbars for a widget.''' def __init__(self, master): # Rozen. Added the try-except clauses so that this class # could be used for scrolled entry widget for which vertical # scrolling is not supported. 5/7/14. try: vsb = ttk.Scrollbar(master, orient='vertical', command=self.yview) except: pass hsb = ttk.Scrollbar(master, orient='horizontal', command=self.xview) #self.configure(yscrollcommand=_autoscroll(vsb), # xscrollcommand=_autoscroll(hsb)) try: self.configure(yscrollcommand=self._autoscroll(vsb)) except: pass self.configure(xscrollcommand=self._autoscroll(hsb)) self.grid(column=0, row=0, sticky='nsew') try: vsb.grid(column=1, row=0, sticky='ns') except: pass hsb.grid(column=0, row=1, sticky='ew') master.grid_columnconfigure(0, weight=1) master.grid_rowconfigure(0, weight=1) # Copy geometry methods of master (taken from ScrolledText.py) if py3: methods = tk.Pack.__dict__.keys() | tk.Grid.__dict__.keys() \ | tk.Place.__dict__.keys() else: methods = tk.Pack.__dict__.keys() + tk.Grid.__dict__.keys() \ + tk.Place.__dict__.keys() for meth in methods: if meth[0] != '_' and meth not in ('config', 'configure'): setattr(self, meth, getattr(master, meth)) @staticmethod def _autoscroll(sbar): '''Hide and show scrollbar as needed.''' def wrapped(first, last): first, last = float(first), float(last) if first <= 0 and last >= 1: sbar.grid_remove() else: sbar.grid() sbar.set(first, last) return wrapped def __str__(self): return str(self.master) def _create_container(func): '''Creates a ttk Frame with a given master, and use this new frame to place the scrollbars and the widget.''' def wrapped(cls, master, **kw): container = ttk.Frame(master) container.bind('<Enter>', lambda e: _bound_to_mousewheel(e, container)) container.bind('<Leave>', lambda e: _unbound_to_mousewheel(e, container)) return func(cls, container, **kw) return wrapped class ScrolledListBox(AutoScroll, tk.Listbox): '''A standard Tkinter Text widget with scrollbars that will automatically show/hide as needed.''' @_create_container def __init__(self, master, **kw): tk.Listbox.__init__(self, master, **kw) AutoScroll.__init__(self, master) import platform def _bound_to_mousewheel(event, widget): child = widget.winfo_children()[0] if platform.system() == 'Windows' or platform.system() == 'Darwin': child.bind_all('<MouseWheel>', lambda e: _on_mousewheel(e, child)) child.bind_all('<Shift-MouseWheel>', lambda e: _on_shiftmouse(e, child)) else: child.bind_all('<Button-4>', lambda e: _on_mousewheel(e, child)) child.bind_all('<Button-5>', lambda e: _on_mousewheel(e, child)) child.bind_all('<Shift-Button-4>', lambda e: _on_shiftmouse(e, child)) child.bind_all('<Shift-Button-5>', lambda e: _on_shiftmouse(e, child)) def _unbound_to_mousewheel(event, widget): if platform.system() == 'Windows' or platform.system() == 'Darwin': widget.unbind_all('<MouseWheel>') widget.unbind_all('<Shift-MouseWheel>') else: widget.unbind_all('<Button-4>') widget.unbind_all('<Button-5>') widget.unbind_all('<Shift-Button-4>') widget.unbind_all('<Shift-Button-5>') def _on_mousewheel(event, widget): if platform.system() == 'Windows': widget.yview_scroll(-1*int(event.delta/120),'units') elif platform.system() == 'Darwin': widget.yview_scroll(-1*int(event.delta),'units') else: if event.num == 4: widget.yview_scroll(-1, 'units') elif event.num == 5: widget.yview_scroll(1, 'units') def _on_shiftmouse(event, widget): if platform.system() == 'Windows': widget.xview_scroll(-1*int(event.delta/120), 'units') elif platform.system() == 'Darwin': widget.xview_scroll(-1*int(event.delta), 'units') else: if event.num == 4: widget.xview_scroll(-1, 'units') elif event.num == 5: widget.xview_scroll(1, 'units') if __name__ == '__main__': vp_start_gui()
46.172291
98
0.64874
2,990
25,995
5.537124
0.145151
0.039865
0.019328
0.058106
0.499336
0.15958
0.135298
0.116453
0.081541
0.066622
0
0.070469
0.21281
25,995
562
99
46.254448
0.738601
0.043662
0
0.094538
1
0
0.113909
0.001156
0
0
0
0
0
1
0.031513
false
0.006303
0.021008
0.002101
0.069328
0.008403
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
7ab8479693143f8c93f4e4b1bf7ece0d1eb036d2
658
py
Python
construct-binary-search-tree-from-preorder-traversal/construct-binary-search-tree-from-preorder-traversal.py
Atri10/Leet-code---Atri_Patel
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
[ "MIT" ]
1
2021-10-10T20:21:18.000Z
2021-10-10T20:21:18.000Z
construct-binary-search-tree-from-preorder-traversal/construct-binary-search-tree-from-preorder-traversal.py
Atri10/Leet-code---Atri_Patel
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
[ "MIT" ]
null
null
null
construct-binary-search-tree-from-preorder-traversal/construct-binary-search-tree-from-preorder-traversal.py
Atri10/Leet-code---Atri_Patel
49fc59b9147a44ab04a66128fbb2ef259b5f7b7c
[ "MIT" ]
null
null
null
# Definition for a binary tree node. class TreeNode: def __init__(self, val=0, left=None, right=None): self.val = val self.left = left self.right = right class Solution: def bstFromPreorder(self, preorder: List[int]) -> Optional[TreeNode]: def subtree(lo, hi): if lo >= hi: return None rootval = preorder[lo] root = TreeNode( rootval ) mid = bisect.bisect_left(preorder, rootval, lo+1, hi) root.left = subtree(lo+1, mid) root.right = subtree(mid, hi) return root return subtree( 0, len(preorder) )
31.333333
73
0.550152
78
658
4.576923
0.423077
0.061625
0
0
0
0
0
0
0
0
0
0.009324
0.348024
658
21
74
31.333333
0.822844
0.051672
0
0
0
0
0
0
0
0
0
0
0
1
0.1875
false
0
0
0
0.4375
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7ab93f465042b5cca276e73a401edc8f8be4aec0
4,113
py
Python
aimsprop/xyz.py
mtzgroup/aimsprop
464d88ad7a817da73027fd2ab7b12476bf59f83d
[ "MIT" ]
1
2022-03-28T13:11:56.000Z
2022-03-28T13:11:56.000Z
aimsprop/xyz.py
mtzgroup/aimsprop
464d88ad7a817da73027fd2ab7b12476bf59f83d
[ "MIT" ]
11
2021-03-17T17:53:58.000Z
2021-07-17T17:59:25.000Z
aimsprop/xyz.py
mtzgroup/aimsprop
464d88ad7a817da73027fd2ab7b12476bf59f83d
[ "MIT" ]
2
2021-04-05T08:36:35.000Z
2021-05-20T22:12:12.000Z
import os import re import numpy as np from . import atom_data, bundle # TODO: Maybe should be in atom data _N_table = {val: key for key, val in list(atom_data.atom_symbol_table.items())} def parse_xyz( filename: str, label=1, w=1.0, I=1, t0=0.0, dt=20.0, ts=None, N_table=None, ) -> bundle.Bundle: """Parse an XYZ adiabatic bundle file directly into a Bundle. filename (str): the absolute or relative path to the xyz file. label (hashable): the label of this bundle w (float): the weight of this bundle I (int): electronic state label t0 (float): the initial time in au dt (float): the timestep in au ts (list of float): an explicit list of times in au, overrides t0 and dt N_table (dict of str : int): an optional dictionary mapping atomic symbol to atomic number, used for non-standard atom names. Returns: bundle (Bundle): the Bundle object. """ lines = open(filename).readlines() natom = int(lines[0]) # This should always work if len(lines) % (natom + 2): raise ValueError("Invalid number of lines in xyz file") nframe = len(lines) / (natom + 2) xyzs = [] Zs = [] for frame in range(nframe): lines2 = lines[frame * (natom + 2) + 2 : (frame + 1) * (natom + 2)] Z = [] xyz = [] for line in lines2: mobj = re.match(r"^\s*(\S+)\s+(\S+)\s+(\S+)\s+(\S+)\s*$", line) Z.append(mobj.group(1)) xyz.append([float(mobj.group(x)) for x in (2, 3, 4)]) xyz = np.array(xyz) xyzs.append(xyz) Zs.append(Z) # User symbol table or default? N_table2 = N_table if N_table else _N_table frames2 = [] for ind, xyz in enumerate(xyzs): Z = Zs[ind] Ns = [N_table2[key] for key in Z] widths = atom_data.from_Ns_to_widths(Ns) frame2 = bundle.Frame( label=label, t=dt * ind + t0 if ts is None else ts[ind], w=w, I=I, N=Ns, xyz=xyz, widths=widths, ) frames2.append(frame2) parsed_bundle = bundle.Bundle(frames2) return parsed_bundle def write_xyzs( bundle: bundle.Bundle, dirname: str, atom_format_str: str = "%-3s %24.16E %24.16E %24.16E\n", ): """Write a directory of xyz files to represent a Bundle, with one xyz file containing all frames for each label Params: bundle: Bundle to write xyz file representation of dirname: the directory to place the xyz files in (created if does not exist) atom_format_str: the format string for each atom line in the xyz file (useful to change precision). Result: xyz files are written for each label in bundle. Each xyz file contains all frames for the label, in time-order """ # Make sure directoy exists if not os.path.exists(dirname): os.makedirs(dirname) # Write xyz files for label in bundle.labels: bundle2 = bundle.subset_by_label(label) xyzfilename = str(label) # Munging with filename label xyzfilename = xyzfilename.replace(" ", "") xyzfilename = xyzfilename.replace("(", "") xyzfilename = xyzfilename.replace(")", "") xyzfilename = xyzfilename.replace(",", "-") fh = open("%s/%s.xyz" % (dirname, xyzfilename), "w") for frame in bundle2.frames: fh.write("%d\n" % frame.xyz.shape[0]) fh.write( "t = %24.16E, w = %24.16E, I = %d\n" % ( frame.t, frame.w, frame.I, ) ) for A in range(frame.xyz.shape[0]): fh.write( atom_format_str % ( atom_data.atom_symbol_table[frame.N[A]], frame.xyz[A, 0], frame.xyz[A, 1], frame.xyz[A, 2], ) )
30.021898
84
0.540968
548
4,113
4
0.306569
0.008212
0.00958
0.010949
0.097172
0.076186
0.057026
0.057026
0.05292
0
0
0.022787
0.349137
4,113
136
85
30.242647
0.79604
0.302942
0
0.046512
0
0
0.056139
0.013401
0
0
0
0.007353
0
1
0.023256
false
0
0.046512
0
0.081395
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7ab9500cd5c1495d990ceec333363832a2cb723e
745
py
Python
src/wafec/fi/hypothesis/models/test_parameter.py
wafec/wafec-fi-hypothesis
e74fea0eb5da39e8f26973fa577dc4515317150c
[ "MIT" ]
null
null
null
src/wafec/fi/hypothesis/models/test_parameter.py
wafec/wafec-fi-hypothesis
e74fea0eb5da39e8f26973fa577dc4515317150c
[ "MIT" ]
null
null
null
src/wafec/fi/hypothesis/models/test_parameter.py
wafec/wafec-fi-hypothesis
e74fea0eb5da39e8f26973fa577dc4515317150c
[ "MIT" ]
null
null
null
from sqlalchemy import * from sqlalchemy.orm import relationship from . import Base class FITestParameter(Base): __tablename__ = 'test_parameter' id = Column(Integer, primary_key=True) test_id = Column(Integer, ForeignKey('test.id')) test = relationship('FITest') name = Column(String(255), index=True) test_parameter_service_id = Column(Integer, ForeignKey('test_parameter_service.id')) test_parameter_service = relationship('FITestParameterService') test_parameter_context_id = Column(Integer, ForeignKey('test_parameter_context.id')) test_parameter_context = relationship('FITestParameterContext') created_at = Column(DateTime) updated_at = Column(DateTime) updated_count = Column(Integer)
35.47619
88
0.759732
83
745
6.53012
0.385542
0.167897
0.110701
0.138376
0.193727
0.140221
0
0
0
0
0
0.004702
0.143624
745
20
89
37.25
0.844828
0
0
0
0
0
0.162416
0.126175
0
0
0
0
0
1
0
false
0
0.1875
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7abd3d620eef1318489c51e350759a7e9fd53d7a
1,481
py
Python
hack/merge_cluster_roles.py
philips/cluster-monitoring-operator
e3d89785ebd70b369e8b9b4a1d8cfe93d3354731
[ "Apache-2.0" ]
null
null
null
hack/merge_cluster_roles.py
philips/cluster-monitoring-operator
e3d89785ebd70b369e8b9b4a1d8cfe93d3354731
[ "Apache-2.0" ]
2
2018-08-13T11:46:13.000Z
2018-08-13T12:47:12.000Z
hack/merge_cluster_roles.py
philips/cluster-monitoring-operator
e3d89785ebd70b369e8b9b4a1d8cfe93d3354731
[ "Apache-2.0" ]
2
2018-09-09T19:03:40.000Z
2020-01-08T22:24:43.000Z
#!/usr/bin/python """ merge_cluster_roles.py - merge OpenShift cluster roles into one """ # Copyright (c) 2018 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import unicode_literals, print_function import os.path import sys import yaml def main(): base_role = {} sources = [os.path.relpath(sys.argv[1])] with open(sys.argv[1], 'r') as f: base_role = yaml.load(f) manifests = sys.argv[2:] for manifest in manifests: sources.append(os.path.relpath(manifest)) with open(manifest, 'r') as f: rules = yaml.load(f)['rules'] if rules not in base_role['rules']: base_role['rules'] += rules print("---") print("# This is a generated file. DO NOT EDIT") print("# Run `make merge-cluster-roles` to generate.") print("# Sources: ") for source in sources: print("# \t" + source) print(yaml.dump(base_role)) if __name__ == "__main__": main()
30.854167
74
0.667792
215
1,481
4.502326
0.525581
0.061983
0.035124
0.033058
0
0
0
0
0
0
0
0.009475
0.21607
1,481
47
75
31.510638
0.824289
0.427414
0
0
0
0
0.153382
0
0
0
0
0
0
1
0.04
false
0
0.16
0
0.2
0.28
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7abe0503f9560abe292475ee88102fdcd3ba34ab
7,005
py
Python
internationalflavor/timezone/data.py
MounirMesselmeni/django-internationalflavor
30ea407fc9972243ea45f45c1ce09d5d17961730
[ "BSD-3-Clause" ]
22
2015-02-09T11:01:56.000Z
2021-07-02T23:34:56.000Z
internationalflavor/timezone/data.py
MounirMesselmeni/django-internationalflavor
30ea407fc9972243ea45f45c1ce09d5d17961730
[ "BSD-3-Clause" ]
19
2015-02-09T11:04:09.000Z
2021-11-26T08:07:44.000Z
internationalflavor/timezone/data.py
MounirMesselmeni/django-internationalflavor
30ea407fc9972243ea45f45c1ce09d5d17961730
[ "BSD-3-Clause" ]
18
2015-04-16T08:39:38.000Z
2021-07-08T08:07:41.000Z
import datetime from django.core.exceptions import ImproperlyConfigured from django.utils.encoding import force_text from django.utils.functional import lazy from django.utils.translation import gettext_lazy as _, gettext import itertools from internationalflavor.timezone._cldr_data import TIMEZONE_NAMES, METAZONE_NAMES, METAZONE_MAPPING_FROM_TZ, \ METAZONE_MAPPING_TO_TZ, TZ_REGION_FORMAT, TZ_GMT_FORMAT, TZ_HOUR_FORMAT from internationalflavor._helpers import orig_str, string_format try: from pytz import common_timezones as COMMON_TIMEZONES except ImportError: COMMON_TIMEZONES = [x for x in TIMEZONE_NAMES if not x.startswith("Etc")] CURRENT_METAZONES = [x for x in set(METAZONE_MAPPING_FROM_TZ.values()) if x is not None] def get_timezones_cities(timezones=None, exclude=None): """Returns a list of choices with (timezone code, exemplar city)-pairs, grouped by their territory. Only timezones present in the timezones argument, and not present in the exclude argument, are returned. """ # We require sorting for the groupby timezones = COMMON_TIMEZONES if timezones is None else timezones exclude = exclude if exclude else [] values = sorted(TIMEZONE_NAMES.items(), key=lambda item: orig_str(item[1][0])) result = [] for territory, zones in itertools.groupby(values, lambda item: item[1][0]): items = [(k, v[1]) for k, v in zones if k in timezones and k not in exclude] if items: result.append((territory, items)) return result get_timezones_cities_lazy = lazy(get_timezones_cities, list) def _get_metazone_cities(metazone, limit=5): zones = [tz for mz, tz in METAZONE_MAPPING_TO_TZ.items() if mz[0] == metazone] cities = sorted([territory[1] for tz, territory in TIMEZONE_NAMES.items() if tz in zones]) if len(cities) > limit: return ", ".join(map(force_text, cities[:limit])) + ", ..." else: return ", ".join(map(force_text, cities)) _get_metazone_cities_lazy = lazy(_get_metazone_cities, str) def _get_metazone_offset(metazone, correct_dst=True): try: import pytz except ImportError: raise ImproperlyConfigured("You can not use this display format without pytz") # We need to ensure that we do utcoffset - dst to get the normal offset for this timezone try: tzinfo = pytz.timezone(get_timezone_by_metazone(metazone)) offset = tzinfo.utcoffset(datetime.datetime.now(), is_dst=False) if correct_dst: offset -= tzinfo.dst(datetime.datetime.now(), is_dst=False) except pytz.UnknownTimeZoneError: offset = datetime.timedelta(0) return offset def _get_metazone_offset_str(metazone, correct_dst=True, include_gmt=True): offset = _get_metazone_offset(metazone, correct_dst=correct_dst) # Format the timezone if offset >= datetime.timedelta(0): offset_str = force_text(TZ_HOUR_FORMAT).split(';')[0] else: offset = -offset offset_str = force_text(TZ_HOUR_FORMAT).split(';')[1] offset_str = offset_str.replace('HH', "%02d" % (offset.total_seconds() // 3600)) offset_str = offset_str.replace('mm', "%02d" % ((offset.total_seconds() % 3600) // 60)) if include_gmt: return force_text(TZ_GMT_FORMAT) % offset_str else: return offset_str _get_metazone_offset_str_lazy = lazy(_get_metazone_offset_str, str) def get_metazone_name(metazone, display_format='name'): """Returns the name of a metazone, given a display_format. Available formats: *name* -- The name of the metazone, e.g. Central European Time *name_cities* -- The above two options combined, e.g. Central European Time (Abidjan, Accra, Bamako, Banjul, Conakry, ...) *offset_name* -- The offset and the name, e.g. GMT+01:00 Central European Time *offset_name_cities* -- The offset and the name, e.g. GMT+01:00 Central European Time (Abidjan, Accra, Bamako, Banjul, Conakry, ...) Everything else is string formatted using traditional Python string formatting, with the following arguments available: * tzname * cities * offset * gmt_offset -- The offset including the GMT string * dst_offset -- The offset with current DST applied * gmt_dst_offset - The above two combined """ if display_format == 'name': display_format = gettext("%(tzname)s") elif display_format == 'name_cities': display_format = gettext("%(tzname)s (%(cities)s)") elif display_format == 'offset_name': display_format = gettext("%(gmt_offset)s %(tzname)s") elif display_format == 'offset_name_cities': display_format = gettext("%(gmt_offset)s %(tzname)s (%(cities)s)") name = force_text(METAZONE_NAMES.get(metazone, string_format(TZ_REGION_FORMAT, _(metazone)))) result = display_format % { 'tzname': name, 'cities': _get_metazone_cities_lazy(metazone), 'offset': _get_metazone_offset_str_lazy(metazone, True, False), 'gmt_offset': _get_metazone_offset_str_lazy(metazone, True, True), 'dst_offset': _get_metazone_offset_str_lazy(metazone, False, False), 'gmt_dst_offset': _get_metazone_offset_str_lazy(metazone, False, True) } return result get_metazone_name_lazy = lazy(get_metazone_name, str) def get_metazones(metazones=None, exclude=None, display_format='name'): """Returns a list of metazones. By default, returns all current metazones. If the metazones argument defines metazones, they are returned. Values in exclude are never returned. """ metazones = CURRENT_METAZONES if metazones is None else metazones exclude = exclude if exclude else [] return [(k, get_metazone_name_lazy(k, display_format)) for k in metazones if k not in exclude] get_metazones_lazy = lazy(get_metazones, list) def get_timezone_by_metazone(metazone, territories=None, fallback='001'): """Returns the timezone name from the metazone name. It takes three arguments: :param metazone: Name of the metazone :param territories: String of a single territory or a list of territories in order of preference for retrieving the correct timezone. This is used when a metazone has multiple base timezones. It is optional as there is always a fallback to the default 'World' territory (001). Use case: you could use it to fill in the country of the user. :param fallback: The territory to use when no other territory could be found. This should always be 001 (=world) """ if territories is None: territories = [] elif isinstance(territories, str): territories = [territories] for ter in territories: if (metazone, ter) in METAZONE_MAPPING_TO_TZ: return METAZONE_MAPPING_TO_TZ[(metazone, ter)] return METAZONE_MAPPING_TO_TZ[(metazone, fallback)]
39.801136
118
0.698644
940
7,005
5
0.218085
0.042128
0.032553
0.029787
0.262979
0.170213
0.105319
0.105319
0.039574
0.02
0
0.007788
0.211849
7,005
175
119
40.028571
0.843507
0.287937
0
0.12766
0
0
0.057172
0
0
0
0
0
0
1
0.074468
false
0
0.12766
0
0.308511
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
7abe131a7498f3f282bb4ace852b76791ac312e6
10,520
py
Python
kinto/core/resource/schema.py
hafeez3000/kinto
2b741cf3f34dbb532b595caa4dbb9de2a991938d
[ "Apache-2.0" ]
null
null
null
kinto/core/resource/schema.py
hafeez3000/kinto
2b741cf3f34dbb532b595caa4dbb9de2a991938d
[ "Apache-2.0" ]
null
null
null
kinto/core/resource/schema.py
hafeez3000/kinto
2b741cf3f34dbb532b595caa4dbb9de2a991938d
[ "Apache-2.0" ]
null
null
null
import six import colander from colander import SchemaNode, String from kinto.core.utils import strip_whitespace, msec_time, decode_header, native_value class ResourceSchema(colander.MappingSchema): """Base resource schema, with *Cliquet* specific built-in options.""" class Options: """ Resource schema options. This is meant to be overriden for changing values: .. code-block:: python class Product(ResourceSchema): reference = colander.SchemaNode(colander.String()) class Options: readonly_fields = ('reference',) """ readonly_fields = tuple() """Fields that cannot be updated. Values for fields will have to be provided either during record creation, through default values using ``missing`` attribute or implementing a custom logic in :meth:`kinto.core.resource.UserResource.process_record`. """ preserve_unknown = True """Define if unknown fields should be preserved or not. The resource is schema-less by default. In other words, any field name will be accepted on records. Set this to ``False`` in order to limit the accepted fields to the ones defined in the schema. """ @classmethod def get_option(cls, attr): default_value = getattr(ResourceSchema.Options, attr) return getattr(cls.Options, attr, default_value) @classmethod def is_readonly(cls, field): """Return True if specified field name is read-only. :param str field: the field name in the schema :returns: ``True`` if the specified field is read-only, ``False`` otherwise. :rtype: bool """ return field in cls.get_option("readonly_fields") def schema_type(self): if self.get_option("preserve_unknown") is True: unknown = 'preserve' else: unknown = 'ignore' return colander.Mapping(unknown=unknown) class PermissionsSchema(colander.SchemaNode): """A permission mapping defines ACEs. It has permission names as keys and principals as values. :: { "write": ["fxa:af3e077eb9f5444a949ad65aa86e82ff"], "groups:create": ["fxa:70a9335eecfe440fa445ba752a750f3d"] } """ def __init__(self, *args, **kwargs): self.known_perms = kwargs.pop('permissions', tuple()) super(PermissionsSchema, self).__init__(*args, **kwargs) @staticmethod def schema_type(): return colander.Mapping(unknown='preserve') def deserialize(self, cstruct=colander.null): # Start by deserializing a simple mapping. permissions = super(PermissionsSchema, self).deserialize(cstruct) # In case it is optional in parent schema. if permissions in (colander.null, colander.drop): return permissions # Remove potential extra children from previous deserialization. self.children = [] for perm in permissions.keys(): # If know permissions is limited, then validate inline. if self.known_perms: colander.OneOf(choices=self.known_perms)(self, perm) # Add a String list child node with the name of ``perm``. self.add(self._get_node_principals(perm)) # End up by deserializing a mapping whose keys are now known. return super(PermissionsSchema, self).deserialize(permissions) def _get_node_principals(self, perm): principal = colander.SchemaNode(colander.String()) return colander.SchemaNode(colander.Sequence(), principal, name=perm, missing=colander.drop) class TimeStamp(colander.SchemaNode): """Basic integer schema field that can be set to current server timestamp in milliseconds if no value is provided. .. code-block:: python class Book(ResourceSchema): added_on = TimeStamp() read_on = TimeStamp(auto_now=False, missing=-1) """ schema_type = colander.Integer title = 'Epoch timestamp' """Default field title.""" auto_now = True """Set to current server timestamp (*milliseconds*) if not provided.""" missing = None """Default field value if not provided in record.""" def deserialize(self, cstruct=colander.null): if cstruct is colander.null and self.auto_now: cstruct = msec_time() return super(TimeStamp, self).deserialize(cstruct) class URL(SchemaNode): """String field representing a URL, with max length of 2048. This is basically a shortcut for string field with `~colander:colander.url`. .. code-block:: python class BookmarkSchema(ResourceSchema): url = URL() """ schema_type = String validator = colander.All(colander.url, colander.Length(min=1, max=2048)) def preparer(self, appstruct): return strip_whitespace(appstruct) class Any(colander.SchemaType): """Colander type agnostic field.""" def deserialize(self, node, cstruct): return cstruct class HeaderField(colander.SchemaNode): """Basic header field SchemaNode.""" missing = colander.drop def deserialize(self, cstruct=colander.null): if isinstance(cstruct, six.binary_type): try: cstruct = decode_header(cstruct) except UnicodeDecodeError: raise colander.Invalid(self, msg='Headers should be UTF-8 encoded') return super(HeaderField, self).deserialize(cstruct) class QueryField(colander.SchemaNode): """Basic querystring field SchemaNode.""" missing = colander.drop def deserialize(self, cstruct=colander.null): if isinstance(cstruct, six.string_types): cstruct = native_value(cstruct) return super(QueryField, self).deserialize(cstruct) class FieldList(QueryField): """String field representing a list of attributes.""" schema_type = colander.Sequence error_message = "The value should be a list of comma separated attributes" missing = colander.drop fields = colander.SchemaNode(colander.String(), missing=colander.drop) def deserialize(self, cstruct=colander.null): if isinstance(cstruct, six.string_types): cstruct = cstruct.split(',') return super(FieldList, self).deserialize(cstruct) class HeaderQuotedInteger(HeaderField): """Integer between "" used in precondition headers.""" schema_type = colander.String error_message = "The value should be integer between double quotes" validator = colander.Any(colander.Regex('^"([0-9]+?)"$', msg=error_message), colander.Regex('\*')) def deserialize(self, cstruct=colander.null): param = super(HeaderQuotedInteger, self).deserialize(cstruct) if param is colander.drop or param == '*': return param return int(param[1:-1]) class HeaderSchema(colander.MappingSchema): """Schema used for validating and deserializing request headers. """ def response_behavior_validator(): return colander.OneOf(['full', 'light', 'diff']) if_match = HeaderQuotedInteger(name='If-Match') if_none_match = HeaderQuotedInteger(name='If-None-Match') response_behaviour = HeaderField(colander.String(), name='Response-Behavior', validator=response_behavior_validator()) @staticmethod def schema_type(): return colander.Mapping(unknown='preserve') class QuerySchema(colander.MappingSchema): """ Schema used for validating and deserializing querystrings. It will include and try to guess the type of unknown fields (field filters) on deserialization. """ _limit = QueryField(colander.Integer()) _fields = FieldList() _sort = FieldList() _token = QueryField(colander.String()) _since = QueryField(colander.Integer()) _to = QueryField(colander.Integer()) _before = QueryField(colander.Integer()) last_modified = QueryField(colander.Integer()) @staticmethod def schema_type(): return colander.Mapping(unknown='ignore') def deserialize(self, cstruct=colander.null): """ Deserialize and validate the QuerySchema fields and try to deserialize and get the native value of additional filds (field filters) that may be present on the cstruct. e.g:: ?exclude_id=a,b&deleted=true -> {'exclude_id': ['a', 'b'], deleted: True} """ values = {} schema_values = super(QuerySchema, self).deserialize(cstruct) if schema_values is colander.drop: return schema_values # Deserialize querystring field filters (see docstring e.g) for k, v in cstruct.items(): # Deserialize lists used on in_ and exclude_ filters if k.startswith('in_') or k.startswith('exclude_'): as_list = FieldList().deserialize(v) if isinstance(as_list, list): values[k] = [native_value(v) for v in as_list] else: values[k] = native_value(v) values.update(schema_values) return values class JsonPatchOperationSchema(colander.MappingSchema): """Single JSON Patch Operation.""" def op_validator(): op_values = ['test', 'add', 'remove', 'replace', 'move', 'copy'] return colander.OneOf(op_values) def path_validator(): return colander.Regex('(/\w*)+') op = colander.SchemaNode(colander.String(), validator=op_validator()) path = colander.SchemaNode(colander.String(), validator=path_validator()) from_ = colander.SchemaNode(colander.String(), name='from', validator=path_validator(), missing=colander.drop) value = colander.SchemaNode(Any(), missing=colander.drop) @staticmethod def schema_type(): return colander.Mapping(unknown='raise') class JsonPatchBodySchema(colander.SequenceSchema): """Body used with JSON Patch (application/json-patch+json) as in RFC 6902.""" operations = JsonPatchOperationSchema(missing=colander.drop) class RequestSchema(colander.MappingSchema): """Baseline schema for kinto requests.""" header = HeaderSchema(missing=colander.drop) querystring = QuerySchema(missing=colander.drop) class JsonPatchRequestSchema(RequestSchema): body = JsonPatchBodySchema()
33.18612
87
0.656084
1,152
10,520
5.907986
0.263021
0.022921
0.027917
0.025713
0.157508
0.131942
0.10094
0.09521
0.062004
0.044079
0
0.007038
0.243631
10,520
316
88
33.291139
0.84831
0.233555
0
0.173333
0
0
0.050243
0
0
0
0
0
0
1
0.14
false
0
0.026667
0.053333
0.653333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
7abefe6d36851192eaf9f176b148fc64e01f2ed3
156
py
Python
nets/__init__.py
zhuofalin/Pytorch_Mask_R-CNN
c940fb0f238cf75dca9e90c3f4433adee42650af
[ "Apache-2.0" ]
2
2022-01-11T16:18:26.000Z
2022-01-23T05:56:42.000Z
utils/__init__.py
zhuofalin/Pytorch_Mask_R-CNN
c940fb0f238cf75dca9e90c3f4433adee42650af
[ "Apache-2.0" ]
null
null
null
utils/__init__.py
zhuofalin/Pytorch_Mask_R-CNN
c940fb0f238cf75dca9e90c3f4433adee42650af
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: UTF-8 -*- ''' @Project :mask_rcnn_pytorch @File :__init__.py.py @Author :zhuofalin @Date :2021/11/24 21:18 '''
17.333333
28
0.615385
23
156
3.913043
0.956522
0
0
0
0
0
0
0
0
0
0
0.100775
0.173077
156
8
29
19.5
0.596899
0.923077
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
7ac2453ef253d20470030f8d8112f23a1f877440
1,366
py
Python
sloth-scratch.py
xer0-1ne/sloth-scratch
1d81ffe65ff7d72fcc766a6ba025366a3319a838
[ "MIT" ]
null
null
null
sloth-scratch.py
xer0-1ne/sloth-scratch
1d81ffe65ff7d72fcc766a6ba025366a3319a838
[ "MIT" ]
null
null
null
sloth-scratch.py
xer0-1ne/sloth-scratch
1d81ffe65ff7d72fcc766a6ba025366a3319a838
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 import json from assets.lib.bottle import route, run, static_file, response, request, redirect #get default routes for files/paths @route('/<filepath:path>') def server_static(filepath): return static_file(filepath, root="./") #route index as default page @route('/') def index(): filename='index.html' return static_file(filename, root="./") #return json object @route('/commands') def getCommands(): objFile = 'commands.json' response.content_type = 'application/json' with open(objFile, "r") as file: data = json.load(file) return json.dumps(data) @route('/addcommand', method="POST") def addCommands(): command = request.forms.get('newCommand') commandName = request.forms.get('newCommandName') commandOS = request.forms.get('newCommandOS') commandDescription = request.forms.get('newCommandDescription') commandObj = { "Command":command, "Name":commandName, "OS":commandOS, "Description":commandDescription } objFile = 'commands.json' response.content_type = 'application/json' with open(objFile, "r") as file: data = json.load(file) data.append(commandObj) with open(objFile, "w") as file: json.dump(data, file) redirect('/') run(host='localhost', port=8080, debug=True, reloader=True)
25.296296
82
0.666911
157
1,366
5.764331
0.471338
0.053039
0.066298
0.059669
0.201105
0.201105
0.201105
0.201105
0.201105
0.201105
0
0.004521
0.190337
1,366
53
83
25.773585
0.813743
0.073206
0
0.216216
0
0
0.164025
0.01664
0
0
0
0
0
1
0.108108
false
0
0.054054
0.027027
0.243243
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f816a322098d1a1e68d37250e949dfedc9e5d44
2,569
py
Python
neighbour/views.py
Eccie-K/neighbour-hood
f874f9468160aa34dee294d685374e4c5e2eec4d
[ "MIT" ]
null
null
null
neighbour/views.py
Eccie-K/neighbour-hood
f874f9468160aa34dee294d685374e4c5e2eec4d
[ "MIT" ]
4
2020-06-05T23:21:40.000Z
2021-06-10T21:57:32.000Z
neighbour/views.py
Eccie-K/neighbour-hood
f874f9468160aa34dee294d685374e4c5e2eec4d
[ "MIT" ]
null
null
null
from django.shortcuts import render, redirect from django.http import HttpResponse from django.contrib.auth import login, authenticate from django.contrib.auth.decorators import login_required from .forms import SignupForm, HoodForm, UserProfileUpdateForm, UserUpdateForm from .models import * from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect from django.contrib.auth.models import User from django.contrib import messages # Create your views here. @login_required(login_url="/accounts/login/") def index(request): hoods = Hood.objects.all() return render(request,"index.html",locals()) def home (request): return render(request, "index.html", locals()) def signup(request): if request.method == 'POST': form = SignupForm(request.POST) if form.is_valid(): user = form.save(commit=False) user.is_active = True user.save() return render(request, 'index.html') else: form = SignupForm() return render(request, 'signup.html', {'form': form}) def new_hood(request): current_user = request.user if request.method == "POST": form = HoodForm(request.POST, request.FILES) if form.is_valid(): hood = form.save(commit=False) hood.user = current_user hood.save() return redirect("index") else: form = HoodForm() return render(request, "new_hood.html", {"form": form}) @login_required def profile(request): """Display user profile information.""" user = request.user return render(request, 'profile.html', {'user': user}) @login_required def update_profile(request): """Edit user profile information.""" user = request.user form1 = UserUpdateForm(instance=user) form2 = UserProfileUpdateForm(instance=user.profile) if request.method == 'POST': form1 = UserUpdateForm(instance=user, data=request.POST) form2 = UserProfileUpdateForm( instance=user, data=request.POST, files=request.FILES ) if form1.is_valid() and form2.is_valid(): form1.save() form2.save() messages.success(request, "Your profile has been updated!") return HttpResponseRedirect(reverse('profile')) return render(request, 'update_profile.html', {'form1': form1, 'form2': form2}) def details(request, hood_id): hood = Hood.objects.get(id=hood_id) return render(request, "details.html", locals())
30.223529
79
0.657844
293
2,569
5.706485
0.259386
0.047847
0.090909
0.037679
0.165072
0.088517
0.044258
0
0
0
0
0.006051
0.228104
2,569
84
80
30.583333
0.837115
0.034644
0
0.184615
0
0
0.07658
0
0
0
0
0
0
1
0.107692
false
0
0.153846
0.015385
0.415385
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f81ac36d9e586a2803a67107baad029f84be150
2,636
py
Python
isitfit/dotMan.py
autofitcloud/isitf
6ffc0c67c00140120f5d5ad8dfe11c8f0f7dacc1
[ "Apache-2.0" ]
82
2019-09-04T17:39:10.000Z
2021-08-10T14:59:18.000Z
isitfit/dotMan.py
autofitcloud/isitf
6ffc0c67c00140120f5d5ad8dfe11c8f0f7dacc1
[ "Apache-2.0" ]
11
2019-09-10T03:54:19.000Z
2020-02-21T22:58:44.000Z
isitfit/dotMan.py
autofitcloud/isitf
6ffc0c67c00140120f5d5ad8dfe11c8f0f7dacc1
[ "Apache-2.0" ]
9
2019-09-13T15:57:42.000Z
2021-02-13T15:56:40.000Z
import os class DotMan: def get_dotisitfit(self): # get home import pathlib p1_home = str(pathlib.Path.home()) # check dot folder p2_dot = os.path.join(p1_home, ".isitfit") if not os.path.exists(p2_dot): pathlib.Path(p2_dot).mkdir(exist_ok=True) return p2_dot def get_myuid(self, is_reentry=False): """ Create a UUID for each installation of isitfit This also creates a .isitfit folder in the user's home directory and caches the generated UUID in a txt file for re-use is_reentry - internally used flag to identify that this is a case when UUID is identified as invalid and needs to be set again """ p2_dot = self.get_dotisitfit() # check uid file within dot folder p3_uidtxt = os.path.join(p2_dot, "uid.txt") uuid_val = None if not os.path.exists(p3_uidtxt): import uuid uuid_val = uuid.uuid4().hex with open(p3_uidtxt, 'w') as fh: fh.write(uuid_val) # if not created above, read from file if uuid_val is None: with open(p3_uidtxt, 'r') as fh: uuid_val = fh.read() uuid_val = uuid_val.strip() # strip the new-line or spaces if any # if re-entry due to invalid ID or not if is_reentry: # any further processing of this would be an overkill pass else: # verify that the UUID is valid (in case of accidental overwrite) if len(uuid_val)!=32: # drop the uid.txt file and overwrite it os.remove(p3_uidtxt) uuid_val = self.get_myuid(True) # return return uuid_val def tempdir(self): import os import tempfile isitfit_tmpdir = os.path.join(tempfile.gettempdir(), 'isitfit') os.makedirs(isitfit_tmpdir, exist_ok=True) return isitfit_tmpdir import os class DotFile: """ Base class to set/get files in ~/.isitfit like ~/.isitfit/last_email.txt """ filename = None def __init__(self): self._init_fn() def _init_fn(self): if self.filename is None: raise Exception("Derived classes should set filename member") from isitfit.dotMan import DotMan dm = DotMan() fold = dm.get_dotisitfit() self.fn = os.path.join(fold, self.filename) def get(self): if not os.path.exists(self.fn): return None with open(self.fn, 'r') as fh: val = fh.read() val = val.strip() if val=='': return None return val def set(self, val): with open(self.fn, 'w') as fh: fh.write(val) class DotLastEmail(DotFile): filename = "last_email.txt" class DotLastProfile(DotFile): filename = "last_profile.txt"
24.407407
74
0.646434
400
2,636
4.1425
0.34
0.042245
0.02414
0.019916
0.045263
0
0
0
0
0
0
0.008193
0.259105
2,636
107
75
24.635514
0.840246
0.270106
0
0.079365
0
0
0.05235
0
0
0
0
0
0
1
0.111111
false
0.015873
0.111111
0
0.428571
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f865163bfbe4b0bc8d900996f2290d3bae441c0
437
py
Python
workflows/base/interaction_views.py
xflows/clowdflows
697b36ebc976d1ba4ab726bda2fc4593422af080
[ "MIT" ]
38
2015-11-21T08:16:14.000Z
2021-06-22T16:14:12.000Z
workflows/base/interaction_views.py
chimeng089/clowdflows
e19bf57906e893d8f0be93329168b76eae758384
[ "MIT" ]
21
2017-04-05T08:03:54.000Z
2022-03-11T23:16:03.000Z
workflows/base/interaction_views.py
chimeng089/clowdflows
e19bf57906e893d8f0be93329168b76eae758384
[ "MIT" ]
26
2016-01-11T17:51:07.000Z
2022-02-24T11:49:40.000Z
import json from django.shortcuts import render def base_js_snippet(request, input_dict, output_dict, widget): try: inputs = json.dumps(input_dict['in']) except: raise Exception("Problem serializing the inputs. Only JSON-serializable objects can be used.") return render(request, 'interactions/base_js_snippet.html', {'widget': widget, 'snippet': input_dict['snippet'], 'inputs': inputs})
36.416667
102
0.695652
54
437
5.481481
0.62963
0.091216
0.087838
0
0
0
0
0
0
0
0
0
0.19222
437
11
103
39.727273
0.838527
0
0
0
0
0
0.311213
0.075515
0
0
0
0
0
1
0.111111
false
0
0.222222
0
0.444444
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f8686c5c30243fe1085ef5f1ba45de06bb5c4cd
928
py
Python
jira_devops/release_notes/settings.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
jira_devops/release_notes/settings.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
jira_devops/release_notes/settings.py
clutcher/jira_devops
61360f3fa9bd9b402d752dde84b3cf486245879e
[ "MIT" ]
null
null
null
import os from django.apps import AppConfig class ReleaseNotesAppConfig(AppConfig): name = 'jira_devops.release_notes' verbose_name = 'Release Notes' DEFAULT_JIRA_RELEASE_FIELD_MAP = { "hac_update": "customfield_13359", "need_impex": "customfield_13360", "need_manual": "customfield_13361", "special_notes": "customfield_13362", "responsible_person": "customfield_12200", } def ready(self): from django.conf import settings settings = settings._wrapped.__dict__ settings.setdefault('JIRA_RELEASE_FIELD_MAP', self.DEFAULT_JIRA_RELEASE_FIELD_MAP) settings.setdefault('FILE_CLEAN_UP_PREFIX', self.get_env_variable("FILE_CLEAN_UP_PREFIX", "hybris/bin/custom")) @staticmethod def get_env_variable(variable, default=""): value = os.getenv(variable) if not value: return default return value
29.935484
119
0.690733
103
928
5.84466
0.533981
0.054817
0.079734
0.094684
0.086379
0
0
0
0
0
0
0.034341
0.215517
928
30
120
30.933333
0.792582
0
0
0
0
0
0.284483
0.050647
0
0
0
0
0
1
0.086957
false
0
0.130435
0
0.478261
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f87de0150456c3cf0ea3b4c1fd2d4de2903f22e
1,888
py
Python
app/auth.py
alexisthethe/quickrun-fastapi
d2c8c2502d5eedf0e396dd9b78df0820b26a24b0
[ "MIT" ]
1
2022-01-31T14:59:37.000Z
2022-01-31T14:59:37.000Z
app/auth.py
alexisthethe/quickrun-fastapi
d2c8c2502d5eedf0e396dd9b78df0820b26a24b0
[ "MIT" ]
null
null
null
app/auth.py
alexisthethe/quickrun-fastapi
d2c8c2502d5eedf0e396dd9b78df0820b26a24b0
[ "MIT" ]
null
null
null
import logging from datetime import datetime, timedelta from typing import Optional from fastapi import Depends from fastapi.security import OAuth2PasswordBearer from jose import JWTError, jwt from passlib.context import CryptContext from app import schemas from app.config import settings oauth2_scheme = OAuth2PasswordBearer(tokenUrl="login") pwd_ctx = CryptContext(schemes=["bcrypt"], deprecated="auto") class NotAuthorized(Exception): """Exception when a request does not have the right login""" def __init__(self, username: str = ""): msg = f"Not authorized (user: {username})" logging.exception(f"Exception {self.__class__.__name__}: {msg}") super().__init__(msg) class Hash: @staticmethod def bcrypt(password: str): return pwd_ctx.hash(password) @staticmethod def verify(plain_pass: str, hashed_pass: str): return pwd_ctx.verify(plain_pass, hashed_pass) def create_access_token(data: dict, expires_delta: Optional[timedelta] = None): to_encode = data.copy() if expires_delta: expire = datetime.utcnow() + expires_delta else: expire = datetime.utcnow() + timedelta( minutes=settings.ACCESS_TOKEN_EXPIRE_MINUTES ) to_encode.update({"exp": expire}) encoded_jwt = jwt.encode( to_encode, settings.SECRET_KEY, algorithm=settings.ALGORITHM ) return encoded_jwt def verify_token(token: str): try: payload = jwt.decode( token, settings.SECRET_KEY, algorithms=[settings.ALGORITHM] ) email: str = payload.get("sub") if email is None: raise NotAuthorized(email) token_data = schemas.TokenData(email=email) except JWTError: raise NotAuthorized(email) return token_data def get_current_user(token: str = Depends(oauth2_scheme)): return verify_token(token)
28.179104
79
0.695975
224
1,888
5.660714
0.424107
0.014196
0.018927
0.023659
0
0
0
0
0
0
0
0.002688
0.211864
1,888
66
80
28.606061
0.849462
0.028602
0
0.08
0
0
0.052516
0.014223
0
0
0
0
0
1
0.12
false
0.14
0.18
0.06
0.44
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
1
8f88225e95ec2579d2321422fdd57413d254306d
3,581
py
Python
azad/exp/alternatives/optuna_dqn2.py
CoAxLab/azad
d1498069dd8856e93ae077b34dd7c9f1c7ce80e6
[ "MIT" ]
6
2018-09-11T21:06:12.000Z
2022-01-28T17:36:52.000Z
azad/exp/alternatives/optuna_dqn2.py
CoAxLab/azad
d1498069dd8856e93ae077b34dd7c9f1c7ce80e6
[ "MIT" ]
null
null
null
azad/exp/alternatives/optuna_dqn2.py
CoAxLab/azad
d1498069dd8856e93ae077b34dd7c9f1c7ce80e6
[ "MIT" ]
2
2018-09-12T00:40:52.000Z
2018-10-29T15:45:54.000Z
"""Tune the dqn2 model of wythoff's using the opotune lib""" import optuna import fire import torch import torch.nn as nn import torch.nn.functional as F import torch.optim as optim import torch.utils.data from torchvision import datasets from torchvision import transforms from azad.exp.alternatives import wythoff_dqn2 from copy import deepcopy def _build(trial): """Build a nn.Module MLP model""" # Sample hidden layers and features in_features = 4 # Initial n_layers = trial.suggest_int('n_layers', 2, 6) layers = [] for l in range(n_layers): out_features = trial.suggest_int(f'{l}', in_features, MAX_FEATURES) layers.append(nn.Linear(in_features, out_features)) layers.append(nn.ReLU()) in_features = deepcopy(out_features) # Output layer topo is fixed layers.append(nn.Linear(in_features, 1)) # Define the nn class Model(nn.Module): def __init__(self): super(Model, self).__init__() self.layers = nn.Sequential(*layers) def forward(self, x): return self.layers(x) return Model def _objective(trial): """Runs a single HP trial""" # Build a new Model Model = _build(trial) # Sample new HP learning_rate = trial.suggest_float("learning_rate", 0.005, 0.5) gamma = trial.suggest_float("gamma", 0.01, 0.5) epsilon = trial.suggest_float("epsilon", 0.1, 0.9) # Run wythoff_dqn2 result = wythoff_dqn2(epsilon=epsilon, gamma=gamma, learning_rate=learning_rate, num_episodes=NUM_EPISODES, batch_size=20, memory_capacity=1000, game=GAME, network=Model, anneal=True, tensorboard=None, update_every=1, double=False, double_update=10, save=False, save_model=False, monitor=None, return_none=False, debug=False, device=DEVICE, clip_grad=True, progress=False, zero=False, seed=SEED) return result["score"] # the final def optuna_dqn2(save=None, num_trials=100, num_episodes=100, max_features=20, game='Wythoff15x15', num_jobs=1, device="cpu", debug=True, seed=None): # Set globals used in _objective. A lazy bad soln. global DEVICE global SEED global GAME global NUM_EPISODES global MAX_FEATURES DEVICE = device SEED = seed GAME = game NUM_EPISODES = num_episodes MAX_FEATURES = max_features # Run the study study = optuna.create_study(direction="maximize") study.optimize(_objective, n_trials=num_trials, n_jobs=num_jobs) trial = study.best_trial if debug: print(f">>> Saving to {save}") print(f">>> Number of finished trials: {study.trials}") print(f">>> Best trial {trial}") print(f">>> score: {trial.value}") print(f">>> params:\n") for k, v in trial.params.items(): print(f"\t{k}: {v}") # Save? if save is not None: torch.save(study, save) return study
29.113821
75
0.543144
411
3,581
4.579075
0.364964
0.035069
0.022317
0.023379
0.031881
0.031881
0
0
0
0
0
0.020282
0.366657
3,581
122
76
29.352459
0.809524
0.088523
0
0
0
0
0.061149
0
0
0
0
0
0
1
0.055556
false
0
0.122222
0.011111
0.233333
0.066667
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f8a00c79c4bc75d5fe1451c7619bf46e7178afd
54,850
py
Python
tests/sql/build_test.py
DataJunction/datajunction
d2293255bb7df0e5144c7e448a0ca2b590b6c20f
[ "MIT" ]
null
null
null
tests/sql/build_test.py
DataJunction/datajunction
d2293255bb7df0e5144c7e448a0ca2b590b6c20f
[ "MIT" ]
null
null
null
tests/sql/build_test.py
DataJunction/datajunction
d2293255bb7df0e5144c7e448a0ca2b590b6c20f
[ "MIT" ]
null
null
null
""" Tests for ``datajunction.sql.build``. """ # pylint: disable=invalid-name, too-many-lines, line-too-long import datetime import pytest from pytest_mock import MockerFixture from sqlalchemy.engine import create_engine from sqlmodel import Session from datajunction.models.column import Column from datajunction.models.database import Database from datajunction.models.node import Node, NodeType from datajunction.models.table import Table from datajunction.sql.build import ( find_on_clause, get_dimensions_from_filters, get_filter, get_join_columns, get_query_for_node, get_query_for_sql, ) from datajunction.typing import ColumnType def test_get_query_for_node(mocker: MockerFixture) -> None: """ Test ``get_query_for_node``. """ database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) parent = Node(name="A") child = Node( name="B", tables=[ Table( database=database, table="B", columns=[Column(name="cnt", type=ColumnType.INT)], ), ], type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE B (cnt INTEGER)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session = mocker.MagicMock() create_query = get_query_for_node(session, child, [], []) assert create_query.database_id == 1 assert create_query.submitted_query == 'SELECT "B".cnt \nFROM "B"' def test_get_query_for_node_with_groupbys(mocker: MockerFixture) -> None: """ Test ``get_query_for_node`` with group bys. """ database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) parent = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="user_id", type=ColumnType.INT), Column(name="comment", type=ColumnType.STR), ], ), ], columns=[ Column(name="user_id", type=ColumnType.INT), Column(name="comment", type=ColumnType.STR), ], ) child = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (user_id INTEGER, comment TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session = mocker.MagicMock() create_query = get_query_for_node(session, child, ["A.user_id"], []) space = " " assert create_query.database_id == 1 assert ( create_query.submitted_query == f"""SELECT count('*') AS cnt, "A".user_id{space} FROM (SELECT "A".user_id AS user_id, "A".comment AS comment{space} FROM "A") AS "A" GROUP BY "A".user_id""" ) def test_get_query_for_node_specify_database(mocker: MockerFixture) -> None: """ Test ``get_query_for_node`` when a database is specified. """ database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) parent = Node(name="A") child = Node( name="B", tables=[ Table( database=database, table="B", columns=[Column(name="cnt", type=ColumnType.INT)], ), ], type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[parent], columns=[Column(name="cnt", type=ColumnType.INT)], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE B (cnt INTEGER)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session = mocker.MagicMock() session.exec().one.return_value = database create_query = get_query_for_node(session, child, [], [], 1) assert create_query.database_id == 1 assert create_query.submitted_query == 'SELECT "B".cnt \nFROM "B"' with pytest.raises(Exception) as excinfo: get_query_for_node(session, child, [], [], 2) assert str(excinfo.value) == "Database ID 2 is not valid" def test_get_query_for_node_no_databases(mocker: MockerFixture) -> None: """ Test ``get_query_for_node``. """ database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) parent = Node(name="A") child = Node( name="B", tables=[ Table( database=database, table="B", columns=[Column(name="one", type=ColumnType.STR)], ), ], type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[parent], columns=[Column(name="one", type=ColumnType.STR)], ) mocker.patch("datajunction.sql.dag.get_computable_databases", return_value=set()) session = mocker.MagicMock() with pytest.raises(Exception) as excinfo: get_query_for_node(session, child, [], []) assert str(excinfo.value) == "No valid database was found" def test_get_query_for_node_with_dimensions(mocker: MockerFixture) -> None: """ Test ``get_query_for_node`` when filtering/grouping by a dimension. """ database = Database(id=1, name="one", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) child = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE dim_users (id INTEGER, age INTEGER, gender TEXT)") connection.execute("CREATE TABLE comments (ds TEXT, user_id INTEGER, text TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session = mocker.MagicMock() session.exec().one.return_value = dimension create_query = get_query_for_node( session, child, ["core.users.gender"], ["core.users.age>25"], ) space = " " assert create_query.database_id == 1 assert ( create_query.submitted_query == f"""SELECT count('*') AS count_1, "core.users".gender{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id{space} WHERE "core.users".age > 25 GROUP BY "core.users".gender""" ) with pytest.raises(Exception) as excinfo: get_query_for_node(session, child, ["aaaa"], []) assert str(excinfo.value) == "Invalid dimension: aaaa" with pytest.raises(Exception) as excinfo: get_query_for_node(session, child, ["aaaa", "bbbb"], []) assert str(excinfo.value) == "Invalid dimensions: aaaa, bbbb" def test_get_query_for_node_with_multiple_dimensions(mocker: MockerFixture) -> None: """ Test ``get_query_for_node`` when filtering/grouping by a dimension. """ database = Database(id=1, name="one", URI="sqlite://") dimension_1 = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) dimension_2 = Node( name="core.bands", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_bands", columns=[ Column(name="uuid", type=ColumnType.INT), Column(name="name", type=ColumnType.STR), Column(name="genre", type=ColumnType.STR), ], ), ], columns=[ Column(name="uuid", type=ColumnType.INT), Column(name="name", type=ColumnType.STR), Column(name="genre", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="band_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension_1), Column( name="band_id", type=ColumnType.INT, dimension=dimension_2, dimension_column="uuid", ), Column(name="text", type=ColumnType.STR), ], ) child = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE dim_users (id INTEGER, age INTEGER, gender TEXT)") connection.execute("CREATE TABLE dim_bands (uuid INTEGER, name TEXT, genre TEXT)") connection.execute( "CREATE TABLE comments (ds TEXT, user_id INTEGER, band_id INTEGER, text TEXT)", ) mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session = mocker.MagicMock() session.exec().one.side_effect = [dimension_1, dimension_2] create_query = get_query_for_node( session, child, ["core.users.gender"], ["core.bands.genre='rock'"], ) space = " " assert create_query.database_id == 1 assert ( create_query.submitted_query == f"""SELECT count('*') AS count_1, "core.users".gender{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.band_id AS band_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id, (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.band_id AS band_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_bands.uuid AS uuid, dim_bands.name AS name, dim_bands.genre AS genre{space} FROM dim_bands) AS "core.bands" ON "core.comments".band_id = "core.bands".uuid{space} WHERE "core.bands".genre = 'rock' GROUP BY "core.users".gender""" ) def test_get_filter(mocker: MockerFixture) -> None: """ Test ``get_filter``. """ greater_than = mocker.MagicMock() less_than = mocker.MagicMock() equals = mocker.MagicMock() mocker.patch( "datajunction.sql.build.COMPARISONS", new={ ">": greater_than, "<": less_than, "=": equals, }, ) column_a = mocker.MagicMock() column_date = mocker.MagicMock() column_date.type.python_type = datetime.date column_dt = mocker.MagicMock() column_dt.type.python_type = datetime.datetime columns = {"a": column_a, "day": column_date, "dt": column_dt} # basic get_filter(columns, "a>0") greater_than.assert_called_with(column_a, 0) # date get_filter(columns, "day=2020-01-01") equals.assert_called_with(column_date, "2020-01-01 00:00:00") get_filter(columns, "day<20200202") less_than.assert_called_with(column_date, "2020-02-02 00:00:00") get_filter(columns, "day=3/3/2020") equals.assert_called_with(column_date, "2020-03-03 00:00:00") # datetime get_filter(columns, "dt=2012-01-19 17:21:00") equals.assert_called_with(column_dt, "2012-01-19 17:21:00") with pytest.raises(Exception) as excinfo: get_filter(columns, "dt>foo/bar-baz") assert str(excinfo.value) == "Invalid date or datetime value: foo/bar-baz" # exceptions with pytest.raises(Exception) as excinfo: get_filter(columns, "invalid") assert ( str(excinfo.value) == """The filter "invalid" is invalid The following error happened: - The filter "invalid" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes. (error code: 100)""" ) with pytest.raises(Exception) as excinfo: get_filter(columns, "b>0") assert str(excinfo.value) == "Invalid column name: b" with pytest.raises(Exception) as excinfo: get_filter(columns, "a>open('/etc/passwd').read()") assert str(excinfo.value) == "Invalid value: open('/etc/passwd').read()" def test_get_query_for_sql(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql``. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT count('*') AS "B"{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "A"''' ) def test_get_query_for_sql_no_metrics(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql`` when no metrics are requested. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="db", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE dim_users (id INTEGER, age INTEGER, gender TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session.add(dimension) session.commit() sql = 'SELECT "core.users.gender", "core.users.age" FROM metrics' create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT "core.users".gender, "core.users".age{space} FROM (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users"''' ) other_dimension = Node( name="core.other_dim", type=NodeType.DIMENSION, columns=[ Column(name="full_name", type=ColumnType.STR), ], ) session.add(other_dimension) session.commit() sql = 'SELECT "core.users.gender", "core.other_dim.full_name" FROM metrics' with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert ( str(excinfo.value) == "Cannot query from multiple dimensions when no metric is specified" ) def test_get_query_for_sql_no_tables(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql`` when no tables are involved. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="memory", URI="sqlite://") session.add(database) session.commit() sql = "SELECT 1" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 assert create_query.submitted_query == "SELECT 1" def test_get_query_for_sql_having(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql``. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B FROM metrics HAVING B > 10" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f"""SELECT count('*') AS "B"{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "A"{space} HAVING count('*') > 10""" ) sql = "SELECT B FROM metrics HAVING C > 10" with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Invalid dimension: C" def test_get_query_for_sql_with_dimensions( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with dimensions in the query. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) child = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE dim_users (id INTEGER, age INTEGER, gender TEXT)") connection.execute("CREATE TABLE comments (ds TEXT, user_id INTEGER, text TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session.add(child) session.add(dimension) session.commit() sql = """ SELECT "core.users.gender", "core.num_comments" FROM metrics WHERE "core.users.age" > 25 GROUP BY "core.users.gender" """ create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f"""SELECT "core.users".gender, count('*') AS "core.num_comments"{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id{space} WHERE "core.users".age > 25 GROUP BY "core.users".gender""" ) sql = """ SELECT "core.users.invalid", "core.num_comments" FROM metrics WHERE "core.users.age" > 25 GROUP BY "core.users.invalid" """ with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Invalid dimension: core.users.invalid" def test_get_query_for_sql_with_dimensions_order_by( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with dimensions in the query and ``ORDER BY``. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) child = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[parent], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE dim_users (id INTEGER, age INTEGER, gender TEXT)") connection.execute("CREATE TABLE comments (ds TEXT, user_id INTEGER, text TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) session.add(child) session.add(dimension) session.commit() sql = """ SELECT "core.users.gender" AS "core.users.gender", "core.num_comments" AS "core.num_comments" FROM main.metrics GROUP BY "core.users.gender" ORDER BY "core.num_comments" DESC LIMIT 100; """ create_query = get_query_for_sql(sql) space = " " assert create_query.database_id == 1 assert ( create_query.submitted_query == f"""SELECT "core.users".gender AS "core.users.gender", count('*') AS "core.num_comments"{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id GROUP BY "core.users".gender ORDER BY count('*') DESC LIMIT 100 OFFSET 0""" ) sql = """ SELECT "core.users.gender" AS "core.users.gender", "core.num_comments" AS "core.num_comments" FROM main.metrics GROUP BY "core.users.gender" ORDER BY "core.num_comments" ASC LIMIT 100; """ create_query = get_query_for_sql(sql) assert ( create_query.submitted_query == f"""SELECT "core.users".gender AS "core.users.gender", count('*') AS "core.num_comments"{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id GROUP BY "core.users".gender ORDER BY count('*') LIMIT 100 OFFSET 0""" ) sql = """ SELECT "core.users.gender" AS "core.users.gender", "core.num_comments" AS "core.num_comments" FROM main.metrics GROUP BY "core.users.gender" ORDER BY "core.num_comments" ASC LIMIT 100; """ create_query = get_query_for_sql(sql) assert ( create_query.submitted_query == f"""SELECT "core.users".gender AS "core.users.gender", count('*') AS "core.num_comments"{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id GROUP BY "core.users".gender ORDER BY count('*') LIMIT 100 OFFSET 0""" ) sql = """ SELECT "core.users.gender" AS "core.users.gender", "core.num_comments" AS "core.num_comments" FROM main.metrics GROUP BY "core.users.gender" ORDER BY "core.users.gender" ASC LIMIT 100; """ create_query = get_query_for_sql(sql) assert ( create_query.submitted_query == f"""SELECT "core.users".gender AS "core.users.gender", count('*') AS "core.num_comments"{space} FROM (SELECT comments.ds AS ds, comments.user_id AS user_id, comments.text AS text{space} FROM comments) AS "core.comments" JOIN (SELECT dim_users.id AS id, dim_users.age AS age, dim_users.gender AS gender{space} FROM dim_users) AS "core.users" ON "core.comments".user_id = "core.users".id GROUP BY "core.users".gender ORDER BY "core.users".gender LIMIT 100 OFFSET 0""" ) sql = """ SELECT "core.users.gender" AS "core.users.gender", "core.num_comments" AS "core.num_comments" FROM main.metrics GROUP BY "core.users.gender" ORDER BY invalid ASC LIMIT 100; """ with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Invalid identifier: invalid" def test_get_query_for_sql_compound_names( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with nodes with compound names. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="core.A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="core.B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM core.A", parents=[A], ) session.add(B) session.commit() sql = "SELECT core.B FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT count('*') AS "core.B"{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "core.A"''' ) def test_get_query_for_sql_multiple_databases( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` when the parents are in multiple databases. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database_1 = Database(id=1, name="slow", URI="sqlite://", cost=10.0) database_2 = Database(id=2, name="fast", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database_1, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), Table( database=database_2, table="A", columns=[ Column(name="one", type=ColumnType.STR), ], ), ], columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ) engine = create_engine(database_1.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 2 # fast B.expression = "SELECT COUNT(two) AS cnt FROM A" session.add(B) session.commit() sql = "SELECT B FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 # slow def test_get_query_for_sql_multiple_metrics( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with multiple metrics. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) C = Node( name="C", type=NodeType.METRIC, expression="SELECT MAX(one) AS max_one FROM A", parents=[A], ) session.add(C) session.commit() sql = "SELECT B, C FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT count('*') AS "B", max("A".one) AS "C"{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "A"''' ) def test_get_query_for_sql_non_identifiers( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with metrics and non-identifiers in the ``SELECT``. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) C = Node( name="C", type=NodeType.METRIC, expression="SELECT MAX(one) AS max_one FROM A", parents=[A], ) session.add(C) session.commit() sql = "SELECT B, C, 'test' FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT count('*') AS "B", max("A".one) AS "C", test{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "A"''' ) def test_get_query_for_sql_different_parents( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with metrics with different parents. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) B = Node( name="B", tables=[ Table( database=database, table="B", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) C = Node( name="C", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(C) D = Node( name="D", type=NodeType.METRIC, expression="SELECT MAX(one) AS max_one FROM A", parents=[B], ) session.add(D) session.commit() sql = "SELECT C, D FROM metrics" with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Metrics C and D have non-shared parents" def test_get_query_for_sql_not_metric(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql`` when the projection is not a metric node. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) B = Node( name="B", expression="SELECT one FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B FROM metrics" with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Invalid dimension: B" def test_get_query_for_sql_no_databases( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` when no common databases are found. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session A = Node( name="A", tables=[], ) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B FROM metrics" with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "No valid database was found" def test_get_query_for_sql_alias(mocker: MockerFixture, session: Session) -> None: """ Test ``get_query_for_sql`` with aliases. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) A = Node( name="A", tables=[ Table( database=database, table="A", columns=[ Column(name="one", type=ColumnType.STR), Column(name="two", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE A (one TEXT, two TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) B = Node( name="B", type=NodeType.METRIC, expression="SELECT COUNT(*) AS cnt FROM A", parents=[A], ) session.add(B) session.commit() sql = "SELECT B AS my_metric FROM metrics" create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f'''SELECT count('*') AS my_metric{space} FROM (SELECT "A".one AS one, "A".two AS two{space} FROM "A") AS "A"''' ) def test_get_query_for_sql_where_groupby( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with a where and a group by. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) comments = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="user_id", type=ColumnType.INT), Column(name="comment", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE comments (user_id INT, comment TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) num_comments = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[comments], ) session.add(num_comments) session.commit() sql = """ SELECT "core.num_comments", "core.comments.user_id" FROM metrics WHERE "core.comments.user_id" > 1 GROUP BY "core.comments.user_id" """ create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f"""SELECT count('*') AS "core.num_comments", "core.comments".user_id{space} FROM (SELECT comments.user_id AS user_id, comments.comment AS comment{space} FROM comments) AS "core.comments"{space} WHERE "core.comments".user_id > 1 GROUP BY "core.comments".user_id""" ) def test_get_query_for_sql_date_trunc( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with a call to ``DATE_TRUNC``. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="db", URI="sqlite://") comments = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="user_id", type=ColumnType.INT), Column(name="timestamp", type=ColumnType.DATETIME), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE comments (user_id INT, timestamp DATETIME)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) num_comments = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[comments], ) session.add(num_comments) session.commit() sql = """ SELECT DATE_TRUNC('day', "core.comments.timestamp") AS "__timestamp", "core.num_comments" FROM metrics GROUP BY DATE_TRUNC('day', "core.comments.timestamp") """ create_query = get_query_for_sql(sql) assert create_query.database_id == 1 space = " " assert ( create_query.submitted_query == f"""SELECT datetime("core.comments".timestamp, 'start of day') AS __timestamp, count('*') AS "core.num_comments"{space} FROM (SELECT comments.user_id AS user_id, comments.timestamp AS timestamp{space} FROM comments) AS "core.comments" GROUP BY datetime("core.comments".timestamp, 'start of day')""" ) def test_get_query_for_sql_invalid_column( mocker: MockerFixture, session: Session, ) -> None: """ Test ``get_query_for_sql`` with an invalid column. """ get_session = mocker.patch("datajunction.sql.build.get_session") get_session().__next__.return_value = session database = Database(id=1, name="slow", URI="sqlite://", cost=1.0) comments = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="user_id", type=ColumnType.INT), Column(name="comment", type=ColumnType.STR), ], ), ], ) engine = create_engine(database.URI) connection = engine.connect() connection.execute("CREATE TABLE comments (user_id INT, comment TEXT)") mocker.patch("datajunction.sql.transpile.create_engine", return_value=engine) num_comments = Node( name="core.num_comments", type=NodeType.METRIC, expression="SELECT COUNT(*) FROM core.comments", parents=[comments], ) session.add(num_comments) session.commit() sql = """ SELECT "core.num_comments" FROM metrics WHERE "core.some_other_parent.user_id" > 1 """ with pytest.raises(Exception) as excinfo: get_query_for_sql(sql) assert str(excinfo.value) == "Invalid dimension: core.some_other_parent.user_id" def test_get_dimensions_from_filters() -> None: """ Test ``get_dimensions_from_filters``. """ assert get_dimensions_from_filters(["a>1", "b=10"]) == {"a", "b"} with pytest.raises(Exception) as excinfo: get_dimensions_from_filters(["aaaa"]) assert ( str(excinfo.value) == """The filter "aaaa" is invalid The following error happened: - The filter "aaaa" is not a valid filter. Filters should consist of a dimension name, follow by a valid operator (<=|<|>=|>|!=|=), followed by a value. If the value is a string or date/time it should be enclosed in single quotes. (error code: 100)""" ) def test_find_on_clause(mocker: MockerFixture) -> None: """ Test ``find_on_clause``. """ database = Database(id=1, name="one", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) child = Node(name="core.num_comments", parents=[parent]) node_select = mocker.MagicMock() subquery = mocker.MagicMock() find_on_clause(child, node_select, dimension, subquery) assert node_select.columns.__getitem__.called_with("user_id") assert subquery.columns.__getitem__.called_with("id") def test_find_on_clause_parent_no_columns(mocker: MockerFixture) -> None: """ Test ``find_on_clause`` when a parent has no columns. I think we expect all nodes to have at least one column, so this test is just for completeness. """ database = Database(id=1, name="one", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent_1 = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) parent_2 = Node( name="a_weird_node", tables=[ Table( database=database, table="empty", columns=[], ), ], columns=[], ) child = Node(name="core.num_comments", parents=[parent_2, parent_1]) node_select = mocker.MagicMock() subquery = mocker.MagicMock() find_on_clause(child, node_select, dimension, subquery) assert node_select.columns.__getitem__.called_with("user_id") def test_find_on_clause_parent_invalid_reference(mocker: MockerFixture) -> None: """ Test ``find_on_clause`` when a parent has no columns. The compiler should check that the dimension is valid, but the table could change. """ database = Database(id=1, name="one", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) parent = Node( name="core.comments", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ) child = Node(name="core.num_comments", parents=[parent]) node_select = mocker.MagicMock() subquery = mocker.MagicMock() with pytest.raises(Exception) as excinfo: find_on_clause(child, node_select, dimension, subquery) assert ( str(excinfo.value) == "Node core.num_comments has no columns with dimension core.users" ) def test_get_join_columns() -> None: """ Test ``get_join_columns``. """ database = Database(id=1, name="one", URI="sqlite://") dimension = Node( name="core.users", type=NodeType.DIMENSION, tables=[ Table( database=database, table="dim_users", columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ), ], columns=[ Column(name="id", type=ColumnType.INT), Column(name="age", type=ColumnType.INT), Column(name="gender", type=ColumnType.STR), ], ) orphan = Node(name="orphan") with pytest.raises(Exception) as excinfo: get_join_columns(orphan, dimension) assert str(excinfo.value) == "Node orphan has no columns with dimension core.users" parent_without_columns = Node(name="parent_without_columns") broken = Node(name="broken", parents=[parent_without_columns]) with pytest.raises(Exception) as excinfo: get_join_columns(broken, dimension) assert str(excinfo.value) == "Node broken has no columns with dimension core.users" parent = Node( name="parent", tables=[ Table( database=database, table="comments", columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT), Column(name="text", type=ColumnType.STR), ], ), ], columns=[ Column(name="ds", type=ColumnType.STR), Column(name="user_id", type=ColumnType.INT, dimension=dimension), Column(name="text", type=ColumnType.STR), ], ) child = Node(name="child", parents=[parent_without_columns, parent]) parent_name, column_name, dimension_column = get_join_columns(child, dimension) assert parent_name == "parent" assert column_name == "user_id" assert dimension_column == "id"
31.023756
254
0.586472
6,378
54,850
4.892286
0.040765
0.049995
0.047944
0.02692
0.891549
0.875044
0.856713
0.832067
0.816684
0.797712
0
0.006569
0.281167
54,850
1,767
255
31.041313
0.784823
0.032762
0
0.783462
0
0.023125
0.260002
0.044817
0
0
0
0
0.049755
1
0.020322
false
0.001402
0.007708
0
0.028031
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
7
8f8b794a3c57821e35ab3c5efd51b3bc2789f860
2,671
py
Python
plugins/tff_backend/models/nodes.py
threefoldfoundation/app_backend
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
[ "Apache-2.0" ]
null
null
null
plugins/tff_backend/models/nodes.py
threefoldfoundation/app_backend
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
[ "Apache-2.0" ]
178
2017-08-02T12:58:06.000Z
2017-12-20T15:01:12.000Z
plugins/tff_backend/models/nodes.py
threefoldfoundation/app_backend
b3cea2a3ff9e10efcc90d3d6e5e8e46b9e84312a
[ "Apache-2.0" ]
2
2018-01-10T10:43:12.000Z
2018-03-18T10:42:23.000Z
# -*- coding: utf-8 -*- # Copyright 2018 GIG Technology NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # @@license_version:1.4@@ from google.appengine.ext import ndb from framework.models.common import NdbModel from plugins.rogerthat_api.plugin_utils import Enum from plugins.tff_backend.plugin_consts import NAMESPACE class NodeStatus(Enum): HALTED = 'halted' RUNNING = 'running' class WalletStatus(Enum): ERROR = 'error' LOCKED = 'locked' UNLOCKED = 'unlocked' class NodeChainStatus(NdbModel): wallet_status = ndb.StringProperty(choices=WalletStatus.all()) block_height = ndb.IntegerProperty(default=0) active_blockstakes = ndb.IntegerProperty(default=0) network = ndb.StringProperty(default='standard', choices=['devnet', 'testnet', 'standard']) confirmed_balance = ndb.IntegerProperty(default=0) connected_peers = ndb.IntegerProperty(default=0) address = ndb.StringProperty() class Node(NdbModel): NAMESPACE = NAMESPACE serial_number = ndb.StringProperty() last_update = ndb.DateTimeProperty() username = ndb.StringProperty() status = ndb.StringProperty(default=NodeStatus.HALTED) status_date = ndb.DateTimeProperty() info = ndb.JsonProperty() chain_status = ndb.StructuredProperty(NodeChainStatus) @property def id(self): return self.key.string_id().decode('utf-8') @classmethod def create_key(cls, node_id): # type: (unicode) -> ndb.Key return ndb.Key(cls, node_id, namespace=NAMESPACE) @classmethod def list_by_user(cls, username): return cls.query().filter(cls.username == username) @classmethod def list_by_property(cls, property_name, ascending): prop = None if '.' in property_name: for part in property_name.split('.'): prop = getattr(prop if prop else cls, part) else: prop = getattr(cls, property_name) return cls.query().order(prop if ascending else - prop) @classmethod def list_running_by_last_update(cls, date): return cls.query().filter(cls.last_update < date).filter(cls.status == NodeStatus.RUNNING)
32.975309
98
0.706477
333
2,671
5.573574
0.471471
0.032328
0.053879
0.056034
0.024784
0
0
0
0
0
0
0.007414
0.192063
2,671
80
99
33.3875
0.852641
0.234744
0
0.081633
0
0
0.033564
0
0
0
0
0
0
1
0.102041
false
0
0.081633
0.081633
0.77551
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
8f909cdae723994e554369978e6cc97f4a98602c
172
py
Python
BOJ/13000~13999/13800~13899/13877.py
shinkeonkim/today-ps
f3e5e38c5215f19579bb0422f303a9c18c626afa
[ "Apache-2.0" ]
2
2020-01-29T06:54:41.000Z
2021-11-07T13:23:27.000Z
BOJ/13000~13999/13800~13899/13877.py
shinkeonkim/Today_PS
bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44
[ "Apache-2.0" ]
null
null
null
BOJ/13000~13999/13800~13899/13877.py
shinkeonkim/Today_PS
bb0cda0ee1b9c57e1cfa38355e29d0f1c6167a44
[ "Apache-2.0" ]
null
null
null
for case in range(int(input())): a,b = input().split() k = False try: print(a,int(b,8),int(b),int(b,16)) except: print(a,0,int(b),int(b,16))
24.571429
42
0.505814
32
172
2.71875
0.53125
0.229885
0.16092
0.183908
0.229885
0
0
0
0
0
0
0.047244
0.261628
172
7
43
24.571429
0.637795
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.285714
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
3
8f90f912716a6632b2bcc2ac457c7c623545be22
869
py
Python
gsplines/services/gsplinesjson.py
rafaelrojasmiliani/gsplines
663b10f6d53b498a1e892d9eb32a345153de36d2
[ "MIT" ]
3
2021-08-28T01:42:40.000Z
2021-12-02T22:39:45.000Z
gsplines/services/gsplinesjson.py
rafaelrojasmiliani/gsplines
663b10f6d53b498a1e892d9eb32a345153de36d2
[ "MIT" ]
null
null
null
gsplines/services/gsplinesjson.py
rafaelrojasmiliani/gsplines
663b10f6d53b498a1e892d9eb32a345153de36d2
[ "MIT" ]
null
null
null
from ..piecewisefunction.piecewisefunction import cPiecewiseFunction import json import numpy as np import gsplines.basis def piecewise2json(_pw): basis_name = _pw.basis_.__class__.__name__ if hasattr(_pw.basis_, 'params_'): basis_params = _pw.basis_.params_ else: basis_params = None basis = [basis_name, basis_params] result = [_pw.tau_.tolist(), _pw.y_.tolist(), _pw.dim_, basis] return json.dumps(result) def json2piecewise(_data): array = json.loads(_data) for i, element in enumerate(array[:-2]): array[i] = np.array(element) basis_data = array[-1] class_ = getattr(gsplines.basis, basis_data[0]) if basis_data[1] is not None: basis = class_(basis_data[1]) else: basis = class_() array[-1] = basis result = cPiecewiseFunction(*array) return result
22.868421
68
0.665132
109
869
4.963303
0.385321
0.101664
0.048059
0
0
0
0
0
0
0
0
0.011852
0.223245
869
37
69
23.486486
0.78963
0
0
0.076923
0
0
0.008055
0
0
0
0
0
0
1
0.076923
false
0
0.153846
0
0.307692
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f926ec70f17a27fd3a3870a6289f1a7f3c638c1
3,085
py
Python
ovos_utils/waiting_for_mycroft/settings_gui_generator.py
forslund/ovos_utils
bfca2d9175b72b0d157385af07627aefcd280177
[ "Apache-2.0" ]
3
2021-11-10T11:46:05.000Z
2022-03-06T01:59:51.000Z
ovos_utils/waiting_for_mycroft/settings_gui_generator.py
forslund/ovos_utils
bfca2d9175b72b0d157385af07627aefcd280177
[ "Apache-2.0" ]
5
2021-08-10T17:26:49.000Z
2022-03-03T14:43:55.000Z
ovos_utils/waiting_for_mycroft/settings_gui_generator.py
forslund/ovos_utils
bfca2d9175b72b0d157385af07627aefcd280177
[ "Apache-2.0" ]
1
2021-11-19T09:31:07.000Z
2021-11-19T09:31:07.000Z
# Copyright 2020 Mycroft AI Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import json import yaml import pathlib class SettingsGuiGenerator: """Skill Settings Generator For GUI. """ def __init__(self): """ Create a SettingList Object """ self.settings_list = [] def populate(self, skill_id, settings_file, settings_dict): """ Populates settings list for current skill. Arguments: skill_id: ID of target skill. settings_file: Settings meta file from skill folder. settings_dict: Dictionary of current settings.json file. """ file_type = pathlib.Path(settings_file).suffix if file_type == ".json": with open(settings_file, 'r') as f: settingsmeta_dict = json.load(f) __skillMetaData = settingsmeta_dict.get('skillMetadata') for section in __skillMetaData.get('sections'): self.settings_list.append(section) if file_type == ".yaml": with open(settings_file, 'r') as f: settingsmeta_dict = yaml.safe_load(f) __skillMetaData = settingsmeta_dict.get('skillMetadata') for section in __skillMetaData.get('sections'): self.settings_list.append(section) if settings_dict is not None: __updated_list = [] for sections in self.settings_list: for fields in sections['fields']: if "name" in fields: if fields["name"] in settings_dict.keys(): fields["value"] = settings_dict[fields["name"]] __updated_list.append(sections) self.clear() self.settings_list = __updated_list def fetch(self): """Return Settings List """ return self.settings_list def clear(self): """Clear Settings List """ self.settings_list.clear() def update(self, settings_dict): """Getting Changed Settings & Update List. Arguments: settings_dict: Dictionary of current settings.json file. """ __updated_list = [] for sections in self.settings_list: for fields in sections['fields']: if "name" in fields: if fields["name"] in settings_dict.keys(): fields["value"] = settings_dict[fields["name"]] __updated_list.append(sections) self.clear() self.settings_list = __updated_list
33.901099
75
0.60778
351
3,085
5.162393
0.339031
0.07947
0.07947
0.01766
0.449227
0.449227
0.449227
0.449227
0.397351
0.353201
0
0.00373
0.3047
3,085
91
76
33.901099
0.841026
0.316045
0
0.590909
0
0
0.050226
0
0
0
0
0
0
1
0.113636
false
0
0.068182
0
0.227273
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f931f9651c47e4b3678e1660beb74247dbc04c6
25,483
py
Python
pysnmp/FD-SYSTEM-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
11
2021-02-02T16:27:16.000Z
2021-08-31T06:22:49.000Z
pysnmp/FD-SYSTEM-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
75
2021-02-24T17:30:31.000Z
2021-12-08T00:01:18.000Z
pysnmp/FD-SYSTEM-MIB.py
agustinhenze/mibs.snmplabs.com
1fc5c07860542b89212f4c8ab807057d9a9206c7
[ "Apache-2.0" ]
10
2019-04-30T05:51:36.000Z
2022-02-16T03:33:41.000Z
# # PySNMP MIB module FD-SYSTEM-MIB (http://snmplabs.com/pysmi) # ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/FD-SYSTEM-MIB # Produced by pysmi-0.3.4 at Mon Apr 29 18:59:07 2019 # On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4 # Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15) # OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer") NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues") SingleValueConstraint, ValueSizeConstraint, ValueRangeConstraint, ConstraintsIntersection, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ValueSizeConstraint", "ValueRangeConstraint", "ConstraintsIntersection", "ConstraintsUnion") epon, DeviceOperation, DeviceType, LedStatus, DeviceStatus = mibBuilder.importSymbols("EPON-EOC-MIB", "epon", "DeviceOperation", "DeviceType", "LedStatus", "DeviceStatus") ObjectGroup, ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ObjectGroup", "ModuleCompliance", "NotificationGroup") Counter32, MibIdentifier, iso, ModuleIdentity, Integer32, TimeTicks, Bits, Gauge32, Unsigned32, MibScalar, MibTable, MibTableRow, MibTableColumn, IpAddress, Counter64, ObjectIdentity, NotificationType = mibBuilder.importSymbols("SNMPv2-SMI", "Counter32", "MibIdentifier", "iso", "ModuleIdentity", "Integer32", "TimeTicks", "Bits", "Gauge32", "Unsigned32", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "IpAddress", "Counter64", "ObjectIdentity", "NotificationType") RowStatus, TextualConvention, MacAddress, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "RowStatus", "TextualConvention", "MacAddress", "DisplayString") systemInfo = ModuleIdentity((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1)) if mibBuilder.loadTexts: systemInfo.setLastUpdated('201005271056Z') if mibBuilder.loadTexts: systemInfo.setOrganization('epon eoc factory.') sysBaseInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1)) sysModel = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 1), DeviceType()).setMaxAccess("readonly") if mibBuilder.loadTexts: sysModel.setStatus('current') sysDesc = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 2), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysDesc.setStatus('current') sysLocation = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 3), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysLocation.setStatus('current') sysContact = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 4), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysContact.setStatus('current') sysMajAlarmLed = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 5), LedStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: sysMajAlarmLed.setStatus('current') sysCriAlarmLed = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 6), LedStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: sysCriAlarmLed.setStatus('current') sysAlarmDesc = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 1, 7), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: sysAlarmDesc.setStatus('current') sysConfig = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2)) consolePortSpd = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("bps2400", 1), ("bps4800", 2), ("bps9600", 3), ("bps19200", 4), ("bps38400", 5), ("bps57600", 6), ("bps115200", 7)))).setMaxAccess("readonly") if mibBuilder.loadTexts: consolePortSpd.setStatus('current') manageIpAddr = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 2), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: manageIpAddr.setStatus('current') manageNetMask = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 3), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: manageNetMask.setStatus('current') manageGateway = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 4), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: manageGateway.setStatus('current') snmpReadCommunity = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 5), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: snmpReadCommunity.setStatus('current') snmpRWCommunity = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 6), DisplayString()).setMaxAccess("readwrite") if mibBuilder.loadTexts: snmpRWCommunity.setStatus('current') trapDstIpAddr1 = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 8), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapDstIpAddr1.setStatus('current') trapDstIpAddr2 = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 9), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapDstIpAddr2.setStatus('current') trapDstIpAddr3 = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 10), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapDstIpAddr3.setStatus('current') trapDstIpAddr4 = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 11), IpAddress()).setMaxAccess("readwrite") if mibBuilder.loadTexts: trapDstIpAddr4.setStatus('current') sysOperate = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 2, 12), DeviceOperation()).setMaxAccess("readwrite") if mibBuilder.loadTexts: sysOperate.setStatus('current') chassisInfo = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3)) chassisType = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 1), DeviceType()).setMaxAccess("readonly") if mibBuilder.loadTexts: chassisType.setStatus('current') chassisFactorySerial = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly") if mibBuilder.loadTexts: chassisFactorySerial.setStatus('current') chassisRevision = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: chassisRevision.setStatus('current') chassisTemperature = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: chassisTemperature.setStatus('current') powerStatusBit = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 5), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: powerStatusBit.setStatus('current') fanStatusBit = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 3, 6), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: fanStatusBit.setStatus('current') cardModule = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5)) mainCard = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1)) mainCardType = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 1), DeviceType()).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardType.setStatus('current') mainCardFactorySerial = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 2), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardFactorySerial.setStatus('current') mainCardHWRevision = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 3), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardHWRevision.setStatus('current') mainCardSWVersion = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 4), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardSWVersion.setStatus('current') mainCardRunningStatus = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 5), DeviceStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardRunningStatus.setStatus('current') mainCardRunningTime = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 6), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: mainCardRunningTime.setStatus('current') mainCardOperate = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 1, 7), DeviceOperation()).setMaxAccess("readwrite") if mibBuilder.loadTexts: mainCardOperate.setStatus('current') ponCard = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2)) ponCardTable = MibTable((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1), ) if mibBuilder.loadTexts: ponCardTable.setStatus('current') ponCardEntry = MibTableRow((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1), ).setIndexNames((0, "FD-SYSTEM-MIB", "ponCardSlotId")) if mibBuilder.loadTexts: ponCardEntry.setStatus('current') ponCardSlotId = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 4))) if mibBuilder.loadTexts: ponCardSlotId.setStatus('current') ponCardType = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 2), DeviceType()).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardType.setStatus('current') ponCardFactorySerial = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 30))).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardFactorySerial.setStatus('current') ponCardHwRev = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 4), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardHwRev.setStatus('current') ponCardFwVer = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 5), DisplayString()).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardFwVer.setStatus('current') ponCardRunningStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 7), DeviceStatus()).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardRunningStatus.setStatus('current') ponCardRuningTime = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 8), TimeTicks()).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardRuningTime.setStatus('current') ponCardOperate = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 9), DeviceOperation()).setMaxAccess("readwrite") if mibBuilder.loadTexts: ponCardOperate.setStatus('current') ponCardUpgradeStat = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 5, 2, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("booting", 1), ("normalRun", 2), ("rcvFileIng", 3), ("rcvFileOk", 4), ("rcvFileErr", 5), ("upgrading", 6), ("upgradeOk", 7), ("upgradeErr", 8), ("upgradeOlt", 9), ("upgradeOnu", 10)))).setMaxAccess("readonly") if mibBuilder.loadTexts: ponCardUpgradeStat.setStatus('current') onuAuth = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6)) authMethod = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("blackList", 1), ("whiteList", 2), ("none", 3)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: authMethod.setStatus('current') nonAuthOper = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(2))).clone(namedValues=NamedValues(("clearNonAuthMacList", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: nonAuthOper.setStatus('current') onuAuthMacCfgTable = MibTable((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3), ) if mibBuilder.loadTexts: onuAuthMacCfgTable.setStatus('current') onuAuthMacCfgEntry = MibTableRow((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1), ).setIndexNames((0, "FD-SYSTEM-MIB", "authMacEntryId")) if mibBuilder.loadTexts: onuAuthMacCfgEntry.setStatus('current') authMacEntryId = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1, 1), Unsigned32()) if mibBuilder.loadTexts: authMacEntryId.setStatus('current') beginMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1, 2), MacAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: beginMacAddr.setStatus('current') endMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1, 3), MacAddress()).setMaxAccess("readcreate") if mibBuilder.loadTexts: endMacAddr.setStatus('current') macAttr = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("blackMac", 1), ("whiteMac", 2), ("obsolete", 3)))).setMaxAccess("readcreate") if mibBuilder.loadTexts: macAttr.setStatus('current') onuAuthMacRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 3, 1, 5), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: onuAuthMacRowStatus.setStatus('current') nonAuthOnuListTable = MibTable((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 4), ) if mibBuilder.loadTexts: nonAuthOnuListTable.setStatus('current') nonAuthOnuListEntry = MibTableRow((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 4, 1), ).setIndexNames((0, "FD-SYSTEM-MIB", "nonAuthOnuMacIndex")) if mibBuilder.loadTexts: nonAuthOnuListEntry.setStatus('current') nonAuthOnuMacIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 4, 1, 1), Unsigned32()) if mibBuilder.loadTexts: nonAuthOnuMacIndex.setStatus('current') nonAuthOnuMac = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 4, 1, 2), MacAddress()).setMaxAccess("readonly") if mibBuilder.loadTexts: nonAuthOnuMac.setStatus('current') nonAuthOnuTries = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 6, 4, 1, 3), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: nonAuthOnuTries.setStatus('current') userManage = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7)) userManageTable = MibTable((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1), ) if mibBuilder.loadTexts: userManageTable.setStatus('current') userManageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1), ).setIndexNames((0, "FD-SYSTEM-MIB", "userId")) if mibBuilder.loadTexts: userManageEntry.setStatus('current') userId = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 10))) if mibBuilder.loadTexts: userId.setStatus('current') userName = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 2), DisplayString()).setMaxAccess("readcreate") if mibBuilder.loadTexts: userName.setStatus('current') userPassword = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 3), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate") if mibBuilder.loadTexts: userPassword.setStatus('current') userPermission = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 4), Unsigned32()).setMaxAccess("readonly") if mibBuilder.loadTexts: userPermission.setStatus('current') userAccessDeviceMap = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 5), Unsigned32()).setMaxAccess("readcreate") if mibBuilder.loadTexts: userAccessDeviceMap.setStatus('current') loginTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 6), Unsigned32().clone(300)).setMaxAccess("readonly") if mibBuilder.loadTexts: loginTimeout.setStatus('current') userEntryRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 7, 1, 1, 7), RowStatus()).setMaxAccess("readcreate") if mibBuilder.loadTexts: userEntryRowStatus.setStatus('current') upgrade = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8)) ftpServerIp = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ftpServerIp.setStatus('current') ftpServerUserName = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ftpServerUserName.setStatus('current') ftpServerUserPasswd = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ftpServerUserPasswd.setStatus('current') ftpOperFileName = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 63))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ftpOperFileName.setStatus('current') ftpOperTarget = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9))).clone(namedValues=NamedValues(("ctrlCardImage", 1), ("ponCardImage", 2), ("oltApp", 3), ("oltPers", 4), ("oltBoot", 5), ("onuApp", 6), ("onuPers", 7), ("onuBoot", 8), ("otherSpecifiedFile", 9)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: ftpOperTarget.setStatus('current') dwLoadFileCrcCheck = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("checkCrc", 1), ("dontCheckCrc", 2)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: dwLoadFileCrcCheck.setStatus('current') dwLoadFileCrcValue = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 8), Unsigned32()).setMaxAccess("readwrite") if mibBuilder.loadTexts: dwLoadFileCrcValue.setStatus('current') operDeviceMap = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 9), OctetString().subtype(subtypeSpec=ValueSizeConstraint(10, 10)).setFixedLength(10)).setMaxAccess("readwrite") if mibBuilder.loadTexts: operDeviceMap.setStatus('current') upgradeStatus = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("paraErr", 1), ("initFtpErr", 2), ("transmitting", 3), ("transmitErr", 4), ("transmitOk", 5), ("upgrading", 6), ("upgradeErr", 7), ("upgradeOk", 8), ("uploading", 9), ("uploadErr", 10), ("uploadOk", 11)))).setMaxAccess("readonly") if mibBuilder.loadTexts: upgradeStatus.setStatus('current') upgradeOperation = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("downloadFile", 1), ("upgrade", 2), ("reboot", 3), ("uploadFile", 4)))).setMaxAccess("readwrite") if mibBuilder.loadTexts: upgradeOperation.setStatus('current') ftpProgress = MibScalar((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 8, 12), Integer32()).setUnits('percent').setMaxAccess("readonly") if mibBuilder.loadTexts: ftpProgress.setStatus('current') fdSysConformance = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13)) fdSystemGroups = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1)) sysBaseManageGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 1)).setObjects(("FD-SYSTEM-MIB", "sysModel"), ("FD-SYSTEM-MIB", "sysDesc"), ("FD-SYSTEM-MIB", "sysLocation"), ("FD-SYSTEM-MIB", "sysContact"), ("FD-SYSTEM-MIB", "sysMajAlarmLed"), ("FD-SYSTEM-MIB", "sysCriAlarmLed"), ("FD-SYSTEM-MIB", "sysAlarmDesc"), ("FD-SYSTEM-MIB", "consolePortSpd"), ("FD-SYSTEM-MIB", "manageIpAddr"), ("FD-SYSTEM-MIB", "manageNetMask"), ("FD-SYSTEM-MIB", "manageGateway"), ("FD-SYSTEM-MIB", "snmpReadCommunity"), ("FD-SYSTEM-MIB", "snmpRWCommunity"), ("FD-SYSTEM-MIB", "trapDstIpAddr1"), ("FD-SYSTEM-MIB", "trapDstIpAddr2"), ("FD-SYSTEM-MIB", "trapDstIpAddr3"), ("FD-SYSTEM-MIB", "trapDstIpAddr4"), ("FD-SYSTEM-MIB", "sysOperate")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): sysBaseManageGroup = sysBaseManageGroup.setStatus('current') chassisInfoGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 2)).setObjects(("FD-SYSTEM-MIB", "chassisType"), ("FD-SYSTEM-MIB", "chassisFactorySerial"), ("FD-SYSTEM-MIB", "chassisRevision"), ("FD-SYSTEM-MIB", "chassisTemperature"), ("FD-SYSTEM-MIB", "powerStatusBit"), ("FD-SYSTEM-MIB", "fanStatusBit")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): chassisInfoGroup = chassisInfoGroup.setStatus('current') cardModuleGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 3)).setObjects(("FD-SYSTEM-MIB", "mainCardType"), ("FD-SYSTEM-MIB", "mainCardFactorySerial"), ("FD-SYSTEM-MIB", "mainCardHWRevision"), ("FD-SYSTEM-MIB", "mainCardSWVersion"), ("FD-SYSTEM-MIB", "mainCardRunningStatus"), ("FD-SYSTEM-MIB", "mainCardRunningTime"), ("FD-SYSTEM-MIB", "mainCardOperate"), ("FD-SYSTEM-MIB", "ponCardType"), ("FD-SYSTEM-MIB", "ponCardFactorySerial"), ("FD-SYSTEM-MIB", "ponCardHwRev"), ("FD-SYSTEM-MIB", "ponCardFwVer"), ("FD-SYSTEM-MIB", "ponCardRunningStatus"), ("FD-SYSTEM-MIB", "ponCardRuningTime"), ("FD-SYSTEM-MIB", "ponCardOperate"), ("FD-SYSTEM-MIB", "ponCardUpgradeStat")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): cardModuleGroup = cardModuleGroup.setStatus('current') onuAuthGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 4)).setObjects(("FD-SYSTEM-MIB", "authMethod"), ("FD-SYSTEM-MIB", "nonAuthOper"), ("FD-SYSTEM-MIB", "beginMacAddr"), ("FD-SYSTEM-MIB", "endMacAddr"), ("FD-SYSTEM-MIB", "macAttr"), ("FD-SYSTEM-MIB", "onuAuthMacRowStatus"), ("FD-SYSTEM-MIB", "nonAuthOnuMac"), ("FD-SYSTEM-MIB", "nonAuthOnuTries")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): onuAuthGroup = onuAuthGroup.setStatus('current') userManageGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 5)).setObjects(("FD-SYSTEM-MIB", "userName"), ("FD-SYSTEM-MIB", "userPassword"), ("FD-SYSTEM-MIB", "userPermission"), ("FD-SYSTEM-MIB", "userAccessDeviceMap"), ("FD-SYSTEM-MIB", "loginTimeout"), ("FD-SYSTEM-MIB", "userEntryRowStatus")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): userManageGroup = userManageGroup.setStatus('current') systemUpgradeGroup = ObjectGroup((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 1, 6)).setObjects(("FD-SYSTEM-MIB", "ftpServerIp"), ("FD-SYSTEM-MIB", "ftpServerUserName"), ("FD-SYSTEM-MIB", "ftpServerUserPasswd"), ("FD-SYSTEM-MIB", "ftpOperFileName"), ("FD-SYSTEM-MIB", "dwLoadFileCrcCheck"), ("FD-SYSTEM-MIB", "dwLoadFileCrcValue"), ("FD-SYSTEM-MIB", "operDeviceMap"), ("FD-SYSTEM-MIB", "upgradeStatus"), ("FD-SYSTEM-MIB", "ftpProgress"), ("FD-SYSTEM-MIB", "upgradeOperation"), ("FD-SYSTEM-MIB", "ftpOperTarget")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): systemUpgradeGroup = systemUpgradeGroup.setStatus('current') fdSystemCompliances = MibIdentifier((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 2)) fdSystemCompliance = ModuleCompliance((1, 3, 6, 1, 4, 1, 34592, 1, 3, 1, 13, 2, 1)).setObjects(("FD-SYSTEM-MIB", "sysBaseManageGroup"), ("FD-SYSTEM-MIB", "chassisInfoGroup"), ("FD-SYSTEM-MIB", "cardModuleGroup"), ("FD-SYSTEM-MIB", "onuAuthGroup"), ("FD-SYSTEM-MIB", "userManageGroup"), ("FD-SYSTEM-MIB", "systemUpgradeGroup")) if getattr(mibBuilder, 'version', (0, 0, 0)) > (4, 4, 0): fdSystemCompliance = fdSystemCompliance.setStatus('current') mibBuilder.exportSymbols("FD-SYSTEM-MIB", ponCardType=ponCardType, userManageTable=userManageTable, dwLoadFileCrcCheck=dwLoadFileCrcCheck, ftpServerIp=ftpServerIp, userEntryRowStatus=userEntryRowStatus, sysModel=sysModel, userManageEntry=userManageEntry, manageIpAddr=manageIpAddr, PYSNMP_MODULE_ID=systemInfo, onuAuthMacCfgTable=onuAuthMacCfgTable, sysCriAlarmLed=sysCriAlarmLed, ponCardUpgradeStat=ponCardUpgradeStat, userManage=userManage, authMacEntryId=authMacEntryId, chassisRevision=chassisRevision, ponCardFwVer=ponCardFwVer, fdSystemGroups=fdSystemGroups, ftpOperFileName=ftpOperFileName, ponCardFactorySerial=ponCardFactorySerial, manageGateway=manageGateway, mainCardRunningStatus=mainCardRunningStatus, ftpServerUserPasswd=ftpServerUserPasswd, sysContact=sysContact, chassisType=chassisType, userId=userId, snmpRWCommunity=snmpRWCommunity, nonAuthOnuMac=nonAuthOnuMac, manageNetMask=manageNetMask, nonAuthOper=nonAuthOper, userName=userName, mainCardType=mainCardType, upgradeStatus=upgradeStatus, ponCardSlotId=ponCardSlotId, userPassword=userPassword, nonAuthOnuMacIndex=nonAuthOnuMacIndex, macAttr=macAttr, ponCardOperate=ponCardOperate, sysOperate=sysOperate, nonAuthOnuListEntry=nonAuthOnuListEntry, fdSysConformance=fdSysConformance, powerStatusBit=powerStatusBit, ponCard=ponCard, ftpOperTarget=ftpOperTarget, fdSystemCompliance=fdSystemCompliance, onuAuthGroup=onuAuthGroup, sysLocation=sysLocation, sysConfig=sysConfig, sysBaseManageGroup=sysBaseManageGroup, sysDesc=sysDesc, systemUpgradeGroup=systemUpgradeGroup, fanStatusBit=fanStatusBit, nonAuthOnuTries=nonAuthOnuTries, mainCardRunningTime=mainCardRunningTime, chassisInfo=chassisInfo, mainCardOperate=mainCardOperate, trapDstIpAddr2=trapDstIpAddr2, mainCard=mainCard, sysAlarmDesc=sysAlarmDesc, loginTimeout=loginTimeout, operDeviceMap=operDeviceMap, userAccessDeviceMap=userAccessDeviceMap, upgrade=upgrade, onuAuthMacRowStatus=onuAuthMacRowStatus, ftpProgress=ftpProgress, chassisInfoGroup=chassisInfoGroup, onuAuthMacCfgEntry=onuAuthMacCfgEntry, snmpReadCommunity=snmpReadCommunity, sysBaseInfo=sysBaseInfo, sysMajAlarmLed=sysMajAlarmLed, trapDstIpAddr1=trapDstIpAddr1, ftpServerUserName=ftpServerUserName, upgradeOperation=upgradeOperation, trapDstIpAddr4=trapDstIpAddr4, mainCardSWVersion=mainCardSWVersion, ponCardRunningStatus=ponCardRunningStatus, systemInfo=systemInfo, trapDstIpAddr3=trapDstIpAddr3, mainCardFactorySerial=mainCardFactorySerial, ponCardEntry=ponCardEntry, ponCardTable=ponCardTable, mainCardHWRevision=mainCardHWRevision, endMacAddr=endMacAddr, consolePortSpd=consolePortSpd, userManageGroup=userManageGroup, cardModule=cardModule, onuAuth=onuAuth, dwLoadFileCrcValue=dwLoadFileCrcValue, ponCardRuningTime=ponCardRuningTime, fdSystemCompliances=fdSystemCompliances, beginMacAddr=beginMacAddr, nonAuthOnuListTable=nonAuthOnuListTable, chassisFactorySerial=chassisFactorySerial, cardModuleGroup=cardModuleGroup, ponCardHwRev=ponCardHwRev, userPermission=userPermission, chassisTemperature=chassisTemperature, authMethod=authMethod)
124.307317
3,032
0.727269
3,130
25,483
5.920447
0.08754
0.022233
0.015703
0.020722
0.429982
0.37332
0.274513
0.253899
0.23431
0.230533
0
0.085971
0.092572
25,483
204
3,033
124.916667
0.715404
0.012557
0
0.035714
0
0
0.168323
0.003419
0
0
0
0
0
1
0
false
0.035714
0.035714
0
0.035714
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
2
8f949563ac0e205ebc380e370afe4937e75dab84
2,690
py
Python
blender/.blender/scripts/textplugin_templates.py
visnz/sketchfab_download
976f667d5c2c2864b2bad65aceac0dab5ce51b74
[ "Apache-2.0" ]
41
2021-02-18T05:56:26.000Z
2021-12-06T07:58:15.000Z
blender/.blender/scripts/textplugin_templates.py
visnz/sketchfab_download
976f667d5c2c2864b2bad65aceac0dab5ce51b74
[ "Apache-2.0" ]
19
2021-02-18T05:59:03.000Z
2022-01-13T01:00:52.000Z
blender/.blender/scripts/textplugin_templates.py
visnz/sketchfab_download
976f667d5c2c2864b2bad65aceac0dab5ce51b74
[ "Apache-2.0" ]
18
2021-02-22T13:32:56.000Z
2022-01-22T12:38:29.000Z
#!BPY """ Name: 'Template Completion | Tab' Blender: 246 Group: 'TextPlugin' Shortcut: 'Tab' Tooltip: 'Completes templates based on the text preceding the cursor' """ # Only run if we have the required modules try: import bpy from BPyTextPlugin import * from Blender import Text except ImportError: OK = False else: OK = True templates = { 'ie': 'if ${1:cond}:\n' '\t${2}\n' 'else:\n' '\t${3}\n', 'iei': 'if ${1:cond}:\n' '\t${2}\n' 'elif:\n' '\t${3}\n' 'else:\n' '\t${4}\n', 'def': 'def ${1:name}(${2:params}):\n' '\t"""(${2}) - ${3:comment}"""\n' '\t${4}', 'cls': 'class ${1:name}(${2:parent}):\n' '\t"""${3:docs}"""\n' '\t\n' '\tdef __init__(self, ${4:params}):\n' '\t\t"""Creates a new ${1}"""\n' '\t\t${5}', 'class': 'class ${1:name}(${2:parent}):\n' '\t"""${3:docs}"""\n' '\t\n' '\tdef __init__(self, ${4:params}):\n' '\t\t"""Creates a new ${1}"""\n' '\t\t${5}' } def main(): txt = bpy.data.texts.active if not txt: return row, c = txt.getCursorPos() line = txt.asLines(row, row+1)[0] indent=0 while indent<c and (line[indent]==' ' or line[indent]=='\t'): indent += 1 # Check we are in a normal context if get_context(txt) != CTX_NORMAL: return targets = get_targets(line, c-1); if len(targets) != 1: return color = (0, 192, 32) for trigger, template in templates.items(): if trigger != targets[0]: continue inserts = {} txt.delete(-len(trigger)-1) y, x = txt.getCursorPos() first = None # Insert template text and parse for insertion points count = len(template); i = 0 while i < count: if i<count-1 and template[i]=='$' and template[i+1]=='{': i += 2 e = template.find('}', i) item = template[i:e].split(':') if len(item)<2: item.append('') if not inserts.has_key(item[0]): inserts[item[0]] = (item[1], [(x, y)]) else: inserts[item[0]][1].append((x, y)) item[1] = inserts[item[0]][0] if not first: first = (item[1], x, y) txt.insert(item[1]) x += len(item[1]) i = e else: txt.insert(template[i]) if template[i] == '\n': txt.insert(line[:indent]) y += 1 x = indent else: x += 1 i += 1 # Insert markers at insertion points for id, (text, points) in inserts.items(): for x, y in points: txt.setCursorPos(y, x) txt.setSelectPos(y, x+len(text)) txt.markSelection((hash(text)+int(id)) & 0xFFFF, color, Text.TMARK_TEMP | Text.TMARK_EDITALL) if first: text, x, y = first txt.setCursorPos(y, x) txt.setSelectPos(y, x+len(text)) break # Check we are running as a script and not imported as a module if __name__ == "__main__" and OK: main()
21.693548
69
0.568773
427
2,690
3.531616
0.30445
0.019894
0.007958
0.01061
0.155172
0.155172
0.155172
0.140584
0.140584
0.140584
0
0.02891
0.215613
2,690
123
70
21.869919
0.685782
0.141264
0
0.265306
0
0
0.194166
0.032651
0
0
0.002612
0
0
1
0.010204
false
0
0.040816
0
0.071429
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f94b300e2b34ff76f0d2873be32657ff67630be
1,939
py
Python
kive/file_access_utils.py
cfe-lab/Kive
e46b9eb40f085d579c12f47b6b5696d5ee93a9d3
[ "BSD-3-Clause" ]
2
2016-10-02T18:24:53.000Z
2019-01-19T09:37:56.000Z
kive/file_access_utils.py
cfe-lab/Kive
e46b9eb40f085d579c12f47b6b5696d5ee93a9d3
[ "BSD-3-Clause" ]
1,190
2015-07-10T22:57:23.000Z
2022-03-30T05:10:14.000Z
kive/file_access_utils.py
cfe-lab/Kive
e46b9eb40f085d579c12f47b6b5696d5ee93a9d3
[ "BSD-3-Clause" ]
2
2019-07-16T00:25:25.000Z
2019-11-25T16:32:58.000Z
""" Basic file-checking functionality used by Kive. """ import hashlib import mimetypes import os from contextlib import contextmanager from django.http import FileResponse def build_download_response(field_file): # Intentionally leave this open for streaming response. # FileResponse will close it when streaming finishes. field_file.open('rb') mimetype = mimetypes.guess_type(field_file.name)[0] response = FileResponse(field_file, content_type=mimetype) response['Content-Length'] = field_file.size response['Content-Disposition'] = 'attachment; filename="{}"'.format( os.path.basename(field_file.name)) return response def compute_md5(file_to_checksum, chunk_size=1024*64): """Computes MD5 checksum of specified file. file_to_checksum should be an open, readable, file handle, with its position at the beginning, i.e. so that .read() gets the entire contents of the file. NOTE: under python3, the file should have been open in binary mode ("rb") so that bytes (not strings) are returned when iterating over the file. """ md5gen = hashlib.md5() while True: chunk = file_to_checksum.read(chunk_size) if not chunk: return md5gen.hexdigest() md5gen.update(chunk) @contextmanager def use_field_file(field_file, mode='rb'): """ Context manager for FieldFile objects. Tries to leave a file object in the same state it was in when the context manager started. It's hard to tell when to close a FieldFile object. It opens implicitly when you first read from it. Sometimes, it's an in-memory file object, and it can't be reopened. """ was_closed = field_file.closed field_file.open(mode) start_position = field_file.tell() try: yield field_file finally: if was_closed: field_file.close() else: field_file.seek(start_position)
30.777778
78
0.700877
268
1,939
4.951493
0.488806
0.094951
0.03165
0.027129
0
0
0
0
0
0
0
0.009253
0.219701
1,939
62
79
31.274194
0.867812
0.410005
0
0
0
0
0.057728
0
0
0
0
0
0
1
0.09375
false
0
0.15625
0
0.3125
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0
8f958f12563b74a6555a910c0edcd3c3aff2c51a
16,170
py
Python
gem/quaternion.py
AlexMarinescu/pyGameMath
5257291431bb45db0274dc48edf24694ecfe2e2d
[ "BSD-2-Clause-FreeBSD" ]
8
2020-04-15T22:30:52.000Z
2022-01-18T01:05:45.000Z
gem/quaternion.py
SmithSamuelM/gem
d05dfa50739aa0b3dcd5e9cd2eb7d147fb4c0d63
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
gem/quaternion.py
SmithSamuelM/gem
d05dfa50739aa0b3dcd5e9cd2eb7d147fb4c0d63
[ "BSD-2-Clause-FreeBSD" ]
1
2021-11-12T00:41:21.000Z
2021-11-12T00:41:21.000Z
import math import six.moves as sm from gem import vector from gem import matrix def quat_identity(): ''' Returns the quaternion identity. ''' return [1.0, 0.0, 0.0, 0.0] def quat_add(quat, quat1): ''' Add two quaternions. ''' return [quat[0] + quat1[0], quat[1] + quat1[1], quat[2] + quat1[2], quat[3] + quat1[3]] def quat_sub(quat, quat1): ''' Subtract two quaternions. ''' return [quat[0] - quat1[0], quat[1] - quat1[1], quat[2] - quat1[2], quat[3] - quat1[3]] def quat_mul_quat(quat, quat1): ''' Multiply a quaternion with a quaternion. ''' w = quat[0] * quat1[0] - quat[1] * quat1[1] - quat[2] * quat1[2] - quat[3] * quat1[3] x = quat[0] * quat1[1] + quat[1] * quat1[0] + quat[2] * quat1[3] - quat[3] * quat1[2] y = quat[0] * quat1[2] + quat[2] * quat1[0] + quat[3] * quat1[1] - quat[1] * quat1[3] z = quat[0] * quat1[3] + quat[3] * quat1[0] + quat[1] * quat1[2] - quat[2] * quat1[1] return [w, x, y, z] def quat_mul_vect(quat, vect): ''' Multiply a quaternion with a vector. ''' w = -quat[1] * vect[0] - quat[2] * vect[1] - quat[3] * vect[2] x = quat[0] * vect[0] + quat[2] * vect[2] - quat[3] * vect[1] y = quat[0] * vect[1] + quat[3] * vect[0] - quat[1] * vect[2] z = quat[0] * vect[2] + quat[1] * vect[1] - quat[2] * vect[0] return [w, x, y, z] def quat_mul_float(quat, scalar): ''' Multiply a quaternion with a scalar (float). ''' return [quat[0] * scalar, quat[1] * scalar, quat[2] * scalar, quat[3] * scalar] def quat_div_float(quat, scalar): ''' Divide a quaternion with a scalar (float). ''' return [quat[0] / scalar, quat[1] / scalar, quat[2] / scalar, quat[3] / scalar] def quat_neg(quat): ''' Negate the elements of a quaternion. ''' return [-quat[0], -quat[1], -quat[2], -quat[3]] def quat_dot(quat1, quat2): ''' Dot product between two quaternions. Returns a scalar. ''' rdp= 0 for i in sm.range(4): rdp += quat1[i] * quat2[i] return rdp def quat_magnitude(quat): ''' Compute magnitude of a quaternion. Returns a scalar. ''' rmg = 0 for i in sm.range(4): rmg += quat[i] * quat[i] return math.sqrt(rmg) def quat_normalize(quat): ''' Returns a normalized quaternion. ''' length = quat_magnitude(quat) oquat = quat_identity() if length is not 0: for i in sm.range(4): oquat[i] = quat[i] / length return oquat def quat_conjugate(quat): ''' Returns the conjugate of a quaternion. ''' idquat = quat_identity() for i in sm.range(4): idquat[i] = -quat[i] idquat[0] = -idquat[0] return idquat def quat_inverse(quat): ''' Returns the inverse of a quaternion. ''' lengthSquared = quat[0] * quat[0] + quat[1] * quat[1] + quat[2] * quat[2] + quat[3] * quat[3] return [quat[0] / lengthSquared, quat[1] / lengthSquared, quat[2] / lengthSquared, quat[3] / lengthSquared] def quat_from_axis_angle(axis, theta): ''' Returns a quaternion from a given axis and a angle. ''' thetaOver2 = theta * 0.5 sto2 = math.sin(math.radians(thetaOver2)) cto2 = math.cos(math.radians(thetaOver2)) quat1List = [] if isinstance(axis, vector.Vector): axis.i_normalize() quat1List = [cto2, axis.vector[0] * sto2, axis.vector[1] * sto2, axis.vector[2] * sto2] elif isinstance(axis, list): naxis = axis.normalize() quat1List = (cto2, naxis[0] * sto2, naxis[1] * sto2, naxis[2] * sto2) else: return NotImplemented return Quaternion(data=quat1List) def quat_rotate(origin, axis, theta): ''' Returns a vector that is rotated around an axis. ''' thetaOver2 = theta * 0.5 sinThetaOver2 = math.sin(math.radians(thetaOver2)) cosThetaOver2 = math.cos(math.radians(thetaOver2)) quat = Quaternion(data = [cosThetaOver2, axis[0] * sinThetaOver2, axis[1] * sinThetaOver2, axis[2] * sinThetaOver2]) rotation = (quat * origin) * quat.conjugate() return vector.Vector(3, data=[rotation.data[1], rotation.data[2], rotation.data[3]]) def quat_rotate_x_from_angle(theta): ''' Creates a quaternion that rotates around X axis given an angle. ''' thetaOver2 = theta * 0.5 cto2 = math.cos(thetaOver2) sto2 = math.sin(thetaOver2) return [cto2, sto2, 0.0, 0.0] def quat_rotate_y_from_angle(theta): ''' Creates a quaternion that rotates around Y axis given an angle. ''' thetaOver2 = theta * 0.5 cto2 = math.cos(thetaOver2) sto2 = math.sin(thetaOver2) return [cto2, 0.0, sto2, 0.0] def quat_rotate_z_from_angle(theta): ''' Creates a quaternion that rotates around Z axis given an angle. ''' thetaOver2 = theta * 0.5 cto2 = math.cos(thetaOver2) sto2 = math.sin(thetaOver2) return [cto2, 0.0, 0.0, sto2] def quat_rotate_from_axis_angle(axis, theta): ''' Creates a quaternion that rotates around an arbitary axis given an angle. ''' thetaOver2 = theta * 0.5 sto2 = math.sin(math.radians(thetaOver2)) cto2 = math.cos(math.radians(thetaOver2)) quat1List = [] if isinstance(axis, vector.Vector): axis.i_normalize() quat1List = [cto2, axis.vector[0] * sto2, axis.vector[1] * sto2, axis.vector[2] * sto2] elif isinstance(axis, list): naxis = axis.normalize() quat1List = (cto2, naxis[0] * sto2, naxis[1] * sto2, naxis[2] * sto2) else: return NotImplemented quat1 = Quaternion(data=quat1List) rotation = (quat1 * axis) * quat1.conjugate() return rotation def quat_rotate_vector(quat, vec): ''' Rotates a vector by a quaternion, returns a vector. ''' outQuat = (quat * vec) * quat.conjugate() return vector.Vector(3, data=[outQuat.data[1], outQuat.data[2], outQuat.data[3]]) def quat_pow(quat, exp): ''' Returns a quaternion to the power of N. ''' quatExp = Quaternion() if quat.data[0] is not 0.0: angle = math.acos(quat.data[0]) newAngle = angle * exp quatExp.data[0] = math.cos(newAngle) divAngle = math.sin(newAngle) / math.sin(angle) quatExp.data[1] *= divAngle quatExp.data[2] *= divAngle quatExp.data[3] *= divAngle return quatExp def quat_log(quat): ''' Returns the logatithm of a quaternion. ''' alpha = math.acos(quat.data[0]) sinAlpha = math.sin(alpha) outList = [1.0, 0.0, 0.0, 0.0] if sinAlpha > 0.0: outList[1] = quat.data[1] * alpha / sinAlpha outList[2] = quat.data[2] * alpha / sinAlpha outList[3] = quat.data[3] * alpha / sinAlpha else: outList = quat.data return outList def quat_lerp(quat0, quat1, t): ''' Linear interpolation between two quaternions. ''' k0 = 1.0 - t k1 = t output = Quaternion() output = (quat0 * k0) + (quat1 * k1) return output def quat_slerp(quat0, quat1, t): ''' Spherical interpolation between two quaternions. ''' k0 = 0.0 k1 = 0.0 output = Quaternion() quat1Neg = Quaternion() cosTheta = quat0.dot(quat1) if cosTheta < 0.0: quat1Neg = quat1.negate() cosTheta = -cosTheta else: quat1Neg = quat1 if cosTheta > 0.999: k0 = 1.0 - t k1 = t else: theta = math.acos(cosTheta) oneOverSinTheta = 1.0 / math.sin(theta) k0 = math.sin((1.0 - t) * theta) * oneOverSinTheta k1 = math.sin(t * theta) * oneOverSinTheta output = (quat0 * k0) + (quat1Neg * k1) return output def quat_slerp_no_invert(quat0, quat1, t): ''' Spherical interpolation between two quaternions, it does not check for theta > 90. Used by SQUAD. ''' dotP = quat0.dot(quat1) output = Quaternion() if (dotP > -0.95) and (dotP < 0.95): angle = math.acos(dotP) k0 = math.sin(angle * (1.0 - t)) / math.sin(angle) k1 = math.sin(t * angle) / math.sin(angle) output = (quat0 * k0) + (quat1 * k1) else: output = quat_lerp(quat0, quat1, t) return output def quat_squad(quat0, quat1, quat2, t): ''' Quaternion splines. ''' return quat_slerp_no_invert(quat_slerp_no_invert(quat0, quat2, t), quat_slerp_no_invert(quat0, quat1, t), 2 * t(1 - t)) def quat_to_matrix(quat): ''' Converts a quaternion to a rotational 4x4 matrix. ''' x2 = quat.data[1] * quat.data[1] y2 = quat.data[2] * quat.data[2] z2 = quat.data[3] * quat.data[3] xy = quat.data[1] * quat.data[2] xz = quat.data[1] * quat.data[3] yz = quat.data[2] * quat.data[3] wx = quat.data[0] * quat.data[1] wy = quat.data[0] * quat.data[2] wz = quat.data[0] * quat.data[3] outputMatrix = matrix.Matrix(4) outputMatrix.matrix[0][0] = 1.0 - 2.0 * y2 - 2.0 * z2 outputMatrix.matrix[0][1] = 2.0 * xy + 2.0 * wz outputMatrix.matrix[0][2] = 2.0 * xz - 2.0 * wy outputMatrix.matrix[0][3] = 0.0 outputMatrix.matrix[1][0] = 2.0 * xy - 2.0 * wz outputMatrix.matrix[1][1] = 1.0 - 2.0 * x2 - 2.0 * z2 outputMatrix.matrix[1][2] = 2.0 * yz + 2.0 * wx outputMatrix.matrix[1][3] = 0.0 outputMatrix.matrix[2][0] = 2.0 * xz + 2.0 * wy outputMatrix.matrix[2][1] = 2.0 * yz - 2.0 * wx outputMatrix.matrix[2][2] = 1.0 - 2.0 * x2 - 2.0 * y2 outputMatrix.matrix[2][3] = 0.0 return outputMatrix class Quaternion(object): def __init__(self, data=None): if data is None: self.data = quat_identity() else: self.data = data def __add__(self, other): if isinstance(other, Quaternion): return Quaternion(quat_add(self.data, other.data)) else: return NotImplemented def __iadd__(self, other): if isinstance(other, Quaternion): self.data = quat_add(self.data, other.data) return self else: return NotImplemented def __sub__(self, other): if isinstance(other, Quaternion): return Quaternion(quat_sub(self.data, other.data)) else: return NotImplemented def __isub__(self, other): if isinstance(other, Quaternion): self.data = quat_sub(self.data, other.data) return self else: return NotImplemented def __mul__(self, other): if isinstance(other, Quaternion): return Quaternion(quat_mul_quat(self.data, other.data)) elif isinstance(other, vector.Vector): return Quaternion(quat_mul_vect(self.data, other.vector)) elif isinstance(other, float): return Quaternion(quat_mul_float(self.data, other)) else: return NotImplemented def __imul__(self, other): if isinstance(other, Quaternion): self.data = quat_mul_quat(self.data, other.data) return self elif isinstance(other, vector.Vector): self.data = quat_mul_vect(self.data, other.data) return self elif isinstance(other, float): self.data = quat_mul_float(self.data, other) return self else: return NotImplemented def __div__(self, other): if isinstance(other, float): return Quaternion(quat_div_float(self.data, other)) else: return NotImplemented def __idiv__(self, other): if isinstance(other, float): self.data = quat_div_float(self.data, other) return self else: return NotImplemented def i_negate(self): self.data = quat_neg(self.data) return self def negate(self): quatList = quat_neg(self.data) return Quaternion(quatList) def i_identity(self): self.data = quat_identity() return self def identity(self): quatList = quat_identity() return Quaternion(quatList) def magnitude(self): return quat_magnitude(self.data) def dot(self, quat2): if isinstance(quat2, Quaternion): return quat_dot(self.data, quat2.data) else: return NotImplemented def i_normalize(self): self.data = quat_normalize(self.data) return self def normalize(self): quatList = quat_normalize(self.data) return Quaternion(quatList) def i_conjugate(self): self.data = quat_conjugate(self.data) return self def conjugate(self): quatList = quat_conjugate(self.data) return Quaternion(quatList) def inverse(self): quatList = quat_inverse(self.data) return Quaternion(quatList) def pow(self, e): exponent = e return quat_pow(self, exponent) def log(self): return quat_log(self) def lerp(self, quat1, time): return quat_lerp(self, quat1, time) def slerp(self, quat1, time): return quat_slerp(self, quat1, time) def slerp_no_invert(self, quat1, time): return quat_slerp_no_invert(self, quat1, time) def squad(self, quat1, quat2, time): return quat_squad(self, quat1, quat2, time) def toMatrix(self): return quat_to_matrix(self) # The following are used for orientation and motion def getForward(self): ''' Returns the forward vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[0.0, 0.0, 1.0])) def getBack(self): ''' Returns the backwards vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[0.0, 0.0, -1.0])) def getLeft(self): ''' Returns the left vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[-1.0, 0.0, 0.0])) def getRight(self): ''' Returns the right vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[1.0, 0.0, 0.0])) def getUp(self): ''' Returns the up vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[0.0, 1.0, 0.0])) def getDown(self): ''' Returns the down vector. ''' return quat_rotate_vector(self, vector.Vector(3, data=[0.0, -1.0, 0.0])) def quat_from_matrix(matrix): ''' Converts a 4x4 rotational matrix to quaternion. ''' fourXSquaredMinus1 = matrix.matrix[0][0] - matrix.matrix[1][1] - matrix.matrix[2][2] fourYSquaredMinus1 = matrix.matrix[1][1] - matrix.matrix[0][0] - matrix.matrix[2][2] fourZSquaredMinus1 = matrix.matrix[2][2] - matrix.matrix[0][0] - matrix.matrix[1][1] fourWSquaredMinus1 = matrix.matrix[0][0] + matrix.matrix[1][1] + matrix.matrix[2][2] biggestIndex = 0 fourBiggestSquaredMinus1 = fourWSquaredMinus1 if (fourXSquaredMinus1 > fourBiggestSquaredMinus1): biggestIndex = 1 elif(fourYSquaredMinus1 > fourBiggestSquaredMinus1): biggestIndex = 2 elif(fourZSquaredMinus1 > fourBiggestSquaredMinus1): biggestIndex = 3 biggestVal = math.sqrt(fourBiggestSquaredMinus1 + 1) * 0.5 mult = 0.25 / biggestVal rquat = Quaternion() if biggestIndex is 0: rquat.data[0] = biggestVal rquat.data[1] = (matrix.matrix[1][2] - matrix.matrix[2][1]) * mult rquat.data[2] = (matrix.matrix[2][0] - matrix.matrix[0][2]) * mult rquat.data[3] = (matrix.matrix[0][1] - matrix.matrix[1][0]) * mult return rquat if biggestIndex is 1: rquat.data[0] = (matrix.matrix[1][2] - matrix.matrix[2][1]) * mult rquat.data[1] = biggestVal rquat.data[2] = (matrix.matrix[0][1] + matrix.matrix[1][0]) * mult rquat.data[3] = (matrix.matrix[2][0] + matrix.matrix[0][2]) * mult return rquat if biggestIndex is 2: rquat.data[0] = (matrix.matrix[2][0] - matrix.matrix[0][2]) * mult rquat.data[1] = (matrix.matrix[0][1] + matrix.matrix[1][0]) * mult rquat.data[2] = biggestVal rquat.data[3] = (matrix.matrix[1][2] + matrix.matrix[2][1]) * mult return rquat if biggestIndex is 3: rquat.data[0] = (matrix.matrix[0][1] - matrix.matrix[1][0]) * mult rquat.data[1] = (matrix.matrix[2][0] + matrix.matrix[0][2]) * mult rquat.data[2] = (matrix.matrix[1][2] + matrix.matrix[2][1]) * mult rquat.data[3] = biggestVal return rquat
32.865854
123
0.60402
2,229
16,170
4.309107
0.08838
0.011036
0.008121
0.006663
0.570953
0.450703
0.389485
0.356065
0.300156
0.232795
0
0.052518
0.254607
16,170
491
124
32.93279
0.744379
0.095857
0
0.300836
0
0
0
0
0
0
0
0
0
1
0.169916
false
0
0.011142
0.019499
0.401114
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
1
0