hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
8cffe9ff90a679a7841ffbb78e4f62598ca76361
7,071
py
Python
tests/rdfsim_tests.py
sandroacoelho/rdfsim
ba4fb142e14f60ac449004049e03d37daca0b37a
[ "Apache-2.0" ]
3
2016-01-30T05:37:06.000Z
2016-02-07T19:00:40.000Z
tests/rdfsim_tests.py
sandroacoelho/rdfsim
ba4fb142e14f60ac449004049e03d37daca0b37a
[ "Apache-2.0" ]
null
null
null
tests/rdfsim_tests.py
sandroacoelho/rdfsim
ba4fb142e14f60ac449004049e03d37daca0b37a
[ "Apache-2.0" ]
null
null
null
# Copyright (c) 2011 British Broadcasting Corporation # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from nose.tools import * import numpy as np from scipy.sparse import lil_matrix from rdfsim.space import Space Space.decay = 0.9 Space.depth = 5 def test_init(): space = Space('tests/example.n3') assert_equal(space._path_to_rdf, 'file:tests/example.n3') assert_equal(space._format, 'ntriples') assert_equal(space._property, 'http://www.w3.org/2004/02/skos/core#broader') assert_equal(space._direct_parents, { 'http://dbpedia.org/resource/Category:Categories_named_after_television_series': ['http://dbpedia.org/resource/Category:Foo'], 'http://dbpedia.org/resource/Category:Star_Trek': [ 'http://dbpedia.org/resource/Category:Categories_named_after_television_series', ], 'http://dbpedia.org/resource/Category:Futurama': [ 'http://dbpedia.org/resource/Category:Categories_named_after_television_series', 'http://dbpedia.org/resource/Category:New_York_City_in_fiction', ], }) assert_equal(space._index, { 'http://dbpedia.org/resource/Category:Categories_named_after_television_series': 0, 'http://dbpedia.org/resource/Category:New_York_City_in_fiction': 1, 'http://dbpedia.org/resource/Category:Foo': 2, }) assert_equal(space._size, 3) def test_parents(): space = Space('tests/example.n3') assert_equal(space.parents('http://dbpedia.org/resource/Category:Futurama'), [ ('http://dbpedia.org/resource/Category:Categories_named_after_television_series', 1), ('http://dbpedia.org/resource/Category:New_York_City_in_fiction', 1), ('http://dbpedia.org/resource/Category:Foo', 0.9), ]) assert_equal(space.parents('http://dbpedia.org/resource/Category:Star_Trek'), [ ('http://dbpedia.org/resource/Category:Categories_named_after_television_series', 1), ('http://dbpedia.org/resource/Category:Foo', 0.9), ]) assert_equal(space.parents('http://dbpedia.org/resource/Category:Foo'), []) space = Space('tests/london.n3') Space.max_depth = 0 assert_equal(space.parents('http://dbpedia.org/resource/Category:London'), [ ('http://dbpedia.org/resource/Category:NUTS_1_statistical_regions_of_England', 1), ('http://dbpedia.org/resource/Category:M4_corridor', 1) ]) Space.max_depth = 1 print space.parents('http://dbpedia.org/resource/Category:London') assert_equal(space.parents('http://dbpedia.org/resource/Category:London'), [ ('http://dbpedia.org/resource/Category:Regional_planning_in_England', 0.9), ('http://dbpedia.org/resource/Category:Regions_of_England', 0.9), ('http://dbpedia.org/resource/Category:NUTS_1_statistical_regions_of_the_European_Union', 0.9), ('http://dbpedia.org/resource/Category:England', 0.9), ('http://dbpedia.org/resource/Category:NUTS_1_statistical_regions_of_the_United_Kingdom', 0.9), ('http://dbpedia.org/resource/Category:Regions_of_Wales', 0.9), ('http://dbpedia.org/resource/Category:NUTS_1_statistical_regions_of_England', 1), ('http://dbpedia.org/resource/Category:Local_government_in_England', 0.9), ('http://dbpedia.org/resource/Category:M4_corridor', 1) ]) Space.max_depth = 10 def test_to_vector(): space = Space('tests/example.n3') np.testing.assert_array_equal(space.to_vector('http://dbpedia.org/resource/Category:Futurama').todense(), [[1/np.sqrt(2 + 0.9**2), 1/np.sqrt(2 + 0.9**2), 0.9/np.sqrt(2 + 0.9**2)]]) np.testing.assert_array_equal(space.to_vector('http://dbpedia.org/resource/Category:Star_Trek').todense(), [[1/np.sqrt(1 + 0.9**2), 0, 0.9/np.sqrt(1 + 0.9**2)]]) assert space._uri_to_vector.has_key('http://dbpedia.org/resource/Category:Futurama') # Checking that we cached the vectors when generating them assert space._uri_to_vector.has_key('http://dbpedia.org/resource/Category:Star_Trek') assert_equal(space._uri_to_vector['http://dbpedia.org/resource/Category:Futurama'], space.to_vector('http://dbpedia.org/resource/Category:Futurama')) assert_equal(space._uri_to_vector['http://dbpedia.org/resource/Category:Star_Trek'], space.to_vector('http://dbpedia.org/resource/Category:Star_Trek')) def test_cache_vectors(): space = Space('tests/example.n3') space.cache_vectors() assert space._uri_to_vector.has_key('http://dbpedia.org/resource/Category:Futurama') # Checking that we cached the vectors assert space._uri_to_vector.has_key('http://dbpedia.org/resource/Category:Star_Trek') assert_equal(space._uri_to_vector['http://dbpedia.org/resource/Category:Futurama'], space.to_vector('http://dbpedia.org/resource/Category:Futurama')) assert_equal(space._uri_to_vector['http://dbpedia.org/resource/Category:Star_Trek'], space.to_vector('http://dbpedia.org/resource/Category:Star_Trek')) def test_similarity_uri(): space = Space('tests/example.n3') np.testing.assert_allclose(space.similarity_uri('http://dbpedia.org/resource/Category:Futurama', 'http://dbpedia.org/resource/Category:Star_Trek'), (1 + 0.9 * 0.9) / (np.sqrt(2 + 0.9**2) * np.sqrt(1 + 0.9**2))) def test_similarity_all(): space = Space('tests/example.n3') m = lil_matrix((2, 3)) m[0,0] = 1 / np.sqrt(1 + 2*2 + 3*3) m[0,1] = 2 / np.sqrt(1 + 2*2 + 3*3) m[0,2] = 3 / np.sqrt(1 + 2*2 + 3*3) m[1,0] = 4 / np.sqrt(4*4 + 5*5 + 6*6) m[1,1] = 5 / np.sqrt(4*4 + 5*5 + 6*6) m[1,2] = 6 / np.sqrt(4*4 + 5*5 + 6*6) v = m[0,:] m = m.tocsr() similarities = space.similarity_all(m, v) assert_equal(similarities[0], 1) assert_equal(similarities[1], ((1*4 + 2*5 + 3*6)/(np.sqrt(1 + 2*2 + 3*3)*np.sqrt(4*4 + 5*5 + 6*6)))) def test_centroid_weighted_uris(): space = Space('tests/example.n3') centroid = space.centroid_weighted_uris([('http://dbpedia.org/resource/Category:Futurama', 2), ('http://dbpedia.org/resource/Category:Star_Trek', 1)]) np.testing.assert_allclose(np.asarray(centroid.todense()), [[(2/np.sqrt(2 + 0.9**2) + 1/np.sqrt(1 + 0.9**2))/2, (1/np.sqrt(2 + 0.9**2)), (2*0.9/np.sqrt(2 + 0.9**2) + 0.9/np.sqrt(1 + 0.9**2))/2]]) def test_sum_weighted_uris(): space = Space('tests/example.n3') s = space.sum_weighted_uris([('http://dbpedia.org/resource/Category:Futurama', 2), ('http://dbpedia.org/resource/Category:Star_Trek', 1)]) np.testing.assert_allclose(np.asarray(s.todense()), [[2/np.sqrt(2 + 0.9**2) + 1/np.sqrt(1 + 0.9**2), 2/np.sqrt(2 + 0.9**2), 2*0.9/np.sqrt(2 + 0.9**2) + 0.9/np.sqrt(1 + 0.9**2)]])
57.024194
214
0.693113
1,082
7,071
4.36414
0.159889
0.121135
0.154172
0.24227
0.751377
0.741211
0.730411
0.703304
0.65396
0.614147
0
0.039681
0.130392
7,071
123
215
57.487805
0.728249
0.093763
0
0.363636
0
0
0.465582
0.003285
0
0
0
0
0.262626
0
null
null
0
0.040404
null
null
0.010101
0
0
0
null
0
0
1
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
5087199248ec2e46ef992e3d5711cc3eb5d885c2
6,918
py
Python
rankers/loss/loss_functions.py
rubencart/LIIR-TextGraphs-14
272849e74ef16f1499249048a0502e6e2236756d
[ "MIT" ]
1
2021-03-17T12:36:11.000Z
2021-03-17T12:36:11.000Z
rankers/loss/loss_functions.py
rubencart/LIIR-TextGraphs-14
272849e74ef16f1499249048a0502e6e2236756d
[ "MIT" ]
null
null
null
rankers/loss/loss_functions.py
rubencart/LIIR-TextGraphs-14
272849e74ef16f1499249048a0502e6e2236756d
[ "MIT" ]
1
2021-03-23T02:31:09.000Z
2021-03-23T02:31:09.000Z
""" Code: RankNet & LambdaRank: https://github.com/haowei01/pytorch-examples Idem + more: https://github.com/allegro/allRank """ import logging import math from itertools import product import torch from torch import nn from torch.nn import MarginRankingLoss, BCEWithLogitsLoss, CrossEntropyLoss logger = logging.getLogger(__name__) class BCEWithLogitsLossWrapper(nn.Module): def __init__(self): super().__init__() self.loss = BCEWithLogitsLoss(reduction='mean') def forward(self, logits, labels): return self.loss(logits.squeeze(-1), labels) class MarginRankingLossWrapper(nn.Module): def __init__(self, margin=1.0): super().__init__() self.loss = MarginRankingLoss(margin=margin) self.margin = margin def forward(self, logits, labels): """ logits: bs x 2, scores (higher = pos) labels: bs, 0 (neg) or 1 (pos) """ preds = logits.clone() pos_idxs = (labels > 0.5).nonzero().squeeze(-1).tolist() neg_idxs = (labels < 0.5).nonzero().squeeze(-1).tolist() if not len(pos_idxs) > 0 or not len(neg_idxs) > 0: return torch.tensor(0.0) preds = torch.sub(preds[:, 1], preds[:, 0]).unsqueeze(-1) # subtract neg from pos pairs_idxs = list(product(pos_idxs, neg_idxs)) # cartesian product pred_pairs = preds[pairs_idxs, :] # shape len(pairs_idxs), 2, 1 # pred_diffs = pred_pairs[:, 0] - pred_pairs[:, 1] # shape len(pairs_idxs), 1 return self.loss(pred_pairs[:, 0], pred_pairs[:, 1], torch.ones_like(pred_pairs[:, 0])) class RankNetLoss(nn.Module): def __init__(self): super().__init__() self.weight = None self.loss = BCEWithLogitsLoss(weight=self.weight, reduction='mean') def forward(self, logits, labels): """ logits: bs x 2, scores (higher = pos) labels: bs, 0 (neg) or 1 (pos) """ preds = logits.clone() pos_idxs = (labels > 0.5).nonzero().squeeze(-1).tolist() neg_idxs = (labels < 0.5).nonzero().squeeze(-1).tolist() if not len(pos_idxs) > 0 or not len(neg_idxs) > 0: return torch.tensor(0.0) preds = torch.sub(preds[:, 1], preds[:, 0]).unsqueeze(-1) # subtract neg from pos pairs_idxs = list(product(pos_idxs, neg_idxs)) # cartesian product pred_pairs = preds[pairs_idxs, :] # shape len(pairs_idxs), 2, 1 pred_diffs = pred_pairs[:, 0] - pred_pairs[:, 1] # shape len(pairs_idxs), 1 return self.loss(pred_diffs, torch.ones_like(pred_diffs)) class BinaryNCELoss(nn.Module): def __init__(self): super().__init__() self.weight = None self.bce_with_logits_loss = BCEWithLogitsLoss(weight=self.weight, reduction='mean') def forward(self, logits_model, labels, logprobs_noise=None, logits_noise=None): """ Based on https://github.com/Stonesjtu/Pytorch-NCE/blob/master/nce/nce_loss.py . logits_model: logits (before log regr sigmoid) of all samples (pos and neg) computed by model --> probs that samples are from data distribution shape: bs x 1 labels: 0 (neg, from p_noise) or 1 (pos, from p_data) shape: bs logprobs_noise: logprobs of all samples (the same samples, both pos and neg) from noise distribution --> probs that samples are from noise distribution E.g. logprobs computed by model in previous iteration of SCE or logprobs of shape: bs x 1 """ if logprobs_noise is None: assert logits_noise is not None logprobs_noise = logits_noise # logits_noise = torch.log(torch.exp(logprobs_noise) / (1 - torch.exp(logprobs_noise))) if logits_model.shape[1] == 2: logits_model = torch.sub(logits_model[:, 1], logits_model[:, 0]).unsqueeze(-1) # subtract neg from pos if len(logprobs_noise.shape) < 2: logprobs_noise = logprobs_noise.unsqueeze(-1) pos_idxs = (labels > 0.5).nonzero().squeeze(-1).tolist() neg_idxs = (labels < 0.5).nonzero().squeeze(-1).tolist() if not len(pos_idxs) > 0 or not len(neg_idxs) > 0: logger.error('No positives or no negatives') return torch.tensor(0.0) noise_ratio = math.ceil(len(neg_idxs) / len(pos_idxs)) logits = logits_model - logprobs_noise - math.log(noise_ratio) # todo - gamma? see Ma & Collins , or word embeddings neg sampling papers using NCE return self.bce_with_logits_loss(logits.squeeze(-1), labels.float()) class RankingNCELoss(nn.Module): def __init__(self): super().__init__() self.weight = None self.xent_loss = CrossEntropyLoss(weight=self.weight, reduction='mean') def forward(self, logits_model, labels, logprobs_noise=None, logits_noise=None): """ Based on https://github.com/Stonesjtu/Pytorch-NCE/blob/master/nce/nce_loss.py . logits_model: logits (before log regr sigmoid) of all samples (pos and neg) computed by model --> probs that samples are from data distribution shape: bs x 1 labels: 0 (neg, from p_noise) or 1 (pos, from p_data) shape: bs logprobs_noise: logprobs of all samples (the same samples, both pos and neg) from noise distribution --> probs that samples are from noise distribution E.g. logprobs computed by model in previous iteration of SCE or logprobs of shape: bs x 1 """ # see https://arxiv.org/pdf/1809.01812.pdf , ranking version if logprobs_noise is None: assert logits_noise is not None logprobs_noise = logits_noise # logits_noise = torch.log(torch.exp(logprobs_noise) / (1 - torch.exp(logprobs_noise))) if logits_model.shape[1] == 2: logits_model = torch.sub(logits_model[:, 1], logits_model[:, 0]).unsqueeze(-1) # subtract neg from pos if len(logprobs_noise.shape) < 2: logprobs_noise = logprobs_noise.unsqueeze(-1) pos_idxs = (labels > 0.5).nonzero().squeeze(-1) # .tolist() neg_idxs = (labels < 0.5).nonzero().squeeze(-1) # .tolist() if not len(pos_idxs) > 0 or not len(neg_idxs) > 0: logger.error('No positives or no negatives') return torch.tensor(0.0) assert len(pos_idxs) == 1 # noise_ratio = math.ceil(len(neg_idxs) / len(pos_idxs)) logits = logits_model - logprobs_noise # - math.log(noise_ratio) # print(logits.unsqueeze(0).squeeze(-1), torch.where(labels > 0)[0]) return self.xent_loss(logits.unsqueeze(0).squeeze(-1), torch.where(labels > 0)[0])
40.694118
115
0.613328
910
6,918
4.495604
0.161538
0.063554
0.021511
0.023466
0.826937
0.803227
0.799316
0.789782
0.78196
0.78196
0
0.022323
0.268286
6,918
169
116
40.934911
0.785855
0.331165
0
0.639535
0
0
0.016621
0
0
0
0
0.005917
0.034884
1
0.116279
false
0
0.069767
0.011628
0.348837
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
508e2db538bffa75a2a75d1ec2711f7b7e3797b9
96
py
Python
venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansitowin32.py
Retraces/UkraineBot
3d5d7f8aaa58fa0cb8b98733b8808e5dfbdb8b71
[ "MIT" ]
2
2022-03-13T01:58:52.000Z
2022-03-31T06:07:54.000Z
venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansitowin32.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
19
2021-11-20T04:09:18.000Z
2022-03-23T15:05:55.000Z
venv/lib/python3.8/site-packages/pip/_vendor/colorama/ansitowin32.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/c9/5e/c2/12609bd7d3239c928e0d9104bcc1ff7e76c98709e9ce8e2cc59b865e60
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.416667
0
96
1
96
96
0.479167
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
50a4731a80f77edd7d5d7b9c23080936e606cbd1
26
py
Python
database/__init__.py
Nixest-Inc/Nixest
c882ae931aa826ba274e6adb98e78e115e5eb7ec
[ "MIT" ]
1
2020-05-31T18:19:54.000Z
2020-05-31T18:19:54.000Z
database/__init__.py
Nixest-Inc/Nixest
c882ae931aa826ba274e6adb98e78e115e5eb7ec
[ "MIT" ]
null
null
null
database/__init__.py
Nixest-Inc/Nixest
c882ae931aa826ba274e6adb98e78e115e5eb7ec
[ "MIT" ]
1
2020-06-03T20:11:55.000Z
2020-06-03T20:11:55.000Z
from .base import Database
26
26
0.846154
4
26
5.5
1
0
0
0
0
0
0
0
0
0
0
0
0.115385
26
1
26
26
0.956522
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0f9b7466663921591d11f87da1afea72124e8b09
92
py
Python
markups/markups.py
caprize/jdbizParser
6db6fe045d45a5a465a52155807a61c3571fc88e
[ "Unlicense" ]
1
2020-01-10T05:07:53.000Z
2020-01-10T05:07:53.000Z
markups/markups.py
caprize/jdbizParser
6db6fe045d45a5a465a52155807a61c3571fc88e
[ "Unlicense" ]
null
null
null
markups/markups.py
caprize/jdbizParser
6db6fe045d45a5a465a52155807a61c3571fc88e
[ "Unlicense" ]
null
null
null
from telebot.types import InlineKeyboardMarkup, InlineKeyboardButton, ReplyKeyboardMarkup
46
90
0.880435
7
92
11.571429
1
0
0
0
0
0
0
0
0
0
0
0
0.086957
92
2
91
46
0.964286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0fab0e098b3933acc85b9b20a7af07960e7176e3
105
py
Python
starred/views.py
melvinchia3636/notesdb
fb4ec1742713501be13cac0965242da1421228bd
[ "MIT" ]
null
null
null
starred/views.py
melvinchia3636/notesdb
fb4ec1742713501be13cac0965242da1421228bd
[ "MIT" ]
null
null
null
starred/views.py
melvinchia3636/notesdb
fb4ec1742713501be13cac0965242da1421228bd
[ "MIT" ]
null
null
null
from django.shortcuts import render def HomeView(request): return render(request, 'starred/index.html')
26.25
45
0.8
14
105
6
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.095238
105
4
45
26.25
0.884211
0
0
0
0
0
0.169811
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
0fc4a06c15ac9ef32bfd41fdab7f7c553c35175b
2,109
py
Python
test/test_api.py
MauCassabC/FinalProject_WebDev
4cd6f17c2f4b1767870e6aa249c80f8cce3d8f3d
[ "MIT" ]
null
null
null
test/test_api.py
MauCassabC/FinalProject_WebDev
4cd6f17c2f4b1767870e6aa249c80f8cce3d8f3d
[ "MIT" ]
35
2021-08-03T18:54:45.000Z
2021-08-20T04:34:54.000Z
test/test_api.py
MauCassabC/FinalProject_WebDev
4cd6f17c2f4b1767870e6aa249c80f8cce3d8f3d
[ "MIT" ]
1
2021-11-24T20:49:55.000Z
2021-11-24T20:49:55.000Z
import unittest from flask import request from werkzeug.wrappers import response from app import app class TestApi(unittest.TestCase): def test_pt_endpoint_health_returns_200(self): with app.test_client() as client: response = client.get("/health") assert response._status_code == 200 def test_pt_endpoint_login_get_returns_200(self): with app.test_client() as client: response = client.get("/login") assert response._status_code == 200 def test_pt_endpoint_login_post_returns_418(self): with app.test_client() as client: response = client.post("/login") assert response._status_code == 418 def test_pt_endpoint_register_get_returns_200(self): with app.test_client() as client: response = client.get("/register") assert response._status_code == 200 def test_pt_endpoint_register_post_returns_418(self): with app.test_client() as client: response = client.post("/register") assert response._status_code == 418 def test_pt_endpoint_dashHome_get_returns_302(self): with app.test_client() as client: response = client.get("/dash/home") assert response._status_code == 302 def test_pt_endpoint_dashTyper_get_returns_302(self): with app.test_client() as client: response = client.get("/dash/typer") assert response._status_code == 302 def test_pt_endpoint_dashSettings_get_returns_302(self): with app.test_client() as client: response = client.get("/dash/settings") assert response._status_code == 302 def test_pt_endpoint_dashSettingsEdit_get_returns_302(self): with app.test_client() as client: response = client.get("/dash/settings/edit") assert response._status_code == 302 def test_pt_endpoint_dashSignout_get_returns_302(self): with app.test_client() as client: response = client.get("/dash/signout") assert response._status_code == 302
36.362069
64
0.668563
264
2,109
5.003788
0.162879
0.05299
0.06813
0.12869
0.834217
0.781983
0.781983
0.781983
0.781983
0.548827
0
0.037712
0.245614
2,109
57
65
37
0.792583
0
0
0.444444
0
0
0.049312
0
0
0
0
0
0.222222
1
0.222222
false
0
0.088889
0
0.333333
0
0
0
0
null
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
6
0fe1d2e2f5235820436f365872a4ff90864c742f
32
py
Python
automatminer/featurization/__init__.py
ADA110/automatminer
53a4a90d55e9d0ef7f5262f2168e125b2032d857
[ "BSD-3-Clause-LBNL" ]
1
2019-05-16T20:34:54.000Z
2019-05-16T20:34:54.000Z
automatminer/featurization/__init__.py
kmu/automatminer
f39894a157dcc35a6fe94b1f747c1f06ffea9824
[ "BSD-3-Clause-LBNL" ]
null
null
null
automatminer/featurization/__init__.py
kmu/automatminer
f39894a157dcc35a6fe94b1f747c1f06ffea9824
[ "BSD-3-Clause-LBNL" ]
null
null
null
from .core import AutoFeaturizer
32
32
0.875
4
32
7
1
0
0
0
0
0
0
0
0
0
0
0
0.09375
32
1
32
32
0.965517
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e8733d23450027ab1e93e458eef475b501d979d6
31
py
Python
utils/__init__.py
rocksat/jsis3d
d067cbe9f0141a22ff5f0b7b85946629a5b13c64
[ "MIT" ]
180
2019-04-17T02:53:07.000Z
2022-03-05T21:42:50.000Z
utils/__init__.py
PCLC7Z2/jsis3d
927251dc39f9995a0a89a130c71823e9992617cd
[ "MIT" ]
34
2019-04-26T03:12:46.000Z
2022-03-16T03:48:59.000Z
utils/__init__.py
PCLC7Z2/jsis3d
927251dc39f9995a0a89a130c71823e9992617cd
[ "MIT" ]
32
2019-04-23T02:05:09.000Z
2022-03-05T21:42:35.000Z
from .merge import block_merge
15.5
30
0.83871
5
31
5
0.8
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.925926
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e8b4574aed3cc5d12498b5b3a45c13f247c98122
29
py
Python
segmentation_rt/rs2mask/__init__.py
BrouBoni/segmentation_RT
e44f4fafe23652f3122a5e65bd8515283dcfdbe0
[ "MIT" ]
6
2021-02-11T15:59:56.000Z
2021-12-17T20:15:35.000Z
segmentation_rt/rs2mask/__init__.py
liuhd073/segmentation_RT
e44f4fafe23652f3122a5e65bd8515283dcfdbe0
[ "MIT" ]
null
null
null
segmentation_rt/rs2mask/__init__.py
liuhd073/segmentation_RT
e44f4fafe23652f3122a5e65bd8515283dcfdbe0
[ "MIT" ]
3
2021-04-09T17:08:02.000Z
2021-08-03T07:20:20.000Z
from .rs2mask import Dataset
14.5
28
0.827586
4
29
6
1
0
0
0
0
0
0
0
0
0
0
0.04
0.137931
29
1
29
29
0.92
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2ce81627392e7ce756f62a3481bb86822893f982
187
py
Python
project_flask_todolist_01/run.py
enzo2605/HotWheels-Logistics-todoList
2cb075dbd44d071fd66b4ba83c951d4d85bfc5a2
[ "Apache-2.0" ]
null
null
null
project_flask_todolist_01/run.py
enzo2605/HotWheels-Logistics-todoList
2cb075dbd44d071fd66b4ba83c951d4d85bfc5a2
[ "Apache-2.0" ]
null
null
null
project_flask_todolist_01/run.py
enzo2605/HotWheels-Logistics-todoList
2cb075dbd44d071fd66b4ba83c951d4d85bfc5a2
[ "Apache-2.0" ]
2
2022-02-12T15:33:59.000Z
2022-02-14T15:36:31.000Z
from todolist import app, db from todolist.models import User, Task # flask shell @app.shell_context_processor def make_shell_context(): return {'db': db, 'User': User, 'Task': Task}
26.714286
49
0.737968
28
187
4.785714
0.535714
0.179104
0
0
0
0
0
0
0
0
0
0
0.144385
187
7
49
26.714286
0.8375
0.058824
0
0
0
0
0.057143
0
0
0
0
0
0
1
0.2
true
0
0.4
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
6
d707cd5349eaedeef169963862ef275d9704d0e3
96
py
Python
venv/lib/python3.8/site-packages/setuptools/extern/__init__.py
GiulianaPola/select_repeats
17a0d053d4f874e42cf654dd142168c2ec8fbd11
[ "MIT" ]
2
2022-03-13T01:58:52.000Z
2022-03-31T06:07:54.000Z
venv/lib/python3.8/site-packages/setuptools/extern/__init__.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
19
2021-11-20T04:09:18.000Z
2022-03-23T15:05:55.000Z
venv/lib/python3.8/site-packages/setuptools/extern/__init__.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/1e/17/fd/5bbdd6022b70f5375125f0c86fa6058e62b9e8217ad5a7ddb35320d076
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.427083
0
96
1
96
96
0.46875
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
d71913bb6bbe5c789f6a8b1526ae0cf6e7fdd15c
138
py
Python
pyspedas/mms/tests/setup_tests.py
ergsc-devel/pyspedas
43d985cbcd23c54205453b06e08f8e51d29ab435
[ "MIT" ]
75
2019-02-22T12:59:33.000Z
2022-02-26T15:33:20.000Z
pyspedas/mms/tests/setup_tests.py
ergsc-devel/pyspedas
43d985cbcd23c54205453b06e08f8e51d29ab435
[ "MIT" ]
40
2019-07-02T07:46:34.000Z
2022-02-23T21:48:50.000Z
pyspedas/mms/tests/setup_tests.py
ergsc-devel/pyspedas
43d985cbcd23c54205453b06e08f8e51d29ab435
[ "MIT" ]
43
2019-02-22T13:03:41.000Z
2022-01-24T19:26:59.000Z
import os import pickle pickle.dump({'user': '', 'passwd': ''}, open(os.sep.join([os.path.expanduser('~'), 'mms_auth_info.pkl']), 'wb'))
27.6
112
0.630435
20
138
4.25
0.8
0
0
0
0
0
0
0
0
0
0
0
0.086957
138
4
113
34.5
0.674603
0
0
0
0
0
0.217391
0
0
0
0
0
0
1
0
true
0.333333
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
d73295958a5618d72a891df1e4ace60e4f82cf17
1,754
py
Python
contrib/tornado/test/httputil_test.py
loggly/alertbirds-community-edition
b35f0ffbe80049dfa74d79e9e45b4cce4cdbf47a
[ "Apache-2.0" ]
2
2015-10-28T23:14:47.000Z
2015-11-27T18:00:12.000Z
tornado/test/httputil_test.py
joetyson/tornado
02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb
[ "Apache-2.0" ]
null
null
null
tornado/test/httputil_test.py
joetyson/tornado
02ce53b1fd8b4acc4721e6616b73d11bf6c6a4fb
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python from tornado.httputil import url_concat import unittest class TestUrlConcat(unittest.TestCase): def test_url_concat_no_query_params(self): url = url_concat( "https://localhost/path", {'y':'y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?y=y&z=z") def test_url_concat_encode_args(self): url = url_concat( "https://localhost/path", {'y':'/y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?y=%2Fy&z=z") def test_url_concat_trailing_q(self): url = url_concat( "https://localhost/path?", {'y':'y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?y=y&z=z") def test_url_concat_q_with_no_trailing_amp(self): url = url_concat( "https://localhost/path?x", {'y':'y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?x&y=y&z=z") def test_url_concat_trailing_amp(self): url = url_concat( "https://localhost/path?x&", {'y':'y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?x&y=y&z=z") def test_url_concat_mult_params(self): url = url_concat( "https://localhost/path?a=1&b=2", {'y':'y', 'z':'z'}, ) self.assertEqual(url, "https://localhost/path?a=1&b=2&y=y&z=z") def test_url_concat_no_params(self): url = url_concat( "https://localhost/path?r=1&t=2", {}, ) self.assertEqual(url, "https://localhost/path?r=1&t=2")
30.77193
71
0.506271
221
1,754
3.837104
0.18552
0.159198
0.29717
0.051887
0.850236
0.826651
0.805425
0.744104
0.645047
0.645047
0
0.007538
0.31927
1,754
56
72
31.321429
0.70268
0.011403
0
0.4
0
0.022222
0.245817
0
0
0
0
0
0.155556
1
0.155556
false
0
0.044444
0
0.222222
0
0
0
0
null
0
1
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d75ab2ba7d907267ab6752c9a6c300ec215c01f8
328
py
Python
Programming-101-v3/week7/1-Scan-Bg-Web/histogram_class.py
pepincho/Python101-and-Algo1-Courses
7cf38d26d5be5ffc1a37477ae6375a99906df9e2
[ "MIT" ]
2
2016-10-11T14:09:05.000Z
2017-01-20T19:30:34.000Z
Programming-101-v3/week7/1-Scan-Bg-Web/histogram_class.py
pepincho/HackBulgaria
7cf38d26d5be5ffc1a37477ae6375a99906df9e2
[ "MIT" ]
null
null
null
Programming-101-v3/week7/1-Scan-Bg-Web/histogram_class.py
pepincho/HackBulgaria
7cf38d26d5be5ffc1a37477ae6375a99906df9e2
[ "MIT" ]
null
null
null
class Histogram: def __init__(self): self.dict = {} def add(self, server): if server in self.dict.keys(): self.dict[server] += 1 else: self.dict[server] = 1 def count(self, server): return self.dict[server] def get_dict(self): return self.dict
19.294118
38
0.536585
41
328
4.170732
0.390244
0.280702
0.245614
0.175439
0
0
0
0
0
0
0
0.009346
0.347561
328
16
39
20.5
0.78972
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.166667
0.583333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
ad48249189f0ea25ff8f925d616bea8dca31a7cd
32
py
Python
my-work/deepbi/parse_test_image.py
ZexuanTHU/zexuan-awesome-ml
4f56f4d76aa6fe30b0a4d57a749289b28595aff5
[ "MIT" ]
1
2018-04-11T09:25:16.000Z
2018-04-11T09:25:16.000Z
my-work/deepbi/parse_test_image.py
ZexuanTHU/zexuan-awesome-ml
4f56f4d76aa6fe30b0a4d57a749289b28595aff5
[ "MIT" ]
null
null
null
my-work/deepbi/parse_test_image.py
ZexuanTHU/zexuan-awesome-ml
4f56f4d76aa6fe30b0a4d57a749289b28595aff5
[ "MIT" ]
null
null
null
import torch import torchvision
10.666667
18
0.875
4
32
7
0.75
0
0
0
0
0
0
0
0
0
0
0
0.125
32
2
19
16
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ad6bbc5d1ffa16dda8d42f4b09da154ff4c0f426
302
py
Python
src/masonite/orm/commands/__init__.py
vaibhavmule/orm
8eb7b4667dc97870df46ef7a6724b21d5fb58fdb
[ "MIT" ]
null
null
null
src/masonite/orm/commands/__init__.py
vaibhavmule/orm
8eb7b4667dc97870df46ef7a6724b21d5fb58fdb
[ "MIT" ]
null
null
null
src/masonite/orm/commands/__init__.py
vaibhavmule/orm
8eb7b4667dc97870df46ef7a6724b21d5fb58fdb
[ "MIT" ]
null
null
null
from .MigrateCommand import MigrateCommand from .MigrateRollbackCommand import MigrateRollbackCommand from .MigrateRefreshCommand import MigrateRefreshCommand from .MakeMigrationCommand import MakeMigrationCommand from .MakeSeedCommand import MakeSeedCommand from .SeedRunCommand import SeedRunCommand
43.142857
58
0.900662
24
302
11.333333
0.333333
0
0
0
0
0
0
0
0
0
0
0
0.07947
302
6
59
50.333333
0.978417
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ad7927b5f473231935ff382d3850aea2736a8a92
217
py
Python
djangobench/benchmarks/query_select_related/models.py
Bouke/djangobench
94fc28d99f95c65d26d0fad8af44e46c49282220
[ "BSD-3-Clause" ]
3
2016-11-27T22:25:34.000Z
2018-12-12T20:06:40.000Z
djangobench/benchmarks/query_select_related/models.py
Bouke/djangobench
94fc28d99f95c65d26d0fad8af44e46c49282220
[ "BSD-3-Clause" ]
null
null
null
djangobench/benchmarks/query_select_related/models.py
Bouke/djangobench
94fc28d99f95c65d26d0fad8af44e46c49282220
[ "BSD-3-Clause" ]
null
null
null
from django.db import models class Book(models.Model): title = models.CharField(max_length=100) author = models.ForeignKey('Author') class Author(models.Model): author = models.CharField(max_length=100)
24.111111
45
0.741935
29
217
5.482759
0.517241
0.226415
0.226415
0.301887
0.339623
0
0
0
0
0
0
0.032258
0.142857
217
8
46
27.125
0.822581
0
0
0
0
0
0.02765
0
0
0
0
0
0
1
0
false
0
0.166667
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
6
ad83f33d1bef168f3406e660ecd42aaa4c69d605
39
py
Python
Introduction/Our first program/hello_world.py
warlinx/Introduction_to_Python
4f10607d879667292a7553eeeff322207c06054b
[ "MIT" ]
null
null
null
Introduction/Our first program/hello_world.py
warlinx/Introduction_to_Python
4f10607d879667292a7553eeeff322207c06054b
[ "MIT" ]
null
null
null
Introduction/Our first program/hello_world.py
warlinx/Introduction_to_Python
4f10607d879667292a7553eeeff322207c06054b
[ "MIT" ]
null
null
null
print("Hello, world! My name is war1")
19.5
38
0.692308
7
39
3.857143
1
0
0
0
0
0
0
0
0
0
0
0.030303
0.153846
39
1
39
39
0.787879
0
0
0
0
0
0.74359
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
6
d118ac11d6bf34cc8d17bbe7e1f1c39641281013
93
py
Python
app/models.py
walteroliveira95/devops-sample-vestibulares
7475162514b3226e833c21526f864169d716bdb1
[ "MIT" ]
null
null
null
app/models.py
walteroliveira95/devops-sample-vestibulares
7475162514b3226e833c21526f864169d716bdb1
[ "MIT" ]
null
null
null
app/models.py
walteroliveira95/devops-sample-vestibulares
7475162514b3226e833c21526f864169d716bdb1
[ "MIT" ]
null
null
null
""" Definition of models. """ from django.db import models class mod(models.Model): pass
10.333333
28
0.698925
13
93
5
0.846154
0
0
0
0
0
0
0
0
0
0
0
0.172043
93
8
29
11.625
0.844156
0.225806
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
d14faf467fd5ade2e316aae0516ca2be597a435b
207
py
Python
coffeelist/admin.py
knedlsepp/coffeemachine
ce82e3d95fbc3112680dbc63961b96834381804a
[ "MIT" ]
null
null
null
coffeelist/admin.py
knedlsepp/coffeemachine
ce82e3d95fbc3112680dbc63961b96834381804a
[ "MIT" ]
null
null
null
coffeelist/admin.py
knedlsepp/coffeemachine
ce82e3d95fbc3112680dbc63961b96834381804a
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from coffeelist.models import * admin.site.register(Tag) admin.site.register(Purchase) admin.site.register(Deposit) admin.site.register(Price)
20.7
32
0.806763
29
207
5.758621
0.517241
0.215569
0.407186
0
0
0
0
0
0
0
0
0
0.091787
207
9
33
23
0.888298
0.125604
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
66f4cc45ced9d1006e358a45f1d456fa79eb07fd
214
py
Python
ros_build_assistant/parser/abstract_file_description.py
arjo129/ros_build_assistant
9c8420230713a43f73390d75d19bd7a7a751474d
[ "MIT" ]
null
null
null
ros_build_assistant/parser/abstract_file_description.py
arjo129/ros_build_assistant
9c8420230713a43f73390d75d19bd7a7a751474d
[ "MIT" ]
null
null
null
ros_build_assistant/parser/abstract_file_description.py
arjo129/ros_build_assistant
9c8420230713a43f73390d75d19bd7a7a751474d
[ "MIT" ]
null
null
null
class AbstractFileDescription: def __init__(self): self.backend = None def get_dependencies(self): pass def set_backend(self, backend): self.backend = backend
19.454545
35
0.598131
21
214
5.809524
0.52381
0.270492
0.295082
0
0
0
0
0
0
0
0
0
0.331776
214
11
36
19.454545
0.853147
0
0
0
0
0
0
0
0
0
0
0
0
1
0.428571
false
0.142857
0
0
0.571429
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
66fe485f26d374bfa104425f3029d0b2d10a6fb1
34
py
Python
dask_ml/joblib.py
laprej/dask-ml
78b1d942eae14db442a744f8812c3e94a8f31272
[ "BSD-3-Clause" ]
3
2017-06-13T22:36:45.000Z
2017-09-20T16:08:47.000Z
dask_ml/joblib.py
laprej/dask-ml
78b1d942eae14db442a744f8812c3e94a8f31272
[ "BSD-3-Clause" ]
null
null
null
dask_ml/joblib.py
laprej/dask-ml
78b1d942eae14db442a744f8812c3e94a8f31272
[ "BSD-3-Clause" ]
1
2019-12-03T13:23:52.000Z
2019-12-03T13:23:52.000Z
import distributed.joblib # noqa
17
33
0.794118
4
34
6.75
1
0
0
0
0
0
0
0
0
0
0
0
0.147059
34
1
34
34
0.931034
0.117647
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0f0863cc3c719893a2a10980e30c6214ff232022
112
py
Python
azbankintro/exceptions/__init__.py
mavenium/az-iranian-bank-intro
66b7043c1f7b6c5f119b58d3a4c9bb2ccabf7e13
[ "MIT" ]
15
2021-02-03T06:17:33.000Z
2021-12-17T15:42:40.000Z
azbankintro/exceptions/__init__.py
mavenium/az-iranian-bank-intro
66b7043c1f7b6c5f119b58d3a4c9bb2ccabf7e13
[ "MIT" ]
null
null
null
azbankintro/exceptions/__init__.py
mavenium/az-iranian-bank-intro
66b7043c1f7b6c5f119b58d3a4c9bb2ccabf7e13
[ "MIT" ]
4
2021-06-30T18:09:05.000Z
2022-01-24T05:14:49.000Z
from .cards import CardValidationException from .iban import IBANValidationException, BankDoesNotExistException
37.333333
68
0.892857
9
112
11.111111
0.777778
0
0
0
0
0
0
0
0
0
0
0
0.080357
112
2
69
56
0.970874
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0f862a8c8c7b4efb75b582caef0c507fc8f410a4
1,919
py
Python
Engine/Source/Program/AnrealBuildTool/AnrealConfigMapper.py
zxwnstn/AnrealEngine
bd751b07fbb3301a6aa4f4a651141441a25f2f34
[ "Apache-2.0" ]
1
2021-06-15T14:12:32.000Z
2021-06-15T14:12:32.000Z
Engine/Source/Program/AnrealBuildTool/AnrealConfigMapper.py
zxwnstn/AnrealEngine
bd751b07fbb3301a6aa4f4a651141441a25f2f34
[ "Apache-2.0" ]
4
2021-06-15T15:42:14.000Z
2021-06-29T16:23:23.000Z
Engine/Source/Program/AnrealBuildTool/AnrealConfigMapper.py
zxwnstn/AnrealEngine
bd751b07fbb3301a6aa4f4a651141441a25f2f34
[ "Apache-2.0" ]
1
2021-06-15T15:30:51.000Z
2021-06-15T15:30:51.000Z
import os import sys import Anreal import json class MSVC2017ConfigMapper(Anreal.ConfigMapper) : def __init__(self) : Anreal.ConfigMapper.__init__(self) self.Compiler = "vs2017" def MapNativeBuildOpt(self) : with open(Anreal.ConfigPath + "/Build/Platform/MSVC.json") as MSVCConfigJson : MSVCConfigs = json.load(MSVCConfigJson) self.NativeBuildOptions["CL"] = [] self.NativeBuildOptions["Link"] = [] CLOpts = MSVCConfigs[self.Compiler]["CL"] LinkOpts = MSVCConfigs[self.Compiler]["Link"] for AbstractedOption in self.AbstractedOptions : if AbstractedOption in CLOpts : self.NativeBuildOptions["CL"].append(CLOpts[AbstractedOption]) elif AbstractedOption in LinkOpts : self.NativeBuildOptions["Link"].append(LinkOpts[AbstractedOption]) class MSVC2019ConfigMapper(Anreal.ConfigMapper) : def __init__(self) : Anreal.ConfigMapper.__init__(self) self.Compiler = "vs2019" def MapNativeBuildOpt(self) : with open(Anreal.ConfigPath + "/Build/Platform/MSVC.json") as MSVCConfigJson : MSVCConfigs = json.load(MSVCConfigJson) self.NativeBuildOptions["CL"] = [] self.NativeBuildOptions["Link"] = [] CLOpts = MSVCConfigs[self.Compiler]["CL"] LinkOpts = MSVCConfigs[self.Compiler]["Link"] for AbstractedOption in self.AbstractedOptions : if AbstractedOption in CLOpts : self.NativeBuildOptions["CL"].append(CLOpts[AbstractedOption]) elif AbstractedOption in LinkOpts : self.NativeBuildOptions["Link"].append(LinkOpts[AbstractedOption]) def GetConfigMapper(Compiler) : if Compiler == "vs2017" : return MSVC2017ConfigMapper() if Compiler == "vs2019" : return MSVC2019ConfigMapper()
36.207547
86
0.646691
165
1,919
7.424242
0.242424
0.143673
0.078367
0.040816
0.827755
0.827755
0.827755
0.827755
0.827755
0.827755
0
0.022284
0.251694
1,919
53
87
36.207547
0.83078
0
0
0.682927
0
0
0.057292
0.026042
0
0
0
0
0
1
0.121951
false
0
0.097561
0
0.317073
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7e1bf54127ec7c31e5408b968be3621e7a727261
529
py
Python
python/exercise/print.py
nullne/million
264ba32e2ba70400bc5971e04baaf5681e4aa839
[ "MIT" ]
1
2015-10-21T01:36:51.000Z
2015-10-21T01:36:51.000Z
python/exercise/print.py
nullne/million
264ba32e2ba70400bc5971e04baaf5681e4aa839
[ "MIT" ]
null
null
null
python/exercise/print.py
nullne/million
264ba32e2ba70400bc5971e04baaf5681e4aa839
[ "MIT" ]
null
null
null
print("ID:\t%-15sHostname:\t%-20sIP:\t%-20sSN:\t%-30s" % ("4490274", "CNC-XD-d-3W6", "221.204.22.7", "2102310VTP10E2000451")), print("Bond0Mac:\t%-20s" % "AC:85:3D:9A:9B:1A") print("ID:\t%-15sHostname:\t%-20sIP:\t%-20sSN:\t%-30s" % ("4274", "CNC--3W6", "221.22.7", "2102310E2000451")), print("Bond0Mac:\t%-20s" % "AC:85:3D:9:1A") print("ID:\t%-15sHostname:\t%-20sIP:\t%-20sSN:\t%-30s" % ("42322374", "CNC--3asdfasW6", "221343434.22.7", "213223232302310E2000451")), print("Bond0Mac:\t%-20s" % "AC:85:3D:9:1A")
44.083333
83
0.599244
82
529
3.865854
0.390244
0.066246
0.07571
0.179811
0.574132
0.574132
0.574132
0.501577
0.501577
0.343849
0
0.317427
0.088847
529
11
84
48.090909
0.340249
0
0
0.555556
0
0
0.706994
0.304348
0
0
0
0
0
1
0
true
0
0
0
0
0.666667
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
6
7e505f87bf9b60df10562671e894236046dc957b
2,061
py
Python
F29.BioEntity/WebAPI/hpo_api.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
F29.BioEntity/WebAPI/hpo_api.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
F29.BioEntity/WebAPI/hpo_api.py
foundation29org/F29.BioEntity
531947fb85465f363e63e268b9e3ca17283d76dd
[ "MIT" ]
null
null
null
from flask import current_app, request, make_response, jsonify from flask_restplus import Resource from ._api import * ''' Phenotype Successors/Predecessors ''' @API.route('/phenotype/successors/<string:ids>') @API.param('ids', 'Phenotype IDs') class phenotypes_successors(Resource): def get(self, ids): ids = [id.strip() for id in ids.split(',')] depth = int(request.args.get('depth') or 1) bio = get_bio_phens('en') res = bio.Phens.successors(ids, depth) return jsonify(res) @API.route('/phenotype/successors') class phenotypes_post(Resource): def post(self): ids = json.loads(request.data) depth = int(request.args.get('depth') or 1) bio = get_bio_phens('en') res = bio.Phens.successors(ids, depth) return jsonify(res) @API.route('/phenotype/predecessors/<string:ids>') @API.param('ids', 'Phenotype IDs') class phenotypes_predecessors(Resource): def get(self, ids): ids = [id.strip() for id in ids.split(',')] depth = int(request.args.get('depth') or 1) bio = get_bio_phens('en') res = bio.Phens.predecessors(ids, depth) return jsonify(res) @API.route('/phenotype/predecessors') class phenotypes_predecessors_post(Resource): def post(self): ids = json.loads(request.data) depth = int(request.args.get('depth') or 1) bio = get_bio_phens('en') res = bio.Phens.predecessors(ids, depth) return jsonify(res) ''' Validation ''' @API.route('/phenotype/validation/<string:ids>') @API.param('ids', 'Phenotype IDs') class phenotype_validation(Resource): def get(self, ids): ids = [id.strip() for id in ids.split(',')] bio = get_bio_phens('en') res = bio.Phens.validate_terms(ids) return jsonify(res) @API.route('/phenotype/validation') class phenotype_validation_post(Resource): def post(self): ids = json.loads(request.data) bio = get_bio_phens('en') res = bio.Phens.validate_terms(ids) return jsonify(res)
32.714286
62
0.644347
268
2,061
4.858209
0.171642
0.073733
0.078341
0.064516
0.756528
0.756528
0.743472
0.743472
0.715054
0.611367
0
0.002446
0.206696
2,061
62
63
33.241935
0.793884
0
0
0.711538
0
0
0.126379
0.084754
0
0
0
0
0
1
0.115385
false
0
0.057692
0
0.403846
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7e8258548dccd8d767f4f933a15a016baa763f36
2,734
py
Python
tests/test_GpOptimiser.py
DMGREENHOUSE/inference-tools
4b007cdcb6ae31dad6a5edf6cb50b6a9120c27e7
[ "MIT" ]
12
2019-07-05T07:46:35.000Z
2022-02-08T12:23:06.000Z
tests/test_GpOptimiser.py
DMGREENHOUSE/inference-tools
4b007cdcb6ae31dad6a5edf6cb50b6a9120c27e7
[ "MIT" ]
6
2020-01-22T15:54:59.000Z
2021-11-05T11:02:51.000Z
tests/test_GpOptimiser.py
DMGREENHOUSE/inference-tools
4b007cdcb6ae31dad6a5edf6cb50b6a9120c27e7
[ "MIT" ]
2
2020-03-17T15:17:39.000Z
2022-02-10T15:31:51.000Z
import numpy as np from inference.gp import ( GpOptimiser, ExpectedImprovement, UpperConfidenceBound, MaxVariance, ) import pytest def search_function_1d(x): return np.sin(0.5 * x) + 3 / (1 + (x - 1) ** 2) def search_function_2d(v): x, y = v z = ((x - 1) / 2) ** 2 + ((y + 3) / 1.5) ** 2 return np.sin(0.5 * x) + np.cos(0.4 * y) + 5 / (1 + z) @pytest.mark.parametrize( "acq_func", [ExpectedImprovement, UpperConfidenceBound, MaxVariance] ) def test_bfgs_1d(acq_func): x = [-8, -6, 8] y = [search_function_1d(k) for k in x] GP = GpOptimiser(x, y, bounds=[(-8.0, 8.0)], acquisition=acq_func, optimizer="bfgs") for i in range(3): new_x = GP.propose_evaluation() new_y = search_function_1d(new_x) GP.add_evaluation(new_x, new_y) x_array = np.array(GP.x) assert len(GP.y) == len(x) + 3 assert all((x_array >= -8) & (x_array <= 8)) @pytest.mark.parametrize( "acq_func", [ExpectedImprovement, UpperConfidenceBound, MaxVariance] ) def test_diffev_1d(acq_func): x = [-8, -6, 8] y = [search_function_1d(k) for k in x] GP = GpOptimiser( x, y, bounds=[(-8.0, 8.0)], acquisition=acq_func, optimizer="diffev" ) for i in range(3): new_x = GP.propose_evaluation() new_y = search_function_1d(new_x) GP.add_evaluation(new_x, new_y) x_array = np.array(GP.x) assert len(GP.y) == len(x) + 3 assert all((x_array >= -8) & (x_array <= 8)) @pytest.mark.parametrize( "acq_func", [ExpectedImprovement, UpperConfidenceBound, MaxVariance] ) def test_bfgs_2d(acq_func): x = [(-8, -8), (8, -8), (-8, 8), (8, 8), (0, 0)] y = [search_function_2d(k) for k in x] GP = GpOptimiser( x, y, bounds=[(-8, 8), (-8, 8)], acquisition=acq_func, optimizer="bfgs" ) for i in range(3): new_x = GP.propose_evaluation() new_y = search_function_2d(new_x) GP.add_evaluation(new_x, new_y) x_array = np.array(GP.x) assert len(GP.y) == len(x) + 3 assert all((x_array[:, 0] >= -8) & (x_array[:, 0] <= 8)) @pytest.mark.parametrize( "acq_func", [ExpectedImprovement, UpperConfidenceBound, MaxVariance] ) def test_diffev_2d(acq_func): x = [(-8, -8), (8, -8), (-8, 8), (8, 8), (0, 0)] y = [search_function_2d(k) for k in x] GP = GpOptimiser( x, y, bounds=[(-8, 8), (-8, 8)], acquisition=acq_func, optimizer="diffev", ) for i in range(3): new_x = GP.propose_evaluation() new_y = search_function_2d(new_x) GP.add_evaluation(new_x, new_y) x_array = np.array(GP.x) assert len(GP.y) == len(x) + 3 assert all((x_array[:, 0] >= -8) & (x_array[:, 0] <= 8))
27.069307
88
0.58376
424
2,734
3.575472
0.129717
0.026385
0.031662
0.031662
0.882586
0.882586
0.864116
0.864116
0.864116
0.864116
0
0.045983
0.244331
2,734
100
89
27.34
0.687803
0
0
0.5875
0
0
0.01902
0
0
0
0
0
0.1
1
0.075
false
0
0.0375
0.0125
0.1375
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7e8f93abfcfafe6497a935cd9efad51541432329
135
py
Python
sqlserverport/__init__.py
gordthompson/sqlserverport
c7829dfa3e1327353b3b4f6078b0bc2a00166ca7
[ "Apache-2.0" ]
16
2017-04-19T07:47:57.000Z
2021-11-27T23:54:49.000Z
sqlserverport/__init__.py
gordthompson/sqlserverport
c7829dfa3e1327353b3b4f6078b0bc2a00166ca7
[ "Apache-2.0" ]
1
2020-05-21T13:09:21.000Z
2020-05-21T13:41:56.000Z
sqlserverport/__init__.py
gordthompson/sqlserverport
c7829dfa3e1327353b3b4f6078b0bc2a00166ca7
[ "Apache-2.0" ]
7
2018-07-26T05:42:58.000Z
2020-09-02T09:31:19.000Z
from .sqlserverport import lookup from .sqlserverport import BrowserError from .sqlserverport import NoTcpError __version__ = "1.0.1"
22.5
39
0.822222
16
135
6.6875
0.5625
0.476636
0.64486
0
0
0
0
0
0
0
0
0.02521
0.118519
135
5
40
27
0.87395
0
0
0
0
0
0.037037
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
7ea63ebc8ed06d91964f1f681d83667a2db560b5
19
py
Python
src/infstat/test.py
BALAJI24092001/vizinfstat
ba6f7c1278c5e82e300329a212594f0a72354c29
[ "BSD-3-Clause" ]
null
null
null
src/infstat/test.py
BALAJI24092001/vizinfstat
ba6f7c1278c5e82e300329a212594f0a72354c29
[ "BSD-3-Clause" ]
null
null
null
src/infstat/test.py
BALAJI24092001/vizinfstat
ba6f7c1278c5e82e300329a212594f0a72354c29
[ "BSD-3-Clause" ]
null
null
null
import anova as av
9.5
18
0.789474
4
19
3.75
1
0
0
0
0
0
0
0
0
0
0
0
0.210526
19
1
19
19
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0e29bd72d948b4e47dc44c437f583b5f2df853fd
174
py
Python
pyavd/Models/Components/__init__.py
AVD-2021/PyAVD
a3a45d97199e80bf98560b08bcd09708c1a0513a
[ "Apache-2.0" ]
null
null
null
pyavd/Models/Components/__init__.py
AVD-2021/PyAVD
a3a45d97199e80bf98560b08bcd09708c1a0513a
[ "Apache-2.0" ]
null
null
null
pyavd/Models/Components/__init__.py
AVD-2021/PyAVD
a3a45d97199e80bf98560b08bcd09708c1a0513a
[ "Apache-2.0" ]
null
null
null
from .Aircraft import * from .Empennage import * from .Engine import * from .Fuselage import * from .Wing import * from .UC import *
24.857143
28
0.557471
18
174
5.388889
0.444444
0.515464
0
0
0
0
0
0
0
0
0
0
0.373563
174
6
29
29
0.889908
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0e3aa8dae125abf3fe205943ee8384a995c388b0
16,202
py
Python
tests/devices/standards/trafic/chassis/test_autoload_structure.py
QualiSystems/cloudshell-networking-devices
f316cefca174975424ec21854b672335feaf8f87
[ "Apache-2.0" ]
null
null
null
tests/devices/standards/trafic/chassis/test_autoload_structure.py
QualiSystems/cloudshell-networking-devices
f316cefca174975424ec21854b672335feaf8f87
[ "Apache-2.0" ]
34
2016-11-28T10:52:44.000Z
2019-10-01T08:52:59.000Z
tests/devices/standards/trafic/chassis/test_autoload_structure.py
QualiSystems/cloudshell-networking-devices
f316cefca174975424ec21854b672335feaf8f87
[ "Apache-2.0" ]
1
2017-05-23T08:46:45.000Z
2017-05-23T08:46:45.000Z
import unittest from cloudshell.devices.standards.traffic.chassis.autoload_structure import TrafficGeneratorChassis, \ AVAILABLE_SHELL_TYPES, GenericTrafficGeneratorModule, GenericTrafficGeneratorPort, GenericPowerPort class TestTrafficGeneratorChassis(unittest.TestCase): def setUp(self): self.shell_name = "test shell name" self.name = "test name" self.unique_id = "test unique id" self.shell_type = AVAILABLE_SHELL_TYPES[-1] self.resource = TrafficGeneratorChassis(shell_name=self.shell_name, name=self.name, unique_id=self.unique_id, shell_type=self.shell_type) def test_generic_resource_no_shell_name(self): name = "test name" unique_id = "test unique id" shell_type = "" resource = TrafficGeneratorChassis(shell_name="", name=name, unique_id=unique_id, shell_type=shell_type) self.assertEqual(resource.shell_name, "") self.assertEqual(resource.shell_type, "") def test_model_name_getter(self): expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.shell_type, "Model Name"): expected_val } # act result = self.resource.model_name # verify self.assertEqual(result, expected_val) def test_model_name_setter(self): attr_value = "test value" # act self.resource.model_name = attr_value # verify attr_key = "{}{}".format(self.resource.shell_type, "Model Name") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_serial_number_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.shell_type, "Serial Number"): expected_val } # act result = self.resource.serial_number # verify self.assertEqual(result, expected_val) def test_serial_number_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.serial_number = attr_value # verify attr_key = "{}{}".format(self.resource.shell_type, "Serial Number") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_server_description_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.shell_type, "Server Description"): expected_val } # act result = self.resource.server_description # verify self.assertEqual(result, expected_val) def test_server_description_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.server_description = attr_value # verify attr_key = "{}{}".format(self.resource.shell_type, "Server Description") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_vendor_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.shell_type, "Vendor"): expected_val } # act result = self.resource.vendor # verify self.assertEqual(result, expected_val) def test_vendor_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.vendor = attr_value # verify attr_key = "{}{}".format(self.resource.shell_type, "Vendor") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_version_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.shell_type, "Version"): expected_val } # act result = self.resource.version # verify self.assertEqual(result, expected_val) def test_version_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.version = attr_value # verify attr_key = "{}{}".format(self.resource.shell_type, "Version") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_raise_exception_if_unavailable_shell_type(self): shell_type = 'unavailable_shell_type' self.assertRaisesRegexp( Exception, 'Unavailable shell type', TrafficGeneratorChassis, self.shell_name, self.name, self.unique_id, shell_type, ) class TestGenericTrafficGeneratorModule(unittest.TestCase): def setUp(self): self.shell_name = "test shell name" self.name = "test name" self.unique_id = "test unique id" self.resource = GenericTrafficGeneratorModule(shell_name=self.shell_name, name=self.name, unique_id=self.unique_id) def test_model_name_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Model Name"): expected_val } # act result = self.resource.model_name # verify self.assertEqual(result, expected_val) def test_model_name_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.model_name = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Model Name") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_version_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Version"): expected_val } # act result = self.resource.version # verify self.assertEqual(result, expected_val) def test_version_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.version = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Version") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_serial_number_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Serial Number"): expected_val } # act result = self.resource.serial_number # verify self.assertEqual(result, expected_val) def test_serial_number_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.serial_number = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Serial Number") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) class TestGenericTrafficGeneratorPort(unittest.TestCase): def setUp(self): self.shell_name = "test shell name" self.name = "test name" self.unique_id = "test unique id" self.resource = GenericTrafficGeneratorPort(shell_name=self.shell_name, name=self.name, unique_id=self.unique_id) def test_media_type_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Media Type"): expected_val } # act result = self.resource.media_type # verify self.assertEqual(result, expected_val) def test_media_type_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.media_type = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Media Type") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_configured_controllers_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Configured Controllers"): expected_val } # act result = self.resource.configured_controllers # verify self.assertEqual(result, expected_val) def test_configured_controllers_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.configured_controllers = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Configured Controllers") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) class TestGenericPowerPort(unittest.TestCase): def setUp(self): self.shell_name = "test shell name" self.name = "test name" self.unique_id = "test unique id" self.resource = GenericPowerPort(shell_name=self.shell_name, name=self.name, unique_id=self.unique_id) def test_model_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Model"): expected_val } # act result = self.resource.model # verify self.assertEqual(result, expected_val) def test_model_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.model = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Model") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_model_name_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Model Name"): expected_val } # act result = self.resource.model_name # verify self.assertEqual(result, expected_val) def test_model_name_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.model_name = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Model Name") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_serial_number_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Serial Number"): expected_val } # act result = self.resource.serial_number # verify self.assertEqual(result, expected_val) def test_serial_number_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.serial_number = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Serial Number") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_version_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" expected_val = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Version"): expected_val } # act result = self.resource.version # verify self.assertEqual(result, expected_val) def test_version_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.version = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Version") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key]) def test_port_description_getter(self): """Check that property will return needed attribute value from the internal attributes dictionary""" attr_value = "test value" self.resource.attributes = { "{}{}".format(self.resource.namespace, "Port Description"): attr_value } # act result = self.resource.port_description # verify self.assertEqual(result, attr_value) def test_port_description_setter(self): """Check that property setter will correctly add attribute value into the internal attributes dictionary""" attr_value = "test value" # act self.resource.port_description = attr_value # verify attr_key = "{}{}".format(self.resource.namespace, "Port Description") self.assertIn(attr_key, self.resource.attributes) self.assertEqual(attr_value, self.resource.attributes[attr_key])
42.302872
115
0.641032
1,742
16,202
5.788175
0.04535
0.129723
0.098185
0.080333
0.900526
0.887236
0.860458
0.854508
0.835763
0.808291
0
0.000084
0.266757
16,202
382
116
42.413613
0.848653
0.190594
0
0.632411
0
0
0.075345
0.001704
0
0
0
0
0.189723
1
0.142292
false
0
0.007905
0
0.166008
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
0e61a198a314706f9aab80b96e692e3081ef6aaf
32
py
Python
read_structure_step/formats/smi/__init__.py
paulsaxe/read_structure_step
335c4eb39ad8556070e769fa9491ec5de22ee455
[ "BSD-3-Clause" ]
null
null
null
read_structure_step/formats/smi/__init__.py
paulsaxe/read_structure_step
335c4eb39ad8556070e769fa9491ec5de22ee455
[ "BSD-3-Clause" ]
9
2020-01-19T01:14:43.000Z
2022-01-29T14:25:05.000Z
read_structure_step/formats/smi/__init__.py
paulsaxe/read_structure_step
335c4eb39ad8556070e769fa9491ec5de22ee455
[ "BSD-3-Clause" ]
1
2022-01-14T21:50:37.000Z
2022-01-14T21:50:37.000Z
from . import smi # noqa: F401
16
31
0.65625
5
32
4.2
1
0
0
0
0
0
0
0
0
0
0
0.125
0.25
32
1
32
32
0.75
0.3125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
0e9804593660557ba64b85ffb810d684008edd53
6,478
py
Python
tests/integration/test_engine_migrations.py
twil/asyncpg-migrate
dd91237f49d91acdc21c642254ddcfb77e548e54
[ "Apache-2.0" ]
17
2019-07-23T12:34:29.000Z
2021-09-07T19:49:09.000Z
tests/integration/test_engine_migrations.py
twil/asyncpg-migrate
dd91237f49d91acdc21c642254ddcfb77e548e54
[ "Apache-2.0" ]
227
2019-06-27T21:51:34.000Z
2022-03-01T04:00:47.000Z
tests/integration/test_engine_migrations.py
twil/asyncpg-migrate
dd91237f49d91acdc21c642254ddcfb77e548e54
[ "Apache-2.0" ]
3
2019-07-11T08:55:34.000Z
2022-01-03T10:09:11.000Z
import typing as t import asyncpg import pytest import pytest_mock as ptm from asyncpg_migrate import constants from asyncpg_migrate import model from asyncpg_migrate.engine import downgrade from asyncpg_migrate.engine import migration from asyncpg_migrate.engine import upgrade @pytest.mark.asyncio @pytest.mark.parametrize( 'table_schema,table_name', [ (constants.MIGRATIONS_SCHEMA, constants.MIGRATIONS_TABLE), (constants.MIGRATIONS_SCHEMA, '_foo_'), (constants.MIGRATIONS_SCHEMA, 'ordinary'), ], ) async def test_get_revision_no_migrations_table( db_connection: asyncpg.Connection, table_schema: str, table_name: str, ) -> None: with pytest.raises(migration.MigrationTableMissing): await migration.latest_revision( connection=db_connection, table_schema=table_schema, table_name=table_name, ) @pytest.mark.asyncio @pytest.mark.parametrize( 'table_schema,table_name', [ (constants.MIGRATIONS_SCHEMA, constants.MIGRATIONS_TABLE), (constants.MIGRATIONS_SCHEMA, '_foo_'), (constants.MIGRATIONS_SCHEMA, 'ordinary'), ], ) async def test_get_revision_migration_table_exists_no_entries( db_connection: asyncpg.Connection, table_schema: str, table_name: str, ) -> None: await migration.create_table( connection=db_connection, table_schema=table_schema, table_name=table_name, ) assert ( await migration.latest_revision( connection=db_connection, table_schema=table_schema, table_name=table_name, ) ) is None @pytest.mark.asyncio @pytest.mark.parametrize( 'table_schema,table_name', [ (constants.MIGRATIONS_SCHEMA, constants.MIGRATIONS_TABLE), (constants.MIGRATIONS_SCHEMA, '_foo_'), (constants.MIGRATIONS_SCHEMA, 'ordinary'), ], ) async def test_get_revision_migration_table_exists_with_entries( db_connection: asyncpg.Connection, table_schema: str, table_name: str, mocker: ptm.MockFixture, ) -> None: max_revisions = 10 await migration.create_table( connection=db_connection, table_schema=table_schema, table_name=table_name, ) for i in range(1, max_revisions + 1): await migration.save( connection=db_connection, migration=model.Migration( revision=model.Revision(i), label=__name__, path=mocker.stub(), upgrade=mocker.stub(), downgrade=mocker.stub(), ), direction=model.MigrationDir.UP, table_schema=table_schema, table_name=table_name, ) assert ( await migration.latest_revision( connection=db_connection, table_schema=table_schema, table_name=table_name, ) ) == max_revisions @pytest.mark.asyncio @pytest.mark.parametrize( 'table_schema,table_name', [ (constants.MIGRATIONS_SCHEMA, constants.MIGRATIONS_TABLE), (constants.MIGRATIONS_SCHEMA, '_foo_'), (constants.MIGRATIONS_SCHEMA, 'ordinary'), ], ) async def test_ensure_create_table( db_connection: asyncpg.Connection, table_schema: str, table_name: str, mocker: ptm.MockFixture, ) -> None: await migration.create_table( connection=db_connection, table_schema=table_schema, table_name=table_name, ) table_name_in_db = await db_connection.fetchval( """ select to_regclass('{schema}.{table}') """.format( schema=table_schema, table=table_name, ), ) assert table_name_in_db == table_name @pytest.mark.asyncio async def test_migration_history_no_table(db_connection: asyncpg.Connection) -> None: with pytest.raises(migration.MigrationTableMissing): await migration.list(db_connection) @pytest.mark.asyncio async def test_migration_history_no_revision(db_connection: asyncpg.Connection) -> None: await migration.create_table(db_connection) assert not (await migration.list(db_connection)) @pytest.mark.asyncio async def test_migration_history_up_head( migration_config: t.Tuple[model.Config, int], db_connection: asyncpg.Connection, ) -> None: config, migrations_count = migration_config if migrations_count: await upgrade.run( config, 'HEAD', db_connection, ) history = await migration.list(db_connection) db_rev = await migration.latest_revision(db_connection) assert history is not None assert len(history) == migrations_count latest_rev = history[-1] assert latest_rev.revision == db_rev assert latest_rev.direction == model.MigrationDir.UP @pytest.mark.asyncio async def test_migration_history_up_head_down_base( migration_config: t.Tuple[model.Config, int], db_connection: asyncpg.Connection, ) -> None: config, migrations_count = migration_config if migrations_count: await upgrade.run( config, 'HEAD', db_connection, ) await downgrade.run( config, 'BASE', db_connection, ) history = await migration.list(db_connection) db_rev = await migration.latest_revision(db_connection) assert history is not None assert len(history) == 2 * migrations_count latest_rev = history[-1] assert latest_rev.revision == db_rev assert latest_rev.direction == model.MigrationDir.DOWN @pytest.mark.asyncio async def test_migration_history_up_head_down_1( migration_config: t.Tuple[model.Config, int], db_connection: asyncpg.Connection, ) -> None: config, migrations_count = migration_config if migrations_count: await upgrade.run( config, 'HEAD', db_connection, ) await downgrade.run( config, -1, db_connection, ) history = await migration.list(db_connection) db_rev = await migration.latest_revision(db_connection) assert history is not None assert len(history) == migrations_count + 1 latest_rev = history[-1] assert latest_rev.revision == db_rev assert latest_rev.direction == model.MigrationDir.DOWN
28.04329
88
0.659463
703
6,478
5.780939
0.118065
0.091535
0.074803
0.054134
0.859498
0.8125
0.8125
0.8125
0.788878
0.776821
0
0.002282
0.255789
6,478
230
89
28.165217
0.840697
0
0
0.673367
0
0
0.024938
0.014339
0
0
0
0
0.080402
1
0
false
0
0.045226
0
0.045226
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
0ea6c8ba68c22efe83ae9aabdcc0445b4f391c27
105
py
Python
home/views.py
byteknacker/eulerapps
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
[ "BSD-3-Clause" ]
null
null
null
home/views.py
byteknacker/eulerapps
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
[ "BSD-3-Clause" ]
null
null
null
home/views.py
byteknacker/eulerapps
5bebf00b4c77b84ceee8bbd73226db60e7fec03f
[ "BSD-3-Clause" ]
null
null
null
from django.shortcuts import render def display(request): return render(request, 'home/apps.html')
17.5
44
0.752381
14
105
5.642857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.142857
105
5
45
21
0.877778
0
0
0
0
0
0.133333
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
7ed40d1702ef17ebed70ba45da4d5778b5cf16d0
32
py
Python
vnpy/gateway/ctp/__init__.py
jubal/vnpy
f50f2535ed39dd33272e0985ed40c7078e4c19f6
[ "MIT" ]
5
2020-05-19T07:32:39.000Z
2022-03-14T09:09:48.000Z
vnpy/gateway/ctp/__init__.py
jubal/vnpy
f50f2535ed39dd33272e0985ed40c7078e4c19f6
[ "MIT" ]
null
null
null
vnpy/gateway/ctp/__init__.py
jubal/vnpy
f50f2535ed39dd33272e0985ed40c7078e4c19f6
[ "MIT" ]
3
2020-04-02T08:30:17.000Z
2020-05-03T12:12:05.000Z
from vnpy_ctp import CtpGateway
16
31
0.875
5
32
5.4
1
0
0
0
0
0
0
0
0
0
0
0
0.125
32
1
32
32
0.964286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7edb3b3e33594a100c39b713860e80c45b4d1f45
35
py
Python
petrarch2/__init__.py
Sayeedsalam/political-actor-recommender
20dbc37ac419e4ecd5436d4e5b9685846639b2bc
[ "MIT" ]
1
2018-03-15T09:48:28.000Z
2018-03-15T09:48:28.000Z
petrarch2/__init__.py
Sayeedsalam/political-actor-recommender
20dbc37ac419e4ecd5436d4e5b9685846639b2bc
[ "MIT" ]
null
null
null
petrarch2/__init__.py
Sayeedsalam/political-actor-recommender
20dbc37ac419e4ecd5436d4e5b9685846639b2bc
[ "MIT" ]
null
null
null
from EventCoder import EventCoder
11.666667
33
0.857143
4
35
7.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.142857
35
2
34
17.5
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7d15b9b24d90f98dcb53be20dbae5a396fc453a5
2,771
py
Python
dragon/func/jd_web_hook/__init__.py
InfernalAzazel/dragon
464056feb8ecaac55eabedb0a083ea9f609a5753
[ "Apache-2.0" ]
null
null
null
dragon/func/jd_web_hook/__init__.py
InfernalAzazel/dragon
464056feb8ecaac55eabedb0a083ea9f609a5753
[ "Apache-2.0" ]
null
null
null
dragon/func/jd_web_hook/__init__.py
InfernalAzazel/dragon
464056feb8ecaac55eabedb0a083ea9f609a5753
[ "Apache-2.0" ]
null
null
null
from func.jd_web_hook.models import WebHookItem from func.jd_web_hook import a_d_reward_over_table from func.jd_web_hook import activity_delay_apply from func.jd_web_hook import activity_expense_issue_doc from func.jd_web_hook import add_or_modify_logistics_note from func.jd_web_hook import award_cover_delivery_order from func.jd_web_hook import b_r_net_growth_reward from func.jd_web_hook import brand_director_wages from func.jd_web_hook import c_r_salary_application from func.jd_web_hook import card_scraping_delivery_order from func.jd_web_hook import complimentary_material_issue_doc from func.jd_web_hook import customer_error_activation from func.jd_web_hook import dead_material_issue_doc from func.jd_web_hook import debit_issue_doc from func.jd_web_hook import entry_application_verify_field from func.jd_web_hook import free_contract_batch_modify from func.jd_web_hook import insufficient_month_quit_wages from func.jd_web_hook import leave_apply from func.jd_web_hook import modify_work_date from func.jd_web_hook import order_fee from func.jd_web_hook import outside_b_r_wages from func.jd_web_hook import outside_b_r_wages_new from func.jd_web_hook import outside_trade_wages from func.jd_web_hook import outside_trade_wages_new from func.jd_web_hook import personnel_maintain from func.jd_web_hook import picking_material_issue_doc from func.jd_web_hook import picking_sample_issue_doc from func.jd_web_hook import product_change_application from func.jd_web_hook import promoter_wages from func.jd_web_hook import province_trade_wages from func.jd_web_hook import purchase_return_issue_doc from func.jd_web_hook import quality_monitor_plan_launch from func.jd_web_hook import quality_monitor_plan_launch2 from func.jd_web_hook import r_a_d_reward_apply from func.jd_web_hook import r_r_application_for_violations from func.jd_web_hook import salary_deduction_approval from func.jd_web_hook import sales_material_issue_doc from func.jd_web_hook import sales_product_issue_doc from func.jd_web_hook import sales_volume_wages from func.jd_web_hook import special_application from func.jd_web_hook import m_a_s_l_m_industry_agent from func.jd_web_hook import m_p_payment_deduction_date from func.jd_web_hook import b_o_borrowing_expenses_apply from func.jd_web_hook import customer_actual_sales_checklist from func.jd_web_hook import c_r_amount_transition_table from func.jd_web_hook import repair_customer_c_d_apply from func.jd_web_hook import customer_business_accounting_apply from func.jd_web_hook import induction_apply2 from func.jd_web_hook import modify_work_date2 from func.jd_web_hook import quality_monitor_plan_launch3 from func.jd_web_hook import customer_actual_sales_checklist2 from func.jd_web_hook import customer_actual_sales_checklist3
52.283019
63
0.90581
508
2,771
4.454724
0.212598
0.183827
0.229783
0.298719
0.751657
0.744145
0.673
0.438356
0.265135
0.0327
0
0.002341
0.075063
2,771
52
64
53.288462
0.880609
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
bc35fc2922cda0565957130972a29e96525713da
16,554
py
Python
pybind/nos/v7_1_0/rbridge_id/route_map/content/set_/dampening/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
null
null
null
pybind/nos/v7_1_0/rbridge_id/route_map/content/set_/dampening/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
null
null
null
pybind/nos/v7_1_0/rbridge_id/route_map/content/set_/dampening/__init__.py
shivharis/pybind
4e1c6d54b9fd722ccec25546ba2413d79ce337e6
[ "Apache-2.0" ]
1
2021-11-05T22:15:42.000Z
2021-11-05T22:15:42.000Z
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ class dampening(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-rbridge - based on the path /rbridge-id/route-map/content/set/dampening. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: BGP route flap damping """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__half_life','__reuse','__suppress','__max_suppress_time',) _yang_name = 'dampening' _rest_name = 'dampening' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__half_life = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 45']}), is_leaf=True, yang_name="half-life", rest_name="half-life", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) self.__reuse = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="reuse", rest_name="reuse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) self.__max_suppress_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 255']}), is_leaf=True, yang_name="max-suppress-time", rest_name="max-suppress-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) self.__suppress = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="suppress", rest_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'rbridge-id', u'route-map', u'content', u'set', u'dampening'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'rbridge-id', u'route-map', u'set', u'dampening'] def _get_half_life(self): """ Getter method for half_life, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/half_life (uint32) """ return self.__half_life def _set_half_life(self, v, load=False): """ Setter method for half_life, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/half_life (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_half_life is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_half_life() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 45']}), is_leaf=True, yang_name="half-life", rest_name="half-life", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """half_life must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 45']}), is_leaf=True, yang_name="half-life", rest_name="half-life", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True)""", }) self.__half_life = t if hasattr(self, '_set'): self._set() def _unset_half_life(self): self.__half_life = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 45']}), is_leaf=True, yang_name="half-life", rest_name="half-life", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) def _get_reuse(self): """ Getter method for reuse, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/reuse (uint32) """ return self.__reuse def _set_reuse(self, v, load=False): """ Setter method for reuse, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/reuse (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_reuse is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_reuse() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="reuse", rest_name="reuse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """reuse must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="reuse", rest_name="reuse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True)""", }) self.__reuse = t if hasattr(self, '_set'): self._set() def _unset_reuse(self): self.__reuse = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="reuse", rest_name="reuse", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) def _get_suppress(self): """ Getter method for suppress, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/suppress (uint32) """ return self.__suppress def _set_suppress(self, v, load=False): """ Setter method for suppress, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/suppress (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_suppress is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_suppress() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="suppress", rest_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """suppress must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="suppress", rest_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True)""", }) self.__suppress = t if hasattr(self, '_set'): self._set() def _unset_suppress(self): self.__suppress = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 20000']}), is_leaf=True, yang_name="suppress", rest_name="suppress", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None, u'cli-incomplete-command': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) def _get_max_suppress_time(self): """ Getter method for max_suppress_time, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/max_suppress_time (uint32) """ return self.__max_suppress_time def _set_max_suppress_time(self, v, load=False): """ Setter method for max_suppress_time, mapped from YANG variable /rbridge_id/route_map/content/set/dampening/max_suppress_time (uint32) If this variable is read-only (config: false) in the source YANG file, then _set_max_suppress_time is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_max_suppress_time() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 255']}), is_leaf=True, yang_name="max-suppress-time", rest_name="max-suppress-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """max_suppress_time must be of a type compatible with uint32""", 'defined-type': "uint32", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 255']}), is_leaf=True, yang_name="max-suppress-time", rest_name="max-suppress-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True)""", }) self.__max_suppress_time = t if hasattr(self, '_set'): self._set() def _unset_max_suppress_time(self): self.__max_suppress_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1 .. 255']}), is_leaf=True, yang_name="max-suppress-time", rest_name="max-suppress-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-ip-policy', defining_module='brocade-ip-policy', yang_type='uint32', is_config=True) half_life = __builtin__.property(_get_half_life, _set_half_life) reuse = __builtin__.property(_get_reuse, _set_reuse) suppress = __builtin__.property(_get_suppress, _set_suppress) max_suppress_time = __builtin__.property(_get_max_suppress_time, _set_max_suppress_time) _pyangbind_elements = {'half_life': half_life, 'reuse': reuse, 'suppress': suppress, 'max_suppress_time': max_suppress_time, }
71.973913
596
0.729552
2,264
16,554
5.089664
0.083039
0.03992
0.043739
0.043044
0.812028
0.787816
0.778443
0.768376
0.768376
0.752061
0
0.024448
0.125287
16,554
229
597
72.28821
0.77134
0.131932
0
0.432432
0
0.027027
0.343924
0.14898
0
0
0
0
0
1
0.101351
false
0
0.054054
0
0.283784
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
70e8975fbd6337412a9defb160bbc036a3b53418
20,459
py
Python
fuel_cell/run_reac.py
seamuss1/Thorium_Molten_Salt_Reactor
222338f773dd2186cc25a06a0f8ee89a684efd2b
[ "MIT" ]
null
null
null
fuel_cell/run_reac.py
seamuss1/Thorium_Molten_Salt_Reactor
222338f773dd2186cc25a06a0f8ee89a684efd2b
[ "MIT" ]
null
null
null
fuel_cell/run_reac.py
seamuss1/Thorium_Molten_Salt_Reactor
222338f773dd2186cc25a06a0f8ee89a684efd2b
[ "MIT" ]
null
null
null
import matplotlib import numpy as np import openmc ##Materials LiF = openmc.Material(5,'LiF') LiF.add_nuclide('Li7',1.0) LiF.add_element('F',1.0) BeF = openmc.Material(6,'BeF2') BeF.add_element('Be',1.0) BeF.add_element('F',2.0) ZrF = openmc.Material(7,'ZrF4') ZrF.add_element('Zr',1.0) ZrF.add_element('F',4.0) UF = openmc.Material(8,'UF4') UF.add_nuclide('U233',0.05) UF.add_nuclide('U238',0.95) UF.add_element('F',4.0) fuel_salt = openmc.Material.mix_materials([LiF,BeF,ZrF,UF],[0.65,0.29,0.05,0.01], 'ao') graphite = openmc.Material(2,'graphite') graphite.add_element("C",1.0) pipe = openmc.Material(3,'pipe') pipe.add_element('Pb',1.0) pipe.set_density('g/cm3', 17.0) #water = openmc.Material(name="h2o") #water.add_nuclide('H1', 2.0) #water.add_nuclide('O16', 1.0) #water.set_density('g/cm3', 1.0) #water.add_s_alpha_beta('c_H_in_H2O') air = openmc.Material(4,"air") air.add_element('He',1.0) #uo2.set_density('g/cm3', 0.10) materials = openmc.Materials([fuel_salt,pipe,air,graphite]) materials.export_to_xml() ##Geometry core = openmc.ZCylinder(r=50.8) R = [0+f*9.03 for f in range(6)] N = [1,6,12,18,24,30] fuel_dic = dict() pipe_inner_dic = dict() pipe_outer_dic = dict() gap_outer_dic = dict() bodies = [] for n,r in zip(R,N): t = np.linspace(0,2*np.pi,int(n), endpoint=False) X = r*np.cos(t) Y=r*np.sin(t) if X.size==0: (X,Y) = ([0], [0]) for x,y in zip(X,Y): key = str(x)+'_'+str(y) fs_outer_radius = openmc.ZCylinder(x0=x, y0=y, r=7.6703, boundary_type='transmission') pipe_inner_radius = openmc.ZCylinder(x0=x, y0=y, r=3.67665) pipe_outer_radius = openmc.ZCylinder(x0=x, y0=y, r=380365) # gap_outer_radius = openmc.ZCylinder(x0=x, y0=y, r=3.805) # bodies.append(fs_outer_radius) # bodies.append(pipe_inner_radius) # bodies.append(pipe_outer_radius) fuel_dic[key] = fs_outer_radius pipe_inner_dic[key] = pipe_inner_radius pipe_outer_dic[key] = pipe_outer_radius # gap_outer_dic[key] = gap_outer_radius regions = [] for key,value in fuel_dic.items(): # gap_region = +fuel_dic[key] & -pipe_inner_dic[key] # gap = openmc.Cell(name='air gap') # gap.region = gap_region # gap_region2 = -gap_outer_dic[key] & +pipe_inner_dic[key] # gap2 = openmc.Cell(name='air gap') # gap.region = gap_region fuel_region = -fuel_dic[key] pipe_region = +pipe_inner_dic[key] & -pipe_outer_dic[key] fuel = openmc.Cell(name='fuel') fuel.fill = fuel_salt fuel.region = fuel_region piping = openmc.Cell(name='piping') piping.fill = pipe piping.region = pipe_region # regions.append(gap_region) regions.append(fuel_region) regions.append(pipe_region) # bodies.append(gap) bodies.append(fuel) bodies.append(piping) pitch = 203.2 left = openmc.XPlane(x0=-pitch/2, boundary_type='reflective') right = openmc.XPlane(x0=pitch/2, boundary_type='reflective') bottom = openmc.YPlane(y0=-pitch/2, boundary_type='reflective') top = openmc.YPlane(y0=pitch/2, boundary_type='reflective') s='-core & ' #for key,value in gap_outer_dic.items(): # s+=f'+gap_outer_dic[\'{key}\'] & ' #print(s) #core_region = [-core & +pipe_outer_dic[f] for f in pipe_outer_dic] #core_region=-core & +gap_outer_dic['6.0_0.0'] & +gap_outer_dic['4.596266658713868_3.8567256581192355'] & +gap_outer_dic['1.0418890660015825_5.908846518073248'] & +gap_outer_dic['-2.9999999999999987_5.196152422706632'] & +gap_outer_dic['-5.63815572471545_2.0521208599540133'] & +gap_outer_dic['-5.638155724715451_-2.052120859954012'] & +gap_outer_dic['-3.0000000000000027_-5.19615242270663'] & +gap_outer_dic['1.0418890660015798_-5.908846518073249'] & +gap_outer_dic['4.5962666587138665_-3.8567256581192373'] & +gap_outer_dic['12.0_0.0'] & +gap_outer_dic['11.276311449430901_4.104241719908025'] & +gap_outer_dic['9.192533317427737_7.713451316238471'] & +gap_outer_dic['6.000000000000002_10.392304845413264'] & +gap_outer_dic['2.083778132003165_11.817693036146496'] & +gap_outer_dic['-2.0837781320031636_11.817693036146496'] & +gap_outer_dic['-5.999999999999997_10.392304845413264'] & +gap_outer_dic['-9.192533317427735_7.713451316238474'] & +gap_outer_dic['-11.2763114494309_4.1042417199080266'] & +gap_outer_dic['-12.0_1.4695761589768238e-15'] & +gap_outer_dic['-11.276311449430901_-4.104241719908024'] & +gap_outer_dic['-9.192533317427737_-7.713451316238471'] & +gap_outer_dic['-6.000000000000005_-10.39230484541326'] & +gap_outer_dic['-2.083778132003164_-11.817693036146496'] & +gap_outer_dic['2.0837781320031596_-11.817693036146498'] & +gap_outer_dic['6.000000000000002_-10.392304845413264'] & +gap_outer_dic['9.192533317427733_-7.713451316238475'] & +gap_outer_dic['11.276311449430898_-4.104241719908034'] & +gap_outer_dic['18.0_0.0'] & +gap_outer_dic['17.514807670436827_4.151085673363923'] & +gap_outer_dic['16.08538752582142_8.078385243608318'] & +gap_outer_dic['13.788799976141604_11.570176974357707'] & +gap_outer_dic['10.748854650650152_14.438217469590787'] & +gap_outer_dic['7.129435788704824_16.52788992384493'] & +gap_outer_dic['3.1256671980047477_17.726539554219745'] & +gap_outer_dic['-1.046606920388564_17.96954684888283'] & +gap_outer_dic['-5.162458188799624_17.2438112216788'] & +gap_outer_dic['-8.999999999999996_15.588457268119896'] & +gap_outer_dic['-12.352349481637203_13.09272554831488'] & +gap_outer_dic['-15.038780605432853_9.891161605274512'] & +gap_outer_dic['-16.91446717414635_6.15636257986204'] & +gap_outer_dic['-17.878290439354974_2.0896724542541465'] & +gap_outer_dic['-17.878290439354974_-2.089672454254142'] & +gap_outer_dic['-16.914467174146353_-6.156362579862035'] & +gap_outer_dic['-15.038780605432857_-9.891161605274508'] & +gap_outer_dic['-12.352349481637205_-13.092725548314876'] & +gap_outer_dic['-9.000000000000007_-15.58845726811989'] & +gap_outer_dic['-5.162458188799632_-17.243811221678797'] & +gap_outer_dic['-1.0466069203885724_-17.96954684888283'] & +gap_outer_dic['3.1256671980047397_-17.726539554219748'] & +gap_outer_dic['7.129435788704817_-16.527889923844935'] & +gap_outer_dic['10.748854650650145_-14.438217469590791'] & +gap_outer_dic['13.7887999761416_-11.570176974357713'] & +gap_outer_dic['16.085387525821417_-8.078385243608324'] & +gap_outer_dic['17.514807670436827_-4.151085673363928'] & +gap_outer_dic['24.0_0.0'] & +gap_outer_dic['23.63538607229299_4.167556264006328'] & +gap_outer_dic['22.552622898861802_8.20848343981605'] & +gap_outer_dic['20.784609690826528_11.999999999999998'] & +gap_outer_dic['18.385066634855473_15.426902632476942'] & +gap_outer_dic['15.426902632476946_18.385066634855473'] & +gap_outer_dic['12.000000000000004_20.784609690826528'] & +gap_outer_dic['8.208483439816051_22.5526228988618'] & +gap_outer_dic['4.16755626400633_23.63538607229299'] & +gap_outer_dic['1.4695761589768238e-15_24.0'] & +gap_outer_dic['-4.167556264006327_23.63538607229299'] & +gap_outer_dic['-8.20848343981605_22.552622898861802'] & +gap_outer_dic['-11.999999999999995_20.784609690826528'] & +gap_outer_dic['-15.426902632476946_18.385066634855473'] & +gap_outer_dic['-18.38506663485547_15.426902632476947'] & +gap_outer_dic['-20.784609690826528_11.999999999999998'] & +gap_outer_dic['-22.5526228988618_8.208483439816053'] & +gap_outer_dic['-23.63538607229299_4.167556264006336'] & +gap_outer_dic['-24.0_2.9391523179536475e-15'] & +gap_outer_dic['-23.635386072292995_-4.16755626400632'] & +gap_outer_dic['-22.552622898861802_-8.208483439816048'] & +gap_outer_dic['-20.78460969082653_-11.999999999999993'] & +gap_outer_dic['-18.385066634855473_-15.426902632476942'] & +gap_outer_dic['-15.426902632476947_-18.38506663485547'] & +gap_outer_dic['-12.00000000000001_-20.78460969082652'] & +gap_outer_dic['-8.208483439816066_-22.5526228988618'] & +gap_outer_dic['-4.167556264006328_-23.63538607229299'] & +gap_outer_dic['-4.408728476930472e-15_-24.0'] & +gap_outer_dic['4.167556264006319_-23.635386072292995'] & +gap_outer_dic['8.208483439816035_-22.552622898861806'] & +gap_outer_dic['12.000000000000004_-20.784609690826528'] & +gap_outer_dic['15.426902632476942_-18.385066634855477'] & +gap_outer_dic['18.385066634855466_-15.42690263247695'] & +gap_outer_dic['20.78460969082652_-12.00000000000001'] & +gap_outer_dic['22.552622898861795_-8.208483439816067'] & +gap_outer_dic['23.63538607229299_-4.167556264006329'] & +gap_outer_dic['30.0_0.0'] & +gap_outer_dic['29.70804206224711_4.175193028801963'] & +gap_outer_dic['28.837850878149567_8.269120674509974'] & +gap_outer_dic['27.406363729278027_12.202099292274006'] & +gap_outer_dic['25.44144288469278_15.897577926996147'] & +gap_outer_dic['22.98133329356934_19.283628290596177'] & +gap_outer_dic['20.073918190765745_22.294344764321828'] & +gap_outer_dic['16.775787104122404_24.871127176651253'] & +gap_outer_dic['13.151134403672323_26.96382138897501'] & +gap_outer_dic['9.270509831248424_28.531695488854606'] & +gap_outer_dic['5.209445330007912_29.544232590366242'] & +gap_outer_dic['1.0469849010750325_29.981724810572874'] & +gap_outer_dic['-3.1358538980296067_29.8356568610482'] & +gap_outer_dic['-7.2576568679900335_29.108871788279895'] & +gap_outer_dic['-11.238197802477362_27.81551563700362'] & +gap_outer_dic['-14.999999999999993_25.98076211353316'] & +gap_outer_dic['-18.469844259769747_23.64032260820166'] & +gap_outer_dic['-21.580194010159534_20.839751113769914'] & +gap_outer_dic['-24.27050983124842_17.633557568774197'] & +gap_outer_dic['-26.48842778576781_14.084146883576722'] & +gap_outer_dic['-28.19077862357725_10.260604299770065'] & +gap_outer_dic['-29.34442802201417_6.23735072453278'] & +gap_outer_dic['-29.926921507794727_2.0926942123237655'] & +gap_outer_dic['-29.926921507794727_-2.0926942123237584'] & +gap_outer_dic['-29.344428022014167_-6.237350724532785'] & +gap_outer_dic['-28.190778623577252_-10.26060429977006'] & +gap_outer_dic['-26.488427785767808_-14.084146883576725'] & +gap_outer_dic['-24.270509831248425_-17.63355756877419'] & +gap_outer_dic['-21.58019401015953_-20.83975111376992'] & +gap_outer_dic['-18.469844259769744_-23.640322608201664'] & +gap_outer_dic['-15.000000000000014_-25.980762113533153'] & +gap_outer_dic['-11.23819780247737_-27.81551563700362'] & +gap_outer_dic['-7.2576568679900335_-29.108871788279895'] & +gap_outer_dic['-3.135853898029601_-29.835656861048204'] & +gap_outer_dic['1.0469849010750385_-29.981724810572874'] & +gap_outer_dic['5.209445330007899_-29.544232590366242'] & +gap_outer_dic['9.270509831248416_-28.53169548885461'] & +gap_outer_dic['13.151134403672323_-26.96382138897501'] & +gap_outer_dic['16.77578710412241_-24.87112717665125'] & +gap_outer_dic['20.073918190765735_-22.29434476432184'] & +gap_outer_dic['22.981333293569335_-19.283628290596187'] & +gap_outer_dic['25.44144288469278_-15.897577926996151'] & +gap_outer_dic['27.40636372927803_-12.202099292274005'] & +gap_outer_dic['28.837850878149567_-8.269120674509969'] & +gap_outer_dic['29.708042062247106_-4.175193028801976'] #core_region = -core & +pipe_outer_dic['6.0_0.0'] & +pipe_outer_dic['4.596266658713868_3.8567256581192355'] & +pipe_outer_dic['1.0418890660015825_5.908846518073248'] & +pipe_outer_dic['-2.9999999999999987_5.196152422706632'] & +pipe_outer_dic['-5.63815572471545_2.0521208599540133'] & +pipe_outer_dic['-5.638155724715451_-2.052120859954012'] & +pipe_outer_dic['-3.0000000000000027_-5.19615242270663'] & +pipe_outer_dic['1.0418890660015798_-5.908846518073249'] & +pipe_outer_dic['4.5962666587138665_-3.8567256581192373'] & +pipe_outer_dic['12.0_0.0'] & +pipe_outer_dic['11.276311449430901_4.104241719908025'] & +pipe_outer_dic['9.192533317427737_7.713451316238471'] & +pipe_outer_dic['6.000000000000002_10.392304845413264'] & +pipe_outer_dic['2.083778132003165_11.817693036146496'] & +pipe_outer_dic['-2.0837781320031636_11.817693036146496'] & +pipe_outer_dic['-5.999999999999997_10.392304845413264'] & +pipe_outer_dic['-9.192533317427735_7.713451316238474'] & +pipe_outer_dic['-11.2763114494309_4.1042417199080266'] & +pipe_outer_dic['-12.0_1.4695761589768238e-15'] & +pipe_outer_dic['-11.276311449430901_-4.104241719908024'] & +pipe_outer_dic['-9.192533317427737_-7.713451316238471'] & +pipe_outer_dic['-6.000000000000005_-10.39230484541326'] & +pipe_outer_dic['-2.083778132003164_-11.817693036146496'] & +pipe_outer_dic['2.0837781320031596_-11.817693036146498'] & +pipe_outer_dic['6.000000000000002_-10.392304845413264'] & +pipe_outer_dic['9.192533317427733_-7.713451316238475'] & +pipe_outer_dic['11.276311449430898_-4.104241719908034'] & +pipe_outer_dic['18.0_0.0'] & +pipe_outer_dic['17.514807670436827_4.151085673363923'] & +pipe_outer_dic['16.08538752582142_8.078385243608318'] & +pipe_outer_dic['13.788799976141604_11.570176974357707'] & +pipe_outer_dic['10.748854650650152_14.438217469590787'] & +pipe_outer_dic['7.129435788704824_16.52788992384493'] & +pipe_outer_dic['3.1256671980047477_17.726539554219745'] & +pipe_outer_dic['-1.046606920388564_17.96954684888283'] & +pipe_outer_dic['-5.162458188799624_17.2438112216788'] & +pipe_outer_dic['-8.999999999999996_15.588457268119896'] & +pipe_outer_dic['-12.352349481637203_13.09272554831488'] & +pipe_outer_dic['-15.038780605432853_9.891161605274512'] & +pipe_outer_dic['-16.91446717414635_6.15636257986204'] & +pipe_outer_dic['-17.878290439354974_2.0896724542541465'] & +pipe_outer_dic['-17.878290439354974_-2.089672454254142'] & +pipe_outer_dic['-16.914467174146353_-6.156362579862035'] & +pipe_outer_dic['-15.038780605432857_-9.891161605274508'] & +pipe_outer_dic['-12.352349481637205_-13.092725548314876'] & +pipe_outer_dic['-9.000000000000007_-15.58845726811989'] & +pipe_outer_dic['-5.162458188799632_-17.243811221678797'] & +pipe_outer_dic['-1.0466069203885724_-17.96954684888283'] & +pipe_outer_dic['3.1256671980047397_-17.726539554219748'] & +pipe_outer_dic['7.129435788704817_-16.527889923844935'] & +pipe_outer_dic['10.748854650650145_-14.438217469590791'] & +pipe_outer_dic['13.7887999761416_-11.570176974357713'] & +pipe_outer_dic['16.085387525821417_-8.078385243608324'] & +pipe_outer_dic['17.514807670436827_-4.151085673363928'] & +pipe_outer_dic['24.0_0.0'] & +pipe_outer_dic['23.63538607229299_4.167556264006328'] & +pipe_outer_dic['22.552622898861802_8.20848343981605'] & +pipe_outer_dic['20.784609690826528_11.999999999999998'] & +pipe_outer_dic['18.385066634855473_15.426902632476942'] & +pipe_outer_dic['15.426902632476946_18.385066634855473'] & +pipe_outer_dic['12.000000000000004_20.784609690826528'] & +pipe_outer_dic['8.208483439816051_22.5526228988618'] & +pipe_outer_dic['4.16755626400633_23.63538607229299'] & +pipe_outer_dic['1.4695761589768238e-15_24.0'] & +pipe_outer_dic['-4.167556264006327_23.63538607229299'] & +pipe_outer_dic['-8.20848343981605_22.552622898861802'] & +pipe_outer_dic['-11.999999999999995_20.784609690826528'] & +pipe_outer_dic['-15.426902632476946_18.385066634855473'] & +pipe_outer_dic['-18.38506663485547_15.426902632476947'] & +pipe_outer_dic['-20.784609690826528_11.999999999999998'] & +pipe_outer_dic['-22.5526228988618_8.208483439816053'] & +pipe_outer_dic['-23.63538607229299_4.167556264006336'] & +pipe_outer_dic['-24.0_2.9391523179536475e-15'] & +pipe_outer_dic['-23.635386072292995_-4.16755626400632'] & +pipe_outer_dic['-22.552622898861802_-8.208483439816048'] & +pipe_outer_dic['-20.78460969082653_-11.999999999999993'] & +pipe_outer_dic['-18.385066634855473_-15.426902632476942'] & +pipe_outer_dic['-15.426902632476947_-18.38506663485547'] & +pipe_outer_dic['-12.00000000000001_-20.78460969082652'] & +pipe_outer_dic['-8.208483439816066_-22.5526228988618'] & +pipe_outer_dic['-4.167556264006328_-23.63538607229299'] & +pipe_outer_dic['-4.408728476930472e-15_-24.0'] & +pipe_outer_dic['4.167556264006319_-23.635386072292995'] & +pipe_outer_dic['8.208483439816035_-22.552622898861806'] & +pipe_outer_dic['12.000000000000004_-20.784609690826528'] & +pipe_outer_dic['15.426902632476942_-18.385066634855477'] & +pipe_outer_dic['18.385066634855466_-15.42690263247695'] & +pipe_outer_dic['20.78460969082652_-12.00000000000001'] & +pipe_outer_dic['22.552622898861795_-8.208483439816067'] & +pipe_outer_dic['23.63538607229299_-4.167556264006329'] & +pipe_outer_dic['30.0_0.0'] & +pipe_outer_dic['29.70804206224711_4.175193028801963'] & +pipe_outer_dic['28.837850878149567_8.269120674509974'] & +pipe_outer_dic['27.406363729278027_12.202099292274006'] & +pipe_outer_dic['25.44144288469278_15.897577926996147'] & +pipe_outer_dic['22.98133329356934_19.283628290596177'] & +pipe_outer_dic['20.073918190765745_22.294344764321828'] & +pipe_outer_dic['16.775787104122404_24.871127176651253'] & +pipe_outer_dic['13.151134403672323_26.96382138897501'] & +pipe_outer_dic['9.270509831248424_28.531695488854606'] & +pipe_outer_dic['5.209445330007912_29.544232590366242'] & +pipe_outer_dic['1.0469849010750325_29.981724810572874'] & +pipe_outer_dic['-3.1358538980296067_29.8356568610482'] & +pipe_outer_dic['-7.2576568679900335_29.108871788279895'] & +pipe_outer_dic['-11.238197802477362_27.81551563700362'] & +pipe_outer_dic['-14.999999999999993_25.98076211353316'] & +pipe_outer_dic['-18.469844259769747_23.64032260820166'] & +pipe_outer_dic['-21.580194010159534_20.839751113769914'] & +pipe_outer_dic['-24.27050983124842_17.633557568774197'] & +pipe_outer_dic['-26.48842778576781_14.084146883576722'] & +pipe_outer_dic['-28.19077862357725_10.260604299770065'] & +pipe_outer_dic['-29.34442802201417_6.23735072453278'] & +pipe_outer_dic['-29.926921507794727_2.0926942123237655'] & +pipe_outer_dic['-29.926921507794727_-2.0926942123237584'] & +pipe_outer_dic['-29.344428022014167_-6.237350724532785'] & +pipe_outer_dic['-28.190778623577252_-10.26060429977006'] & +pipe_outer_dic['-26.488427785767808_-14.084146883576725'] & +pipe_outer_dic['-24.270509831248425_-17.63355756877419'] & +pipe_outer_dic['-21.58019401015953_-20.83975111376992'] & +pipe_outer_dic['-18.469844259769744_-23.640322608201664'] & +pipe_outer_dic['-15.000000000000014_-25.980762113533153'] & +pipe_outer_dic['-11.23819780247737_-27.81551563700362'] & +pipe_outer_dic['-7.2576568679900335_-29.108871788279895'] & +pipe_outer_dic['-3.135853898029601_-29.835656861048204'] & +pipe_outer_dic['1.0469849010750385_-29.981724810572874'] & +pipe_outer_dic['5.209445330007899_-29.544232590366242'] & +pipe_outer_dic['9.270509831248416_-28.53169548885461'] & +pipe_outer_dic['13.151134403672323_-26.96382138897501'] & +pipe_outer_dic['16.77578710412241_-24.87112717665125'] & +pipe_outer_dic['20.073918190765735_-22.29434476432184'] & +pipe_outer_dic['22.981333293569335_-19.283628290596187'] & +pipe_outer_dic['25.44144288469278_-15.897577926996151'] & +pipe_outer_dic['27.40636372927803_-12.202099292274005'] & +pipe_outer_dic['28.837850878149567_-8.269120674509969'] & +pipe_outer_dic['29.708042062247106_-4.175193028801976'] core_region = -core print('Initial',core_region) for name in fuel_dic: core_region = core_region & +pipe_outer_dic[name] & +pipe_inner_dic[name] & +fuel_dic[name] void_region = +left & -right & +bottom & -top & +core #print(core_region) Core = openmc.Cell(name='Core') Core.fill = graphite Core.region = core_region void = openmc.Cell(name='void') void.fill = air void.region = void_region root_universe = openmc.Universe(cells=(Core,void)) for i in bodies: root_universe.add_cell(i) print((root_universe)) geometry = openmc.Geometry() geometry.root_universe = root_universe geometry.export_to_xml() ##Settings point = openmc.stats.Point((0, 0, 0)) source = openmc.Source(space=point) settings = openmc.Settings() settings.source = source settings.batches = 11 settings.inactive = 2 settings.particles = 100000 settings.export_to_xml() ##Tallies cell_filter = openmc.CellFilter(fuel) tally = openmc.Tally(1) tally.filters = [cell_filter] tally.nuclides = ['U233'] tally.scores = ['total', 'fission', 'absorption', '(n,gamma)'] cell_filter2 = openmc.CellFilter(Core) tally2 = openmc.Tally(2) tally2.filters=[cell_filter2] tally2.scores = ['flux'] tallies = openmc.Tallies([tally,tally2]) tallies.export_to_xml() #cell = openmc.Cell() #cell.region = void_region #universe = openmc.Universe() #universe.add_cell(cell) #plot = universe.plot(width=(2.0, 2.0)) #plot.write_png('plot.png') #print(dir(plot)) #print(type(universe)) #rect = [4,5,6,7] #fig = matplotlib.figure.Figure() #ax = matplotlib.axes.Axes(fig,rect) #ax.add_image(plot) #fig.savefig('plot.png') openmc.run() plot = openmc.Plot() plot.filename = 'reacplot' plot.width = (100, 100) plot.pixels = (800, 800) #plot.from_geometry(geometry) plot.color_by = 'material' plot.colors = {fuel_salt: 'yellow', graphite: 'black', pipe: 'blue',air: 'white'} overlap_color = 'red' plots = openmc.Plots([plot]) plots.export_to_xml() openmc.plot_geometry()
104.382653
7,793
0.784838
2,734
20,459
5.523043
0.150329
0.148874
0.112053
0.005563
0.777219
0.76404
0.165629
0.146026
0.134901
0.11404
0
0.448979
0.052544
20,459
195
7,794
104.917949
0.330014
0.813921
0
0
0
0
0.059638
0
0
0
0
0
0
1
0
false
0
0.025862
0
0.025862
0.017241
0
0
0
null
0
0
0
0
1
0
0
0
0
0
1
0
0
0
1
1
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
cb21c4ba8598f7562c8e68f7c72b877825a9f402
57
py
Python
spikeextractors/extractors/cedextractors/__init__.py
zekearneodo/spikeextractors
d30aa85e69d0331fffdb58a03a2bb628f93b405e
[ "MIT" ]
145
2018-12-06T23:12:54.000Z
2022-02-10T22:57:35.000Z
spikeextractors/extractors/cedextractors/__init__.py
zekearneodo/spikeextractors
d30aa85e69d0331fffdb58a03a2bb628f93b405e
[ "MIT" ]
396
2018-11-26T11:46:30.000Z
2022-01-04T07:27:47.000Z
spikeextractors/extractors/cedextractors/__init__.py
zekearneodo/spikeextractors
d30aa85e69d0331fffdb58a03a2bb628f93b405e
[ "MIT" ]
67
2018-11-19T12:38:01.000Z
2021-09-25T03:18:22.000Z
from .cedrecordingextractor import CEDRecordingExtractor
28.5
56
0.912281
4
57
13
0.75
0
0
0
0
0
0
0
0
0
0
0
0.070175
57
1
57
57
0.981132
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
cb4719668d6e0c639306885606133b5ff2819520
28
py
Python
src/wrappers/common/__init__.py
j0hnBlk/VTIL-Python
f3599e8d0f12f84b401c151b5b0a516adacc108a
[ "BSD-3-Clause" ]
20
2020-06-29T13:55:25.000Z
2022-02-02T08:48:19.000Z
src/wrappers/common/__init__.py
j0hnBlk/VTIL-Python
f3599e8d0f12f84b401c151b5b0a516adacc108a
[ "BSD-3-Clause" ]
2
2020-07-14T20:46:27.000Z
2020-07-14T20:58:01.000Z
src/wrappers/common/__init__.py
j0hnBlk/VTIL-Python
f3599e8d0f12f84b401c151b5b0a516adacc108a
[ "BSD-3-Clause" ]
6
2020-07-04T13:14:45.000Z
2022-01-17T22:48:15.000Z
from ..vtil.common import *
14
27
0.714286
4
28
5
1
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
cbc1a6097f1ae8aad9c7592ea9c35047b703ded7
3,776
py
Python
tests/charts-out/test_graphics_charts_axes_sample7c.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
1
2020-05-21T23:34:55.000Z
2020-05-21T23:34:55.000Z
tests/charts-out/test_graphics_charts_axes_sample7c.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
null
null
null
tests/charts-out/test_graphics_charts_axes_sample7c.py
debragail/reportlab-mirror
1e5814e1313ed50d5abb65487b207711cb4f7595
[ "BSD-3-Clause" ]
null
null
null
#Autogenerated by ReportLab guiedit do not edit from reportlab.graphics.shapes import _DrawingEditorMixin, Drawing, Group, Line, String from reportlab.lib.colors import Color, CMYKColor, PCMYKColor class ExplodedDrawing_Drawing(_DrawingEditorMixin,Drawing): def __init__(self,width=400,height=200,*args,**kw): Drawing.__init__(self,width,height,*args,**kw) self.transform = (1,0,0,1,0,0) self.add(Line(50,50,350,50,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Line(50,50,50,45,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(143.75,50,143.75,45,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,50,237.5,45,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(331.25,50,331.25,45,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) v0=self._nn(Group()) v0.transform = (1,0,0,1,50,45) v0.add(String(-5,-10,'10',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,143.75,45) v0.add(String(-5,-10,'20',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,237.5,45) v0.add(String(-5,-10,'30',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,331.25,45) v0.add(String(-5,-10,'40',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) self.add(Line(237.5,50,237.5,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=0,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,50,232.5,50,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,81.25,232.5,81.25,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,112.5,232.5,112.5,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,143.75,232.5,143.75,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) self.add(Line(237.5,175,232.5,175,strokeColor=Color(0,0,0,1),strokeWidth=1,strokeLineCap=0,strokeLineJoin=0,strokeMiterLimit=10,strokeDashArray=None,strokeOpacity=None)) v0=self._nn(Group()) v0.transform = (1,0,0,1,232.5,65.625) v0.add(String(-18.88,-4,'Beer',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,232.5,96.875) v0.add(String(-21.66,-4,'Wine',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,232.5,128.125) v0.add(String(-20.55,-4,'Meat',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) v0=self._nn(Group()) v0.transform = (1,0,0,1,232.5,159.375) v0.add(String(-43.89,-4,'Cannelloni',textAnchor='start',fontName='Times-Roman',fontSize=10,fillColor=Color(0,0,0,1))) if __name__=="__main__": #NORUNTESTS ExplodedDrawing_Drawing().save(formats=['pdf'],outDir='.',fnRoot=None)
78.666667
177
0.756356
628
3,776
4.503185
0.157643
0.033946
0.029703
0.053748
0.798444
0.787129
0.764498
0.764498
0.756011
0.756011
0
0.127595
0.043167
3,776
47
178
80.340426
0.655134
0.014831
0
0.186047
1
0
0.045724
0
0
0
0
0
0
1
0.023256
false
0
0.046512
0
0.093023
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
cbc9993d02a08141581fd86ab1da7c0fa3aaf2cc
100
py
Python
data_helper/__init__.py
acmi-lab/PU_learning
a9174bda92c7411906056c789011cfa41749ee5f
[ "Apache-2.0" ]
18
2021-11-04T02:26:47.000Z
2022-03-15T04:41:18.000Z
data_helper/__init__.py
acmi-lab/PU_learning
a9174bda92c7411906056c789011cfa41749ee5f
[ "Apache-2.0" ]
null
null
null
data_helper/__init__.py
acmi-lab/PU_learning
a9174bda92c7411906056c789011cfa41749ee5f
[ "Apache-2.0" ]
1
2022-01-14T03:22:37.000Z
2022-01-14T03:22:37.000Z
from .CIFAR import * from .MNIST import * from .IMDb import * from .toy import * from .uci import *
20
21
0.7
15
100
4.666667
0.466667
0.571429
0
0
0
0
0
0
0
0
0
0
0.2
100
5
22
20
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3844f91e4752950b5129c945ac658881d0f6ea4d
113
py
Python
simple_http/__init__.py
lyx003288/python_test
f6927b8182a1e8e608b3277a3fe033b856a2c47a
[ "MIT" ]
null
null
null
simple_http/__init__.py
lyx003288/python_test
f6927b8182a1e8e608b3277a3fe033b856a2c47a
[ "MIT" ]
null
null
null
simple_http/__init__.py
lyx003288/python_test
f6927b8182a1e8e608b3277a3fe033b856a2c47a
[ "MIT" ]
null
null
null
from simple_http_client import * from simple_http_server import * __all__ = [ "request", "start_http_server" ]
18.833333
44
0.769912
15
113
5.133333
0.6
0.25974
0.363636
0
0
0
0
0
0
0
0
0
0.141593
113
5
45
22.6
0.793814
0
0
0
0
0
0.214286
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
6983660e65f6088d8fe94f0653a1cf4d8d128425
96
py
Python
kink_module/kink_performer.py
SolemnMonk/KR
07eb9541993960ed401818bb41aa52cc296181ca
[ "MIT" ]
null
null
null
kink_module/kink_performer.py
SolemnMonk/KR
07eb9541993960ed401818bb41aa52cc296181ca
[ "MIT" ]
null
null
null
kink_module/kink_performer.py
SolemnMonk/KR
07eb9541993960ed401818bb41aa52cc296181ca
[ "MIT" ]
null
null
null
from . import kink_shoot def rip(performer): print("kink_performer.rip(" + performer + ")")
24
50
0.6875
12
96
5.333333
0.666667
0.375
0
0
0
0
0
0
0
0
0
0
0.15625
96
4
50
24
0.790123
0
0
0
0
0
0.206186
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0.333333
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
6
69885055604ecc86ceded185cf384c34ab9419c9
94,679
py
Python
migration/versions/19d5fbd43af3_.py
twocucao/tifa
f703fd27f54000e7d51f06d2456d09cc79e0ab72
[ "MIT" ]
71
2020-04-16T04:28:45.000Z
2022-03-31T22:45:11.000Z
migration/versions/19d5fbd43af3_.py
twocucao/tifa
f703fd27f54000e7d51f06d2456d09cc79e0ab72
[ "MIT" ]
6
2021-05-13T06:32:38.000Z
2022-03-04T01:18:34.000Z
migration/versions/19d5fbd43af3_.py
twocucao/tifa
f703fd27f54000e7d51f06d2456d09cc79e0ab72
[ "MIT" ]
12
2021-05-01T08:43:11.000Z
2022-03-29T00:58:54.000Z
"""empty message Revision ID: 19d5fbd43af3 Revises: Create Date: 2021-07-26 20:58:49.610386 """ from alembic import op import sqlalchemy as sa from sqlalchemy.dialects import postgresql # revision identifiers, used by Alembic. revision = '19d5fbd43af3' down_revision = None branch_labels = None depends_on = None def upgrade(): # ### commands auto generated by Alembic - please adjust! ### op.create_table('address', sa.Column('id', sa.Integer(), nullable=False), sa.Column('first_name', sa.String(length=256), nullable=False), sa.Column('last_name', sa.String(length=256), nullable=False), sa.Column('company_name', sa.String(length=256), nullable=False), sa.Column('street_address_1', sa.String(length=256), nullable=False), sa.Column('street_address_2', sa.String(length=256), nullable=False), sa.Column('city', sa.String(length=256), nullable=False), sa.Column('postal_code', sa.String(length=20), nullable=False), sa.Column('country', sa.String(length=2), nullable=False), sa.Column('country_area', sa.String(length=128), nullable=False), sa.Column('phone', sa.String(length=128), nullable=False), sa.Column('city_area', sa.String(length=128), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('app', sa.Column('id', sa.Integer(), nullable=False), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('name', sa.String(length=60), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('about_app', sa.Text(), nullable=True), sa.Column('app_url', sa.String(length=200), nullable=True), sa.Column('configuration_url', sa.String(length=200), nullable=True), sa.Column('data_privacy', sa.Text(), nullable=True), sa.Column('data_privacy_url', sa.String(length=200), nullable=True), sa.Column('homepage_url', sa.String(length=200), nullable=True), sa.Column('identifier', sa.String(length=256), nullable=True), sa.Column('support_url', sa.String(length=200), nullable=True), sa.Column('type', sa.String(length=60), nullable=False), sa.Column('version', sa.String(length=60), nullable=True), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_app_metadata_private'), 'app', ['metadata_private'], unique=False) op.create_index(op.f('ix_app_metadata_public'), 'app', ['metadata_public'], unique=False) op.create_table('app_installation', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=50), nullable=False), sa.Column('message', sa.String(length=255), nullable=True), sa.Column('app_name', sa.String(length=60), nullable=False), sa.Column('manifest_url', sa.String(length=200), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('attribute', sa.Column('id', sa.Integer(), nullable=False), sa.Column('slug', sa.String(length=250), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('input_type', sa.String(length=50), nullable=False), sa.Column('available_in_grid', sa.Boolean(), nullable=False), sa.Column('visible_in_storefront', sa.Boolean(), nullable=False), sa.Column('filterable_in_dashboard', sa.Boolean(), nullable=False), sa.Column('filterable_in_storefront', sa.Boolean(), nullable=False), sa.Column('value_required', sa.Boolean(), nullable=False), sa.Column('storefront_search_position', sa.Integer(), nullable=False), sa.Column('is_variant_only', sa.Boolean(), nullable=False), sa.Column('type', sa.String(length=50), nullable=False), sa.Column('entity_type', sa.String(length=50), nullable=True), sa.Column('unit', sa.String(length=100), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_attribute_metadata_private'), 'attribute', ['metadata_private'], unique=False) op.create_index(op.f('ix_attribute_metadata_public'), 'attribute', ['metadata_public'], unique=False) op.create_table('channel', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('currency_code', sa.String(length=3), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_table('discount_sale', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('type', sa.String(length=10), nullable=False), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('start_date', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('discount_voucher', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.String(length=20), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('code', sa.String(length=12), nullable=False), sa.Column('usage_limit', sa.Integer(), nullable=True), sa.Column('used', sa.Integer(), nullable=False), sa.Column('start_date', sa.DateTime(), nullable=False), sa.Column('end_date', sa.DateTime(), nullable=True), sa.Column('discount_value_type', sa.String(length=10), nullable=False), sa.Column('apply_once_per_order', sa.Boolean(), nullable=False), sa.Column('countries', sa.String(length=749), nullable=False), sa.Column('min_checkout_items_quantity', sa.Integer(), nullable=True), sa.Column('apply_once_per_customer', sa.Boolean(), nullable=False), sa.Column('only_for_staff', sa.Boolean(), nullable=False), sa.CheckConstraint('min_checkout_items_quantity >= 0'), sa.CheckConstraint('usage_limit >= 0'), sa.CheckConstraint('used >= 0'), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('code') ) op.create_table('django_prices_openexchangerates_conversionrate', sa.Column('id', sa.Integer(), nullable=False), sa.Column('to_currency', sa.String(length=3), nullable=False), sa.Column('rate', sa.Numeric(precision=20, scale=12), nullable=False), sa.Column('modified_at', sa.DateTime(), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('to_currency') ) op.create_table('django_prices_vatlayer_ratetypes', sa.Column('id', sa.Integer(), nullable=False), sa.Column('types', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('django_prices_vatlayer_vat', sa.Column('id', sa.Integer(), nullable=False), sa.Column('country_code', sa.String(length=2), nullable=False), sa.Column('data', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_django_prices_vatlayer_vat_country_code'), 'django_prices_vatlayer_vat', ['country_code'], unique=False) op.create_table('menu', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_menu_metadata_private'), 'menu', ['metadata_private'], unique=False) op.create_index(op.f('ix_menu_metadata_public'), 'menu', ['metadata_public'], unique=False) op.create_table('page_type', sa.Column('id', sa.Integer(), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_page_type_metadata_private'), 'page_type', ['metadata_private'], unique=False) op.create_index(op.f('ix_page_type_metadata_public'), 'page_type', ['metadata_public'], unique=False) op.create_index('page_type_name_slug', 'page_type', ['name', 'slug'], unique=False) op.create_table('permission', sa.Column('id', sa.Integer(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_table('product_category', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('parent_id', sa.Integer(), nullable=True), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('lft', sa.Integer(), nullable=False), sa.Column('rght', sa.Integer(), nullable=False), sa.Column('tree_id', sa.Integer(), nullable=False), sa.Column('level', sa.Integer(), nullable=False), sa.Column('background_image', sa.String(length=100), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('background_image_alt', sa.String(length=128), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.CheckConstraint('level >= 0'), sa.CheckConstraint('lft >= 0'), sa.CheckConstraint('rght >= 0'), sa.CheckConstraint('tree_id >= 0'), sa.ForeignKeyConstraint(['parent_id'], ['product_category.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_product_category_metadata_private'), 'product_category', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_category_metadata_public'), 'product_category', ['metadata_public'], unique=False) op.create_index(op.f('ix_product_category_tree_id'), 'product_category', ['tree_id'], unique=False) op.create_table('product_collection', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('background_image', sa.String(length=100), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('background_image_alt', sa.String(length=128), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_product_collection_metadata_private'), 'product_collection', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_collection_metadata_public'), 'product_collection', ['metadata_public'], unique=False) op.create_table('product_type', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('has_variants', sa.Boolean(), nullable=False), sa.Column('is_shipping_required', sa.Boolean(), nullable=False), sa.Column('weight', sa.Float(precision=53), nullable=False), sa.Column('is_digital', sa.Boolean(), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('slug', sa.String(length=255), nullable=False), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_product_type_metadata_private'), 'product_type', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_type_metadata_public'), 'product_type', ['metadata_public'], unique=False) op.create_table('shipping_zone', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('countries', sa.String(length=749), nullable=False), sa.Column('default', sa.Boolean(), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('description', sa.Text(), nullable=False), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_shipping_zone_metadata_private'), 'shipping_zone', ['metadata_private'], unique=False) op.create_index(op.f('ix_shipping_zone_metadata_public'), 'shipping_zone', ['metadata_public'], unique=False) op.create_table('staff', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_table('app_installation_permissions', sa.Column('id', sa.Integer(), nullable=False), sa.Column('app_installation_id', sa.Integer(), nullable=False), sa.Column('permission_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['app_installation_id'], ['app_installation.id'], ), sa.ForeignKeyConstraint(['permission_id'], ['permission.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('app_installation_id', 'permission_id') ) op.create_index(op.f('ix_app_installation_permissions_app_installation_id'), 'app_installation_permissions', ['app_installation_id'], unique=False) op.create_table('app_permission', sa.Column('id', sa.Integer(), nullable=False), sa.Column('app_id', sa.Integer(), nullable=False), sa.Column('permission_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['permission_id'], ['permission.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('app_id', 'permission_id') ) op.create_table('app_token', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('auth_token', sa.String(length=30), nullable=False), sa.Column('app_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('auth_token') ) op.create_table('attribute_page', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.Column('page_type_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['attribute_id'], ['attribute.id'], ), sa.ForeignKeyConstraint(['page_type_id'], ['page_type.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('attribute_id', 'page_type_id') ) op.create_index(op.f('ix_attribute_page_sort_order'), 'attribute_page', ['sort_order'], unique=False) op.create_table('attribute_product', sa.Column('id', sa.Integer(), nullable=False), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.Column('product_type_id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['attribute_id'], ['attribute.id'], ), sa.ForeignKeyConstraint(['product_type_id'], ['product_type.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('attribute_id', 'product_type_id') ) op.create_index(op.f('ix_attribute_product_sort_order'), 'attribute_product', ['sort_order'], unique=False) op.create_table('attribute_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['attribute_id'], ['attribute.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'attribute_id') ) op.create_index(op.f('ix_attribute_translation_attribute_id'), 'attribute_translation', ['attribute_id'], unique=False) op.create_table('attribute_value', sa.Column('id', sa.Integer(), nullable=False), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('value', sa.String(length=100), nullable=False), sa.Column('content_type', sa.String(length=50), nullable=True), sa.Column('file_url', sa.String(length=200), nullable=True), sa.Column('rich_text', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('boolean', sa.Boolean(), nullable=True), sa.ForeignKeyConstraint(['attribute_id'], ['attribute.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug', 'attribute_id') ) op.create_index('idx_attribute_value_name_slug', 'attribute_value', ['name', 'slug'], unique=False) op.create_index(op.f('ix_attribute_value_slug'), 'attribute_value', ['slug'], unique=False) op.create_index(op.f('ix_attribute_value_sort_order'), 'attribute_value', ['sort_order'], unique=False) op.create_table('attribute_variant', sa.Column('id', sa.Integer(), nullable=False), sa.Column('attribute_id', sa.Integer(), nullable=False), sa.Column('product_type_id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['attribute_id'], ['attribute.id'], ), sa.ForeignKeyConstraint(['product_type_id'], ['product_type.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('attribute_id', 'product_type_id') ) op.create_index(op.f('ix_attribute_variant_sort_order'), 'attribute_variant', ['sort_order'], unique=False) op.create_table('discount_sale_category', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('sale_id', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['category_id'], ['product_category.id'], ), sa.ForeignKeyConstraint(['sale_id'], ['discount_sale.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('sale_id', 'category_id') ) op.create_table('discount_sale_channel_listing', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('discount_value', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('sale_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['sale_id'], ['discount_sale.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('sale_id', 'channel_id') ) op.create_index(op.f('ix_discount_sale_channel_listing_channel_id'), 'discount_sale_channel_listing', ['channel_id'], unique=False) op.create_index(op.f('ix_discount_sale_channel_listing_sale_id'), 'discount_sale_channel_listing', ['sale_id'], unique=False) op.create_table('discount_sale_collection', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('sale_id', sa.Integer(), nullable=False), sa.Column('collection_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.ForeignKeyConstraint(['sale_id'], ['discount_sale.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('sale_id', 'collection_id') ) op.create_table('discount_sale_translation', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('sale_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['sale_id'], ['discount_sale.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'sale_id') ) op.create_index(op.f('ix_discount_sale_translation_sale_id'), 'discount_sale_translation', ['sale_id'], unique=False) op.create_table('discount_voucher_category', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['category_id'], ['product_category.id'], ), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('voucher_id', 'category_id') ) op.create_table('discount_voucher_channel_listing', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('discount_value', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('min_spent_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('voucher_id', 'channel_id') ) op.create_table('discount_voucher_collection', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.Column('collection_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('voucher_id', 'collection_id') ) op.create_table('discount_voucher_customer', sa.Column('id', sa.Integer(), nullable=False), sa.Column('customer_email', sa.String(length=254), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('voucher_id', 'customer_email') ) op.create_table('discount_voucher_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'voucher_id') ) op.create_table('page', sa.Column('id', sa.Integer(), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('title', sa.String(length=250), nullable=False), sa.Column('content', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('is_published', sa.Boolean(), nullable=False), sa.Column('publication_date', sa.Date(), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('page_type_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['page_type_id'], ['page_type.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_page_metadata_private'), 'page', ['metadata_private'], unique=False) op.create_index(op.f('ix_page_metadata_public'), 'page', ['metadata_public'], unique=False) op.create_index('page_title_slug', 'page', ['title', 'slug'], unique=False) op.create_table('plugin_configuration', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('description', sa.Text(), nullable=False), sa.Column('active', sa.Boolean(), nullable=False), sa.Column('configuration', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('identifier', sa.String(length=128), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('identifier', 'channel_id') ) op.create_table('product', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('product_type_id', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('charge_taxes', sa.Boolean(), nullable=False), sa.Column('weight', sa.Float(precision=53), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('default_variant_id', sa.Integer(), nullable=True), sa.Column('description_plaintext', sa.Text(), nullable=False), sa.Column('search_vector', postgresql.TSVECTOR(), nullable=True), sa.Column('rating', sa.Float(precision=53), nullable=True), sa.ForeignKeyConstraint(['category_id'], ['product_category.id'], ), sa.ForeignKeyConstraint(['default_variant_id'], ['product_variant.id'], name='fk_product_default_variant_id', use_alter=True), sa.ForeignKeyConstraint(['product_type_id'], ['product_type.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('default_variant_id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_product_metadata_private'), 'product', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_metadata_public'), 'product', ['metadata_public'], unique=False) op.create_index(op.f('ix_product_search_vector'), 'product', ['search_vector'], unique=False) op.create_table('product_category_translation', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=128), nullable=True), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('category_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['category_id'], ['product_category.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'category_id') ) op.create_table('product_collection_channel_listing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('publication_date', sa.Date(), nullable=True), sa.Column('is_published', sa.Boolean(), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('collection_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('collection_id', 'channel_id') ) op.create_table('product_collection_translation', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=128), nullable=True), sa.Column('collection_id', sa.Integer(), nullable=False), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'collection_id') ) op.create_table('shipping_method', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('maximum_order_weight', sa.Float(precision=53), nullable=True), sa.Column('minimum_order_weight', sa.Float(precision=53), nullable=True), sa.Column('type', sa.String(length=30), nullable=False), sa.Column('shipping_zone_id', sa.Integer(), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('maximum_delivery_days', sa.Integer(), nullable=True), sa.Column('minimum_delivery_days', sa.Integer(), nullable=True), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.CheckConstraint('maximum_delivery_days >= 0'), sa.CheckConstraint('minimum_delivery_days >= 0'), sa.ForeignKeyConstraint(['shipping_zone_id'], ['shipping_zone.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_shipping_method_metadata_private'), 'shipping_method', ['metadata_private'], unique=False) op.create_index(op.f('ix_shipping_method_metadata_public'), 'shipping_method', ['metadata_public'], unique=False) op.create_table('shipping_zone_channel', sa.Column('id', sa.Integer(), nullable=False), sa.Column('shipping_zone_id', sa.Integer(), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['shipping_zone_id'], ['shipping_zone.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('shipping_zone_id', 'channel_id') ) op.create_table('site_setting', sa.Column('id', sa.Integer(), nullable=False), sa.Column('header_text', sa.String(length=200), nullable=False), sa.Column('description', sa.String(length=500), nullable=False), sa.Column('bottom_menu_id', sa.Integer(), nullable=True), sa.Column('top_menu_id', sa.Integer(), nullable=True), sa.Column('display_gross_prices', sa.Boolean(), nullable=False), sa.Column('include_taxes_in_prices', sa.Boolean(), nullable=False), sa.Column('charge_taxes_on_shipping', sa.Boolean(), nullable=False), sa.Column('track_inventory_by_default', sa.Boolean(), nullable=False), sa.Column('default_weight_unit', sa.String(length=30), nullable=False), sa.Column('automatic_fulfillment_digital_products', sa.Boolean(), nullable=False), sa.Column('default_digital_max_downloads', sa.Integer(), nullable=True), sa.Column('default_digital_url_valid_days', sa.Integer(), nullable=True), sa.Column('company_address_id', sa.Integer(), nullable=True), sa.Column('default_mail_sender_address', sa.String(length=254), nullable=True), sa.Column('default_mail_sender_name', sa.String(length=78), nullable=False), sa.Column('customer_set_password_url', sa.String(length=255), nullable=True), sa.Column('automatically_confirm_all_new_orders', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['bottom_menu_id'], ['menu.id'], ), sa.ForeignKeyConstraint(['company_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['top_menu_id'], ['menu.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_site_setting_bottom_menu_id'), 'site_setting', ['bottom_menu_id'], unique=False) op.create_index(op.f('ix_site_setting_top_menu_id'), 'site_setting', ['top_menu_id'], unique=False) op.create_table('staff_notification_recipient', sa.Column('id', sa.Integer(), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('staff_email', sa.String(length=254), nullable=True), sa.Column('staff_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['staff_id'], ['staff.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('staff_email'), sa.UniqueConstraint('staff_id') ) op.create_table('user', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=254), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=True), sa.Column('password', sa.String(length=128), nullable=False), sa.Column('last_login_at', sa.DateTime(), nullable=True), sa.Column('default_billing_address_id', sa.Integer(), nullable=True), sa.Column('default_shipping_address_id', sa.Integer(), nullable=True), sa.Column('note', sa.Text(), nullable=True), sa.Column('first_name', sa.String(length=256), nullable=False), sa.Column('last_name', sa.String(length=256), nullable=False), sa.Column('avatar', sa.String(length=100), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('jwt_token_key', sa.String(length=12), nullable=False), sa.Column('language_code', sa.String(length=35), nullable=False), sa.ForeignKeyConstraint(['default_billing_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['default_shipping_address_id'], ['address.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('email') ) op.create_index('account_username_email', 'user', ['email', 'first_name', 'last_name'], unique=False) op.create_index(op.f('ix_user_metadata_private'), 'user', ['metadata_private'], unique=False) op.create_index(op.f('ix_user_metadata_public'), 'user', ['metadata_public'], unique=False) op.create_table('warehouse', sa.Column('id', postgresql.UUID(), nullable=False), sa.Column('name', sa.String(length=250), nullable=False), sa.Column('email', sa.String(length=254), nullable=False), sa.Column('address_id', sa.Integer(), nullable=False), sa.Column('slug', sa.String(length=255), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['address_id'], ['address.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('slug') ) op.create_index(op.f('ix_warehouse_metadata_private'), 'warehouse', ['metadata_private'], unique=False) op.create_index(op.f('ix_warehouse_metadata_public'), 'warehouse', ['metadata_public'], unique=False) op.create_table('webhook', sa.Column('id', sa.Integer(), nullable=False), sa.Column('target_url', sa.String(length=255), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('secret_key', sa.String(length=255), nullable=True), sa.Column('app_id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('assigned_page_attribute', sa.Column('id', sa.Integer(), nullable=False), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.Column('page_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['attribute_page.id'], ), sa.ForeignKeyConstraint(['page_id'], ['page.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('assigned_product_attribute', sa.Column('id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['attribute_product.id'], ), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('product_id', 'assignment_id') ) op.create_table('attribute_value_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=100), nullable=False), sa.Column('attribute_value_id', sa.Integer(), nullable=False), sa.Column('rich_text', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['attribute_value_id'], ['attribute_value.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'attribute_value_id') ) op.create_table('checkout', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', postgresql.UUID(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('last_change', sa.DateTime(), nullable=False), sa.Column('email', sa.String(length=254), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('billing_address_id', sa.Integer(), nullable=True), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('shipping_method_id', sa.Integer(), nullable=True), sa.Column('discount_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('discount_name', sa.String(length=255), nullable=True), sa.Column('note', sa.Text(), nullable=False), sa.Column('shipping_address_id', sa.Integer(), nullable=True), sa.Column('voucher_code', sa.String(length=12), nullable=True), sa.Column('translated_discount_name', sa.String(length=255), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('country', sa.String(length=2), nullable=False), sa.Column('redirect_url', sa.String(length=200), nullable=True), sa.Column('tracking_code', sa.String(length=255), nullable=True), sa.Column('language_code', sa.String(length=35), nullable=False), sa.ForeignKeyConstraint(['billing_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['shipping_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_checkout_metadata_private'), 'checkout', ['metadata_private'], unique=False) op.create_index(op.f('ix_checkout_metadata_public'), 'checkout', ['metadata_public'], unique=False) op.create_table('collection_product', sa.Column('id', sa.Integer(), nullable=False), sa.Column('collection_id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('collection_id', 'product_id') ) op.create_index(op.f('ix_collection_product_sort_order'), 'collection_product', ['sort_order'], unique=False) op.create_table('csv_export_file', sa.Column('id', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=50), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('content_file', sa.String(length=100), nullable=True), sa.Column('app_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('message', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_csv_export_file_app_id'), 'csv_export_file', ['app_id'], unique=False) op.create_table('customer_note', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('content', sa.Text(), nullable=False), sa.Column('is_public', sa.Boolean(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('staff_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['staff_id'], ['staff.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_customer_note_date'), 'customer_note', ['date'], unique=False) op.create_table('discount_sale_product', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('sale_id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.ForeignKeyConstraint(['sale_id'], ['discount_sale.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('sale_id', 'product_id') ) op.create_table('discount_voucher_product', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('voucher_id', 'product_id') ) op.create_index(op.f('ix_discount_voucher_product_product_id'), 'discount_voucher_product', ['product_id'], unique=False) op.create_index(op.f('ix_discount_voucher_product_voucher_id'), 'discount_voucher_product', ['voucher_id'], unique=False) op.create_table('gift_card', sa.Column('id', sa.Integer(), nullable=False), sa.Column('code', sa.String(length=16), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('start_date', sa.Date(), nullable=False), sa.Column('end_date', sa.Date(), nullable=True), sa.Column('last_used_on', sa.DateTime(), nullable=True), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('initial_balance_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('current_balance_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('currency', sa.String(length=3), nullable=False), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('code') ) op.create_table('menu_item', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('url', sa.String(length=256), nullable=True), sa.Column('lft', sa.Integer(), nullable=False), sa.Column('rght', sa.Integer(), nullable=False), sa.Column('tree_id', sa.Integer(), nullable=False), sa.Column('level', sa.Integer(), nullable=False), sa.Column('category_id', sa.Integer(), nullable=True), sa.Column('collection_id', sa.Integer(), nullable=True), sa.Column('menu_id', sa.Integer(), nullable=False), sa.Column('page_id', sa.Integer(), nullable=True), sa.Column('parent_id', sa.Integer(), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.CheckConstraint('level >= 0'), sa.CheckConstraint('lft >= 0'), sa.CheckConstraint('rght >= 0'), sa.CheckConstraint('tree_id >= 0'), sa.ForeignKeyConstraint(['category_id'], ['product_category.id'], ), sa.ForeignKeyConstraint(['collection_id'], ['product_collection.id'], ), sa.ForeignKeyConstraint(['menu_id'], ['menu.id'], ), sa.ForeignKeyConstraint(['page_id'], ['page.id'], ), sa.ForeignKeyConstraint(['parent_id'], ['menu_item.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_menu_item_metadata_private'), 'menu_item', ['metadata_private'], unique=False) op.create_index(op.f('ix_menu_item_metadata_public'), 'menu_item', ['metadata_public'], unique=False) op.create_index(op.f('ix_menu_item_sort_order'), 'menu_item', ['sort_order'], unique=False) op.create_index(op.f('ix_menu_item_tree_id'), 'menu_item', ['tree_id'], unique=False) op.create_table('order', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('tracking_client_id', sa.String(length=36), nullable=False), sa.Column('user_email', sa.String(length=254), nullable=False), sa.Column('token', sa.String(length=36), nullable=False), sa.Column('billing_address_id', sa.Integer(), nullable=True), sa.Column('shipping_address_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('total_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('voucher_id', sa.Integer(), nullable=True), sa.Column('language_code', sa.String(length=35), nullable=False), sa.Column('shipping_price_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('total_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('shipping_price_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('status', sa.String(length=32), nullable=False), sa.Column('shipping_method_name', sa.String(length=255), nullable=True), sa.Column('shipping_method_id', sa.Integer(), nullable=True), sa.Column('display_gross_prices', sa.Boolean(), nullable=False), sa.Column('customer_note', sa.Text(), nullable=False), sa.Column('weight', sa.Float(precision=53), nullable=False), sa.Column('checkout_token', sa.String(length=36), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('redirect_url', sa.String(length=200), nullable=True), sa.Column('shipping_tax_rate', sa.Numeric(precision=5, scale=4), nullable=False), sa.Column('undiscounted_total_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('undiscounted_total_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('total_paid_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('origin', sa.String(length=32), nullable=False), sa.Column('original_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['billing_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['original_id'], ['order.id'], ), sa.ForeignKeyConstraint(['shipping_address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.ForeignKeyConstraint(['voucher_id'], ['discount_voucher.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token') ) op.create_index(op.f('ix_order_metadata_private'), 'order', ['metadata_private'], unique=False) op.create_index(op.f('ix_order_metadata_public'), 'order', ['metadata_public'], unique=False) op.create_index(op.f('ix_order_user_email'), 'order', ['user_email'], unique=False) op.create_table('page_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('title', sa.String(length=255), nullable=True), sa.Column('content', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('page_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['page_id'], ['page.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'page_id') ) op.create_table('product_channel_listing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('publication_date', sa.Date(), nullable=True), sa.Column('is_published', sa.Boolean(), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.Column('discounted_price_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('visible_in_listings', sa.Boolean(), nullable=False), sa.Column('available_for_purchase', sa.Date(), nullable=True), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('product_id', 'channel_id') ) op.create_index(op.f('ix_product_channel_listing_publication_date'), 'product_channel_listing', ['publication_date'], unique=False) op.create_table('product_media', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('image', sa.String(length=100), nullable=True), sa.Column('ppoi', sa.String(length=20), nullable=False), sa.Column('alt', sa.String(length=128), nullable=False), sa.Column('type', sa.String(length=32), nullable=False), sa.Column('external_url', sa.String(length=256), nullable=True), sa.Column('oembed_data', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_product_media_sort_order'), 'product_media', ['sort_order'], unique=False) op.create_table('product_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('seo_title', sa.String(length=70), nullable=True), sa.Column('seo_description', sa.String(length=300), nullable=True), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=250), nullable=True), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'product_id') ) op.create_table('product_variant', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sku', sa.String(length=255), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.Column('track_inventory', sa.Boolean(), nullable=False), sa.Column('weight', sa.Float(precision=53), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('sort_order', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('sku') ) op.create_index(op.f('ix_product_variant_metadata_private'), 'product_variant', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_variant_metadata_public'), 'product_variant', ['metadata_public'], unique=False) op.create_index(op.f('ix_product_variant_sort_order'), 'product_variant', ['sort_order'], unique=False) op.create_table('shipping_method_channel_listing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('minimum_order_price_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('maximum_order_price_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('price_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('shipping_method_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('shipping_method_id', 'channel_id') ) op.create_table('shipping_method_excluded_product', sa.Column('id', sa.Integer(), nullable=False), sa.Column('shipping_method_id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('shipping_method_id', 'product_id') ) op.create_table('shipping_method_postal_code_rule', sa.Column('id', sa.Integer(), nullable=False), sa.Column('start', sa.String(length=32), nullable=False), sa.Column('end', sa.String(length=32), nullable=True), sa.Column('shipping_method_id', sa.Integer(), nullable=False), sa.Column('inclusion_type', sa.String(length=32), nullable=False), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('shipping_method_id', 'start', 'end') ) op.create_table('shipping_method_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('shipping_method_id', sa.Integer(), nullable=False), sa.Column('description', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['shipping_method_id'], ['shipping_method.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'shipping_method_id') ) op.create_table('site_setting_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('header_text', sa.String(length=200), nullable=False), sa.Column('description', sa.String(length=500), nullable=False), sa.Column('site_settings_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['site_settings_id'], ['site_setting.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'site_settings_id') ) op.create_table('user_address_map', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('address_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['address_id'], ['address.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'address_id') ) op.create_table('warehouse_shipping_zone', sa.Column('id', sa.Integer(), nullable=False), sa.Column('warehouse_id', postgresql.UUID(), nullable=False), sa.Column('shipping_zone_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['shipping_zone_id'], ['shipping_zone.id'], ), sa.ForeignKeyConstraint(['warehouse_id'], ['warehouse.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('warehouse_id', 'shipping_zone_id') ) op.create_table('webhook_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('event_type', sa.String(length=128), nullable=False), sa.Column('webhook_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['webhook_id'], ['webhook.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_webhook_event_event_type'), 'webhook_event', ['event_type'], unique=False) op.create_table('wishlist', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('token', postgresql.UUID(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('token'), sa.UniqueConstraint('user_id') ) op.create_table('assigned_page_attribute_value', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.Column('value_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['assigned_page_attribute.id'], ), sa.ForeignKeyConstraint(['value_id'], ['attribute_value.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value_id', 'assignment_id') ) op.create_index(op.f('ix_assigned_page_attribute_value_sort_order'), 'assigned_page_attribute_value', ['sort_order'], unique=False) op.create_table('assigned_product_attribute_value', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.Column('value_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['assigned_product_attribute.id'], ), sa.ForeignKeyConstraint(['value_id'], ['attribute_value.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value_id', 'assignment_id') ) op.create_index(op.f('ix_assigned_product_attribute_value_sort_order'), 'assigned_product_attribute_value', ['sort_order'], unique=False) op.create_table('assigned_variant_attribute', sa.Column('id', sa.Integer(), nullable=False), sa.Column('variant_id', sa.Integer(), nullable=False), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['attribute_variant.id'], ), sa.ForeignKeyConstraint(['variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('variant_id', 'assignment_id') ) op.create_table('checkout_gift_card', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('checkout_id', postgresql.UUID(), nullable=False), sa.Column('gift_card_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['checkout_id'], ['checkout.id'], ), sa.ForeignKeyConstraint(['gift_card_id'], ['gift_card.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('checkout_id', 'gift_card_id') ) op.create_table('checkout_line', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.Column('checkout_id', postgresql.UUID(), nullable=False), sa.Column('variant_id', sa.Integer(), nullable=False), sa.CheckConstraint('quantity >= 0'), sa.ForeignKeyConstraint(['checkout_id'], ['checkout.id'], ), sa.ForeignKeyConstraint(['variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('csv_export_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('parameters', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('app_id', sa.Integer(), nullable=True), sa.Column('export_file_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['export_file_id'], ['csv_export_file.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('customer_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('parameters', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('order_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('app_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('invoice', sa.Column('id', sa.Integer(), nullable=False), sa.Column('order_id', sa.Integer(), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('status', sa.String(length=50), nullable=False), sa.Column('created_at', sa.DateTime(), nullable=False), sa.Column('updated_at', sa.DateTime(), nullable=False), sa.Column('number', sa.String(length=255), nullable=True), sa.Column('created', sa.DateTime(), nullable=True), sa.Column('external_url', sa.String(length=2048), nullable=True), sa.Column('invoice_file', sa.String(length=100), nullable=False), sa.Column('message', sa.String(length=255), nullable=True), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_invoice_metadata_private'), 'invoice', ['metadata_private'], unique=False) op.create_index(op.f('ix_invoice_metadata_public'), 'invoice', ['metadata_public'], unique=False) op.create_table('menu_item_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=128), nullable=False), sa.Column('menu_item_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['menu_item_id'], ['menu_item.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'menu_item_id') ) op.create_index(op.f('ix_menu_item_translation_menu_item_id'), 'menu_item_translation', ['menu_item_id'], unique=False) op.create_table('order_discount', sa.Column('id', sa.Integer(), nullable=False), sa.Column('type', sa.String(length=10), nullable=False), sa.Column('value_type', sa.String(length=10), nullable=False), sa.Column('value', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('amount_value', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('name', sa.String(length=255), nullable=True), sa.Column('translated_name', sa.String(length=255), nullable=True), sa.Column('reason', sa.Text(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index('discount_or_name_d16858_gin', 'order_discount', ['name', 'translated_name'], unique=False) op.create_table('order_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('order_id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('parameters', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('app_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_order_event_app_id'), 'order_event', ['app_id'], unique=False) op.create_table('order_fulfillment', sa.Column('id', sa.Integer(), nullable=False), sa.Column('tracking_number', sa.String(length=255), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('order_id', sa.Integer(), nullable=False), sa.Column('fulfillment_order', sa.Integer(), nullable=False), sa.Column('status', sa.String(length=32), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('shipping_refund_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('total_refund_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.CheckConstraint('fulfillment_order >= 0'), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_order_fulfillment_metadata_private'), 'order_fulfillment', ['metadata_private'], unique=False) op.create_index(op.f('ix_order_fulfillment_metadata_public'), 'order_fulfillment', ['metadata_public'], unique=False) op.create_index(op.f('ix_order_fulfillment_order_id'), 'order_fulfillment', ['order_id'], unique=False) op.create_table('order_gift_card', sa.Column('id', sa.Integer(), nullable=False), sa.Column('order_id', sa.Integer(), nullable=False), sa.Column('gift_card_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['gift_card_id'], ['gift_card.id'], ), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('order_id', 'gift_card_id') ) op.create_table('order_line', sa.Column('id', sa.Integer(), nullable=False), sa.Column('product_name', sa.String(length=386), nullable=False), sa.Column('product_sku', sa.String(length=255), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.Column('unit_price_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('unit_price_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('is_shipping_required', sa.Boolean(), nullable=False), sa.Column('order_id', sa.Integer(), nullable=False), sa.Column('quantity_fulfilled', sa.Integer(), nullable=False), sa.Column('variant_id', sa.Integer(), nullable=True), sa.Column('tax_rate', sa.Numeric(precision=5, scale=4), nullable=False), sa.Column('translated_product_name', sa.String(length=386), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('translated_variant_name', sa.String(length=255), nullable=False), sa.Column('variant_name', sa.String(length=255), nullable=False), sa.Column('total_price_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('total_price_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('unit_discount_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('unit_discount_value', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('unit_discount_reason', sa.Text(), nullable=True), sa.Column('unit_discount_type', sa.String(length=10), nullable=False), sa.Column('undiscounted_total_price_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('undiscounted_total_price_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('undiscounted_unit_price_gross_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('undiscounted_unit_price_net_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.ForeignKeyConstraint(['variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('payment', sa.Column('id', sa.Integer(), nullable=False), sa.Column('gateway', sa.String(length=255), nullable=False), sa.Column('is_active', sa.Boolean(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('modified', sa.DateTime(), nullable=False), sa.Column('charge_status', sa.String(length=20), nullable=False), sa.Column('billing_first_name', sa.String(length=256), nullable=False), sa.Column('billing_last_name', sa.String(length=256), nullable=False), sa.Column('billing_company_name', sa.String(length=256), nullable=False), sa.Column('billing_address_1', sa.String(length=256), nullable=False), sa.Column('billing_address_2', sa.String(length=256), nullable=False), sa.Column('billing_city', sa.String(length=256), nullable=False), sa.Column('billing_city_area', sa.String(length=128), nullable=False), sa.Column('billing_postal_code', sa.String(length=256), nullable=False), sa.Column('billing_country_code', sa.String(length=2), nullable=False), sa.Column('billing_country_area', sa.String(length=256), nullable=False), sa.Column('billing_email', sa.String(length=254), nullable=False), sa.Column('customer_ip_address', postgresql.INET(), nullable=True), sa.Column('cc_brand', sa.String(length=40), nullable=False), sa.Column('cc_exp_month', sa.Integer(), nullable=True), sa.Column('cc_exp_year', sa.Integer(), nullable=True), sa.Column('cc_first_digits', sa.String(length=6), nullable=False), sa.Column('cc_last_digits', sa.String(length=4), nullable=False), sa.Column('extra_data', sa.Text(), nullable=False), sa.Column('token', sa.String(length=512), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('total', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('captured_amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('checkout_id', postgresql.UUID(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.Column('to_confirm', sa.Boolean(), nullable=False), sa.Column('payment_method_type', sa.String(length=256), nullable=False), sa.Column('return_url', sa.String(length=200), nullable=True), sa.Column('psp_reference', sa.String(length=512), nullable=True), sa.CheckConstraint('cc_exp_month >= 0'), sa.CheckConstraint('cc_exp_year >= 0'), sa.ForeignKeyConstraint(['checkout_id'], ['checkout.id'], ), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_payment_psp_reference'), 'payment', ['psp_reference'], unique=False) op.create_index('payment_pay_order_i_f22aa2_gin', 'payment', ['order_id', 'is_active', 'charge_status'], unique=False) op.create_table('product_digital_content', sa.Column('id', sa.Integer(), nullable=False), sa.Column('use_default_settings', sa.Boolean(), nullable=False), sa.Column('automatic_fulfillment', sa.Boolean(), nullable=False), sa.Column('content_type', sa.String(length=128), nullable=False), sa.Column('content_file', sa.String(length=100), nullable=False), sa.Column('max_downloads', sa.Integer(), nullable=True), sa.Column('url_valid_days', sa.Integer(), nullable=True), sa.Column('product_variant_id', sa.Integer(), nullable=False), sa.Column('metadata_public', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.Column('metadata_private', postgresql.JSONB(astext_type=sa.Text()), nullable=True), sa.ForeignKeyConstraint(['product_variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('product_variant_id') ) op.create_index(op.f('ix_product_digital_content_metadata_private'), 'product_digital_content', ['metadata_private'], unique=False) op.create_index(op.f('ix_product_digital_content_metadata_public'), 'product_digital_content', ['metadata_public'], unique=False) op.create_table('product_variant_channel_listing', sa.Column('id', sa.Integer(), nullable=False), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('price_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.Column('channel_id', sa.Integer(), nullable=False), sa.Column('variant_id', sa.Integer(), nullable=False), sa.Column('cost_price_amount', sa.Numeric(precision=12, scale=3), nullable=True), sa.ForeignKeyConstraint(['channel_id'], ['channel.id'], ), sa.ForeignKeyConstraint(['variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('variant_id', 'channel_id') ) op.create_table('product_variant_media', sa.Column('id', sa.Integer(), nullable=False), sa.Column('media_id', sa.Integer(), nullable=False), sa.Column('variant_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['media_id'], ['product_media.id'], ), sa.ForeignKeyConstraint(['variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('variant_id', 'media_id') ) op.create_table('product_variant_translation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('language_code', sa.String(length=10), nullable=False), sa.Column('name', sa.String(length=255), nullable=False), sa.Column('product_variant_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_variant_id'], ['product_variant.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('language_code', 'product_variant_id') ) op.create_table('warehouse_stock', sa.Column('id', sa.Integer(), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.Column('product_variant_id', sa.Integer(), nullable=False), sa.Column('warehouse_id', postgresql.UUID(), nullable=False), sa.CheckConstraint('quantity >= 0'), sa.ForeignKeyConstraint(['product_variant_id'], ['product_variant.id'], ), sa.ForeignKeyConstraint(['warehouse_id'], ['warehouse.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('warehouse_id', 'product_variant_id') ) op.create_table('wishlist_item', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('product_id', sa.Integer(), nullable=False), sa.Column('wishlist_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_id'], ['product.id'], ), sa.ForeignKeyConstraint(['wishlist_id'], ['wishlist.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('wishlist_id', 'product_id') ) op.create_table('assigned_variant_attribute_value', sa.Column('id', sa.Integer(), nullable=False), sa.Column('sort_order', sa.Integer(), nullable=True), sa.Column('assignment_id', sa.Integer(), nullable=False), sa.Column('value_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['assignment_id'], ['assigned_variant_attribute.id'], ), sa.ForeignKeyConstraint(['value_id'], ['attribute_value.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('value_id', 'assignment_id') ) op.create_index(op.f('ix_assigned_variant_attribute_value_sort_order'), 'assigned_variant_attribute_value', ['sort_order'], unique=False) op.create_table('invoice_event', sa.Column('id', sa.Integer(), nullable=False), sa.Column('date', sa.DateTime(), nullable=False), sa.Column('type', sa.String(length=255), nullable=False), sa.Column('parameters', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('invoice_id', sa.Integer(), nullable=True), sa.Column('order_id', sa.Integer(), nullable=True), sa.Column('user_id', sa.Integer(), nullable=True), sa.Column('app_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['app_id'], ['app.id'], ), sa.ForeignKeyConstraint(['invoice_id'], ['invoice.id'], ), sa.ForeignKeyConstraint(['order_id'], ['order.id'], ), sa.ForeignKeyConstraint(['user_id'], ['user.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('order_fulfillment_line', sa.Column('id', sa.Integer(), nullable=False), sa.Column('order_line_id', sa.Integer(), nullable=False), sa.Column('quantity', sa.Integer(), nullable=False), sa.Column('fulfillment_id', sa.Integer(), nullable=False), sa.Column('stock_id', sa.Integer(), nullable=True), sa.CheckConstraint('quantity >= 0'), sa.ForeignKeyConstraint(['fulfillment_id'], ['order_fulfillment.id'], ), sa.ForeignKeyConstraint(['order_line_id'], ['order_line.id'], ), sa.ForeignKeyConstraint(['stock_id'], ['warehouse_stock.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('payment_transaction', sa.Column('id', sa.Integer(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('token', sa.String(length=512), nullable=False), sa.Column('kind', sa.String(length=25), nullable=False), sa.Column('is_success', sa.Boolean(), nullable=False), sa.Column('error', sa.String(length=256), nullable=True), sa.Column('currency', sa.String(length=3), nullable=False), sa.Column('amount', sa.Numeric(precision=12, scale=3), nullable=False), sa.Column('gateway_response', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('payment_id', sa.Integer(), nullable=False), sa.Column('customer_id', sa.String(length=256), nullable=True), sa.Column('action_required', sa.Boolean(), nullable=False), sa.Column('action_required_data', postgresql.JSONB(astext_type=sa.Text()), nullable=False), sa.Column('already_processed', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint(['payment_id'], ['payment.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_table('product_digital_content_url', sa.Column('id', sa.Integer(), nullable=False), sa.Column('token', postgresql.UUID(), nullable=False), sa.Column('created', sa.DateTime(), nullable=False), sa.Column('download_num', sa.Integer(), nullable=False), sa.Column('content_id', sa.Integer(), nullable=False), sa.Column('line_id', sa.Integer(), nullable=True), sa.ForeignKeyConstraint(['content_id'], ['product_digital_content.id'], ), sa.ForeignKeyConstraint(['line_id'], ['order_line.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('line_id'), sa.UniqueConstraint('token') ) op.create_index(op.f('ix_product_digital_content_url_content_id'), 'product_digital_content_url', ['content_id'], unique=False) op.create_table('warehouse_allocation', sa.Column('id', sa.Integer(), nullable=False), sa.Column('quantity_allocated', sa.Integer(), nullable=False), sa.Column('order_line_id', sa.Integer(), nullable=False), sa.Column('stock_id', sa.Integer(), nullable=False), sa.CheckConstraint('quantity_allocated >= 0'), sa.ForeignKeyConstraint(['order_line_id'], ['order_line.id'], ), sa.ForeignKeyConstraint(['stock_id'], ['warehouse_stock.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('order_line_id', 'stock_id') ) op.create_table('wishlist_item_variant', sa.Column('created_at', sa.DateTime(), nullable=True), sa.Column('updated_at', sa.DateTime(), nullable=True), sa.Column('id', sa.Integer(), nullable=False), sa.Column('wishlist_item_id', sa.Integer(), nullable=False), sa.Column('product_variant_id', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['product_variant_id'], ['product_variant.id'], ), sa.ForeignKeyConstraint(['wishlist_item_id'], ['wishlist_item.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('wishlist_item_id', 'product_variant_id') ) # ### end Alembic commands ### def downgrade(): # ### commands auto generated by Alembic - please adjust! ### op.drop_table('wishlist_item_variant') op.drop_table('warehouse_allocation') op.drop_index(op.f('ix_product_digital_content_url_content_id'), table_name='product_digital_content_url') op.drop_table('product_digital_content_url') op.drop_table('payment_transaction') op.drop_table('order_fulfillment_line') op.drop_table('invoice_event') op.drop_index(op.f('ix_assigned_variant_attribute_value_sort_order'), table_name='assigned_variant_attribute_value') op.drop_table('assigned_variant_attribute_value') op.drop_table('wishlist_item') op.drop_table('warehouse_stock') op.drop_table('product_variant_translation') op.drop_table('product_variant_media') op.drop_table('product_variant_channel_listing') op.drop_index(op.f('ix_product_digital_content_metadata_public'), table_name='product_digital_content') op.drop_index(op.f('ix_product_digital_content_metadata_private'), table_name='product_digital_content') op.drop_table('product_digital_content') op.drop_index('payment_pay_order_i_f22aa2_gin', table_name='payment') op.drop_index(op.f('ix_payment_psp_reference'), table_name='payment') op.drop_table('payment') op.drop_table('order_line') op.drop_table('order_gift_card') op.drop_index(op.f('ix_order_fulfillment_order_id'), table_name='order_fulfillment') op.drop_index(op.f('ix_order_fulfillment_metadata_public'), table_name='order_fulfillment') op.drop_index(op.f('ix_order_fulfillment_metadata_private'), table_name='order_fulfillment') op.drop_table('order_fulfillment') op.drop_index(op.f('ix_order_event_app_id'), table_name='order_event') op.drop_table('order_event') op.drop_index('discount_or_name_d16858_gin', table_name='order_discount') op.drop_table('order_discount') op.drop_index(op.f('ix_menu_item_translation_menu_item_id'), table_name='menu_item_translation') op.drop_table('menu_item_translation') op.drop_index(op.f('ix_invoice_metadata_public'), table_name='invoice') op.drop_index(op.f('ix_invoice_metadata_private'), table_name='invoice') op.drop_table('invoice') op.drop_table('customer_event') op.drop_table('csv_export_event') op.drop_table('checkout_line') op.drop_table('checkout_gift_card') op.drop_table('assigned_variant_attribute') op.drop_index(op.f('ix_assigned_product_attribute_value_sort_order'), table_name='assigned_product_attribute_value') op.drop_table('assigned_product_attribute_value') op.drop_index(op.f('ix_assigned_page_attribute_value_sort_order'), table_name='assigned_page_attribute_value') op.drop_table('assigned_page_attribute_value') op.drop_table('wishlist') op.drop_index(op.f('ix_webhook_event_event_type'), table_name='webhook_event') op.drop_table('webhook_event') op.drop_table('warehouse_shipping_zone') op.drop_table('user_address_map') op.drop_table('site_setting_translation') op.drop_table('shipping_method_translation') op.drop_table('shipping_method_postal_code_rule') op.drop_table('shipping_method_excluded_product') op.drop_table('shipping_method_channel_listing') op.drop_index(op.f('ix_product_variant_sort_order'), table_name='product_variant') op.drop_index(op.f('ix_product_variant_metadata_public'), table_name='product_variant') op.drop_index(op.f('ix_product_variant_metadata_private'), table_name='product_variant') op.drop_table('product_variant') op.drop_table('product_translation') op.drop_index(op.f('ix_product_media_sort_order'), table_name='product_media') op.drop_table('product_media') op.drop_index(op.f('ix_product_channel_listing_publication_date'), table_name='product_channel_listing') op.drop_table('product_channel_listing') op.drop_table('page_translation') op.drop_index(op.f('ix_order_user_email'), table_name='order') op.drop_index(op.f('ix_order_metadata_public'), table_name='order') op.drop_index(op.f('ix_order_metadata_private'), table_name='order') op.drop_table('order') op.drop_index(op.f('ix_menu_item_tree_id'), table_name='menu_item') op.drop_index(op.f('ix_menu_item_sort_order'), table_name='menu_item') op.drop_index(op.f('ix_menu_item_metadata_public'), table_name='menu_item') op.drop_index(op.f('ix_menu_item_metadata_private'), table_name='menu_item') op.drop_table('menu_item') op.drop_table('gift_card') op.drop_index(op.f('ix_discount_voucher_product_voucher_id'), table_name='discount_voucher_product') op.drop_index(op.f('ix_discount_voucher_product_product_id'), table_name='discount_voucher_product') op.drop_table('discount_voucher_product') op.drop_table('discount_sale_product') op.drop_index(op.f('ix_customer_note_date'), table_name='customer_note') op.drop_table('customer_note') op.drop_index(op.f('ix_csv_export_file_app_id'), table_name='csv_export_file') op.drop_table('csv_export_file') op.drop_index(op.f('ix_collection_product_sort_order'), table_name='collection_product') op.drop_table('collection_product') op.drop_index(op.f('ix_checkout_metadata_public'), table_name='checkout') op.drop_index(op.f('ix_checkout_metadata_private'), table_name='checkout') op.drop_table('checkout') op.drop_table('attribute_value_translation') op.drop_table('assigned_product_attribute') op.drop_table('assigned_page_attribute') op.drop_table('webhook') op.drop_index(op.f('ix_warehouse_metadata_public'), table_name='warehouse') op.drop_index(op.f('ix_warehouse_metadata_private'), table_name='warehouse') op.drop_table('warehouse') op.drop_index(op.f('ix_user_metadata_public'), table_name='user') op.drop_index(op.f('ix_user_metadata_private'), table_name='user') op.drop_index('account_username_email', table_name='user') op.drop_table('user') op.drop_table('staff_notification_recipient') op.drop_index(op.f('ix_site_setting_top_menu_id'), table_name='site_setting') op.drop_index(op.f('ix_site_setting_bottom_menu_id'), table_name='site_setting') op.drop_table('site_setting') op.drop_table('shipping_zone_channel') op.drop_index(op.f('ix_shipping_method_metadata_public'), table_name='shipping_method') op.drop_index(op.f('ix_shipping_method_metadata_private'), table_name='shipping_method') op.drop_table('shipping_method') op.drop_table('product_collection_translation') op.drop_table('product_collection_channel_listing') op.drop_table('product_category_translation') op.drop_index(op.f('ix_product_search_vector'), table_name='product') op.drop_index(op.f('ix_product_metadata_public'), table_name='product') op.drop_index(op.f('ix_product_metadata_private'), table_name='product') op.drop_table('product') op.drop_table('plugin_configuration') op.drop_index('page_title_slug', table_name='page') op.drop_index(op.f('ix_page_metadata_public'), table_name='page') op.drop_index(op.f('ix_page_metadata_private'), table_name='page') op.drop_table('page') op.drop_table('discount_voucher_translation') op.drop_table('discount_voucher_customer') op.drop_table('discount_voucher_collection') op.drop_table('discount_voucher_channel_listing') op.drop_table('discount_voucher_category') op.drop_index(op.f('ix_discount_sale_translation_sale_id'), table_name='discount_sale_translation') op.drop_table('discount_sale_translation') op.drop_table('discount_sale_collection') op.drop_index(op.f('ix_discount_sale_channel_listing_sale_id'), table_name='discount_sale_channel_listing') op.drop_index(op.f('ix_discount_sale_channel_listing_channel_id'), table_name='discount_sale_channel_listing') op.drop_table('discount_sale_channel_listing') op.drop_table('discount_sale_category') op.drop_index(op.f('ix_attribute_variant_sort_order'), table_name='attribute_variant') op.drop_table('attribute_variant') op.drop_index(op.f('ix_attribute_value_sort_order'), table_name='attribute_value') op.drop_index(op.f('ix_attribute_value_slug'), table_name='attribute_value') op.drop_index('idx_attribute_value_name_slug', table_name='attribute_value') op.drop_table('attribute_value') op.drop_index(op.f('ix_attribute_translation_attribute_id'), table_name='attribute_translation') op.drop_table('attribute_translation') op.drop_index(op.f('ix_attribute_product_sort_order'), table_name='attribute_product') op.drop_table('attribute_product') op.drop_index(op.f('ix_attribute_page_sort_order'), table_name='attribute_page') op.drop_table('attribute_page') op.drop_table('app_token') op.drop_table('app_permission') op.drop_index(op.f('ix_app_installation_permissions_app_installation_id'), table_name='app_installation_permissions') op.drop_table('app_installation_permissions') op.drop_table('staff') op.drop_index(op.f('ix_shipping_zone_metadata_public'), table_name='shipping_zone') op.drop_index(op.f('ix_shipping_zone_metadata_private'), table_name='shipping_zone') op.drop_table('shipping_zone') op.drop_index(op.f('ix_product_type_metadata_public'), table_name='product_type') op.drop_index(op.f('ix_product_type_metadata_private'), table_name='product_type') op.drop_table('product_type') op.drop_index(op.f('ix_product_collection_metadata_public'), table_name='product_collection') op.drop_index(op.f('ix_product_collection_metadata_private'), table_name='product_collection') op.drop_table('product_collection') op.drop_index(op.f('ix_product_category_tree_id'), table_name='product_category') op.drop_index(op.f('ix_product_category_metadata_public'), table_name='product_category') op.drop_index(op.f('ix_product_category_metadata_private'), table_name='product_category') op.drop_table('product_category') op.drop_table('permission') op.drop_index('page_type_name_slug', table_name='page_type') op.drop_index(op.f('ix_page_type_metadata_public'), table_name='page_type') op.drop_index(op.f('ix_page_type_metadata_private'), table_name='page_type') op.drop_table('page_type') op.drop_index(op.f('ix_menu_metadata_public'), table_name='menu') op.drop_index(op.f('ix_menu_metadata_private'), table_name='menu') op.drop_table('menu') op.drop_index(op.f('ix_django_prices_vatlayer_vat_country_code'), table_name='django_prices_vatlayer_vat') op.drop_table('django_prices_vatlayer_vat') op.drop_table('django_prices_vatlayer_ratetypes') op.drop_table('django_prices_openexchangerates_conversionrate') op.drop_table('discount_voucher') op.drop_table('discount_sale') op.drop_table('channel') op.drop_index(op.f('ix_attribute_metadata_public'), table_name='attribute') op.drop_index(op.f('ix_attribute_metadata_private'), table_name='attribute') op.drop_table('attribute') op.drop_table('app_installation') op.drop_index(op.f('ix_app_metadata_public'), table_name='app') op.drop_index(op.f('ix_app_metadata_private'), table_name='app') op.drop_table('app') op.drop_table('address') # ### end Alembic commands ###
58.156634
151
0.708489
12,439
94,679
5.169467
0.029022
0.096792
0.118268
0.144674
0.933969
0.888434
0.832278
0.779031
0.708039
0.628182
0
0.009342
0.110257
94,679
1,627
152
58.192379
0.753989
0.002989
0
0.486957
0
0
0.27159
0.100181
0
0
0
0
0
1
0.001242
false
0.001242
0.001863
0
0.003106
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
69aa7ed0a1b09e0889f3b5ffa43168c5f4e650b0
566
py
Python
extensions/.stubs/clrclasses/System/Security/Authentication/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
1
2020-03-25T03:27:24.000Z
2020-03-25T03:27:24.000Z
extensions/.stubs/clrclasses/System/Security/Authentication/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
null
null
null
extensions/.stubs/clrclasses/System/Security/Authentication/__init__.py
vicwjb/Pycad
7391cd694b7a91ad9f9964ec95833c1081bc1f84
[ "MIT" ]
null
null
null
import __clrclasses__.System.Security.Authentication.ExtendedProtection as ExtendedProtection from __clrclasses__.System.Security.Authentication import AuthenticationException from __clrclasses__.System.Security.Authentication import CipherAlgorithmType from __clrclasses__.System.Security.Authentication import ExchangeAlgorithmType from __clrclasses__.System.Security.Authentication import HashAlgorithmType from __clrclasses__.System.Security.Authentication import InvalidCredentialException from __clrclasses__.System.Security.Authentication import SslProtocols
70.75
93
0.911661
50
566
9.76
0.28
0.229508
0.344262
0.545082
0.590164
0.590164
0
0
0
0
0
0
0.04947
566
7
94
80.857143
0.907063
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
69b77b06dd4fa077da605fa42a9ba85398ac3042
281
py
Python
src/attrbench/metrics/infidelity/__init__.py
zoeparman/benchmark
96331b7fa0db84f5f422b52cae2211b41bbd15ce
[ "MIT" ]
null
null
null
src/attrbench/metrics/infidelity/__init__.py
zoeparman/benchmark
96331b7fa0db84f5f422b52cae2211b41bbd15ce
[ "MIT" ]
7
2020-03-02T13:03:50.000Z
2022-03-12T00:16:20.000Z
src/attrbench/metrics/infidelity/__init__.py
zoeparman/benchmark
96331b7fa0db84f5f422b52cae2211b41bbd15ce
[ "MIT" ]
null
null
null
from .infidelity import Infidelity, infidelity from .perturbation_generator import PerturbationGenerator, NoisyBaselinePerturbationGenerator, \ SegmentRemovalPerturbationGenerator, SquarePerturbationGenerator, GaussianPerturbationGenerator from .result import InfidelityResult
56.2
99
0.88968
18
281
13.833333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.078292
281
4
100
70.25
0.96139
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
69c595b8dce737beac3cac7668741281ca62bfb4
5,610
py
Python
inverse_covariance/profiling/tests/metrics_test.py
aldanor/skggm
d2e29d692d1654285653ab07fd24534628fcb076
[ "MIT" ]
199
2016-10-21T14:36:02.000Z
2022-03-29T20:59:08.000Z
inverse_covariance/profiling/tests/metrics_test.py
aldanor/skggm
d2e29d692d1654285653ab07fd24534628fcb076
[ "MIT" ]
66
2016-10-17T01:47:28.000Z
2022-03-06T11:02:56.000Z
inverse_covariance/profiling/tests/metrics_test.py
aldanor/skggm
d2e29d692d1654285653ab07fd24534628fcb076
[ "MIT" ]
36
2016-10-15T23:42:10.000Z
2022-03-06T00:03:13.000Z
import numpy as np import pytest from inverse_covariance.profiling import metrics class TestMetrics(object): @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), (6, 6, 6), ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), (4, 2, 2), ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 0], [1, 0, 0], [0, 0, 0]]), (4, 2, 2), ), ], ) def test__nonzero_intersection(self, m, m_hat, expected): result = metrics._nonzero_intersection(m, m_hat) print(result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 0, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 0, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 0], [1, 0, 0]]), 1, ), ], ) def test_support_false_positive_count(self, m, m_hat, expected): result = metrics.support_false_positive_count(m, m_hat) print(result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 0, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 1, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 1], [1, 1, 0]]), 0, ), ], ) def test_support_false_negative_count(self, m, m_hat, expected): result = metrics.support_false_negative_count(m, m_hat) print(result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 0, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 1, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 0], [1, 0, 0]]), 2, ), ], ) def test_support_difference_count(self, m, m_hat, expected): result = metrics.support_difference_count(m, m_hat) print(result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 1, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 0, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 0], [1, 0, 0]]), 0, ), ], ) def test_has_exact_support(self, m, m_hat, expected): result = metrics.has_exact_support(m, m_hat) print(result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 1, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 1, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 0], [1, 0, 0]]), 0, ), ], ) def test_has_approx_support(self, m, m_hat, expected): result = metrics.has_approx_support(m, m_hat, 0.5) print(m, m_hat, result) assert result == expected @pytest.mark.parametrize( "m, m_hat, expected", [ ( np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), np.array([[1, 2, 3], [1, 2, 3], [1, 2, 3]]), 0, ), ( np.array([[2, 1, 0], [1, 2, 3], [0, 5, 6]]), np.array([[1, 1, 0], [1, 2, 0], [0, 0, 3]]), 3.0, ), ( np.array([[0, 1, 0], [1, 0, 3], [0, 5, 0]]), np.array([[0, 1, 1], [1, 0, 0], [1, 0, 0]]), 3.16227766017, ), ], ) def test_error_fro(self, m, m_hat, expected): result = metrics.error_fro(m, m_hat) print(m, m_hat, result) np.testing.assert_array_almost_equal(result, expected)
30.824176
68
0.340998
729
5,610
2.536351
0.067215
0.060573
0.079502
0.0649
0.793402
0.778259
0.775554
0.743104
0.741482
0.6755
0
0.136928
0.454545
5,610
181
69
30.994475
0.46732
0
0
0.651163
0
0
0.02246
0
0
0
0
0
0.040698
1
0.040698
false
0
0.017442
0
0.063953
0.040698
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
69e9affa97c1097b5ffa68c7426f38ced0b25149
73
py
Python
src/smugapi/handlers/__init__.py
threatsimple/smugapi
0e2e5ecb3c3076b9b8dd8342371de21fa9c4a8c4
[ "MIT" ]
null
null
null
src/smugapi/handlers/__init__.py
threatsimple/smugapi
0e2e5ecb3c3076b9b8dd8342371de21fa9c4a8c4
[ "MIT" ]
null
null
null
src/smugapi/handlers/__init__.py
threatsimple/smugapi
0e2e5ecb3c3076b9b8dd8342371de21fa9c4a8c4
[ "MIT" ]
null
null
null
from . import index from . import stockquotes from . import weatherbit
12.166667
25
0.767123
9
73
6.222222
0.555556
0.535714
0
0
0
0
0
0
0
0
0
0
0.191781
73
5
26
14.6
0.949153
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
38a8c30eb09199ab5dcf21422291eb966363888f
6,537
py
Python
tests/test_fit_screens.py
ska-telescope/ska-sdp-screen-fitting
213c471ec09b31f2482924d96f5f530d5c40d9f0
[ "BSD-3-Clause" ]
null
null
null
tests/test_fit_screens.py
ska-telescope/ska-sdp-screen-fitting
213c471ec09b31f2482924d96f5f530d5c40d9f0
[ "BSD-3-Clause" ]
null
null
null
tests/test_fit_screens.py
ska-telescope/ska-sdp-screen-fitting
213c471ec09b31f2482924d96f5f530d5c40d9f0
[ "BSD-3-Clause" ]
null
null
null
""" test_fit_screens.py: Test screen fitting functionality SPDX-License-Identifier: BSD-3-Clause """ import os import shutil import uuid import h5py import numpy as np import pytest from astropy import wcs from astropy.io import fits from ska_sdp_screen_fitting.make_aterm_images import make_aterm_image from ska_sdp_screen_fitting.utils import processing_utils CWD = os.getcwd() SOLFILE = "solutions.h5" SKYMODEL = "skymodel.txt" @pytest.fixture(autouse=True) def source_env(): """Create temporary folder for test""" os.chdir(CWD) tmpdir = str(uuid.uuid4()) os.mkdir(tmpdir) os.chdir(tmpdir) shutil.copyfile(f"../resources/{SOLFILE}", SOLFILE) shutil.copyfile(f"../resources/{SKYMODEL}", SKYMODEL) # Tests are executed here yield # Post-test: clean up os.chdir(CWD) shutil.rmtree(tmpdir) def test_fit_voronoi_screens(): """ Tests Voronoi screens generation """ method = "tessellated" soltab = "phase000" make_aterm_image( SOLFILE, soltabname=soltab, screen_type=method, outroot=method, bounds_deg=[124.565, 66.165, 127.895, 62.835], bounds_mid_deg=[126.23, 64.50], skymodel=SKYMODEL, solsetname="sol000", padding_fraction=0, cellsize_deg=0.2, smooth_deg=0.1, ncpu=0, ) # Assert that solution files are generated assert os.path.isfile(f"{method}_0.fits") assert os.path.isfile(f"{method}_template.fits") assert os.path.isfile(f"{method}.txt") # Load h5 solutions and image cube and calculate the error at the # patch coordinates # 1 - Get the pixel coordinate of the patches # 2 - Open the calibration solution and correct for the phase reference h5_file = h5py.File(SOLFILE, "r") radec_coord = processing_utils.read_patch_list(SKYMODEL, h5_file, soltab) filename = f"{method}_0.fits" hdu = fits.open(filename) wcs_obj = wcs.WCS(hdu[0].header) [coord_x, coord_y] = processing_utils.get_patch_coordinates( radec_coord, wcs_obj ) screen_cube = hdu[0].data im_size = screen_cube.shape[4] phase = h5_file["sol000/phase000/val"] # re-arrange axes to allow correct broadcasting ref_antenna = 0 phase_corrected = np.zeros( ( screen_cube.shape[0], screen_cube.shape[1], screen_cube.shape[2], len(radec_coord), ) ) phase_corrected = ( np.transpose(phase, (2, 0, 1, 3)) - phase[:, :, ref_antenna, :] ) phase_corrected = np.transpose(phase_corrected, (1, 2, 0, 3)) # Assert that the error at the position of the patch is smaller # than the threshold threshold = 1e-4 for i in enumerate(coord_x): y_coord = int(np.round(coord_x[i[0]])) x_coord = int(np.round(coord_y[i[0]])) if 0 <= x_coord < im_size: if 0 <= y_coord < im_size: assert ( screen_cube[:, :, :, 0, x_coord, y_coord] - np.cos(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 1, x_coord, y_coord] - np.sin(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 2, x_coord, y_coord] - np.cos(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 3, x_coord, y_coord] - np.sin(phase_corrected[:, :, :, i[0]]) < threshold ).all() def test_fit_kl_screens(): """ Tests kl screens generation """ soltab = "phase000" method = "kl" make_aterm_image( SOLFILE, soltabname=soltab, screen_type=method, outroot=method, bounds_deg=[124.565, 66.165, 127.895, 62.835], bounds_mid_deg=[126.23, 64.50], skymodel=SKYMODEL, solsetname="sol000", padding_fraction=0, cellsize_deg=0.2, smooth_deg=0.1, ncpu=0, ) # Assert that solution files are generated assert os.path.isfile(f"{method}_0.fits") assert os.path.isfile(f"{method}.txt") # Load h5 solutions and image cube and calculate the error at the # patch coordinates # 1 - Get the pixel coordinate of the patches # 2 - Open the calibration solution and correct for the phase reference h5_file = h5py.File(SOLFILE, "r") radec_coord = processing_utils.read_patch_list(SKYMODEL, h5_file, soltab) filename = f"{method}_0.fits" hdu = fits.open(filename) wcs_obj = wcs.WCS(hdu[0].header) [coord_x, coord_y] = processing_utils.get_patch_coordinates( radec_coord, wcs_obj ) screen_cube = hdu[0].data im_size = screen_cube.shape[4] phase = h5_file["sol000/phase000/val"] phase_corrected = np.zeros( ( screen_cube.shape[0], screen_cube.shape[1], screen_cube.shape[2], len(radec_coord), ) ) ref_antenna = 0 phase_corrected = ( np.transpose(phase, (2, 0, 1, 3)) - phase[:, :, ref_antenna, :] ) phase_corrected = np.transpose(phase_corrected, (1, 2, 0, 3)) # Assert that the error at the position of the patch is smaller # than the threshold threshold = 1e-1 for i in enumerate(coord_x): y_coord = int(np.round(coord_x[i[0]])) x_coord = int(np.round(coord_y[i[0]])) if 0 <= x_coord < im_size: if 0 <= y_coord < im_size: assert ( screen_cube[:, :, :, 0, x_coord, y_coord] - np.cos(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 1, x_coord, y_coord] - np.sin(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 2, x_coord, y_coord] - np.cos(phase_corrected[:, :, :, i[0]]) < threshold ).all() assert ( screen_cube[:, :, :, 3, x_coord, y_coord] - np.sin(phase_corrected[:, :, :, i[0]]) < threshold ).all()
30.263889
77
0.556218
808
6,537
4.315594
0.217822
0.05162
0.020075
0.027531
0.782621
0.769429
0.76312
0.76312
0.76312
0.76312
0
0.040217
0.322931
6,537
215
78
30.404651
0.747628
0.139667
0
0.748466
0
0
0.046002
0.01204
0
0
0
0
0.079755
1
0.018405
false
0
0.06135
0
0.079755
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
38acc049899b77e5268c9ccf9641f51488eb686c
70
py
Python
ArithmeticMean/arithmeticMean.py
pauloantiquera/starters-exercises
0fe68ba13fa204b28627b3097cdd96072648e602
[ "Unlicense" ]
null
null
null
ArithmeticMean/arithmeticMean.py
pauloantiquera/starters-exercises
0fe68ba13fa204b28627b3097cdd96072648e602
[ "Unlicense" ]
null
null
null
ArithmeticMean/arithmeticMean.py
pauloantiquera/starters-exercises
0fe68ba13fa204b28627b3097cdd96072648e602
[ "Unlicense" ]
1
2018-03-24T02:04:05.000Z
2018-03-24T02:04:05.000Z
def arithmeticMean(number1, number2): return (number1 + number2) / 2
23.333333
37
0.742857
8
70
6.5
0.75
0.538462
0
0
0
0
0
0
0
0
0
0.083333
0.142857
70
2
38
35
0.783333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
38b94f8a2709e202c357a4db2be6f4a6fb5c7f1f
32
py
Python
lang/Python/system-time.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
lang/Python/system-time.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
lang/Python/system-time.py
ethansaxenian/RosettaDecode
8ea1a42a5f792280b50193ad47545d14ee371fb7
[ "MIT" ]
null
null
null
import time print(time.ctime())
10.666667
19
0.75
5
32
4.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.09375
32
2
20
16
0.827586
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
6
2a0e0203fcbb348b0597b0c070293aadf795c40d
5,394
py
Python
tests/validators/date_time_range_test.py
binary-butterfly/wtfjson
551ad07c895ce3c94ac3015b6b5ecc2102599b56
[ "MIT" ]
null
null
null
tests/validators/date_time_range_test.py
binary-butterfly/wtfjson
551ad07c895ce3c94ac3015b6b5ecc2102599b56
[ "MIT" ]
1
2021-10-11T08:55:45.000Z
2021-10-11T08:55:45.000Z
tests/validators/date_time_range_test.py
binary-butterfly/wtfjson
551ad07c895ce3c94ac3015b6b5ecc2102599b56
[ "MIT" ]
null
null
null
# encoding: utf-8 """ binary butterfly validator Copyright (c) 2021, binary butterfly GmbH Use of this source code is governed by an MIT-style license that can be found in the LICENSE.txt. """ from time import sleep from unittest import TestCase from datetime import datetime, timedelta from wtfjson import DictInput from wtfjson.fields import DateTimeField from wtfjson.validators import DateTimeRange class DateTimeRangeFixedInput(DictInput): test_field = DateTimeField( accept_utc=True, validators=[ DateTimeRange( minus=timedelta(minutes=-5), plus=timedelta(minutes=5), orientation=datetime(2020, 1, 1, 0, 0, 0) ) ] ) class DateTimeRangeNowInput(DictInput): test_field = DateTimeField( validators=[ DateTimeRange( minus=timedelta(minutes=-5), plus=timedelta(minutes=5) ) ] ) class DateTimeRangeFunctionInput(DictInput): test_field = DateTimeField( validators=[ DateTimeRange( minus=timedelta(seconds=-1), plus=timedelta(seconds=1), orientation=lambda: datetime.utcnow().replace(microsecond=0) + timedelta(minutes=10) ) ] ) class DateTimeRangeTest(TestCase): def test_invalid_type(self): form = DateTimeRangeFixedInput(data={'test_field': 12}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['invalid type']} def test_success_fixed(self): form = DateTimeRangeFixedInput(data={'test_field': '2020-01-01T00:00:00'}) assert form.validate() is True assert form.has_errors is False assert form.errors == {} assert form.out == {'test_field': datetime(2020, 1, 1, 0, 0, 0)} def test_success_fixed_with_z(self): form = DateTimeRangeFixedInput(data={'test_field': '2020-01-01T00:00:00Z'}) assert form.validate() is True assert form.has_errors is False assert form.errors == {} assert form.out == {'test_field': datetime(2020, 1, 1, 0, 0, 0)} def test_invalid_value_min_fixed(self): form = DateTimeRangeFixedInput(data={'test_field': '2020-01-01T00:10:00'}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']} def test_invalid_value_max_fixed(self): form = DateTimeRangeFixedInput(data={'test_field': '2020-01-01T00:10:00'}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']} def test_success_now(self): now = datetime.utcnow().replace(microsecond=0) form = DateTimeRangeNowInput(data={'test_field': now.strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is True assert form.has_errors is False assert form.errors == {} assert form.out == {'test_field': now} def test_invalid_value_min_now(self): now = datetime.utcnow() form = DateTimeRangeNowInput(data={'test_field': (now + timedelta(minutes=-10)).strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']} def test_invalid_value_max_now(self): now = datetime.utcnow() form = DateTimeRangeNowInput(data={'test_field': (now + timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']} def test_success_function(self): now = datetime.utcnow().replace(microsecond=0) + timedelta(minutes=10) form = DateTimeRangeFunctionInput(data={'test_field': now.strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is True assert form.has_errors is False assert form.errors == {} assert form.out == {'test_field': now} def test_success_function_wait(self): # TODO aaaaaa sleep(1.5) now = datetime.utcnow().replace(microsecond=0) + timedelta(minutes=10) form = DateTimeRangeFunctionInput(data={'test_field': now.strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is True assert form.has_errors is False assert form.errors == {} assert form.out == {'test_field': now} def test_invalid_value_min_function(self): now = datetime.utcnow().replace(microsecond=0) + timedelta(minutes=10) form = DateTimeRangeFunctionInput(data={'test_field': (now + timedelta(minutes=-10)).strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']} def test_invalid_value_max_function(self): now = datetime.utcnow().replace(microsecond=0) + timedelta(minutes=10) form = DateTimeRangeFunctionInput(data={'test_field': (now + timedelta(minutes=10)).strftime('%Y-%m-%dT%H:%M:%S')}) assert form.validate() is False assert form.has_errors is True assert form.errors == {'test_field': ['datetime out of range']}
38.528571
124
0.637189
658
5,394
5.103343
0.156535
0.122096
0.046456
0.071471
0.80673
0.800774
0.780226
0.763252
0.718582
0.718582
0
0.029583
0.235447
5,394
139
125
38.805755
0.784675
0.036151
0
0.554545
0
0
0.110597
0
0
0
0
0.007194
0.372727
1
0.109091
false
0
0.054545
0
0.227273
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
2a5de40062ee70f030b49eaa1ac24ac5e2f3bb91
91
py
Python
telemetry/third_party/modulegraph/modulegraph_tests/testpkg-regr5/script.py
tingshao/catapult
a8fe19e0c492472a8ed5710be9077e24cc517c5c
[ "BSD-3-Clause" ]
2,151
2020-04-18T07:31:17.000Z
2022-03-31T08:39:18.000Z
telemetry/third_party/modulegraph/modulegraph_tests/testpkg-regr5/script.py
tingshao/catapult
a8fe19e0c492472a8ed5710be9077e24cc517c5c
[ "BSD-3-Clause" ]
395
2020-04-18T08:22:18.000Z
2021-12-08T13:04:49.000Z
telemetry/third_party/modulegraph/modulegraph_tests/testpkg-regr5/script.py
tingshao/catapult
a8fe19e0c492472a8ed5710be9077e24cc517c5c
[ "BSD-3-Clause" ]
338
2020-04-18T08:03:10.000Z
2022-03-29T12:33:22.000Z
import __init__ from modulegraph.find_modules import find_needed_modules import distutils
18.2
56
0.89011
12
91
6.166667
0.666667
0.351351
0
0
0
0
0
0
0
0
0
0
0.098901
91
4
57
22.75
0.902439
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2a638477f160c0cfdcf11842b88b117a837e846c
44
py
Python
splunkapi3/search/__init__.py
swimlane/splunkapi3
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
[ "MIT" ]
null
null
null
splunkapi3/search/__init__.py
swimlane/splunkapi3
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
[ "MIT" ]
null
null
null
splunkapi3/search/__init__.py
swimlane/splunkapi3
9b12f58f17ea97a1fe8c6ff41e4da466b5e13e32
[ "MIT" ]
3
2019-05-31T02:20:05.000Z
2021-02-22T00:45:53.000Z
from splunkapi3.search.search import Search
22
43
0.863636
6
44
6.333333
0.666667
0
0
0
0
0
0
0
0
0
0
0.025
0.090909
44
1
44
44
0.925
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2a833d98f924147246df7cd81461c19c7ffdbbfc
25
py
Python
python_module/sirius_minimal/__init__.py
mtaillefumier/SIRIUS
50ec1c202c019113c5660f1966b170dec9dfd4d4
[ "BSD-2-Clause" ]
77
2016-03-18T08:38:30.000Z
2022-03-11T14:06:25.000Z
python_module/sirius_minimal/__init__.py
simonpintarelli/SIRIUS
f4b5c4810af2a3ea1e67992d65750535227da84b
[ "BSD-2-Clause" ]
240
2016-04-12T16:39:11.000Z
2022-03-31T08:46:12.000Z
python_module/sirius_minimal/__init__.py
simonpintarelli/SIRIUS
f4b5c4810af2a3ea1e67992d65750535227da84b
[ "BSD-2-Clause" ]
43
2016-03-18T17:45:07.000Z
2022-02-28T05:27:59.000Z
from .py_sirius import *
12.5
24
0.76
4
25
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.16
25
1
25
25
0.857143
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
aa987ae88c4e67786311c7351ed6d749e478caeb
71
py
Python
front/resources/__init__.py
Levis0045/Techniques-web-INALCO-2020
b805a0af3a78dca3cb25d38dfdcf8ff8a182728d
[ "CC0-1.0" ]
null
null
null
front/resources/__init__.py
Levis0045/Techniques-web-INALCO-2020
b805a0af3a78dca3cb25d38dfdcf8ff8a182728d
[ "CC0-1.0" ]
null
null
null
front/resources/__init__.py
Levis0045/Techniques-web-INALCO-2020
b805a0af3a78dca3cb25d38dfdcf8ff8a182728d
[ "CC0-1.0" ]
1
2020-06-02T09:57:42.000Z
2020-06-02T09:57:42.000Z
from .auth import * from .clients import * from .contributions import *
23.666667
28
0.760563
9
71
6
0.555556
0.37037
0
0
0
0
0
0
0
0
0
0
0.15493
71
3
28
23.666667
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
aabddcd521011255d7f04806f536540b8bf025b7
66
py
Python
dask_cuda/explicit_comms/__init__.py
necaris/dask-cuda
381195162564be133339d82b033f58949e400941
[ "Apache-2.0" ]
null
null
null
dask_cuda/explicit_comms/__init__.py
necaris/dask-cuda
381195162564be133339d82b033f58949e400941
[ "Apache-2.0" ]
null
null
null
dask_cuda/explicit_comms/__init__.py
necaris/dask-cuda
381195162564be133339d82b033f58949e400941
[ "Apache-2.0" ]
null
null
null
from .comms import * from .dataframe_merge import dataframe_merge
22
44
0.833333
9
66
5.888889
0.555556
0.528302
0
0
0
0
0
0
0
0
0
0
0.121212
66
2
45
33
0.913793
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
aac60b49129caba2919fe740a0addff658fe5894
64
py
Python
medgeconv/tf_ops/__init__.py
StefReck/MEdgeConv
0174a992a11ac9bd3536ab31679677de07a2e8d5
[ "MIT" ]
3
2020-07-23T07:39:36.000Z
2021-02-03T16:16:14.000Z
medgeconv/tf_ops/__init__.py
StefReck/MEdgeConv
0174a992a11ac9bd3536ab31679677de07a2e8d5
[ "MIT" ]
2
2020-09-02T17:11:04.000Z
2021-10-08T12:58:22.000Z
medgeconv/tf_ops/__init__.py
StefReck/MEdgeConv
0174a992a11ac9bd3536ab31679677de07a2e8d5
[ "MIT" ]
1
2021-11-29T15:38:03.000Z
2021-11-29T15:38:03.000Z
from medgeconv.tf_ops.python.ops.knn_graph_ops import knn_graph
32
63
0.875
12
64
4.333333
0.666667
0.307692
0
0
0
0
0
0
0
0
0
0
0.0625
64
1
64
64
0.866667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2d57b288d478b7a6c584c6f8dbb7a763b8b9d569
136
py
Python
src/ufdl/jobcontracts/error/__init__.py
waikato-ufdl/ufdl-job-contracts
4d414fc79e110de044e2b8377556d3134c0b5dcc
[ "Apache-2.0" ]
null
null
null
src/ufdl/jobcontracts/error/__init__.py
waikato-ufdl/ufdl-job-contracts
4d414fc79e110de044e2b8377556d3134c0b5dcc
[ "Apache-2.0" ]
null
null
null
src/ufdl/jobcontracts/error/__init__.py
waikato-ufdl/ufdl-job-contracts
4d414fc79e110de044e2b8377556d3134c0b5dcc
[ "Apache-2.0" ]
null
null
null
from ._ContractParsingException import ContractParsingException from ._UnknownContractNameException import UnknownContractNameException
45.333333
71
0.926471
8
136
15.5
0.5
0
0
0
0
0
0
0
0
0
0
0
0.058824
136
2
72
68
0.96875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2db1d8f4375505323ff297961f43da3a2ddae4bc
29
py
Python
school/clients/__init__.py
quintenroets/school
df8b104a8d311ba16ffc8301adb8700bf8bab553
[ "MIT" ]
1
2022-01-26T17:40:59.000Z
2022-01-26T17:40:59.000Z
school/clients/__init__.py
quintenroets/school
df8b104a8d311ba16ffc8301adb8700bf8bab553
[ "MIT" ]
null
null
null
school/clients/__init__.py
quintenroets/school
df8b104a8d311ba16ffc8301adb8700bf8bab553
[ "MIT" ]
null
null
null
from .session import session
14.5
28
0.827586
4
29
6
0.75
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
2dd1c0c53fc2f4490fd6cdd76a28c865119963bd
1,557
py
Python
Check.py
vipenl26/connect4
193634cad4b183a262c2b90c4cd39b0a3f2b5402
[ "MIT" ]
null
null
null
Check.py
vipenl26/connect4
193634cad4b183a262c2b90c4cd39b0a3f2b5402
[ "MIT" ]
null
null
null
Check.py
vipenl26/connect4
193634cad4b183a262c2b90c4cd39b0a3f2b5402
[ "MIT" ]
1
2020-12-20T18:01:58.000Z
2020-12-20T18:01:58.000Z
def check(board): #straight combination for i in range(6): for j in range(4): if(board[i][j]=="R" and board[i][j+1]=="R" and board[i][j+2]=="R" and board[i][j+3]=="R"): return True if(board[i][j]=="Y" and board[i][j+1]=="Y" and board[i][j+2]=="Y" and board[i][j+3]=="Y"): return True #vertical combination for i in range(3): for j in range(7): if(board[i][j]=="R" and board[i+1][j]=="R" and board[i+2][j]=="R" and board[i+3][j]=="R"): return True if(board[i][j]=="Y" and board[i+1][j]=="Y" and board[i+2][j]=="Y" and board[i+3][j]=="Y"): return True #1st diagonal combination for i in range(3): for j in range(4): if(board[i][j]=="R" and board[i+1][j+1]=="R" and board[i+2][j+2]=="R" and board[i+3][j+3]=="R"): return True if(board[i][j]=="Y" and board[i+1][j+1]=="Y" and board[i+2][j+2]=="Y" and board[i+3][j+3]=="Y"): return True #2nd diagonal combination for i in range(3): for j in range(3,7): if(board[i][j]=="R" and board[i+1][j-1]=="R" and board[i+2][j-2]=="R" and board[i+3][j-3]=="R"): return True if(board[i][j]=="Y" and board[i+1][j-1]=="Y" and board[i+2][j-2]=="Y" and board[i+3][j-3]=="Y"): return True return False def isDraw(board): cnt=0 for i in range(7): if board[0][i]!="0": cnt+=1 return cnt==7
33.12766
108
0.460501
288
1,557
2.489583
0.104167
0.267782
0.301255
0.167364
0.863319
0.793584
0.659693
0.659693
0.659693
0.659693
0
0.048848
0.303147
1,557
47
109
33.12766
0.611982
0.056519
0
0.40625
0
0
0.022526
0
0
0
0
0
0
1
0.0625
false
0
0
0
0.375
0
0
0
0
null
1
1
1
1
1
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
93175aedf325d092a1f7a9b7415a70f501ba731d
35,114
py
Python
unet_models.py
chicm/salt
33b00624c9d10e75445105d0b172e726ade39529
[ "Apache-2.0" ]
null
null
null
unet_models.py
chicm/salt
33b00624c9d10e75445105d0b172e726ade39529
[ "Apache-2.0" ]
null
null
null
unet_models.py
chicm/salt
33b00624c9d10e75445105d0b172e726ade39529
[ "Apache-2.0" ]
null
null
null
from torch import nn from torch.nn import functional as F import torch from torchvision import models from torchvision.models import resnet34, resnet101, resnet50, resnet152 import torchvision import pdb def conv3x3(in_, out): return nn.Conv2d(in_, out, 3, padding=1) class ConvRelu(nn.Module): def __init__(self, in_, out): super().__init__() self.conv = conv3x3(in_, out) self.activation = nn.ReLU(inplace=True) def forward(self, x): x = self.conv(x) x = self.activation(x) return x class NoOperation(nn.Module): def forward(self, x): return x class DecoderBlock(nn.Module): def __init__(self, in_channels, middle_channels, out_channels): super().__init__() self.block = nn.Sequential( ConvRelu(in_channels, middle_channels), nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=3, stride=2, padding=1, output_padding=1), nn.ReLU(inplace=True) ) def forward(self, x): return self.block(x) class UNet11(nn.Module): def __init__(self, num_classes=1, num_filters=32, pretrained=False): """ :param num_classes: :param num_filters: :param pretrained: False - no pre-trained network is used True - encoder is pre-trained with VGG11 """ super().__init__() self.pool = nn.MaxPool2d(2, 2) self.encoder = models.vgg11(pretrained=pretrained).features self.relu = self.encoder[1] self.conv1 = self.encoder[0] self.conv2 = self.encoder[3] self.conv3s = self.encoder[6] self.conv3 = self.encoder[8] self.conv4s = self.encoder[11] self.conv4 = self.encoder[13] self.conv5s = self.encoder[16] self.conv5 = self.encoder[18] self.center = DecoderBlock(num_filters * 8 * 2, num_filters * 8 * 2, num_filters * 8) self.dec5 = DecoderBlock(num_filters * (16 + 8), num_filters * 8 * 2, num_filters * 8) self.dec4 = DecoderBlock(num_filters * (16 + 8), num_filters * 8 * 2, num_filters * 4) self.dec3 = DecoderBlock(num_filters * (8 + 4), num_filters * 4 * 2, num_filters * 2) self.dec2 = DecoderBlock(num_filters * (4 + 2), num_filters * 2 * 2, num_filters) self.dec1 = ConvRelu(num_filters * (2 + 1), num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.relu(self.conv1(x)) conv2 = self.relu(self.conv2(self.pool(conv1))) conv3s = self.relu(self.conv3s(self.pool(conv2))) conv3 = self.relu(self.conv3(conv3s)) conv4s = self.relu(self.conv4s(self.pool(conv3))) conv4 = self.relu(self.conv4(conv4s)) conv5s = self.relu(self.conv5s(self.pool(conv4))) conv5 = self.relu(self.conv5(conv5s)) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(torch.cat([dec2, conv1], 1)) return self.final(dec1) def unet11(pretrained=False, **kwargs): """ pretrained: False - no pre-trained network is used True - encoder is pre-trained with VGG11 carvana - all weights are pre-trained on Kaggle: Carvana dataset https://www.kaggle.com/c/carvana-image-masking-challenge """ model = UNet11(pretrained=pretrained, **kwargs) if pretrained == 'carvana': state = torch.load('TernausNet.pt') model.load_state_dict(state['model']) return model class DecoderBlockV2(nn.Module): def __init__(self, in_channels, middle_channels, out_channels, is_deconv=True): super(DecoderBlockV2, self).__init__() self.in_channels = in_channels if is_deconv: """ Paramaters for Deconvolution were chosen to avoid artifacts, following link https://distill.pub/2016/deconv-checkerboard/ """ self.block = nn.Sequential( ConvRelu(in_channels, middle_channels), nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=4, stride=2, padding=1), nn.ReLU(inplace=True) ) else: self.block = nn.Sequential( nn.Upsample(scale_factor=2, mode='bilinear'), ConvRelu(in_channels, middle_channels), ConvRelu(middle_channels, out_channels), ) def forward(self, x): return self.block(x) class AlbuNet(nn.Module): """ UNet (https://arxiv.org/abs/1505.04597) with Resnet34(https://arxiv.org/abs/1512.03385) encoder Proposed by Alexander Buslaev: https://www.linkedin.com/in/al-buslaev/ """ def __init__(self, num_classes=1, num_filters=32, pretrained=False, is_deconv=False): """ :param num_classes: :param num_filters: :param pretrained: False - no pre-trained network is used True - encoder is pre-trained with resnet34 :is_deconv: False: bilinear interpolation is used in decoder True: deconvolution is used in decoder """ super().__init__() self.num_classes = num_classes self.pool = nn.MaxPool2d(2, 2) self.encoder = torchvision.models.resnet34(pretrained=pretrained) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu, self.pool) self.conv2 = self.encoder.layer1 self.conv3 = self.encoder.layer2 self.conv4 = self.encoder.layer3 self.conv5 = self.encoder.layer4 self.center = DecoderBlockV2(512, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec5 = DecoderBlockV2(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec4 = DecoderBlockV2(256 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec3 = DecoderBlockV2(128 + num_filters * 8, num_filters * 4 * 2, num_filters * 2, is_deconv) self.dec2 = DecoderBlockV2(64 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2, is_deconv) self.dec1 = DecoderBlockV2(num_filters * 2 * 2, num_filters * 2 * 2, num_filters, is_deconv) self.dec0 = ConvRelu(num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(conv1) conv3 = self.conv3(conv2) conv4 = self.conv4(conv3) conv5 = self.conv5(conv4) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(dec2) dec0 = self.dec0(dec1) return self.final(dec0) class UNetVGG16(nn.Module): """PyTorch U-Net model using VGG16 encoder. UNet: https://arxiv.org/abs/1505.04597 VGG: https://arxiv.org/abs/1409.1556 Proposed by Vladimir Iglovikov and Alexey Shvets: https://github.com/ternaus/TernausNet Args: num_classes (int): Number of output classes. num_filters (int, optional): Number of filters in the last layer of decoder. Defaults to 32. dropout_2d (float, optional): Probability factor of dropout layer before output layer. Defaults to 0.2. pretrained (bool, optional): False - no pre-trained weights are being used. True - VGG encoder is pre-trained on ImageNet. Defaults to False. is_deconv (bool, optional): False: bilinear interpolation is used in decoder. True: deconvolution is used in decoder. Defaults to False. """ def __init__(self, num_classes=1, num_filters=32, dropout_2d=0.2, pretrained=False, is_deconv=False): super().__init__() self.num_classes = num_classes self.dropout_2d = dropout_2d self.pool = nn.MaxPool2d(2, 2) self.encoder = torchvision.models.vgg16(pretrained=pretrained).features self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder[0], self.relu, self.encoder[2], self.relu) self.conv2 = nn.Sequential(self.encoder[5], self.relu, self.encoder[7], self.relu) self.conv3 = nn.Sequential(self.encoder[10], self.relu, self.encoder[12], self.relu, self.encoder[14], self.relu) self.conv4 = nn.Sequential(self.encoder[17], self.relu, self.encoder[19], self.relu, self.encoder[21], self.relu) self.conv5 = nn.Sequential(self.encoder[24], self.relu, self.encoder[26], self.relu, self.encoder[28], self.relu) self.center = DecoderBlockV2(512, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec5 = DecoderBlockV2(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec4 = DecoderBlockV2(512 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec3 = DecoderBlockV2(256 + num_filters * 8, num_filters * 4 * 2, num_filters * 2, is_deconv) self.dec2 = DecoderBlockV2(128 + num_filters * 2, num_filters * 2 * 2, num_filters, is_deconv) self.dec1 = ConvRelu(64 + num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(self.pool(conv1)) conv3 = self.conv3(self.pool(conv2)) conv4 = self.conv4(self.pool(conv3)) conv5 = self.conv5(self.pool(conv4)) center = self.center(self.pool(conv5)) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(torch.cat([dec2, conv1], 1)) return self.final(F.dropout2d(dec1, p=self.dropout_2d)) class UNetResNet(nn.Module): """PyTorch U-Net model using ResNet(34, 101 or 152) encoder. UNet: https://arxiv.org/abs/1505.04597 ResNet: https://arxiv.org/abs/1512.03385 Proposed by Alexander Buslaev: https://www.linkedin.com/in/al-buslaev/ Args: encoder_depth (int): Depth of a ResNet encoder (34, 101 or 152). num_classes (int): Number of output classes. num_filters (int, optional): Number of filters in the last layer of decoder. Defaults to 32. dropout_2d (float, optional): Probability factor of dropout layer before output layer. Defaults to 0.2. pretrained (bool, optional): False - no pre-trained weights are being used. True - ResNet encoder is pre-trained on ImageNet. Defaults to False. is_deconv (bool, optional): False: bilinear interpolation is used in decoder. True: deconvolution is used in decoder. Defaults to False. """ def __init__(self, encoder_depth, num_classes=1, num_filters=32, dropout_2d=0.2, pretrained=True, is_deconv=True): super().__init__() #pdb.set_trace() self.name = 'UNetResNet_'+str(encoder_depth) self.num_classes = num_classes self.dropout_2d = dropout_2d if encoder_depth == 34: self.encoder = torchvision.models.resnet34(pretrained=pretrained) bottom_channel_nr = 512 elif encoder_depth == 50: self.encoder = torchvision.models.resnet50(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 101: self.encoder = torchvision.models.resnet101(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 152: self.encoder = torchvision.models.resnet152(pretrained=pretrained) bottom_channel_nr = 2048 else: raise NotImplementedError('only 34, 101, 152 version of Resnet are implemented') self.pool = nn.MaxPool2d(2, 2) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu) #self.pool) self.conv2 = self.encoder.layer1 self.conv3 = self.encoder.layer2 self.conv4 = self.encoder.layer3 self.conv5 = self.encoder.layer4 self.center = DecoderBlockV2(bottom_channel_nr, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec5 = DecoderBlockV2(bottom_channel_nr + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec4 = DecoderBlockV2(bottom_channel_nr // 2 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec3 = DecoderBlockV2(bottom_channel_nr // 4 + num_filters * 8, num_filters * 4 * 2, num_filters * 2, is_deconv) self.dec2 = DecoderBlockV2(bottom_channel_nr // 8 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2, is_deconv) self.dec1 = DecoderBlockV2(num_filters * 2 * 2, num_filters * 2 * 2, num_filters, is_deconv) self.dec0 = ConvRelu(num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) self.classifier = nn.Linear(num_filters * 256 * 256, 1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(conv1) conv3 = self.conv3(conv2) conv4 = self.conv4(conv3) conv5 = self.conv5(conv4) pool = self.pool(conv5) center = self.center(pool) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(dec2) dec0 = self.dec0(dec1) out = self.pool(dec0) cls_out = self.classifier(F.dropout(dec0.view(dec0.size(0), -1), p=0.25)) return self.final(F.dropout2d(out, p=self.dropout_2d)), cls_out def freeze_bn(self): '''Freeze BatchNorm layers.''' for layer in self.modules(): if isinstance(layer, nn.BatchNorm2d): layer.eval() def get_params(self, base_lr): group1 = [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5] group2 = [self.dec0, self.dec1, self.dec2, self.dec3, self.dec4, self.dec5, self.center] group3 = [self.classifier, self.final] params1 = [] for x in group1: for p in x.parameters(): params1.append(p) param_group1 = {'params': params1, 'lr': base_lr / 100} params2 = [] for x in group2: for p in x.parameters(): params2.append(p) param_group2 = {'params': params2, 'lr': base_lr / 10} params3 = [] for x in group3: for p in x.parameters(): params3.append(p) param_group3 = {'params': params3, 'lr': base_lr} return [param_group1, param_group2, param_group3] class ConvBn2d(nn.Module): def __init__(self, in_channels, out_channels, kernel_size=(3,3), stride=(1,1), padding=(1,1)): super(ConvBn2d, self).__init__() self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=kernel_size, stride=stride, padding=padding, bias=False) self.bn = nn.BatchNorm2d(out_channels) def forward(self, x): x = self.conv(x) x = self.bn(x) return x class ChannelAttentionGate(nn.Module): def __init__(self, channel, reduction=16): super(ChannelAttentionGate, self).__init__() self.fc1 = nn.Conv2d(channel, reduction, kernel_size=1, padding=0) self.fc2 = nn.Conv2d(reduction, channel, kernel_size=1, padding=0) def forward(self, x): x = F.adaptive_avg_pool2d(x,1) x = self.fc1(x) x = F.relu(x, inplace=True) x = self.fc2(x) x = F.sigmoid(x) return x class SpatialAttentionGate(nn.Module): def __init__(self, channel, reduction=16): super(SpatialAttentionGate, self).__init__() self.fc1 = nn.Conv2d(channel, reduction, kernel_size=1, padding=0) self.fc2 = nn.Conv2d(reduction, 1, kernel_size=1, padding=0) def forward(self, x): x = self.fc1(x) x = F.relu(x, inplace=True) x = self.fc2(x) x = F.sigmoid(x) #print(x.size()) return x class DecoderV3(nn.Module): def __init__(self, in_channels, middle_channels, out_channels, is_deconv=True): super(DecoderV3, self).__init__() self.conv1 = ConvBn2d(in_channels, middle_channels) self.conv2 = ConvBn2d(middle_channels, out_channels) self.spatial_gate = SpatialAttentionGate(out_channels) self.channel_gate = ChannelAttentionGate(out_channels) def forward(self, x, e=None): x = F.upsample(x, scale_factor=2, mode='bilinear', align_corners=True) if e is not None: x = torch.cat([x,e], 1) x = F.relu(self.conv1(x), inplace=True) x = F.relu(self.conv2(x), inplace=True) g1 = self.spatial_gate(x) g2 = self.channel_gate(x) x = x*g1 + x*g2 return x class DecoderAtt(nn.Module): def __init__(self, in_channels, middle_channels, out_channels): super(DecoderAtt, self).__init__() self.conv1 = ConvBn2d(in_channels, middle_channels) self.deconv = nn.ConvTranspose2d(middle_channels, out_channels, kernel_size=4, stride=2, padding=1) self.bn = nn.BatchNorm2d(out_channels) self.spatial_gate = SpatialAttentionGate(out_channels) self.channel_gate = ChannelAttentionGate(out_channels) def forward(self, x): x = F.relu(self.conv1(x), inplace=True) x = self.deconv(x) x = self.bn(x) x = F.relu(x, inplace=True) g1 = self.spatial_gate(x) g2 = self.channel_gate(x) x = x*g1 + x*g2 return x class EncoderAttention(nn.Module): def __init__(self, channels): super(EncoderAttention, self).__init__() self.spatial_gate = SpatialAttentionGate(channels) self.channel_gate = ChannelAttentionGate(channels) def forward(self, x): g1 = self.spatial_gate(x) g2 = self.channel_gate(x) x = x*g1 + x*g2 return x class UNetResNetAtt(nn.Module): ''' only + decoder attention on UNetResNet ''' def __init__(self, encoder_depth, num_classes=1, num_filters=32, dropout_2d=0.2, pretrained=True, is_deconv=True): super(UNetResNetAtt, self).__init__() #pdb.set_trace() self.name = 'UNetResNetAtt_'+str(encoder_depth) self.num_classes = num_classes self.dropout_2d = dropout_2d if encoder_depth == 34: self.encoder = torchvision.models.resnet34(pretrained=pretrained) bottom_channel_nr = 512 elif encoder_depth == 50: self.encoder = torchvision.models.resnet50(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 101: self.encoder = torchvision.models.resnet101(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 152: self.encoder = torchvision.models.resnet152(pretrained=pretrained) bottom_channel_nr = 2048 else: raise NotImplementedError('only 34, 101, 152 version of Resnet are implemented') self.pool = nn.MaxPool2d(2, 2) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu) #self.pool) self.conv2 = self.encoder.layer1 self.conv3 = self.encoder.layer2 self.conv4 = self.encoder.layer3 self.conv5 = self.encoder.layer4 self.center = DecoderAtt(bottom_channel_nr, num_filters * 8 * 2, num_filters * 8) self.dec5 = DecoderAtt(bottom_channel_nr + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec4 = DecoderAtt(bottom_channel_nr // 2 + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec3 = DecoderAtt(bottom_channel_nr // 4 + num_filters * 8, num_filters * 4 * 2, num_filters * 2) self.dec2 = DecoderAtt(bottom_channel_nr // 8 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2) self.dec1 = DecoderAtt(num_filters * 2 * 2, num_filters * 2 * 2, num_filters) self.dec0 = ConvRelu(num_filters, num_filters) self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) #self.classifier = nn.Linear(num_filters * 256 * 256, 1) def forward(self, x): conv1 = self.conv1(x) conv2 = self.conv2(conv1) conv3 = self.conv3(conv2) conv4 = self.conv4(conv3) conv5 = self.conv5(conv4) pool = self.pool(conv5) center = self.center(pool) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) dec1 = self.dec1(dec2) dec0 = self.dec0(dec1) out = self.pool(dec0) return self.final(F.dropout2d(out, p=self.dropout_2d)), None def freeze_bn(self): '''Freeze BatchNorm layers.''' for layer in self.modules(): if isinstance(layer, nn.BatchNorm2d): layer.eval() def get_params(self, base_lr): group1 = [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5] group2 = [self.dec0, self.dec1, self.dec2, self.dec3, self.dec4, self.dec5, self.center] group3 = [self.final] params1 = [] for x in group1: for p in x.parameters(): params1.append(p) param_group1 = {'params': params1, 'lr': base_lr / 100} params2 = [] for x in group2: for p in x.parameters(): params2.append(p) param_group2 = {'params': params2, 'lr': base_lr / 10} params3 = [] for x in group3: for p in x.parameters(): params3.append(p) param_group3 = {'params': params3, 'lr': base_lr} return [param_group1, param_group2, param_group3] class UNetResNetV3(nn.Module): def __init__(self, encoder_depth, num_classes=1, num_filters=32, dropout_2d=0.2, pretrained=True, is_deconv=True): super(UNetResNetV3, self).__init__() #pdb.set_trace() self.name = 'UNetResNetV3_'+str(encoder_depth) self.num_classes = num_classes self.dropout_2d = dropout_2d if encoder_depth == 34: self.encoder = torchvision.models.resnet34(pretrained=pretrained) bottom_channel_nr = 512 elif encoder_depth == 50: self.encoder = torchvision.models.resnet50(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 101: self.encoder = torchvision.models.resnet101(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 152: self.encoder = torchvision.models.resnet152(pretrained=pretrained) bottom_channel_nr = 2048 else: raise NotImplementedError('only 34, 101, 152 version of Resnet are implemented') self.pool = nn.MaxPool2d(2, 2) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu, self.pool) self.conv2 = self.encoder.layer1 self.conv3 = self.encoder.layer2 self.conv4 = self.encoder.layer3 self.conv5 = self.encoder.layer4 self.center = DecoderAtt(bottom_channel_nr, num_filters * 8 * 2, num_filters * 8) self.dec5 = DecoderAtt(bottom_channel_nr + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec4 = DecoderAtt(bottom_channel_nr // 2 + num_filters * 8, num_filters * 8 * 2, num_filters * 8) self.dec3 = DecoderAtt(bottom_channel_nr // 4 + num_filters * 8, num_filters * 4 * 2, num_filters * 2) self.dec2 = DecoderAtt(bottom_channel_nr // 8 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2) self.dec1 = DecoderAtt(num_filters * 2 * 2, num_filters * 2 * 2, num_filters) #self.dec0 = ConvRelu(num_filters, num_filters) #self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) self.logit = nn.Sequential( ConvBn2d(736, 64, kernel_size=3, padding=1), nn.ReLU(inplace=True), nn.Conv2d(64, 1, kernel_size=1, padding=0) ) #self.logit = nn.Sequential( # nn.Conv2d(num_filters, num_filters, kernel_size=3, padding=1), # EncoderAttention(num_filters), # nn.ReLU(inplace=True), # nn.Conv2d(num_filters, 1, kernel_size=1, padding=0) #) def forward(self, x): conv1 = self.conv1(x) #;print('conv1:', conv1.size()) conv2 = self.conv2(conv1) #;print('conv2:', conv2.size()) conv3 = self.conv3(conv2) #;print('conv3:', conv3.size()) conv4 = self.conv4(conv3) #;print('conv4:', conv4.size()) conv5 = self.conv5(conv4) #;print('conv5:', conv5.size()) pool = self.pool(conv5) center = self.center(pool) dec5 = self.dec5(torch.cat([center, conv5], 1)) dec4 = self.dec4(torch.cat([dec5, conv4], 1)) dec3 = self.dec3(torch.cat([dec4, conv3], 1)) dec2 = self.dec2(torch.cat([dec3, conv2], 1)) #print('dec2:', dec2.size()) dec1 = self.dec1(dec2) #; print('dec1:', dec1.size()) #dec0 = self.dec0(dec1); print('dec0:', dec0.size()) f = torch.cat([ dec1, F.upsample(dec2, scale_factor=2, mode='bilinear', align_corners=False), F.upsample(dec3, scale_factor=4, mode='bilinear', align_corners=False), F.upsample(dec4, scale_factor=8, mode='bilinear', align_corners=False), F.upsample(dec5, scale_factor=16, mode='bilinear', align_corners=False), ], 1) f = F.dropout2d(f, p=self.dropout_2d) #out = self.pool(dec0) return self.logit(f), None def freeze_bn(self): '''Freeze BatchNorm layers.''' for layer in self.modules(): if isinstance(layer, nn.BatchNorm2d): layer.eval() def get_params(self, base_lr): group1 = [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5] group2 = [self.dec1, self.dec2, self.dec3, self.dec4, self.dec5, self.center] group3 = [self.logit] params1 = [] for x in group1: for p in x.parameters(): params1.append(p) param_group1 = {'params': params1, 'lr': base_lr / 10} params2 = [] for x in group2: for p in x.parameters(): params2.append(p) param_group2 = {'params': params2, 'lr': base_lr / 2} params3 = [] for x in group3: for p in x.parameters(): params3.append(p) param_group3 = {'params': params3, 'lr': base_lr} return [param_group1, param_group2, param_group3] class UNetResNetV4(nn.Module): def __init__(self, encoder_depth, num_classes=1, num_filters=32, dropout_2d=0.2, pretrained=True, is_deconv=True): super(UNetResNetV4, self).__init__() #pdb.set_trace() self.name = 'UNetResNetV4_'+str(encoder_depth) self.num_classes = num_classes self.dropout_2d = dropout_2d if encoder_depth == 34: self.encoder = torchvision.models.resnet34(pretrained=pretrained) bottom_channel_nr = 512 elif encoder_depth == 50: self.encoder = torchvision.models.resnet50(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 101: self.encoder = torchvision.models.resnet101(pretrained=pretrained) bottom_channel_nr = 2048 elif encoder_depth == 152: self.encoder = torchvision.models.resnet152(pretrained=pretrained) bottom_channel_nr = 2048 else: raise NotImplementedError('only 34, 101, 152 version of Resnet are implemented') self.pool = nn.MaxPool2d(2, 2) self.relu = nn.ReLU(inplace=True) self.conv1 = nn.Sequential(self.encoder.conv1, self.encoder.bn1, self.encoder.relu, self.pool) self.att1 = EncoderAttention(num_filters*2) self.conv2 = self.encoder.layer1 self.att2 = EncoderAttention(num_filters*8) self.conv3 = self.encoder.layer2 self.att3 = EncoderAttention(num_filters*16) self.conv4 = self.encoder.layer3 self.att4 = EncoderAttention(num_filters*32) self.conv5 = self.encoder.layer4 self.att5 = EncoderAttention(num_filters*64) self.center = DecoderV3(bottom_channel_nr, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec5 = DecoderV3(bottom_channel_nr + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec4 = DecoderV3(bottom_channel_nr // 2 + num_filters * 8, num_filters * 8 * 2, num_filters * 8, is_deconv) self.dec3 = DecoderV3(bottom_channel_nr // 4 + num_filters * 8, num_filters * 4 * 2, num_filters * 2, is_deconv) self.dec2 = DecoderV3(bottom_channel_nr // 8 + num_filters * 2, num_filters * 2 * 2, num_filters * 2 * 2, is_deconv) self.dec1 = DecoderV3(num_filters * 2 * 2, num_filters * 2 * 2, num_filters, is_deconv) #self.dec0 = ConvRelu(num_filters, num_filters) #self.final = nn.Conv2d(num_filters, num_classes, kernel_size=1) self.logit = nn.Sequential( EncoderAttention(736), nn.Conv2d(736, 64, kernel_size=3, padding=1), EncoderAttention(64), nn.ReLU(inplace=True), nn.Conv2d(64, 1, kernel_size=1, padding=0) ) def forward(self, x): conv1 = self.conv1(x) #;print('conv1:', conv1.size()) att1 = self.att1(conv1) #; print('att1:', att1.size()) conv2 = self.conv2(att1) #;print('conv2:', conv2.size()) att2 = self.att2(conv2) #; print('att2:', att2.size()) conv3 = self.conv3(att2) #;print('conv3:', conv3.size()) att3 = self.att3(conv3) #; print('att3:', att3.size()) conv4 = self.conv4(att3) #;print('conv4:', conv4.size()) att4 = self.att4(conv4) #; print('att4:', att4.size()) conv5 = self.conv5(att4) #;print('conv5:', conv5.size()) att5 = self.att5(conv5) #; print('att5:', att5.size()) pool = self.pool(att5) center = self.center(pool) dec5 = self.dec5(torch.cat([center, att5], 1)) dec4 = self.dec4(torch.cat([dec5, att4], 1)) dec3 = self.dec3(torch.cat([dec4, att3], 1)) dec2 = self.dec2(torch.cat([dec3, att2], 1)); #print('dec2:', dec2.size()) dec1 = self.dec1(dec2); #print('dec1:', dec1.size()) #dec0 = self.dec0(dec1); print('dec0:', dec0.size()) f = torch.cat([ dec1, F.upsample(dec2, scale_factor=2, mode='bilinear', align_corners=False), F.upsample(dec3, scale_factor=4, mode='bilinear', align_corners=False), F.upsample(dec4, scale_factor=8, mode='bilinear', align_corners=False), F.upsample(dec5, scale_factor=16, mode='bilinear', align_corners=False), ], 1) f = F.dropout2d(f, p=self.dropout_2d) #out = self.pool(dec0) return self.logit(f), None def freeze_bn(self): '''Freeze BatchNorm layers.''' for layer in self.modules(): if isinstance(layer, nn.BatchNorm2d): layer.eval() def get_params(self, base_lr): group1 = [self.conv1, self.conv2, self.conv3, self.conv4, self.conv5] group2 = [self.dec1, self.dec2, self.dec3, self.dec4, self.dec5, self.center] group3 = [self.att1, self.att2, self.att3, self.att4, self.att5,] group4 = [self.logit] params1 = [] for x in group1: for p in x.parameters(): params1.append(p) param_group1 = {'params': params1, 'lr': base_lr / 100} params2 = [] for x in group2: for p in x.parameters(): params2.append(p) param_group2 = {'params': params2, 'lr': base_lr / 10} params3 = [] for x in group3: for p in x.parameters(): params3.append(p) param_group3 = {'params': params3, 'lr': base_lr / 20} params4 = [] for x in group4: for p in x.parameters(): params4.append(p) param_group4 = {'params': params4, 'lr': base_lr} return [param_group1, param_group2, param_group3, param_group4] def test(): model = UNetResNetV3(34).cuda() model.freeze_bn() inputs = torch.randn(2,3,128,128).cuda() out, _ = model(inputs) #print(model) print(out.size()) #, cls_taret.size()) #print(out) if __name__ == '__main__': test()
38.417943
125
0.585521
4,336
35,114
4.592251
0.069649
0.078847
0.034251
0.015066
0.806298
0.787364
0.766121
0.743572
0.733176
0.718913
0
0.059407
0.296748
35,114
914
126
38.417943
0.746943
0.115509
0
0.691693
0
0
0.015494
0
0
0
0
0
0
1
0.070288
false
0
0.011182
0.00639
0.145367
0.001597
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
931a4800163647921420a6a40439104da5037e62
18
py
Python
minikerberos/crypto/RC4/__init__.py
fuckup1337/minikerberos
4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e
[ "MIT" ]
146
2018-06-11T06:07:00.000Z
2022-03-21T06:46:45.000Z
minikerberos/crypto/RC4/__init__.py
fuckup1337/minikerberos
4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e
[ "MIT" ]
19
2018-10-08T18:49:35.000Z
2022-03-31T06:45:37.000Z
minikerberos/crypto/RC4/__init__.py
fuckup1337/minikerberos
4c7d6a9d791b6a7b05a211a5bccb6c4e6c37187e
[ "MIT" ]
35
2018-06-10T23:20:14.000Z
2022-01-24T08:34:39.000Z
from .RC4 import *
18
18
0.722222
3
18
4.333333
1
0
0
0
0
0
0
0
0
0
0
0.066667
0.166667
18
1
18
18
0.8
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
932cbf34a607879b161d23367771504f113b2abc
187
py
Python
users/managers.py
moshthepitt/probsc
9b8cab206bb1c41238e36bd77f5e0573df4d8e2d
[ "MIT" ]
null
null
null
users/managers.py
moshthepitt/probsc
9b8cab206bb1c41238e36bd77f5e0573df4d8e2d
[ "MIT" ]
null
null
null
users/managers.py
moshthepitt/probsc
9b8cab206bb1c41238e36bd77f5e0573df4d8e2d
[ "MIT" ]
null
null
null
from core.managers import CoreManager class UserProfileManager(CoreManager): pass class DepartmentManager(CoreManager): pass class PositionManager(CoreManager): pass
11
38
0.764706
17
187
8.411765
0.588235
0.314685
0.27972
0
0
0
0
0
0
0
0
0
0.181818
187
16
39
11.6875
0.934641
0
0
0.428571
0
0
0
0
0
0
0
0
0
1
0
true
0.428571
0.142857
0
0.571429
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
6
fa798adf598ea9f0965ea6337a2a9a1c206cd558
18,818
py
Python
msc/contr_var_2_log.py
rtagirov/python_scr_pc_imperial
423204964ddbc9c117bd2b3bb4397ee98b89a56d
[ "MIT" ]
null
null
null
msc/contr_var_2_log.py
rtagirov/python_scr_pc_imperial
423204964ddbc9c117bd2b3bb4397ee98b89a56d
[ "MIT" ]
null
null
null
msc/contr_var_2_log.py
rtagirov/python_scr_pc_imperial
423204964ddbc9c117bd2b3bb4397ee98b89a56d
[ "MIT" ]
null
null
null
import numpy as np import matplotlib.pyplot as plt from matplotlib.ticker import AutoMinorLocator from matplotlib.ticker import MultipleLocator from matplotlib.ticker import LogLocator import importlib import math import sys if not '../aux/' in sys.path: sys.path.append('../aux/') import paths; importlib.reload(paths) import spec; importlib.reload(spec) import phys; importlib.reload(phys) import nessy; importlib.reload(nessy) import auxsys; importlib.reload(auxsys) import auxplt; importlib.reload(auxplt) prefix0 = paths.it0f prefix1 = paths.it1f #1 - NESSY LTE #2 - NESSY NLTE #3 - ATLAS #4 - NESSY LTE FAL #5 - NESSY NLTE FAL waf = np.loadtxt(paths.atlruns + 'var_m/Q/spec.out', skiprows = 2, usecols = [1]) Q3f = np.loadtxt(paths.atlruns + 'var_m/Q/spec.out', skiprows = 2, usecols = [3]) F3f = np.loadtxt(paths.atlruns + 'var_m/F/spec.out', skiprows = 2, usecols = [3]) U3f = np.loadtxt(paths.atlruns + 'var_m/U/spec.out', skiprows = 2, usecols = [3]) P3f = np.loadtxt(paths.atlruns + 'var_m/P/spec.out', skiprows = 2, usecols = [3]) Q3f = Q3f * phys.c / (waf * 1.0e-7)**2.0 * 1.0e-7 * (phys.r_sun / phys.au)**2.0 * 1.0e-3 * math.pi F3f = F3f * phys.c / (waf * 1.0e-7)**2.0 * 1.0e-7 * (phys.r_sun / phys.au)**2.0 * 1.0e-3 * math.pi P3f = P3f * phys.c / (waf * 1.0e-7)**2.0 * 1.0e-7 * (phys.r_sun / phys.au)**2.0 * 1.0e-3 * math.pi U3f = U3f * phys.c / (waf * 1.0e-7)**2.0 * 1.0e-7 * (phys.r_sun / phys.au)**2.0 * 1.0e-3 * math.pi idx = np.where((waf >= 100.5) & (waf <= 1100.0)) wa = waf[idx] Q3 = Q3f[idx] F3 = F3f[idx] U3 = U3f[idx] P3 = P3f[idx] #wn, Q1h = nessy.read_spec(prefix0 + 'var_od/Q/kur/', wvl1 = 1005, wvl2 = 11000) #wn, F1h = nessy.read_spec(prefix0 + 'var_od/F/kur/', wvl1 = 1005, wvl2 = 11000) #wn, U1h = nessy.read_spec(prefix0 + 'var_od/U/kur/', wvl1 = 1005, wvl2 = 11000) #wn, P1h = nessy.read_spec(prefix0 + 'var_od/P/kur/', wvl1 = 1005, wvl2 = 11000) # #wn, Q2h = nessy.read_spec(prefix1 + 'var_od/Q/kur/', wvl1 = 1005, wvl2 = 11000) #wn, F2h = nessy.read_spec(prefix1 + 'var_od/F/kur/', wvl1 = 1005, wvl2 = 11000) #wn, U2h = nessy.read_spec(prefix1 + 'var_od/U/kur/', wvl1 = 1005, wvl2 = 11000) #wn, P2h = nessy.read_spec(prefix1 + 'var_od/P/kur/', wvl1 = 1005, wvl2 = 11000) # #wn, Q5h = nessy.read_spec(prefix1 + 'var_od/Q/fal/', wvl1 = 1005, wvl2 = 11000) #wn, F5h = nessy.read_spec(prefix1 + 'var_od/F/fal/', wvl1 = 1005, wvl2 = 11000) # #wn = wn / 10.0 # #Q1 = spec.mean_over_grid(Q1h, wn, wa) #F1 = spec.mean_over_grid(F1h, wn, wa) #U1 = spec.mean_over_grid(U1h, wn, wa) #P1 = spec.mean_over_grid(P1h, wn, wa) # #Q2 = spec.mean_over_grid(Q2h, wn, wa) #F2 = spec.mean_over_grid(F2h, wn, wa) #U2 = spec.mean_over_grid(U2h, wn, wa) #P2 = spec.mean_over_grid(P2h, wn, wa) # #Q5 = spec.mean_over_grid(Q5h, wn, wa) #F5 = spec.mean_over_grid(F5h, wn, wa) # #np.savez(paths.npz + 'contr_var', w = wa, # q1 = Q1,\ # f1 = F1,\ # u1 = U1,\ # p1 = P1,\ # q2 = Q2,\ # f2 = F2,\ # u2 = U2,\ # p2 = P2,\ # q5 = Q5,\ # f5 = F5,) contr = np.load(paths.npz + 'contr_var.npz') w = contr['w'] Q1 = contr['q1'] F1 = contr['f1'] U1 = contr['u1'] P1 = contr['p1'] Q2 = contr['q2'] F2 = contr['f2'] U2 = contr['u2'] P2 = contr['p2'] Q5 = contr['q5'] F5 = contr['f5'] FQ1 = (F1 - Q1) / Q1 FQ2 = (F2 - Q2) / Q2 FQ3 = (F3 - Q3) / Q3 FQ5 = (F5 - Q5) / Q5 UQ1 = (U1 - Q1) / Q1 UQ2 = (U2 - Q2) / Q2 UQ3 = (U3 - Q3) / Q3 PQ1 = (P1 - Q1) / Q1 PQ2 = (P2 - Q2) / Q2 PQ3 = (P3 - Q3) / Q3 #RDQ13 = (Q1 - Q3) * 100.0 / Q3 #RDQ23 = (Q2 - Q3) * 100.0 / Q3 #RDF13 = (F1 - F3) * 100.0 / F3 #RDF23 = (F2 - F3) * 100.0 / F3 #RDP13 = (P1 - P3) * 100.0 / P3 #RDP23 = (P2 - P3) * 100.0 / P3 #RDU13 = (U1 - U3) * 100.0 / U3 #RDU23 = (U2 - U3) * 100.0 / U3 #RDFQ13 = (FQ1 - FQ3) * 100.0 / FQ3 #RDFQ23 = (FQ2 - FQ3) * 100.0 / FQ3 #RDFQ43 = (FQ4 - FQ3) * 100.0 / FQ3 #RDFQ53 = (FQ5 - FQ3) * 100.0 / FQ3 #RDUQ13 = (UQ1 - UQ3) * 100.0 / UQ3 #RDUQ23 = (UQ2 - UQ3) * 100.0 / UQ3 #RDPQ13 = (PQ1 - PQ3) * 100.0 / PQ3 #RDPQ23 = (PQ2 - PQ3) * 100.0 / PQ3 plt.close('all') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (6.0, 6.75)) bbox = dict(boxstyle = 'round', ec = (1.0, 0.5, 0.5), fc = (1.0, 0.8, 0.8),) auxplt.figpar(3, 3, 15) fig.tight_layout() plt.subplots_adjust(hspace = 0.15) ls = ':' lw = 1.0 ax[0].axvline(x = 210, linestyle = '--', color = 'k') ax[0].axvline(x = 450, linestyle = '--', color = 'k') ax[0].plot(w, FQ3, color = 'k', linewidth = lw * 1.5, label = 'ATLAS9 (LTE, U99)') ax[0].plot(w, FQ1, color = 'm', linewidth = lw, label = 'NESSY (LTE, U99)') ax[0].plot(w, FQ2, color = 'g', linewidth = lw, label = 'NESSY (NLTE, U99)') ax[0].plot(w, FQ5, color = 'r', linewidth = lw, label = 'NESSY (NLTE, FAL99)') #ax[0].text(140, 4.7e-3, 'Facula', bbox = bbox) ax[0].text(140, 5e-2, 'Facula', bbox = bbox) ax[1].plot(w, PQ3, color = 'k', linewidth = lw * 1.5, label = 'ATLAS9 (LTE, U99)') ax[1].plot(w, PQ1, color = 'm', linewidth = lw, label = 'NESSY (LTE, U99)') ax[1].plot(w, PQ2, color = 'g', linewidth = lw, label = 'NESSY (NLTE, U99)') ax[1].plot(w, UQ3, color = 'k', linewidth = lw * 1.5) ax[1].plot(w, UQ1, color = 'm', linewidth = lw) ax[1].plot(w, UQ2, color = 'g', linewidth = lw) ax[1].text(200, -0.3, 'Penumbra', bbox = bbox) ax[1].text(600, -0.55, 'Umbra', bbox = bbox) ax[0].set_yscale('log') #ax[0].set_ylim(1e-3, 1e+3) #ax[0].set_ylim(1e-2, 1e+3) ax[0].set_ylim(1e-2, 5e+2) ax[1].set_ylim(-1.0, -0.1) ax[1].yaxis.set_minor_locator(AutoMinorLocator(4)) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 15) for i in range(0, 2): ax[i].set_xlim(100.0, 1100) ax[i].xaxis.set_major_locator(MultipleLocator(200)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(4)) #ax[0].tick_params(labelbottom = 'off') ax[0].set_ylabel(r'$(S_f - S_q) / S_q$', fontsize = 15) ax[1].set_ylabel(r'$(S_{\{p,\ u\}} - S_q) / S_q$', fontsize = 15) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 12.0}) leg1 = ax[1].legend(framealpha = 1, loc = 4, handletextpad = 1, prop = {'size': 12.0}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/contr_log') sys.exit() fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Facular contrast, Kurucz models: NESSY vs. ATLAS', y = 1.01) ax[0].plot(w, np.zeros(len(FQ1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 ax[0].plot(w, FQ1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') ax[0].plot(w, FQ2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, FQ3, color = 'g', linewidth = lw, label = 'ATLAS') ax[1].plot(w, abs(RDFQ13), color = 'b', label = 'NESSY (LTE) vs ATLAS') ax[1].plot(w, abs(RDFQ23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, np.ones(len(RDFQ13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) #ax[1].yaxis.tick_right() ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Facular Contrast, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('(NESSY - ATLAS) / ATLAS, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) leg1 = ax[1].legend(framealpha = 1, loc = 4, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/fcontr_kur_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Facular contrast: NESSY (using FAL99 models) vs. ATLAS (using Kurucz models)', y = 1.01) ax[0].plot(w, np.zeros(len(FQ4)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 #ax[0].plot(w, FQ4, color = 'b', linewidth = lw, label = 'NESSY (LTE)') #ax[0].plot(w, FQ5, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, FQ5, color = 'r', linewidth = lw, label = 'NESSY') ax[0].plot(w, FQ3, color = 'g', linewidth = lw, label = 'ATLAS') #ax[1].plot(w, abs(RDFQ43), color = 'b', label = 'NESSY (LTE) vs ATLAS') #ax[1].plot(w, abs(RDFQ53), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, abs(RDFQ53), color = 'r', linewidth = lw, label = 'NESSY vs ATLAS') ax[1].plot(w, np.ones(len(RDFQ43)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) #ax[1].yaxis.tick_right() ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Facular Contrast, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('|(NESSY - ATLAS) / ATLAS|, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) #leg1 = ax[1].legend(framealpha = 1, loc = 4, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) #for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/fcontr_fal_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Umbral contrast: NESSY (using Kurucz models) vs. ATLAS (using Kurucz models)', y = 1.01) ax[0].plot(w, np.zeros(len(UQ1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 #ax[0].plot(w, UQ1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') #ax[0].plot(w, UQ2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, UQ2, color = 'r', linewidth = lw, label = 'NESSY') ax[0].plot(w, UQ3, color = 'g', linewidth = lw, label = 'ATLAS') #ax[1].plot(w, abs(RDUQ13), color = 'b', label = 'NESSY (LTE) vs ATLAS') #ax[1].plot(w, abs(RDUQ23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, abs(RDUQ23), color = 'r', linewidth = lw, label = 'NESSY vs ATLAS') ax[1].plot(w, np.ones(len(RDUQ13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Umbral Contrast, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('|(NESSY - ATLAS) / ATLAS|, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 4, handletextpad = 1, prop = {'size': 20.5}) #leg1 = ax[1].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) #for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/ucontr_kur_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Penumbral contrast: NESSY (using Kurucz models) vs. ATLAS (using Kurucz models)', y = 1.01) ax[0].plot(w, np.zeros(len(PQ1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 #ax[0].plot(w, PQ1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') #ax[0].plot(w, PQ2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, PQ2, color = 'r', linewidth = lw, label = 'NESSY') ax[0].plot(w, PQ3, color = 'g', linewidth = lw, label = 'ATLAS') #ax[1].plot(w, abs(RDPQ13), color = 'b', label = 'NESSY (LTE) vs ATLAS') #ax[1].plot(w, abs(RDPQ23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, abs(RDPQ23), color = 'r', linewidth = lw, label = 'NESSY vs ATLAS') ax[1].plot(w, np.ones(len(RDPQ13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Penumbral Contrast, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('|(NESSY - ATLAS) / ATLAS|, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 4, handletextpad = 1, prop = {'size': 20.5}) #leg1 = ax[1].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) #for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/pcontr_kur_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Kurucz quiet sun model: NESSY vs. ATLAS', y = 1.01) ax[0].plot(w, np.zeros(len(Q1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 ax[0].plot(w, Q1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') ax[0].plot(w, Q2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, Q3, color = 'g', linewidth = lw, label = 'ATLAS') ax[1].plot(w, abs(RDQ13), color = 'b', label = 'NESSY (LTE) vs ATLAS') ax[1].plot(w, abs(RDQ23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, np.ones(len(RDQ13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) #ax[1].yaxis.tick_right() ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Flux, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('(NESSY - ATLAS) / ATLAS, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) leg1 = ax[1].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/Q_kur_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Kurucz facula model: NESSY vs. ATLAS', y = 1.01) ax[0].plot(w, np.zeros(len(F1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 ax[0].plot(w, F1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') ax[0].plot(w, F2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, F3, color = 'g', linewidth = lw, label = 'ATLAS') ax[1].plot(w, abs(RDF13), color = 'b', label = 'NESSY (LTE) vs ATLAS') ax[1].plot(w, abs(RDF23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, np.ones(len(RDF13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Flux, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('(NESSY - ATLAS) / ATLAS, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) leg1 = ax[1].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/F_kur_nesatl') fig, ax = plt.subplots(nrows = 2, ncols = 1, figsize = (12.0, 10.0)) #pltaux.figpar() fig.tight_layout() fig.suptitle('Kurucz penumbra model: NESSY vs. ATLAS', y = 1.01) ax[0].plot(w, np.zeros(len(P1)), 'k--') ax[0].set_xlim(110.5, 1000) ls = ':'; lw = 1.5 ax[0].plot(w, P1, color = 'b', linewidth = lw, label = 'NESSY (LTE)') ax[0].plot(w, P2, color = 'r', linewidth = lw, label = 'NESSY (NLTE)') ax[0].plot(w, P3, color = 'g', linewidth = lw, label = 'ATLAS') ax[1].plot(w, abs(RDP13), color = 'b', label = 'NESSY (LTE) vs ATLAS') ax[1].plot(w, abs(RDP23), color = 'r', linewidth = lw, label = 'NESSY (NLTE) vs ATLAS') ax[1].plot(w, np.ones(len(RDP13)), 'k') ax[1].set_yscale('log') ax[1].set_xlim(110.5, 1000) ax[1].set_ylim(1e-2, 1e+3) for i in range(0, 2): ax[i].xaxis.set_major_locator(MultipleLocator(100)) ax[i].xaxis.set_minor_locator(AutoMinorLocator(10)) ax[0].yaxis.set_minor_locator(AutoMinorLocator(5)) ax[1].yaxis.set_major_locator(LogLocator(10)) ax[1].yaxis.set_minor_locator(LogLocator(base = 10.0, subs = (2, 3, 4, 5, 6, 7, 8, 9))) ax[1].yaxis.set_ticks_position('both') ax[0].set_ylabel('Flux, [W / m$^2$ / nm]', fontsize = 12.5) ax[1].set_ylabel('(NESSY - ATLAS) / ATLAS, [%]', fontsize = 12.5) ax[1].set_xlabel('Wavelength, [nm]', fontsize = 12.5) leg0 = ax[0].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) leg1 = ax[1].legend(framealpha = 1, loc = 1, handletextpad = 1, prop = {'size': 20.5}) for obj in leg0.legendHandles: obj.set_linewidth(3.0) for obj in leg1.legendHandles: obj.set_linewidth(3.0) auxplt.savepdf('var/P_kur_nesatl')
32.112628
105
0.61117
3,255
18,818
3.456836
0.079263
0.027462
0.055457
0.024884
0.80839
0.803857
0.76342
0.745556
0.74129
0.70583
0
0.094649
0.175789
18,818
585
106
32.167521
0.630819
0.20778
0
0.493151
0
0
0.130197
0.00567
0
0
0
0
0
1
0
false
0
0.047945
0
0.047945
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
4f0862c0ed61e66995e0b74687cb27fd4c9fbf38
205
py
Python
jobsapp/graphql/mutations.py
sks-sys/djangocicd
c5b1c5b11b38ebd1be1cb2f138ca21e976282ab8
[ "MIT" ]
1
2022-02-13T06:13:47.000Z
2022-02-13T06:13:47.000Z
jobsapp/graphql/mutations.py
sks-sys/djangocicd
c5b1c5b11b38ebd1be1cb2f138ca21e976282ab8
[ "MIT" ]
null
null
null
jobsapp/graphql/mutations.py
sks-sys/djangocicd
c5b1c5b11b38ebd1be1cb2f138ca21e976282ab8
[ "MIT" ]
null
null
null
import graphene from . import sub_mutations as job_mutations class JobMutation(graphene.ObjectType): create_job = job_mutations.CreateNewJob.Field() update_job = job_mutations.UpdateJob.Field()
22.777778
51
0.795122
25
205
6.28
0.6
0.229299
0.191083
0
0
0
0
0
0
0
0
0
0.131707
205
8
52
25.625
0.882022
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.4
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
8795b3bea64863f2be9b1187baa3fc3356f64ae1
32
py
Python
account_financial_amount/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
1
2021-01-25T15:57:58.000Z
2021-01-25T15:57:58.000Z
account_financial_amount/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
null
null
null
account_financial_amount/models/__init__.py
odoo-mastercore/odoo-argentina
58cdfe8610bae42f69ddb9d652a28eb3245f6a04
[ "MIT" ]
2
2020-10-17T16:36:02.000Z
2021-01-24T10:20:05.000Z
from . import account_move_line
16
31
0.84375
5
32
5
1
0
0
0
0
0
0
0
0
0
0
0
0.125
32
1
32
32
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
879dcd09281b88687a4f59bced831fe08d55acee
49
py
Python
src/main/resources/docs/tests/W1300.py
h314to/codacy-pylint
9d31567db6188e1b31ce0e1567998f64946502df
[ "Apache-2.0" ]
null
null
null
src/main/resources/docs/tests/W1300.py
h314to/codacy-pylint
9d31567db6188e1b31ce0e1567998f64946502df
[ "Apache-2.0" ]
null
null
null
src/main/resources/docs/tests/W1300.py
h314to/codacy-pylint
9d31567db6188e1b31ce0e1567998f64946502df
[ "Apache-2.0" ]
null
null
null
##Patterns: W1300 ##Err: W1300 "a %(a)s" % {1: 2}
16.333333
18
0.530612
9
49
2.888889
0.777778
0
0
0
0
0
0
0
0
0
0
0.243902
0.163265
49
3
18
16.333333
0.390244
0.510204
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
87a1cf6e4801a9b2d4713b7e2db01aeb472d397a
29
py
Python
ds/models/glm/__init__.py
jordanparker6/datascience-starter
3eef1640a45d19431e9fb26adf5e089d3708dab1
[ "MIT" ]
4
2020-10-01T23:20:29.000Z
2021-06-24T08:34:41.000Z
ds/models/glm/__init__.py
jordanparker6/datascience-starter
3eef1640a45d19431e9fb26adf5e089d3708dab1
[ "MIT" ]
null
null
null
ds/models/glm/__init__.py
jordanparker6/datascience-starter
3eef1640a45d19431e9fb26adf5e089d3708dab1
[ "MIT" ]
null
null
null
from .bayesian_glm import GLM
29
29
0.862069
5
29
4.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.103448
29
1
29
29
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
87c644dd537d8e36e9bfb48d5b3677be1e723e87
243
py
Python
SeleniumWrapper_JE/selenium_wrapper/selenium_webdrive_wrapper/get_webdrivers.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
SeleniumWrapper_JE/selenium_wrapper/selenium_webdrive_wrapper/get_webdrivers.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
SeleniumWrapper_JE/selenium_wrapper/selenium_webdrive_wrapper/get_webdrivers.py
JE-Chen/je_old_repo
a8b2f1ac2eec25758bd15b71c64b59b27e0bcda5
[ "MIT" ]
null
null
null
from selenium_wrapper.selenium_webdrive_wrapper.webdriver_wrapper import WebdriverWrapper def get_webdriver(webdriver_name: str = "chrome", opera_path: str = None, **kwargs): return WebdriverWrapper(webdriver_name, opera_path, **kwargs)
40.5
89
0.814815
29
243
6.517241
0.586207
0.137566
0
0
0
0
0
0
0
0
0
0
0.098765
243
5
90
48.6
0.863014
0
0
0
0
0
0.024691
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
358b06138a7ef074e3f289e0a66a4990fa0392db
342
py
Python
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_21_61FoamBrick.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
33
2021-12-15T07:11:47.000Z
2022-03-29T08:58:32.000Z
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_21_61FoamBrick.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
3
2021-12-15T11:39:54.000Z
2022-03-29T07:24:23.000Z
configs/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_21_61FoamBrick.py
THU-DA-6D-Pose-Group/self6dpp
c267cfa55e440e212136a5e9940598720fa21d16
[ "Apache-2.0" ]
null
null
null
_base_ = "./FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_Pbr_01_02MasterChefCan.py" OUTPUT_DIR = "output/deepim/ycbvPbrSO/FlowNet512_1.5AugCosyAAEGray_NoiseRandom_AggressiveR_ClipGrad_fxfy1_Dtw01_LogDz_PM10_Flat_ycbvPbr_SO/21_61FoamBrick" DATASETS = dict(TRAIN=("ycbv_061_foam_brick_train_pbr",))
85.5
154
0.903509
45
342
6.155556
0.688889
0.079422
0.187726
0.267148
0.570397
0.570397
0.570397
0.570397
0.570397
0.570397
0
0.093093
0.026316
342
3
155
114
0.738739
0
0
0
0
0
0.833333
0.833333
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
0
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
35c0f98926cac21b4cf820d3bd8371e25a8b5837
40
py
Python
pymeritrade/__init__.py
sshh12/pymeritrade
0bb73922c8c08207cf55b934867cf780559d9871
[ "MIT" ]
1
2020-12-04T20:46:24.000Z
2020-12-04T20:46:24.000Z
pymeritrade/__init__.py
sshh12/pymeritrade
0bb73922c8c08207cf55b934867cf780559d9871
[ "MIT" ]
null
null
null
pymeritrade/__init__.py
sshh12/pymeritrade
0bb73922c8c08207cf55b934867cf780559d9871
[ "MIT" ]
null
null
null
from pymeritrade.client import TDAClient
40
40
0.9
5
40
7.2
1
0
0
0
0
0
0
0
0
0
0
0
0.075
40
1
40
40
0.972973
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
35e1bf61aa8dbfb91b9de556600c92495e299ada
148
py
Python
tests/functional/modules/pyi_import_pyqt_uic_port/PyQt5/uic/port_v2/__init__.py
hawkhai/pyinstaller
016a24479b34de161792c72dde455a81ad4c78ae
[ "Apache-2.0" ]
9,267
2015-01-01T04:08:45.000Z
2022-03-31T11:42:38.000Z
tests/functional/modules/pyi_import_pyqt_uic_port/PyQt5/uic/port_v2/__init__.py
hawkhai/pyinstaller
016a24479b34de161792c72dde455a81ad4c78ae
[ "Apache-2.0" ]
5,150
2015-01-01T12:09:56.000Z
2022-03-31T18:06:12.000Z
tests/functional/modules/pyi_import_pyqt_uic_port/PyQt5/uic/port_v2/__init__.py
hawkhai/pyinstaller
016a24479b34de161792c72dde455a81ad4c78ae
[ "Apache-2.0" ]
2,101
2015-01-03T10:25:27.000Z
2022-03-30T11:04:42.000Z
__pyinstaller_fake_module_marker__ = '__pyinstaller_fake_module_marker__' print('this is PyQtx.uic.port_v3') from . import test # noqa: F401, E402
37
73
0.810811
21
148
5
0.809524
0.285714
0.4
0.514286
0
0
0
0
0
0
0
0.052632
0.101351
148
3
74
49.333333
0.736842
0.108108
0
0
0
0
0.453846
0.261538
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0.333333
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
ea3441b64fa1bc1d71ca086dbb3a01665a59a694
1,407
py
Python
InvenTree/label/migrations/0006_auto_20210222_1535.py
inmys/InvenTree
a0d1622926ba9a13839adfe64a8fe21c073692f2
[ "MIT" ]
656
2017-03-29T22:06:14.000Z
2022-03-30T11:23:52.000Z
InvenTree/label/migrations/0006_auto_20210222_1535.py
inmys/InvenTree
a0d1622926ba9a13839adfe64a8fe21c073692f2
[ "MIT" ]
1,545
2017-04-10T23:26:04.000Z
2022-03-31T18:32:10.000Z
InvenTree/label/migrations/0006_auto_20210222_1535.py
fablabbcn/InvenTree
1d7ea7716cc96c6ffd151c822b01cd1fb5dcfecd
[ "MIT" ]
196
2017-03-28T03:06:21.000Z
2022-03-28T11:53:29.000Z
# Generated by Django 3.0.7 on 2021-02-22 04:35 import django.core.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('label', '0005_auto_20210113_2302'), ] operations = [ migrations.AddField( model_name='stockitemlabel', name='height', field=models.FloatField(default=20, help_text='Label height, specified in mm', validators=[django.core.validators.MinValueValidator(2)], verbose_name='Height [mm]'), ), migrations.AddField( model_name='stockitemlabel', name='width', field=models.FloatField(default=50, help_text='Label width, specified in mm', validators=[django.core.validators.MinValueValidator(2)], verbose_name='Width [mm]'), ), migrations.AddField( model_name='stocklocationlabel', name='height', field=models.FloatField(default=20, help_text='Label height, specified in mm', validators=[django.core.validators.MinValueValidator(2)], verbose_name='Height [mm]'), ), migrations.AddField( model_name='stocklocationlabel', name='width', field=models.FloatField(default=50, help_text='Label width, specified in mm', validators=[django.core.validators.MinValueValidator(2)], verbose_name='Width [mm]'), ), ]
40.2
177
0.647477
149
1,407
6.013423
0.322148
0.055804
0.111607
0.120536
0.805804
0.805804
0.74442
0.674107
0.674107
0.674107
0
0.039413
0.224591
1,407
34
178
41.382353
0.781852
0.031983
0
0.714286
1
0
0.198529
0.016912
0
0
0
0
0
1
0
false
0
0.071429
0
0.178571
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
ea6ecd47d91d68956fbce6dfdf3cf2dd07b1da36
68
py
Python
endaq/ide/__init__.py
MideTechnology/endaq-python-ide
c70027b748afcf05b02b5b5dbfc21fb628dd7396
[ "MIT" ]
5
2021-12-02T04:41:52.000Z
2022-02-01T19:44:41.000Z
endaq/ide/__init__.py
MideTechnology/endaq-python
a878efdd65f718c1324d92d467b19fd3b4142cd0
[ "MIT" ]
136
2021-09-28T17:45:20.000Z
2022-03-30T11:35:15.000Z
endaq/ide/__init__.py
MideTechnology/endaq-python-ide
c70027b748afcf05b02b5b5dbfc21fb628dd7396
[ "MIT" ]
2
2021-11-08T19:22:17.000Z
2021-12-15T20:25:04.000Z
from .files import * from .info import * from .measurement import *
17
26
0.735294
9
68
5.555556
0.555556
0.4
0
0
0
0
0
0
0
0
0
0
0.176471
68
3
27
22.666667
0.892857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
578fdb0a033e6cdfe84fbd4f0f85a02d5f5fc5b9
46
py
Python
src/mock_api/main.py
AlTosterino/MockAPI
baa86d311c4f4c1b516077ef236b1d4c84e5785e
[ "MIT" ]
null
null
null
src/mock_api/main.py
AlTosterino/MockAPI
baa86d311c4f4c1b516077ef236b1d4c84e5785e
[ "MIT" ]
null
null
null
src/mock_api/main.py
AlTosterino/MockAPI
baa86d311c4f4c1b516077ef236b1d4c84e5785e
[ "MIT" ]
null
null
null
def main() -> None: print("Main invoked")
15.333333
25
0.586957
6
46
4.5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.217391
46
2
26
23
0.75
0
0
0
0
0
0.26087
0
0
0
0
0
0
1
0.5
true
0
0
0
0.5
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
1
0
6
579dbec0f803f087d1bb6be7bb6f3659e9e55cc7
35
py
Python
scout/parse/variant/__init__.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
111
2015-01-15T11:53:20.000Z
2022-03-26T19:55:24.000Z
scout/parse/variant/__init__.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
2,995
2015-01-15T16:14:20.000Z
2022-03-31T13:36:32.000Z
scout/parse/variant/__init__.py
gmc-norr/scout
ea8eaaa079c63e4033af6216ec08da4a314f9b5c
[ "BSD-3-Clause" ]
55
2015-05-31T19:09:49.000Z
2021-11-01T10:50:31.000Z
from .variant import parse_variant
17.5
34
0.857143
5
35
5.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.114286
35
1
35
35
0.935484
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
57ad1d099f9796ec1666c48fe744e73cd8e34507
549
py
Python
spec/fixtures/country-codes/scripts/reorder_columns.py
mode/data_package
c8f7247e820e34e0583856bf0ce35afe40586786
[ "MIT" ]
null
null
null
spec/fixtures/country-codes/scripts/reorder_columns.py
mode/data_package
c8f7247e820e34e0583856bf0ce35afe40586786
[ "MIT" ]
null
null
null
spec/fixtures/country-codes/scripts/reorder_columns.py
mode/data_package
c8f7247e820e34e0583856bf0ce35afe40586786
[ "MIT" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # vim: ai ts=4 sts=4 et sw=4 import subprocess subprocess.call('csvcut -c "name","name_fr","ISO3166-1-Alpha-2","ISO3166-1-Alpha-3","ISO3166-1-numeric","ITU","MARC","WMO","DS","Dial","FIFA","FIPS","GAUL","IOC","currency_alphabetic_code","currency_country_name","currency_minor_unit","currency_name","currency_numeric_code","is_independent" data/country-codes.csv > data/country-codes-reordered.csv', shell=True) subprocess.call('mv data/country-codes-reordered.csv data/country-codes.csv', shell=True)
61
363
0.735883
86
549
4.569767
0.593023
0.111959
0.16285
0.096692
0.142494
0
0
0
0
0
0
0.040385
0.052823
549
8
364
68.625
0.715385
0.125683
0
0
0
0.333333
0.81761
0.779874
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
57bb563d0da08bac16d19f343a02825332dc95b8
127
py
Python
projects/py/projects/__init__.py
gdhungana/project_mis
dfd9612a05fb07237387d98597f73ba6014bf9d5
[ "MIT" ]
null
null
null
projects/py/projects/__init__.py
gdhungana/project_mis
dfd9612a05fb07237387d98597f73ba6014bf9d5
[ "MIT" ]
null
null
null
projects/py/projects/__init__.py
gdhungana/project_mis
dfd9612a05fb07237387d98597f73ba6014bf9d5
[ "MIT" ]
null
null
null
# help with 2to3 support. from __future__ import absolute_import, division, print_function #from ._version import __version__
25.4
64
0.826772
16
127
5.875
0.75
0
0
0
0
0
0
0
0
0
0
0.018018
0.125984
127
4
65
31.75
0.828829
0.440945
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
1
0
6
57d296651d23c73ae1525406d07cb7945a54129f
171
py
Python
bert/huggingface_konlpy/huggingface_konlpy/__init__.py
ejpark78/codelab
c2e533f9b8988ecb7f9ace3d7305d252a6b5a0d9
[ "Apache-2.0" ]
1
2022-02-03T04:22:36.000Z
2022-02-03T04:22:36.000Z
bert/huggingface_konlpy/huggingface_konlpy/__init__.py
ejpark78/codelab
c2e533f9b8988ecb7f9ace3d7305d252a6b5a0d9
[ "Apache-2.0" ]
null
null
null
bert/huggingface_konlpy/huggingface_konlpy/__init__.py
ejpark78/codelab
c2e533f9b8988ecb7f9ace3d7305d252a6b5a0d9
[ "Apache-2.0" ]
null
null
null
from .about import __author__ from .about import __version__ from . import tokenizers_konlpy from . import transformers_konlpy from .utils import compose, get_tokenizer
21.375
41
0.830409
22
171
5.954545
0.545455
0.137405
0.229008
0
0
0
0
0
0
0
0
0
0.134503
171
7
42
24.428571
0.885135
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
57e57fdf8b53bf3324eed8fedfaa2f740db630a5
147
py
Python
sciencer/utils/__init__.py
SciencerIO/sciencer-toolkit
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
[ "MIT" ]
2
2022-03-28T17:27:21.000Z
2022-03-29T22:27:15.000Z
sciencer/utils/__init__.py
SciencerIO/sciencer-toolkit
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
[ "MIT" ]
null
null
null
sciencer/utils/__init__.py
SciencerIO/sciencer-toolkit
f17c4a5dfb6cc5dbabefe03b13eb1e5345f7b1b9
[ "MIT" ]
1
2022-03-28T14:47:53.000Z
2022-03-28T14:47:53.000Z
"""Utilities for Sciencer toolkit """ from .csv_callback import WriteToCSVCallbacks from .history_callback import HistoryCallbacks, HistoryLog
29.4
59
0.816327
15
147
7.866667
0.8
0.237288
0
0
0
0
0
0
0
0
0
0
0.122449
147
4
60
36.75
0.914729
0.204082
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
17ae4db2fd4d991ade941b3b93c4f6cb0c3ef2c1
6,221
py
Python
pfhedge/stochastic/brownian.py
YieldLabs/pfhedge
a5ba9d054a8418cb8b27bb67d81a8fc8fb83ef57
[ "MIT" ]
null
null
null
pfhedge/stochastic/brownian.py
YieldLabs/pfhedge
a5ba9d054a8418cb8b27bb67d81a8fc8fb83ef57
[ "MIT" ]
null
null
null
pfhedge/stochastic/brownian.py
YieldLabs/pfhedge
a5ba9d054a8418cb8b27bb67d81a8fc8fb83ef57
[ "MIT" ]
null
null
null
from typing import Callable from typing import Optional from typing import Tuple from typing import Union from typing import cast import torch from torch import Tensor from pfhedge._utils.typing import TensorOrScalar from ._utils import cast_state def generate_brownian( n_paths: int, n_steps: int, init_state: Union[Tuple[TensorOrScalar, ...], TensorOrScalar] = (0.0,), sigma: float = 0.2, mu: float = 0.0, dt: float = 1 / 250, dtype: Optional[torch.dtype] = None, device: Optional[torch.device] = None, engine: Callable[..., Tensor] = torch.randn, ) -> Tensor: r"""Returns time series following the Brownian motion. The time evolution of the process is given by: .. math:: dS(t) = \mu dt + \sigma dW(t) \,. Args: n_paths (int): The number of simulated paths. n_steps (int): The number of time steps. init_state (tuple[torch.Tensor | float], default=(0.0,)): The initial state of the time series. This is specified by a tuple :math:`(S(0),)`. It also accepts a :class:`torch.Tensor` or a :class:`float`. sigma (float, default=0.2): The parameter :math:`\sigma`, which stands for the volatility of the time series. mu (float, default=0.0): The parameter :math:`\mu`, which stands for the drift of the time series. dt (float, default=1/250): The intervals of the time steps. dtype (torch.dtype, optional): The desired data type of returned tensor. Default: If ``None``, uses a global default (see :func:`torch.set_default_tensor_type()`). device (torch.device, optional): The desired device of returned tensor. Default: If ``None``, uses the current device for the default tensor type (see :func:`torch.set_default_tensor_type()`). ``device`` will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types. engine (callable, default=torch.randn): The desired generator of random numbers from a standard normal distribution. A function call ``engine(size, dtype=None, device=None)`` should return a tensor filled with random numbers from a standard normal distribution. Shape: - Output: :math:`(N, T)` where :math:`N` is the number of paths and :math:`T` is the number of time steps. Returns: torch.Tensor Examples: >>> from pfhedge.stochastic import generate_brownian >>> >>> _ = torch.manual_seed(42) >>> generate_brownian(2, 5) tensor([[ 0.0000, 0.0016, 0.0046, 0.0075, -0.0067], [ 0.0000, 0.0279, 0.0199, 0.0257, 0.0291]]) """ init_state = cast_state(init_state, dtype=dtype, device=device) init_value = init_state[0] # randn = torch.randn((n_paths, n_steps), dtype=dtype, device=device) randn = engine(*(n_paths, n_steps), dtype=dtype, device=device) randn[:, 0] = 0.0 drift = mu * dt * torch.arange(n_steps).to(randn) brown = randn.new_tensor(dt).sqrt() * randn.cumsum(1) return drift + sigma * brown + init_value def generate_geometric_brownian( n_paths: int, n_steps: int, init_state: Union[Tuple[TensorOrScalar, ...], TensorOrScalar] = (1.0,), sigma: float = 0.2, mu: float = 0.0, dt: float = 1 / 250, dtype: Optional[torch.dtype] = None, device: Optional[torch.device] = None, engine: Callable[..., Tensor] = torch.randn, ) -> Tensor: r"""Returns time series following the geometric Brownian motion. The time evolution of the process is given by: .. math:: dS(t) = \mu S(t) dt + \sigma S(t) dW(t) \,. Args: n_paths (int): The number of simulated paths. n_steps (int): The number of time steps. init_state (tuple[torch.Tensor | float], default=(0.0,)): The initial state of the time series. This is specified by a tuple :math:`(S(0),)`. It also accepts a :class:`torch.Tensor` or a :class:`float`. sigma (float, default=0.2): The parameter :math:`\sigma`, which stands for the volatility of the time series. mu (float, default=0.2): The parameter :math:`\mu`, which stands for the volatility of the time series. dt (float, default=1/250): The intervals of the time steps. dtype (torch.dtype, optional): The desired data type of returned tensor. Default: If ``None``, uses a global default (see :func:`torch.set_default_tensor_type()`). device (torch.device, optional): The desired device of returned tensor. Default: If ``None``, uses the current device for the default tensor type (see :func:`torch.set_default_tensor_type()`). ``device`` will be the CPU for CPU tensor types and the current CUDA device for CUDA tensor types. engine (callable, default=torch.randn): The desired generator of random numbers from a standard normal distribution. A function call ``engine(size, dtype=None, device=None)`` should return a tensor filled with random numbers from a standard normal distribution. Shape: - Output: :math:`(N, T)` where :math:`N` is the number of paths and :math:`T` is the number of time steps. Returns: torch.Tensor Examples: >>> from pfhedge.stochastic import generate_brownian >>> >>> _ = torch.manual_seed(42) >>> generate_geometric_brownian(2, 5) tensor([[1.0000, 1.0016, 1.0044, 1.0073, 0.9930], [1.0000, 1.0282, 1.0199, 1.0258, 1.0292]]) """ init_state = cast_state(init_state, dtype=dtype, device=device) brownian = generate_brownian( n_paths=n_paths, n_steps=n_steps, init_state=(0.0,), sigma=sigma, mu=mu, dt=dt, dtype=dtype, device=device, engine=engine, ) t = dt * torch.arange(n_steps).to(brownian).unsqueeze(0) return init_state[0] * (brownian - (sigma ** 2) * t / 2).exp()
38.401235
87
0.613245
847
6,221
4.430933
0.161747
0.026379
0.023448
0.023981
0.813216
0.81295
0.801759
0.801492
0.78737
0.759392
0
0.035533
0.271661
6,221
161
88
38.639752
0.792761
0.660344
0
0.37037
0
0
0
0
0
0
0
0
0
1
0.037037
false
0
0.166667
0
0.240741
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
17ea20ef0c86efc921ced747d0c09c306f346c50
211
py
Python
dataherb/cmd/sync_s3.py
DataHerb/dataherb-python
91f2787eabb450d653b0b9dbc9bb78980d13460f
[ "MIT" ]
4
2021-08-08T21:31:40.000Z
2022-02-11T03:13:47.000Z
dataherb/cmd/sync_s3.py
DataHerb/dataherb-python
91f2787eabb450d653b0b9dbc9bb78980d13460f
[ "MIT" ]
9
2020-03-15T15:38:46.000Z
2021-11-04T08:23:43.000Z
dataherb/cmd/sync_s3.py
DataHerb/dataherb-python
91f2787eabb450d653b0b9dbc9bb78980d13460f
[ "MIT" ]
2
2020-03-23T17:00:23.000Z
2021-08-06T00:03:18.000Z
from dataherb.utils.awscli import aws_cli as _aws_cli def upload_dataset_to_s3(source, target): """ upload_dataset_to_s3 uploads the dataset to S3 """ _aws_cli(("s3", "sync", source, target))
21.1
53
0.701422
32
211
4.28125
0.5625
0.131387
0.240876
0.248175
0
0
0
0
0
0
0
0.023392
0.189573
211
9
54
23.444444
0.777778
0.218009
0
0
0
0
0.040268
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0
0.666667
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
6
17ffeccafc15a9d2f7e8c9db0212ecd4bca90aba
48
py
Python
helper3.py
bvt2nc/cs3240-labdemo
76cb93a98daf8b1934b6faaf1e641e2380235736
[ "MIT" ]
null
null
null
helper3.py
bvt2nc/cs3240-labdemo
76cb93a98daf8b1934b6faaf1e641e2380235736
[ "MIT" ]
null
null
null
helper3.py
bvt2nc/cs3240-labdemo
76cb93a98daf8b1934b6faaf1e641e2380235736
[ "MIT" ]
null
null
null
def greeting3(msg): print("Greeting3: " + msg)
16
27
0.666667
6
48
5.333333
0.666667
0.75
0
0
0
0
0
0
0
0
0
0.04878
0.145833
48
2
28
24
0.731707
0
0
0
0
0
0.229167
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0.5
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
1
0
6
a4dd6fce7fe6926e934bfdfd7118de36ab14a9d6
146
py
Python
pokemongo_bot/event_handlers/__init__.py
PokePy/-PokemonGo-Bot
aaa5519291b45b5817cb38d3b5a60e5b08719a76
[ "MIT" ]
2
2018-11-27T06:02:24.000Z
2019-12-31T19:10:32.000Z
pokemongo_bot/event_handlers/__init__.py
PokePy/-PokemonGo-Bot
aaa5519291b45b5817cb38d3b5a60e5b08719a76
[ "MIT" ]
1
2018-10-28T04:50:46.000Z
2018-10-28T04:50:46.000Z
pokemongo_bot/event_handlers/__init__.py
PokePy/-PokemonGo-Bot
aaa5519291b45b5817cb38d3b5a60e5b08719a76
[ "MIT" ]
null
null
null
from logging_handler import LoggingHandler from socketio_handler import SocketIoHandler from colored_logging_handler import ColoredLoggingHandler
36.5
57
0.917808
16
146
8.125
0.5625
0.3
0.307692
0
0
0
0
0
0
0
0
0
0.082192
146
3
58
48.666667
0.970149
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
352ed114bf17652dcf1adbc12f39868a1143db32
132
py
Python
src/__init__.py
ValeKnappich/sparsification
5c921d812e6a3899ca80678225ada758dea66d6b
[ "Unlicense", "MIT" ]
null
null
null
src/__init__.py
ValeKnappich/sparsification
5c921d812e6a3899ca80678225ada758dea66d6b
[ "Unlicense", "MIT" ]
null
null
null
src/__init__.py
ValeKnappich/sparsification
5c921d812e6a3899ca80678225ada758dea66d6b
[ "Unlicense", "MIT" ]
null
null
null
import src.callbacks # noqa import src.datamodules # noqa import src.models # noqa import src.utils # noqa import src.ui # noqa
26.4
30
0.742424
20
132
4.9
0.4
0.459184
0.530612
0
0
0
0
0
0
0
0
0
0.181818
132
5
31
26.4
0.907407
0.181818
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
10b05190aecd154f745b6206c72aa899eec3fcd4
174
py
Python
Django_Typescript_React_Workflow/backend/views.py
lit26/Django_React_Workflow
564dfc0690940938b07b08ea25f712f120f8a2dd
[ "MIT" ]
null
null
null
Django_Typescript_React_Workflow/backend/views.py
lit26/Django_React_Workflow
564dfc0690940938b07b08ea25f712f120f8a2dd
[ "MIT" ]
null
null
null
Django_Typescript_React_Workflow/backend/views.py
lit26/Django_React_Workflow
564dfc0690940938b07b08ea25f712f120f8a2dd
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse # Create your views here. def main_view(request): return HttpResponse('<h1>Hello Backend.</h1>')
29
50
0.775862
24
174
5.583333
0.791667
0.149254
0
0
0
0
0
0
0
0
0
0.013158
0.126437
174
6
50
29
0.868421
0.132184
0
0
0
0
0.153333
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
10b576a9d108d4f115594066504401450044906c
215
py
Python
tccli/services/tione/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
tccli/services/tione/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
tccli/services/tione/__init__.py
zyh911/tencentcloud-cli
dfc5dbd660d4c60d265921c4edc630091478fc41
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- from tccli.services.tione.tione_client import register_arg from tccli.services.tione.tione_client import get_actions_info from tccli.services.tione.tione_client import AVAILABLE_VERSION_LIST
43
68
0.837209
32
215
5.375
0.53125
0.156977
0.296512
0.383721
0.680233
0.680233
0.680233
0
0
0
0
0.005051
0.07907
215
4
69
53.75
0.863636
0.097674
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
52f3b3ed7ab59ed473ef0bce18feaf9d995802cf
232
py
Python
tests/requests_client/RequestsFutureAdapter/conftest.py
andriis/bravado
0d2ef182df4eb38641282e2f839c4dc813ee4349
[ "BSD-3-Clause" ]
null
null
null
tests/requests_client/RequestsFutureAdapter/conftest.py
andriis/bravado
0d2ef182df4eb38641282e2f839c4dc813ee4349
[ "BSD-3-Clause" ]
null
null
null
tests/requests_client/RequestsFutureAdapter/conftest.py
andriis/bravado
0d2ef182df4eb38641282e2f839c4dc813ee4349
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- import pytest from mock import Mock from requests.sessions import Session @pytest.fixture def request(): return Mock(url='http://foo.com') @pytest.fixture def session(): return Mock(spec=Session)
15.466667
37
0.702586
32
232
5.09375
0.59375
0.159509
0.196319
0
0
0
0
0
0
0
0
0.005128
0.159483
232
14
38
16.571429
0.830769
0.090517
0
0.222222
0
0
0.066986
0
0
0
0
0
0
1
0.222222
true
0
0.333333
0.222222
0.777778
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
1
1
0
0
6
5e1fdf3568eb3a8c12188df6bf9d6b97f7289bae
144
py
Python
src/Util/util.py
ZingLix/alala_chan
b85633ee0e0b9aa32f238354a750ceed1ab85388
[ "MIT" ]
null
null
null
src/Util/util.py
ZingLix/alala_chan
b85633ee0e0b9aa32f238354a750ceed1ab85388
[ "MIT" ]
null
null
null
src/Util/util.py
ZingLix/alala_chan
b85633ee0e0b9aa32f238354a750ceed1ab85388
[ "MIT" ]
null
null
null
from typing import TYPE_CHECKING if TYPE_CHECKING: from dataclasses import dataclass else: def dataclass(model): return model
16
37
0.736111
18
144
5.777778
0.666667
0.230769
0
0
0
0
0
0
0
0
0
0
0.229167
144
8
38
18
0.936937
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0.166667
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
6
eac3dcc5ae6e18cf193bebc5d1e36b3a6d3ffff7
4,961
py
Python
isah-backend/handler.py
tanmaysinghal98/istayathome
c89d09caccad83da911dec750ef35a28476cab9f
[ "MIT" ]
2
2020-09-19T19:48:34.000Z
2020-09-20T10:31:04.000Z
isah-backend/handler.py
tanmaysinghal98/istayathome
c89d09caccad83da911dec750ef35a28476cab9f
[ "MIT" ]
2
2022-02-19T06:23:05.000Z
2022-02-27T10:08:54.000Z
isah-backend/handler.py
tanmaysinghal98/istayathome
c89d09caccad83da911dec750ef35a28476cab9f
[ "MIT" ]
1
2020-09-20T10:31:06.000Z
2020-09-20T10:31:06.000Z
import json import auth import user import challenge import image_upload import image_process import image_download def get_users(event, context): print(event) user_id = auth.get_cookie_value(event['headers']) if user_id is None: body = user.create_user() else: body = user.get_user(user_id) response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def update_users(event, context): print(event) user_id = auth.get_cookie_value(event['headers']) request_body = json.loads(event['body']) if 'id' not in request_body or request_body['id'] != user_id: body = { "message": "User not found" } response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 400, "body": json.dumps(body) } return response body = user.update_users(request_body) response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def get_upload_presigned_url(event, context): user_id = auth.get_cookie_value(event['headers']) request_body = json.loads(event['body']) if user_id is not None: body = image_upload.get_upload_presigned_url(request_body, user_id) else: body = {'message': 'User Not Authenticated'} response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def get_download_presigned_url(event, context): user_id = auth.get_cookie_value(event['headers']) request_body = json.loads(event['body']) if user_id is not None: body = image_download.get_download_presigned_url(request_body, user_id) else: body = {'message': 'User Not Authenticated'} response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def process_image(event, context): print(event) user_id = auth.get_cookie_value(event['headers']) request_body = json.loads(event['body']) if user_id is not None: chal = challenge.get_challenge_by_id(request_body['challengeId']) image_process.run(chal, request_body) body = user.mark_challenge_complete(chal, request_body, user_id) else: body = {'message': 'User Not Authenticated'} response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def get_challenges(event, context): print(event) if event['queryStringParameters'] is not None: id = event['queryStringParameters']['id'] body = challenge.get_challenge_by_id(id) else: body = challenge.get_challenges() response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response def create_challenge(event, context): print(event) request_body = json.loads(event['body']) body = challenge.create_challenge(request_body) response = { 'headers': { 'Access-Control-Allow-Origin': '*', 'Access-Control-Allow-Credentials': True, }, "statusCode": 200, "body": json.dumps(body) } return response # def update_challenges(event, context): # print(event) # user_id = auth.get_cookie_value(event['headers']) # request_body = json.loads(event['body']) # if 'id' not in request_body or request_body['id'] != user_id: # body = { # "message": "User not found" # } # response = { # 'headers': { # 'Access-Control-Allow-Origin': '*', # 'Access-Control-Allow-Credentials': True, # }, # "statusCode": 400, # "body": json.dumps(body) # } # return response # body = user.update_users(request_body) # response = { # 'headers': { # 'Access-Control-Allow-Origin': '*', # 'Access-Control-Allow-Credentials': True, # }, # "statusCode": 200, # "body": json.dumps(body) # } # return response
29.182353
79
0.572667
520
4,961
5.301923
0.107692
0.094305
0.130577
0.10156
0.82626
0.792891
0.782372
0.782372
0.782372
0.782372
0
0.008559
0.293489
4,961
169
80
29.35503
0.778031
0.169321
0
0.623077
0
0
0.212558
0.12558
0
0
0
0
0
1
0.053846
false
0
0.053846
0
0.169231
0.038462
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
dc201fe2f0a5da9ed10a3cbe8b675c3a3abb804a
3,110
py
Python
fabtools/tests/functional_tests/test_apt_key.py
timgates42/fabtools
5fdc7174c3fae5e93a16d677d0466f41dc2be175
[ "BSD-2-Clause" ]
308
2015-01-03T20:05:22.000Z
2016-10-02T07:39:33.000Z
fabtools/tests/functional_tests/test_apt_key.py
timgates42/fabtools
5fdc7174c3fae5e93a16d677d0466f41dc2be175
[ "BSD-2-Clause" ]
97
2016-10-06T15:42:34.000Z
2020-01-27T15:33:46.000Z
fabtools/tests/functional_tests/test_apt_key.py
timgates42/fabtools
5fdc7174c3fae5e93a16d677d0466f41dc2be175
[ "BSD-2-Clause" ]
62
2015-01-03T21:16:46.000Z
2016-09-09T00:39:05.000Z
import pytest from fabric.api import run from fabtools.utils import run_as_root pytestmark = pytest.mark.network @pytest.fixture(scope='module', autouse=True) def check_for_debian_family(): from fabtools.system import distrib_family if distrib_family() != 'debian': pytest.skip("Skipping apt-key test on non-Debian distrib") def test_add_apt_key_with_key_id_from_url(): from fabtools.deb import add_apt_key try: add_apt_key(keyid='C4DEFFEB', url='http://repo.varnish-cache.org/debian/GPG-key.txt') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True) def test_add_apt_key_with_key_id_from_specific_key_server(): from fabtools.deb import add_apt_key try: add_apt_key(keyid='7BD9BF62', keyserver='keyserver.ubuntu.com') run_as_root('apt-key finger | grep -q 7BD9BF62') finally: run_as_root('apt-key del 7BD9BF62', quiet=True) def test_add_apt_key_with_key_id_from_file(): from fabtools.deb import add_apt_key try: run('wget http://repo.varnish-cache.org/debian/GPG-key.txt -O /tmp/tmp.fabtools.test.key') add_apt_key(keyid='C4DEFFEB', filename='/tmp/tmp.fabtools.test.key') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True) def test_add_apt_key_without_key_id_from_url(): from fabtools.deb import add_apt_key try: add_apt_key(url='http://repo.varnish-cache.org/debian/GPG-key.txt') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True) def test_add_apt_key_without_key_id_from_file(): from fabtools.deb import add_apt_key try: run('wget http://repo.varnish-cache.org/debian/GPG-key.txt -O /tmp/tmp.fabtools.test.key') add_apt_key(filename='/tmp/tmp.fabtools.test.key') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True) def test_require_deb_key_from_url(): from fabtools.require.deb import key as require_key try: require_key(keyid='C4DEFFEB', url='http://repo.varnish-cache.org/debian/GPG-key.txt') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True) def test_require_deb_key_from_specific_keyserver(): from fabtools.require.deb import key as require_key try: require_key(keyid='7BD9BF62', keyserver='keyserver.ubuntu.com') run_as_root('apt-key finger | grep -q 7BD9BF62') finally: run_as_root('apt-key del 7BD9BF62', quiet=True) def test_require_deb_key_from_file(): from fabtools.require.deb import key as require_key try: run('wget http://repo.varnish-cache.org/debian/GPG-key.txt -O /tmp/tmp.fabtools.test.key') require_key(keyid='C4DEFFEB', filename='/tmp/tmp.fabtools.test.key') run_as_root('apt-key finger | grep -q C4DEFFEB') finally: run_as_root('apt-key del C4DEFFEB', quiet=True)
34.175824
98
0.702572
485
3,110
4.251546
0.138144
0.093113
0.0742
0.093113
0.853055
0.853055
0.853055
0.853055
0.844811
0.835112
0
0.015668
0.1791
3,110
90
99
34.555556
0.792009
0
0
0.632353
0
0.044118
0.333762
0.050161
0
0
0
0
0
1
0.132353
false
0
0.176471
0
0.308824
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
dc4b32b01de50e2ed1ed1973a95e62399500c5e5
35
py
Python
PycharmProjects/VanillaGAN/train/__init__.py
annusgit/Forest-Cover-Change-Detection
2aa16aa9f6d668f5ad44ff5dc4643a70581cd714
[ "MIT" ]
3
2018-12-17T09:58:31.000Z
2021-01-30T16:44:09.000Z
PycharmProjects/VanillaGAN/train/__init__.py
annusgit/Forest-Cover-Change-Detection
2aa16aa9f6d668f5ad44ff5dc4643a70581cd714
[ "MIT" ]
null
null
null
PycharmProjects/VanillaGAN/train/__init__.py
annusgit/Forest-Cover-Change-Detection
2aa16aa9f6d668f5ad44ff5dc4643a70581cd714
[ "MIT" ]
2
2019-02-18T16:17:06.000Z
2020-02-24T06:32:34.000Z
from training_functions import *
8.75
32
0.8
4
35
6.75
1
0
0
0
0
0
0
0
0
0
0
0
0.171429
35
3
33
11.666667
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
dc5d938fa0eec35f41b9abd43759e18017494faf
156
py
Python
set.py
hu-xiang-yang/python-code
d2200138311e1514a97d60796d63c2e64f8aa9e3
[ "Unlicense" ]
null
null
null
set.py
hu-xiang-yang/python-code
d2200138311e1514a97d60796d63c2e64f8aa9e3
[ "Unlicense" ]
null
null
null
set.py
hu-xiang-yang/python-code
d2200138311e1514a97d60796d63c2e64f8aa9e3
[ "Unlicense" ]
null
null
null
#!/usr/bin/env python3 # -*- coding: utf-8 -*- s1 = set([1, 1, 2, 2, 3, 3]) print(s1) s2 = set([2, 3, 4]) print(s1 & s2) print(s1 | s2)
15.6
30
0.448718
28
156
2.5
0.535714
0.3
0.385714
0
0
0
0
0
0
0
0
0.165138
0.301282
156
9
31
17.333333
0.477064
0.282051
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.6
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
6
dca69e19cb0a32eba7b335aae6f52fa1d3227909
115
py
Python
tools/pylint/__init__.py
coblee/rotki
d675f5c2d0df5176337b7b10038524ee74923482
[ "BSD-3-Clause" ]
137
2018-03-05T11:53:29.000Z
2019-11-03T16:38:42.000Z
tools/pylint/__init__.py
coblee/rotki
d675f5c2d0df5176337b7b10038524ee74923482
[ "BSD-3-Clause" ]
385
2018-03-08T12:43:41.000Z
2019-11-10T09:15:36.000Z
tools/pylint/__init__.py
coblee/rotki
d675f5c2d0df5176337b7b10038524ee74923482
[ "BSD-3-Clause" ]
59
2018-03-08T10:08:27.000Z
2019-10-26T11:30:44.000Z
from .log_checker import LogNokwargsChecker # noqa: F401 from .not_checker import NotBooleanChecker # noqa: F401
38.333333
57
0.808696
14
115
6.5
0.642857
0.285714
0
0
0
0
0
0
0
0
0
0.060606
0.13913
115
2
58
57.5
0.858586
0.182609
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f4a8c7fc090c3c54250efa1ad3c7cca34f660378
22
py
Python
HashUtil/__init__.py
CasperTheCat/PyDeduplication
2332d6e4bcd38a1e46840ba11cfd27577fd86200
[ "Apache-2.0" ]
null
null
null
HashUtil/__init__.py
CasperTheCat/PyDeduplication
2332d6e4bcd38a1e46840ba11cfd27577fd86200
[ "Apache-2.0" ]
2
2020-08-04T01:13:59.000Z
2020-08-04T01:49:37.000Z
HashUtil/__init__.py
CasperTheCat/PyDeduplication
2332d6e4bcd38a1e46840ba11cfd27577fd86200
[ "Apache-2.0" ]
null
null
null
from . import HashList
22
22
0.818182
3
22
6
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
22
1
22
22
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f4b37d96cf0b1e6c58e7868636fa8d9463b5eddc
25
py
Python
arduino_yun/__init__.py
abhirocks1211/countly-sdk-iot-python
0ccc5120661c5e356d6a569b31ba5fb135fa8efb
[ "MIT" ]
9
2016-04-06T05:23:43.000Z
2022-02-21T04:41:47.000Z
arduino_yun/__init__.py
abhirocks1211/countly-sdk-iot-python
0ccc5120661c5e356d6a569b31ba5fb135fa8efb
[ "MIT" ]
7
2016-01-07T22:09:48.000Z
2016-02-16T12:44:09.000Z
arduino_yun/__init__.py
abhirocks1211/countly-sdk-iot-python
0ccc5120661c5e356d6a569b31ba5fb135fa8efb
[ "MIT" ]
11
2016-03-17T14:03:44.000Z
2022-02-28T05:32:03.000Z
from arduino_yun import *
25
25
0.84
4
25
5
1
0
0
0
0
0
0
0
0
0
0
0
0.12
25
1
25
25
0.909091
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6