hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
594197865dcd78f5c865efa627fa40ed2203473e
281
py
Python
Intro/1_The_Journey_Begins/01_add.py
RylandGomez/CS-Arcade
086afa37fdba5bb2ca21fd93ed616bb5aca09d5d
[ "MIT" ]
null
null
null
Intro/1_The_Journey_Begins/01_add.py
RylandGomez/CS-Arcade
086afa37fdba5bb2ca21fd93ed616bb5aca09d5d
[ "MIT" ]
null
null
null
Intro/1_The_Journey_Begins/01_add.py
RylandGomez/CS-Arcade
086afa37fdba5bb2ca21fd93ed616bb5aca09d5d
[ "MIT" ]
null
null
null
def solution(param1, param2): ''' EXPLANATION ------------------------------------------------------------------- Use the + operator for basic addition. ------------------------------------------------------------------- ''' return param1 + param2
28.1
71
0.291815
14
281
5.857143
0.857143
0.292683
0
0
0
0
0
0
0
0
0
0.017467
0.185053
281
9
72
31.222222
0.340611
0.661922
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
6
a7218a64f6fe7a25bec0c926275feac961c87caf
183
py
Python
app/utils/utils.py
amommendes/wikimedia_stream
53ed27793a665a29addbc16da307b2408f313847
[ "Apache-2.0" ]
null
null
null
app/utils/utils.py
amommendes/wikimedia_stream
53ed27793a665a29addbc16da307b2408f313847
[ "Apache-2.0" ]
2
2021-06-01T23:48:18.000Z
2021-06-02T03:30:43.000Z
app/utils/utils.py
amommendes/wikimedia_stream
53ed27793a665a29addbc16da307b2408f313847
[ "Apache-2.0" ]
null
null
null
from datetime import datetime, timedelta import time def get_mseconds(): """ This method returns actual time in milliseconds """ return int(round(time.time()*1000))
18.3
51
0.693989
23
183
5.478261
0.782609
0
0
0
0
0
0
0
0
0
0
0.027586
0.20765
183
9
52
20.333333
0.841379
0.256831
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
6
596c8e2de7a075d978acdd2eadba419575b344e8
28
py
Python
balance/ERC20/balance.py
picaproject/wallet
56966caca715aa2d541e81f3064d00fdb6adc192
[ "Apache-2.0" ]
null
null
null
balance/ERC20/balance.py
picaproject/wallet
56966caca715aa2d541e81f3064d00fdb6adc192
[ "Apache-2.0" ]
null
null
null
balance/ERC20/balance.py
picaproject/wallet
56966caca715aa2d541e81f3064d00fdb6adc192
[ "Apache-2.0" ]
null
null
null
def get_balance(): pass
14
19
0.642857
4
28
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.25
28
2
20
14
0.809524
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
true
0.5
0
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
6
59d031350fa9afa627e873095bb28c6579a5bcf2
39
py
Python
common/__init__.py
huestisk/NHL
b0e528589c4ae523dea19f406daebc1c5edcc738
[ "MIT" ]
null
null
null
common/__init__.py
huestisk/NHL
b0e528589c4ae523dea19f406daebc1c5edcc738
[ "MIT" ]
null
null
null
common/__init__.py
huestisk/NHL
b0e528589c4ae523dea19f406daebc1c5edcc738
[ "MIT" ]
null
null
null
from common.convert_to_matches import *
39
39
0.871795
6
39
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.076923
39
1
39
39
0.888889
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ab9876674ddda6dfe946df0d13af725dcb911598
174
py
Python
apps/drl/chpZ01/loss_mse.py
yt7589/iching
6673da38f4c80e7fd297c86fedc5616aee8ac09b
[ "Apache-2.0" ]
32
2020-04-14T08:32:18.000Z
2022-02-09T07:05:08.000Z
apps/drl/chpZ01/loss_mse.py
trinh-hoang-hiep/iching
e1feae5741c3cbde535d7a275b01d4f0cf9e21ed
[ "Apache-2.0" ]
1
2020-04-08T10:42:15.000Z
2020-04-15T01:38:03.000Z
apps/drl/chpZ01/loss_mse.py
trinh-hoang-hiep/iching
e1feae5741c3cbde535d7a275b01d4f0cf9e21ed
[ "Apache-2.0" ]
4
2020-08-25T03:56:46.000Z
2021-05-11T05:55:51.000Z
# class LossMse(object): def __init__(self): super().__init__() def forward(self, pred, target): return ((pred - target)*(pred - target)).sum(0)
21.75
55
0.574713
20
174
4.6
0.65
0.326087
0
0
0
0
0
0
0
0
0
0.007752
0.258621
174
8
55
21.75
0.705426
0
0
0
0
0
0
0
0
0
0
0
0
1
0.4
false
0
0
0.2
0.8
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
aba6e0e7a8969f43451a326dbea005515c4db233
32
py
Python
spa/test_use_from_colab.py
cavestruz/spa
4f147694601e0b88f97794c8851ccdd615c73ea4
[ "BSD-3-Clause" ]
null
null
null
spa/test_use_from_colab.py
cavestruz/spa
4f147694601e0b88f97794c8851ccdd615c73ea4
[ "BSD-3-Clause" ]
null
null
null
spa/test_use_from_colab.py
cavestruz/spa
4f147694601e0b88f97794c8851ccdd615c73ea4
[ "BSD-3-Clause" ]
null
null
null
print('Hello from github repo')
16
31
0.75
5
32
4.8
1
0
0
0
0
0
0
0
0
0
0
0
0.125
32
1
32
32
0.857143
0
0
0
0
0
0.6875
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
6
abc64d68abcfbbb016a54c808b2ee0b15709325e
79
py
Python
tests/bundles/sqlalchemy/_bundles/app/__init__.py
achiang/flask-unchained
12788a6e618904a25ff2b571eb05ff1dc8f1840f
[ "MIT" ]
69
2018-10-10T01:59:11.000Z
2022-03-29T17:29:30.000Z
tests/bundles/sqlalchemy/_bundles/app/__init__.py
achiang/flask-unchained
12788a6e618904a25ff2b571eb05ff1dc8f1840f
[ "MIT" ]
18
2018-11-17T12:42:02.000Z
2021-05-22T18:45:27.000Z
tests/bundles/sqlalchemy/_bundles/app/__init__.py
achiang/flask-unchained
12788a6e618904a25ff2b571eb05ff1dc8f1840f
[ "MIT" ]
7
2018-10-12T16:20:25.000Z
2021-10-06T12:18:21.000Z
from flask_unchained import AppBundle class MyAppBundle(AppBundle): pass
13.166667
37
0.797468
9
79
6.888889
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.164557
79
5
38
15.8
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
abd18f088d6b708912b617f04fbc40ae9ef1987d
85
py
Python
src/lib/compute.py
dineshkh/test_git
80caa6b5f009fa1d1c844ea8d57e54c3beea30b2
[ "MIT" ]
null
null
null
src/lib/compute.py
dineshkh/test_git
80caa6b5f009fa1d1c844ea8d57e54c3beea30b2
[ "MIT" ]
null
null
null
src/lib/compute.py
dineshkh/test_git
80caa6b5f009fa1d1c844ea8d57e54c3beea30b2
[ "MIT" ]
null
null
null
import json import numpy def main(): print ("again modieied in master branch")
12.142857
45
0.705882
12
85
5
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.211765
85
7
45
12.142857
0.895522
0
0
0
0
0
0.360465
0
0
0
0
0
0
1
0.25
true
0
0.5
0
0.75
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
6
abe83614d54fc5b7951f5d3f7470cd238e9aaeae
94
py
Python
Desktop/packages/rmnetwork/__init__.py
peter9teufel/raspmedia
037eb9ef4bd700ec3bce00987db5f9355b52346a
[ "Apache-2.0" ]
29
2015-01-03T01:05:04.000Z
2021-10-04T22:42:37.000Z
Desktop/packages/rmnetwork/__init__.py
peter9teufel/raspmedia
037eb9ef4bd700ec3bce00987db5f9355b52346a
[ "Apache-2.0" ]
6
2015-01-03T01:12:18.000Z
2016-08-09T04:35:52.000Z
Desktop/packages/rmnetwork/__init__.py
peter9teufel/raspmedia
037eb9ef4bd700ec3bce00987db5f9355b52346a
[ "Apache-2.0" ]
12
2015-01-03T03:48:18.000Z
2019-08-20T09:23:09.000Z
import interpreter, messages, udpresponselistener, udpconnector, tcpfileclient, tcpfilesocket
47
93
0.87234
7
94
11.714286
1
0
0
0
0
0
0
0
0
0
0
0
0.074468
94
1
94
94
0.942529
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
050c74677551846c575f66a93f9e40fd014796ed
5,001
py
Python
app/tests/crud/test_binary_ml_model.py
qtdemo1/ibm-ops
29f3d4ba406a1c39a007468977784d6c39f056bb
[ "Apache-2.0" ]
1
2021-09-14T18:40:33.000Z
2021-09-14T18:40:33.000Z
app/tests/crud/test_binary_ml_model.py
qtdemo1/ibm-ops
29f3d4ba406a1c39a007468977784d6c39f056bb
[ "Apache-2.0" ]
7
2021-04-23T13:41:39.000Z
2021-08-12T09:33:10.000Z
app/tests/crud/test_binary_ml_model.py
qtdemo1/ibm-ops
29f3d4ba406a1c39a007468977784d6c39f056bb
[ "Apache-2.0" ]
5
2020-12-10T14:27:23.000Z
2022-03-29T08:44:22.000Z
#!/usr/bin/env python3 # # Copyright 2020 IBM # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License.IBM Confidential # import pickle import typing import sqlalchemy.orm as orm import app.crud as crud import app.models as models import app.schemas as schemas import app.schemas.binary_config as mapping import app.tests.predictors.scikit_learn.model def test_create_binary_ml_model( db: orm.Session, endpoint_in_db: models.Endpoint ) -> typing.NoReturn: predictor = app.tests.predictors.scikit_learn.model.get_classification_predictor() binary_create = schemas.BinaryMlModelCreate( model_b64=pickle.dumps(predictor), input_data_structure=mapping.ModelInput.DATAFRAME, output_data_structure=mapping.ModelOutput.NUMPY_ARRAY, format=mapping.ModelWrapper.JOBLIB ) binary = crud.binary_ml_model.create_with_endpoint(db, obj_in=binary_create, endpoint_id=endpoint_in_db.id) assert binary.id == endpoint_in_db.id assert isinstance(pickle.loads(binary.model_b64), type(pickle.loads(binary_create.model_b64))) assert binary.input_data_structure == binary_create.input_data_structure assert binary.output_data_structure == binary_create.output_data_structure assert binary.format == binary_create.format def test_get_binary_ml_model( db: orm.Session, endpoint_in_db: models.Endpoint ) -> typing.NoReturn: predictor = app.tests.predictors.scikit_learn.model.get_classification_predictor() binary_create = schemas.BinaryMlModelCreate( model_b64=pickle.dumps(predictor), input_data_structure=mapping.ModelInput.DATAFRAME, output_data_structure=mapping.ModelOutput.NUMPY_ARRAY, format=mapping.ModelWrapper.JOBLIB ) binary = crud.binary_ml_model.create_with_endpoint(db, obj_in=binary_create, endpoint_id=endpoint_in_db.id) binary_1 = crud.binary_ml_model.get(db, id=binary.id) assert binary_1.id == binary.id assert binary_1.id == endpoint_in_db.id assert isinstance(pickle.loads(binary_1.model_b64), type(pickle.loads(binary_create.model_b64))) assert binary_1.input_data_structure == binary_create.input_data_structure assert binary_1.output_data_structure == binary_create.output_data_structure assert binary_1.format == binary_create.format def test_get_binary_ml_model_by_endpoint( db: orm.Session, endpoint_in_db: models.Endpoint ) -> typing.NoReturn: predictor = app.tests.predictors.scikit_learn.model.get_classification_predictor() binary_create = schemas.BinaryMlModelCreate( model_b64=pickle.dumps(predictor), input_data_structure=mapping.ModelInput.DATAFRAME, output_data_structure=mapping.ModelOutput.NUMPY_ARRAY, format=mapping.ModelWrapper.JOBLIB ) binary = crud.binary_ml_model.create_with_endpoint(db, obj_in=binary_create, endpoint_id=endpoint_in_db.id) binary_1 = crud.binary_ml_model.get_by_endpoint(db, endpoint_id=endpoint_in_db.id) assert binary_1.id == binary.id assert binary_1.id == endpoint_in_db.id assert isinstance(pickle.loads(binary_1.model_b64), type(pickle.loads(binary_create.model_b64))) assert binary_1.input_data_structure == binary_create.input_data_structure assert binary_1.output_data_structure == binary_create.output_data_structure assert binary_1.format == binary_create.format def test_delete_binary_ml_model( db: orm.Session, endpoint_in_db: models.Endpoint ) -> typing.NoReturn: predictor = app.tests.predictors.scikit_learn.model.get_classification_predictor() binary_create = schemas.BinaryMlModelCreate( model_b64=pickle.dumps(predictor), input_data_structure=mapping.ModelInput.DATAFRAME, output_data_structure=mapping.ModelOutput.NUMPY_ARRAY, format=mapping.ModelWrapper.JOBLIB ) binary = crud.binary_ml_model.create_with_endpoint(db, obj_in=binary_create, endpoint_id=endpoint_in_db.id) binary_1 = crud.binary_ml_model.delete(db, id=binary.id) binary_2 = crud.binary_ml_model.get(db, id=binary.id) assert binary_2 is None assert binary_1.id == binary.id assert binary_1.id == endpoint_in_db.id assert isinstance(pickle.loads(binary_1.model_b64), type(pickle.loads(binary_create.model_b64))) assert binary_1.input_data_structure == binary_create.input_data_structure assert binary_1.output_data_structure == binary_create.output_data_structure assert binary_1.format == binary_create.format
43.112069
111
0.774445
694
5,001
5.286744
0.17147
0.078496
0.053148
0.034342
0.810848
0.810848
0.801581
0.801581
0.792314
0.792314
0
0.013078
0.143771
5,001
115
112
43.486957
0.843765
0.115377
0
0.690476
0
0
0
0
0
0
0
0
0.285714
1
0.047619
false
0
0.095238
0
0.142857
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
052c6f2aa46b3d12f07d84d5fbda88043dfe6928
75
py
Python
tests/test.py
dan-casabuena/CovarianceToolbox
6c65191d8947288520cba7266f46042e45be177a
[ "MIT" ]
null
null
null
tests/test.py
dan-casabuena/CovarianceToolbox
6c65191d8947288520cba7266f46042e45be177a
[ "MIT" ]
null
null
null
tests/test.py
dan-casabuena/CovarianceToolbox
6c65191d8947288520cba7266f46042e45be177a
[ "MIT" ]
null
null
null
import pandas as pd import numpy as np def test(bruh, classic=0): pass
15
26
0.72
14
75
3.857143
0.857143
0
0
0
0
0
0
0
0
0
0
0.016949
0.213333
75
5
27
15
0.898305
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0.25
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
6
55328872022fe9ae47e29e666ffbc3fa1b67dcc1
145
py
Python
docs/manual/gears/examples/fmap_union_cfg.py
bogdanvuk/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
120
2018-04-23T08:29:04.000Z
2022-03-30T14:41:52.000Z
docs/manual/gears/examples/fmap_union_cfg.py
FZP1607152286/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
12
2019-07-09T17:12:58.000Z
2022-03-18T09:05:10.000Z
docs/manual/gears/examples/fmap_union_cfg.py
FZP1607152286/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
12
2019-05-10T19:42:08.000Z
2022-03-28T18:26:44.000Z
from pygears import reg reg['debug/trace'].append('/fmap/sub.*') reg['debug/trace'].append('/fmap.din') reg['debug/trace'].append('/fmap.dout')
24.166667
40
0.682759
22
145
4.5
0.5
0.242424
0.393939
0.575758
0.69697
0
0
0
0
0
0
0
0.055172
145
5
41
29
0.722628
0
0
0
0
0
0.434483
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
e9d47c3b443eef25ad18128610057dd99f2ff92d
96
py
Python
venv/lib/python3.8/site-packages/numpy/core/tests/test_casting_unittests.py
GiulianaPola/select_repeats
17a0d053d4f874e42cf654dd142168c2ec8fbd11
[ "MIT" ]
2
2022-03-13T01:58:52.000Z
2022-03-31T06:07:54.000Z
venv/lib/python3.8/site-packages/numpy/core/tests/test_casting_unittests.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
19
2021-11-20T04:09:18.000Z
2022-03-23T15:05:55.000Z
venv/lib/python3.8/site-packages/numpy/core/tests/test_casting_unittests.py
DesmoSearch/Desmobot
b70b45df3485351f471080deb5c785c4bc5c4beb
[ "MIT" ]
null
null
null
/home/runner/.cache/pip/pool/7c/3f/3e/d6cc48561c9aa4b542cf9e7844318e6e779ac122e904018fac69e905eb
96
96
0.895833
9
96
9.555556
1
0
0
0
0
0
0
0
0
0
0
0.416667
0
96
1
96
96
0.479167
0
0
0
0
0
0
0
0
1
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
1
0
0
0
1
0
0
0
0
0
0
0
0
6
75bae58223cbee2d43015a2164cb3bc3b38ed5c0
107,177
py
Python
cfdm/mixin/netcdf.py
tsjackson-noaa/cfdm
a669677905badaced2eba87413288ac0bc2697fc
[ "MIT" ]
null
null
null
cfdm/mixin/netcdf.py
tsjackson-noaa/cfdm
a669677905badaced2eba87413288ac0bc2697fc
[ "MIT" ]
null
null
null
cfdm/mixin/netcdf.py
tsjackson-noaa/cfdm
a669677905badaced2eba87413288ac0bc2697fc
[ "MIT" ]
null
null
null
from ..core.functions import deepcopy class DeprecationError(Exception): """An error indicating a method is no longer available.""" pass class NetCDF: """Mixin class for storing simple netCDF elements. .. versionadded:: (cfdm) 1.7.0 """ def _initialise_netcdf(self, source=None): """Helps to initialise netCDF components. Call this from inside the __init__ method of a class that inherits from this mixin class. :Parameters: source: optional Initialise the netCDF components from those of *source*. :Returns: `None` **Examples:** >>> f._initialise_netcdf(source) """ if source is None: netcdf = {} else: try: netcdf = source._get_component("netcdf", {}) except AttributeError: netcdf = {} else: if netcdf: netcdf = deepcopy(netcdf) else: netcdf = {} self._set_component("netcdf", netcdf, copy=False) class _NetCDFGroupsMixin: """Mixin class for accessing netCDF(4) hierarchical groups. .. versionadded:: (cfdm) 1.8.6 """ @classmethod def _nc_groups(cls, nc_get): """Return the netCDF group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `_nc_clear_groups`, `_nc_set_groups` :Parameters: nc_get: function The method which gets the netCDF name. :Returns: `tuple` of `str` The group structure. **Examples:** See the examples in classes which inherit this method. """ name = nc_get(default="") return tuple(name.split("/")[1:-1]) @classmethod def _nc_set_groups(cls, groups, nc_get, nc_set, nc_groups): """Set the netCDF group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `_nc_clear_groups`, `_nc_groups` :Parameters: groups: sequence of `str` The new group structure. nc_get: function The method which gets the netCDF name. nc_set: function The method which sets the netCDF name. nc_groups: function The method which returns existing group structure. :Returns: `tuple` of `str` The group structure prior to being reset. **Examples:** See the examples in classes which inherit this method. """ old = nc_groups() name = nc_get(default="") name = name.split("/")[-1] if not name: raise ValueError("Can't set groups when there is no netCDF name") if groups: for group in groups: if "/" in group: raise ValueError( f"Can't have '/' character in group name: {group!r}" ) name = "/".join(("",) + tuple(groups) + (name,)) if name: nc_set(name) return old @classmethod def _nc_clear_groups(cls, nc_get, nc_set, nc_groups): """Remove the netCDF group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `_nc_groups`, `_nc_set_groups` :Parameters: nc_get: function The method which gets the netCDF name. nc_set: function The method which sets the netCDF name. nc_groups: function The method which returns existing group structure. :Returns: `tuple` of `str` The removed group structure. **Examples:** See the examples in classes which inherit this method. """ old = nc_groups() name = nc_get(default="") name = name.split("/")[-1] if name: nc_set(name) return old class NetCDFDimension(NetCDF, _NetCDFGroupsMixin): """Mixin class for accessing the netCDF dimension name. .. versionadded:: (cfdm) 1.7.0 """ def nc_del_dimension(self, default=ValueError()): """Remove the netCDF dimension name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_get_dimension`, `nc_has_dimension`, `nc_set_dimension` :Parameters: default: optional Return the value of the *default* parameter if the netCDF dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The removed netCDF dimension name. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_has_dimension() True >>> f.nc_get_dimension() 'time' >>> f.nc_del_dimension() 'time' >>> f.nc_has_dimension() False >>> print(f.nc_get_dimension(None)) None >>> print(f.nc_del_dimension(None)) None """ try: return self._get_component("netcdf").pop("dimension") except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF dimension name", ) def nc_get_dimension(self, default=ValueError()): """Return the netCDF dimension name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_dimension`, `nc_has_dimension`, `nc_set_dimension` :Parameters: default: optional Return the value of the *default* parameter if the netCDF dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The netCDF dimension name. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_has_dimension() True >>> f.nc_get_dimension() 'time' >>> f.nc_del_dimension() 'time' >>> f.nc_has_dimension() False >>> print(f.nc_get_dimension(None)) None >>> print(f.nc_del_dimension(None)) None """ try: return self._get_component("netcdf")["dimension"] except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF dimension name", ) def nc_has_dimension(self): """Whether the netCDF dimension name has been set. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_dimension`, `nc_get_dimension`, `nc_set_dimension` :Returns: `bool` `True` if the netCDF dimension name has been set, otherwise `False`. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_has_dimension() True >>> f.nc_get_dimension() 'time' >>> f.nc_del_dimension() 'time' >>> f.nc_has_dimension() False >>> print(f.nc_get_dimension(None)) None >>> print(f.nc_del_dimension(None)) None """ return "dimension" in self._get_component("netcdf") def nc_set_dimension(self, value): """Set the netCDF dimension name. If there are any ``/`` (slash) characters in the netCDF name then these act as delimiters for a group hierarchy. By default, or if the name starts with a ``/`` character and contains no others, the name is assumed to be in the root group. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_dimension`, `nc_get_dimension`, `nc_has_dimension` :Parameters: value: `str` The value for the netCDF dimension name. :Returns: `None` **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_has_dimension() True >>> f.nc_get_dimension() 'time' >>> f.nc_del_dimension() 'time' >>> f.nc_has_dimension() False >>> print(f.nc_get_dimension(None)) None >>> print(f.nc_del_dimension(None)) None """ if not value or value == "/": raise ValueError(f"Invalid netCDF dimension name: {value!r}") if "/" in value: if not value.startswith("/"): raise ValueError( "A netCDF dimension name with a group structure " f"must start with a '/'. Got {value!r}" ) if value.count("/") == 1: value = value[1:] elif value.endswith("/"): raise ValueError( "A netCDF dimension name with a group structure " f"can't end with a '/'. Got {value!r}" ) self._get_component("netcdf")["dimension"] = value def nc_dimension_groups(self): """Return the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_dimension_groups`, `nc_set_dimension_groups` :Returns: `tuple` of `str` The group structure. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_dimension_groups() () >>> f.nc_set_dimension_groups(['forecast', 'model']) >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() '/forecast/model/time' >>> f.nc_clear_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() 'time' >>> f.nc_set_dimension('/forecast/model/time') >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_del_dimension('/forecast/model/time') '/forecast/model/time' >>> f.nc_dimension_groups() () """ return self._nc_groups(nc_get=self.nc_get_dimension) def nc_set_dimension_groups(self, groups): """Set the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for setting the group structure is to set the netCDF dimension name, with `nc_set_dimension`, with the group structure delimited by ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_dimension_groups`, `nc_dimension_groups` :Parameters: groups: sequence of `str` The new group structure. :Returns: `tuple` of `str` The group structure prior to being reset. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_dimension_groups() () >>> f.nc_set_dimension_groups(['forecast', 'model']) >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() '/forecast/model/time' >>> f.nc_clear_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() 'time' >>> f.nc_set_dimension('/forecast/model/time') >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_del_dimension('/forecast/model/time') '/forecast/model/time' >>> f.nc_dimension_groups() () """ return self._nc_set_groups( groups, nc_get=self.nc_get_dimension, nc_set=self.nc_set_dimension, nc_groups=self.nc_dimension_groups, ) def nc_clear_dimension_groups(self): """Remove the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for removing the group structure is to set the netCDF dimension name, with `nc_set_dimension`, with no ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_dimension_groups`, `nc_set_dimension_groups` :Returns: `tuple` of `str` The removed group structure. **Examples:** >>> f.nc_set_dimension('time') >>> f.nc_dimension_groups() () >>> f.nc_set_dimension_groups(['forecast', 'model']) >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() '/forecast/model/time' >>> f.nc_clear_dimension_groups() ('forecast', 'model') >>> f.nc_get_dimension() 'time' >>> f.nc_set_dimension('/forecast/model/time') >>> f.nc_dimension_groups() ('forecast', 'model') >>> f.nc_del_dimension('/forecast/model/time') '/forecast/model/time' >>> f.nc_dimension_groups() () """ return self._nc_clear_groups( nc_get=self.nc_get_dimension, nc_set=self.nc_set_dimension, nc_groups=self.nc_dimension_groups, ) class NetCDFVariable(NetCDF, _NetCDFGroupsMixin): """Mixin class for accessing the netCDF variable name. .. versionadded:: (cfdm) 1.7.0 """ def nc_del_variable(self, default=ValueError()): """Remove the netCDF variable name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_get_variable`, `nc_has_variable`, `nc_set_variable` :Parameters: default: optional Return the value of the *default* parameter if the netCDF variable name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The removed netCDF variable name. **Examples:** >>> f.nc_set_variable('tas') >>> f.nc_has_variable() True >>> f.nc_get_variable() 'tas' >>> f.nc_del_variable() 'tas' >>> f.nc_has_variable() False >>> print(f.nc_get_variable(None)) None >>> print(f.nc_del_variable(None)) None """ try: return self._get_component("netcdf").pop("variable") except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF variable name", ) def nc_get_variable(self, default=ValueError()): """Return the netCDF variable name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_variable`, `nc_has_variable`, `nc_set_variable` :Parameters: default: optional Return the value of the *default* parameter if the netCDF variable name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The netCDF variable name. If unset then *default* is returned, if provided. **Examples:** >>> f.nc_set_variable('tas') >>> f.nc_has_variable() True >>> f.nc_get_variable() 'tas' >>> f.nc_del_variable() 'tas' >>> f.nc_has_variable() False >>> print(f.nc_get_variable(None)) None >>> print(f.nc_del_variable(None)) None """ try: return self._get_component("netcdf")["variable"] except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF variable name", ) def nc_has_variable(self): """Whether the netCDF variable name has been set. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_variable`, `nc_get_variable`, `nc_set_variable` :Returns: `bool` `True` if the netCDF variable name has been set, otherwise `False`. **Examples:** >>> f.nc_set_variable('tas') >>> f.nc_has_variable() True >>> f.nc_get_variable() 'tas' >>> f.nc_del_variable() 'tas' >>> f.nc_has_variable() False >>> print(f.nc_get_variable(None)) None >>> print(f.nc_del_variable(None)) None """ return "variable" in self._get_component("netcdf") def nc_set_variable(self, value): """Set the netCDF variable name. If there are any ``/`` (slash) characters in the netCDF name then these act as delimiters for a group hierarchy. By default, or if the name starts with a ``/`` character and contains no others, the name is assumed to be in the root group. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_variable`, `nc_get_variable`, `nc_has_variable` :Parameters: value: `str` The value for the netCDF variable name. :Returns: `None` **Examples:** >>> f.nc_set_variable('tas') >>> f.nc_has_variable() True >>> f.nc_get_variable() 'tas' >>> f.nc_del_variable() 'tas' >>> f.nc_has_variable() False >>> print(f.nc_get_variable(None)) None >>> print(f.nc_del_variable(None)) None """ if not value or value == "/": raise ValueError(f"Invalid netCDF variable name: {value!r}") if "/" in value: if not value.startswith("/"): raise ValueError( "A netCDF variable name with a group structure " f"must start with a '/'. Got {value!r}" ) if value.count("/") == 1: value = value[1:] elif value.endswith("/"): raise ValueError( "A netCDF variable name with a group structure " f"can't end with a '/'. Got {value!r}" ) self._get_component("netcdf")["variable"] = value def nc_variable_groups(self): """Return the netCDF variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_variable_groups`, `nc_set_variable_groups` :Returns: `tuple` of `str` The group structure. **Examples:** >>> f.nc_set_variable('time') >>> f.nc_variable_groups() () >>> f.nc_set_variable_groups(['forecast', 'model']) >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() '/forecast/model/time' >>> f.nc_clear_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() 'time' >>> f.nc_set_variable('/forecast/model/time') >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_del_variable('/forecast/model/time') '/forecast/model/time' >>> f.nc_variable_groups() () """ return self._nc_groups(nc_get=self.nc_get_variable) def nc_set_variable_groups(self, groups): """Set the netCDF variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for setting the group structure is to set the netCDF variable name, with `nc_set_variable`, with the group structure delimited by ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_variable_groups`, `nc_variable_groups` :Parameters: groups: sequence of `str` The new group structure. :Returns: `tuple` of `str` The group structure prior to being reset. **Examples:** >>> f.nc_set_variable('time') >>> f.nc_variable_groups() () >>> f.nc_set_variable_groups(['forecast', 'model']) >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() '/forecast/model/time' >>> f.nc_clear_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() 'time' >>> f.nc_set_variable('/forecast/model/time') >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_del_variable('/forecast/model/time') '/forecast/model/time' >>> f.nc_variable_groups() () """ return self._nc_set_groups( groups, nc_get=self.nc_get_variable, nc_set=self.nc_set_variable, nc_groups=self.nc_variable_groups, ) def nc_clear_variable_groups(self): """Remove the netCDF variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for removing the group structure is to set the netCDF variable name, with `nc_set_variable`, with no ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_variable_groups`, `nc_set_variable_groups` :Returns: `tuple` of `str` The removed group structure. **Examples:** >>> f.nc_set_variable('time') >>> f.nc_variable_groups() () >>> f.nc_set_variable_groups(['forecast', 'model']) >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() '/forecast/model/time' >>> f.nc_clear_variable_groups() ('forecast', 'model') >>> f.nc_get_variable() 'time' >>> f.nc_set_variable('/forecast/model/time') >>> f.nc_variable_groups() ('forecast', 'model') >>> f.nc_del_variable('/forecast/model/time') '/forecast/model/time' >>> f.nc_variable_groups() () """ return self._nc_clear_groups( nc_get=self.nc_get_variable, nc_set=self.nc_set_variable, nc_groups=self.nc_variable_groups, ) class NetCDFSampleDimension(NetCDF, _NetCDFGroupsMixin): """Mixin class for accessing the netCDF sample dimension name. .. versionadded:: (cfdm) 1.7.0 """ def nc_del_sample_dimension(self, default=ValueError()): """Remove the netCDF sample dimension name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_get_sample_dimension`, `nc_has_sample_dimension`, `nc_set_sample_dimension` :Parameters: default: optional Return the value of the *default* parameter if the netCDF sample dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The removed netCDF sample dimension name. **Examples:** >>> f.nc_set_sample_dimension('time') >>> f.nc_has_sample_dimension() True >>> f.nc_get_sample_dimension() 'time' >>> f.nc_del_sample_dimension() 'time' >>> f.nc_has_sample_dimension() False >>> print(f.nc_get_sample_dimension(None)) None >>> print(f.nc_del_sample_dimension(None)) None """ try: return self._get_component("netcdf").pop("sample_dimension") except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF sample dimension name", ) def nc_get_sample_dimension(self, default=ValueError()): """Return the netCDF sample dimension name. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_sample_dimension`, `nc_has_sample_dimension`, `nc_set_sample_dimension` :Parameters: default: optional Return the value of the *default* parameter if the netCDF sample dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The netCDF sample dimension name. **Examples:** >>> f.nc_set_sample_dimension('time') >>> f.nc_has_sample_dimension() True >>> f.nc_get_sample_dimension() 'time' >>> f.nc_del_sample_dimension() 'time' >>> f.nc_has_sample_dimension() False >>> print(f.nc_get_sample_dimension(None)) None >>> print(f.nc_del_sample_dimension(None)) None """ try: return self._get_component("netcdf")["sample_dimension"] except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF sample dimension name", ) def nc_has_sample_dimension(self): """Whether the netCDF sample dimension name has been set. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_sample_dimension`, `nc_get_sample_dimension`, `nc_set_sample_dimension` :Returns: `bool` `True` if the netCDF sample dimension name has been set, otherwise `False`. **Examples:** >>> f.nc_set_sample_dimension('time') >>> f.nc_has_sample_dimension() True >>> f.nc_get_sample_dimension() 'time' >>> f.nc_del_sample_dimension() 'time' >>> f.nc_has_sample_dimension() False >>> print(f.nc_get_sample_dimension(None)) None >>> print(f.nc_del_sample_dimension(None)) None """ return "sample_dimension" in self._get_component("netcdf") def nc_set_sample_dimension(self, value): """Set the netCDF sample dimension name. If there are any ``/`` (slash) characters in the netCDF name then these act as delimiters for a group hierarchy. By default, or if the name starts with a ``/`` character and contains no others, the name is assumed to be in the root group. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_del_sample_dimension`, `nc_get_sample_dimension`, `nc_has_sample_dimension` :Parameters: value: `str` The value for the netCDF sample dimension name. :Returns: `None` **Examples:** >>> f.nc_set_sample_dimension('time') >>> f.nc_has_sample_dimension() True >>> f.nc_get_sample_dimension() 'time' >>> f.nc_del_sample_dimension() 'time' >>> f.nc_has_sample_dimension() False >>> print(f.nc_get_sample_dimension(None)) None >>> print(f.nc_del_sample_dimension(None)) None """ if not value or value == "/": raise ValueError( f"Invalid netCDF sample dimension name: {value!r}" ) if "/" in value: if not value.startswith("/"): raise ValueError( "A netCDF sample dimension name with a group structure " f"must start with a '/'. Got {value!r}" ) if value.count("/") == 1: value = value[1:] elif value.endswith("/"): raise ValueError( "A netCDF sample dimension name with a group structure " f"can't end with a '/'. Got {value!r}" ) self._get_component("netcdf")["sample_dimension"] = value def nc_sample_dimension_groups(self): """Return the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_sample_dimension_groups`, `nc_set_sample_dimension_groups` :Returns: `tuple` of `str` The group structure. **Examples:** >>> f.nc_set_sample_dimension('element') >>> f.nc_sample_dimension_groups() () >>> f.nc_set_sample_dimension_groups(['forecast', 'model']) >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() '/forecast/model/element' >>> f.nc_clear_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() 'element' >>> f.nc_set_sample_dimension('/forecast/model/element') >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_del_sample_dimension('/forecast/model/element') '/forecast/model/element' >>> f.nc_sample_dimension_groups() () """ return self._nc_groups(nc_get=self.nc_get_sample_dimension) def nc_set_sample_dimension_groups(self, groups): """Set the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for setting the group structure is to set the netCDF dimension name, with `nc_set_sample_dimension`, with the group structure delimited by ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_sample_dimension_groups`, `nc_sample_dimension_groups` :Parameters: groups: sequence of `str` The new group structure. :Returns: `tuple` of `str` The group structure prior to being reset. **Examples:** >>> f.nc_set_sample_dimension('element') >>> f.nc_sample_dimension_groups() () >>> f.nc_set_sample_dimension_groups(['forecast', 'model']) >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() '/forecast/model/element' >>> f.nc_clear_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() 'element' >>> f.nc_set_sample_dimension('/forecast/model/element') >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_del_sample_dimension('/forecast/model/element') '/forecast/model/element' >>> f.nc_sample_dimension_groups() () """ return self._nc_set_groups( groups, nc_get=self.nc_get_sample_dimension, nc_set=self.nc_set_sample_dimension, nc_groups=self.nc_sample_dimension_groups, ) def nc_clear_sample_dimension_groups(self): """Remove the netCDF dimension group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for removing the group structure is to set the netCDF dimension name, with `nc_set_sample_dimension`, with no ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_sample_dimension_groups`, `nc_set_sample_dimension_groups` :Returns: `tuple` of `str` The removed group structure. **Examples:** >>> f.nc_set_sample_dimension('element') >>> f.nc_sample_dimension_groups() () >>> f.nc_set_sample_dimension_groups(['forecast', 'model']) >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() '/forecast/model/element' >>> f.nc_clear_sample_dimension_groups() ('forecast', 'model') >>> f.nc_get_sample_dimension() 'element' >>> f.nc_set_sample_dimension('/forecast/model/element') >>> f.nc_sample_dimension_groups() ('forecast', 'model') >>> f.nc_del_sample_dimension('/forecast/model/element') '/forecast/model/element' >>> f.nc_sample_dimension_groups() () """ return self._nc_clear_groups( nc_get=self.nc_get_sample_dimension, nc_set=self.nc_set_sample_dimension, nc_groups=self.nc_sample_dimension_groups, ) class NetCDFGlobalAttributes(NetCDF): """Mixin class for accessing netCDF global attributes. .. versionadded:: (cfdm) 1.7.0 """ def nc_global_attributes(self, values=False): """Returns properties to write as netCDF global attributes. When multiple field constructs are being written to the same file, it is only possible to create a netCDF global attribute from a property that has identical values for each field construct. If any field construct's property has a different value then the property will not be written as a netCDF global attribute, even if it has been selected as such, but will appear instead as attributes on the netCDF data variables corresponding to each field construct. The standard description-of-file-contents properties are always written as netCDF global attributes, if possible, so selecting them is optional. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `write`, `nc_clear_global_attributes`, `nc_set_global_attribute`, `nc_set_global_attributes` :Parameters: values: `bool`, optional Return the value (rather than `None`) for any global attribute that has, by definition, the same value as a construct property. .. versionadded:: (cfdm) 1.8.2 :Returns: `dict` The selection of properties requested for writing to netCDF global attributes. **Examples:** >>> f.nc_global_attributes() {'Conventions': None, 'comment': None} >>> f.nc_set_global_attribute('foo') >>> f.nc_global_attributes() {'Conventions': None, 'comment': None, 'foo': None} >>> f.nc_set_global_attribute('comment', 'global comment') >>> f.nc_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_global_attributes(values=True) {'Conventions': 'CF-1.8', 'comment': 'global_comment', 'foo': 'bar'} >>> f.nc_clear_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_global_attributes() {} """ out = self._get_component("netcdf").get("global_attributes") if out is None: return {} out = out.copy() if values: # Replace a None value with the value from the variable # properties properties = self.properties() if properties: for prop, value in out.items(): if value is None and prop in properties: out[prop] = properties[prop] return out def nc_clear_global_attributes(self): """Removes properties to write as netCDF global attributes. When multiple field constructs are being written to the same file, it is only possible to create a netCDF global attribute from a property that has identical values for each field construct. If any field construct's property has a different value then the property will not be written as a netCDF global attribute, even if it has been selected as such, but will appear instead as attributes on the netCDF data variables corresponding to each field construct. The standard description-of-file-contents properties are always written as netCDF global attributes, if possible, so selecting them is optional. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `write`, `nc_global_attributes`, `nc_set_global_attribute`, `nc_set_global_attributes` :Returns: `dict` The removed selection of properties requested for writing to netCDF global attributes. **Examples:** >>> f.nc_global_attributes() {'Conventions': None, 'comment': None} >>> f.nc_set_global_attribute('foo') >>> f.nc_global_attributes() {'Conventions': None, 'comment': None, 'foo': None} >>> f.nc_set_global_attribute('comment', 'global comment') >>> f.nc_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_clear_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_global_attributes() {} """ out = self._get_component("netcdf").get("global_attributes") if out is None: out = {} self._get_component("netcdf")["global_attributes"] = {} return out def nc_set_global_attribute(self, prop, value=None): """Select a property to be written as a netCDF global attribute. When multiple field constructs are being written to the same file, it is only possible to create a netCDF global attribute from a property that has identical values for each field construct. If any field construct's property has a different value then the property will not be written as a netCDF global attribute, even if it has been selected as such, but will appear instead as attributes on the netCDF data variables corresponding to each field construct. The standard description-of-file-contents properties are always written as netCDF global attributes, if possible, so selecting them is optional. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `write`, `nc_global_attributes`, `nc_clear_global_attributes`, `nc_set_global_attributes` :Parameters: prop: `str` Select the property to be written (if possible) as a netCDF global attribute. value: optional The value of the netCDF global attribute, which will be created (if possible) in addition to the property as written to a netCDF data variable. If unset (or `None`) then this acts as an instruction to write the property (if possible) to a netCDF global attribute instead of to a netCDF data variable. :Returns: `None` **Examples:** >>> f.nc_global_attributes() {'Conventions': None, 'comment': None} >>> f.nc_set_global_attribute('foo') >>> f.nc_global_attributes() {'Conventions': None, 'comment': None, 'foo': None} >>> f.nc_set_global_attribute('comment', 'global comment') >>> f.nc_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_clear_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_global_attributes() {} """ out = self._get_component("netcdf").get("global_attributes") if out is None: out = {} out[prop] = value self._get_component("netcdf")["global_attributes"] = out def nc_set_global_attributes(self, properties, copy=True): """Set properties to be written as netCDF global attributes. When multiple field constructs are being written to the same file, it is only possible to create a netCDF global attribute from a property that has identical values for each field construct. If any field construct's property has a different value then the property will not be written as a netCDF global attribute, even if it has been selected as such, but will appear instead as attributes on the netCDF data variables corresponding to each field construct. The standard description-of-file-contents properties are always written as netCDF global attributes, if possible, so selecting them is optional. .. versionadded:: (cfdm) 1.7.10 .. seealso:: `write`, `nc_clear_global_attributes`, `nc_global_attributes`, `nc_set_global_attribute` :Parameters: properties: `dict` Set the properties be written as a netCDF global attribute from the dictionary supplied. The value of a netCDF global attribute, which will be created (if possible) in addition to the property as written to a netCDF data variable. If a value of `None` is used then this acts as an instruction to write the property (if possible) to a netCDF global attribute instead of to a netCDF data variable. *Parameter example:* ``properties={'Conventions': None, 'project': 'research'}`` copy: `bool`, optional If False then any property values provided by the *properties* parameter are not copied before insertion. By default they are deep copied. :Returns: `None` **Examples:** >>> f.nc_global_attributes() {'Conventions': None, 'comment': None} >>> f.nc_set_global_attributes({}) >>> f.nc_set_global_attributes({'foo': None}) >>> f.nc_global_attributes() {'Conventions': None, 'comment': None, 'foo': None} >>> f.nc_set_global_attributes('comment', 'global comment') >>> f.nc_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': None} >>> f.nc_set_global_attributes('foo', 'bar') >>> f.nc_global_attributes() {'Conventions': None, 'comment': 'global_comment', 'foo': 'bar'} """ if copy: properties = deepcopy(properties) else: properties = properties.copy() out = self._get_component("netcdf").get("global_attributes") if out is None: out = {} out.update(properties) self._get_component("netcdf")["global_attributes"] = out class NetCDFGroupAttributes(NetCDF): """Mixin class for accessing netCDF group attributes. .. versionadded:: (cfdm) 1.8.6 """ def nc_group_attributes(self, values=False): """Returns properties to write as netCDF group attributes. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `write`, `nc_clear_group_attributes`, `nc_set_group_attribute`, `nc_set_group_attributes` :Parameters: values: `bool`, optional Return the value (rather than `None`) for any group attribute that has, by definition, the same value as a construct property. :Returns: `dict` The selection of properties requested for writing to netCDF group attributes. **Examples:** >>> f.nc_group_attributes() {'comment': None} >>> f.nc_set_group_attribute('foo') >>> f.nc_group_attributes() {'comment': None, 'foo': None} >>> f.nc_set_group_attribute('foo', 'bar') >>> f.nc_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes(values=True) {'comment': 'forecast comment', 'foo': 'bar'} >>> f.nc_clear_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes() {} """ out = self._get_component("netcdf").get("group_attributes") if out is None: return {} out = out.copy() if values: # Replace a None value with the value from the variable # properties properties = self.properties() if properties: for prop, value in out.items(): if value is None and prop in properties: out[prop] = properties[prop] return out def nc_clear_group_attributes(self): """Removes properties to write as netCDF group attributes. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `write`, `nc_group_attributes`, `nc_set_group_attribute`, `nc_set_group_attributes` :Returns: `dict` The removed selection of properties requested for writing to netCDF group attributes. **Examples:** >>> f.nc_group_attributes() {'comment': None} >>> f.nc_set_group_attribute('foo') >>> f.nc_group_attributes() {'comment': None, 'foo': None} >>> f.nc_set_group_attribute('foo', 'bar') >>> f.nc_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes(values=True) {'comment': 'forecast comment', 'foo': 'bar'} >>> f.nc_clear_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes() {} """ out = self._get_component("netcdf").get("group_attributes") if out is None: out = {} self._get_component("netcdf")["group_attributes"] = {} return out def nc_set_group_attribute(self, prop, value=None): """Select a property to be written as a netCDF group attribute. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `write`, `nc_group_attributes`, `nc_clear_group_attributes`, `nc_set_group_attributes` :Parameters: prop: `str` Select the property to be written (if possible) as a netCDF group attribute. value: optional The value of the netCDF group attribute, which will be created (if possible) in addition to the property as written to a netCDF data variable. If unset (or `None`) then this acts as an instruction to write the property (if possible) to a netCDF group attribute instead of to a netCDF data variable. :Returns: `None` **Examples:** >>> f.nc_group_attributes() {'comment': None} >>> f.nc_set_group_attribute('foo') >>> f.nc_group_attributes() {'comment': None, 'foo': None} >>> f.nc_set_group_attribute('foo', 'bar') >>> f.nc_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes(values=True) {'comment': 'forecast comment', 'foo': 'bar'} >>> f.nc_clear_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes() {} """ out = self._get_component("netcdf").get("group_attributes") if out is None: out = {} out[prop] = value self._get_component("netcdf")["group_attributes"] = out def nc_set_group_attributes(self, properties, copy=True): """Set properties to be written as netCDF group attributes. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `write`, `nc_clear_group_attributes`, `nc_group_attributes`, `nc_set_group_attribute` :Parameters: properties: `dict` Set the properties be written as a netCDF group attribute from the dictionary supplied. The value of a netCDF group attribute, which will be created (if possible) in addition to the property as written to a netCDF data variable. If a value of `None` is used then this acts as an instruction to write the property (if possible) to a netCDF group attribute instead of to a netCDF data variable. *Parameter example:* ``properties={'Conventions': None, 'project': 'research'}`` copy: `bool`, optional If False then any property values provided by the *properties* parameter are not copied before insertion. By default they are deep copied. :Returns: `None` **Examples:** >>> f.nc_group_attributes() {'comment': None} >>> f.nc_set_group_attribute('foo') >>> f.nc_group_attributes() {'comment': None, 'foo': None} >>> f.nc_set_group_attribute('foo', 'bar') >>> f.nc_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes(values=True) {'comment': 'forecast comment', 'foo': 'bar'} >>> f.nc_clear_group_attributes() {'comment': None, 'foo': 'bar'} >>> f.nc_group_attributes() {} """ if copy: properties = deepcopy(properties) else: properties = properties.copy() out = self._get_component("netcdf").get("group_attributes") if out is None: out = {} out.update(properties) self._get_component("netcdf")["group_attributes"] = out class NetCDFUnlimitedDimensions(NetCDF): """Mixin class for accessing netCDF unlimited dimensions. .. versionadded:: (cfdm) 1.7.0 Deprecated at version 1.7.4 """ def nc_unlimited_dimensions(self): """Returns domain axes to write as netCDF unlimited dimensions. By default output netCDF dimensions are not unlimited. .. versionadded:: (cfdm) 1.7.0 Deprecated at version 1.7.4 .. seealso:: `write`, `nc_clear_unlimited_dimensions`, `nc_set_unlimited_dimensions` :Returns: `set` The selection of domain axis constructs to be written as netCDF unlimited dimensions. **Examples:** >>> f.nc_set_unlimited_dimensions(['domainaxis0']) >>> f.nc_unlimited_dimensions() {'domainaxis0'} >>> f.nc_set_unlimited_dimensions(['domainaxis1']) >>> f.nc_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_clear_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_unlimited_dimensions() set() """ raise DeprecationError( "Field.nc_unlimited_dimensions was deprecated at version 1.7.4 " "and is no longer available. Use DomainAxis.nc_is_unlimited " "instead." ) def nc_set_unlimited_dimensions(self, axes): """Selects domain axes to write as netCDF unlimited dimensions. By default output netCDF dimensions are not unlimited. .. versionadded:: (cfdm) 1.7.0 Deprecated at version 1.7.4 .. seealso:: `write`, `nc_unlimited_dimensions`, `nc_clear_unlimited_dimensions` :Parameters: axes: sequence of `str`, optional Select the domain axis constructs from the sequence provided. Domain axis constructs are identified by their construct identifiers. *Parameter example:* ``axes=['domainaxis0', 'domainaxis1']`` *Parameter example:* ``axes=()`` :Returns: `None` **Examples:** >>> f.nc_set_unlimited_dimensions(['domainaxis0']) >>> f.nc_unlimited_dimensions() {'domainaxis0'} >>> f.nc_set_unlimited_dimensions(['domainaxis1']) >>> f.nc_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_clear_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_unlimited_dimensions() set() """ raise DeprecationError( "Field.nc_set_unlimited_dimensions was deprecated at version " "1.7.4 and is no longer available." "Use DomainAxis.nc_set_unlimited instead." ) def nc_clear_unlimited_dimensions(self): """Removes domain axes to write as netCDF unlimited dimensions. By default output netCDF dimensions are not unlimited. .. versionadded:: (cfdm) 1.7.0 Deprecated at version 1.7.4 .. seealso:: `write`, `nc_unlimited_dimensions`, `nc_set_unlimited_dimensions` :Returns: `set` The selection of domain axis constructs that has been removed. **Examples:** >>> f.nc_set_unlimited_dimensions(['domainaxis0']) >>> f.nc_unlimited_dimensions() {'domainaxis0'} >>> f.nc_set_unlimited_dimensions(['domainaxis1']) >>> f.nc_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_clear_unlimited_dimensions() {'domainaxis0', 'domainaxis1'} >>> f.nc_unlimited_dimensions() set() """ raise DeprecationError( "Field.nc_clear_unlimited_dimensions was deprecated at version " "1.7.4 and is no longer available." "Use DomainAxis.nc_set_unlimited instead." ) class NetCDFExternal(NetCDF): """Mixin class for accessing the netCDF external variable status. .. versionadded:: (cfdm) 1.7.0 """ def nc_get_external(self): """Whether a construct matches an external netCDF variable. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_set_external` :Returns: `bool` The external status. **Examples:** >>> c.nc_get_external() False >>> c.nc_set_external(True) >>> c.nc_get_external() True """ return self._get_component("netcdf").get("external", False) def nc_set_external(self, external): """Set external status of a netCDF variable. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `nc_get_external` :Parameters: external: `bool`, optional Set the external status. *Parameter example:* ``external=True`` :Returns: `None` **Examples:** >>> c.nc_get_external() False >>> c.nc_set_external(True) >>> c.nc_get_external() True """ self._get_component("netcdf")["external"] = bool(external) class NetCDFGeometry(NetCDF, _NetCDFGroupsMixin): """Mixin to access the netCDF geometry container variable name. .. versionadded:: (cfdm) 1.8.0 """ def nc_del_geometry_variable(self, default=ValueError()): """Remove the netCDF geometry container variable name. .. versionadded:: (cfdm) 1.8.0 .. seealso:: `nc_get_geometry_variable`, `nc_has_geometry_variable`, `nc_set_geometry_variable` :Parameters: default: optional Return the value of the *default* parameter if the netCDF dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The removed netCDF geometry container variable name. **Examples:** >>> f.nc_set_geometry_variable('geometry') >>> f.nc_has_geometry_variable() True >>> f.nc_get_geometry_variable() 'geometry' >>> f.nc_del_geometry_variable() 'geometry' >>> f.nc_has_geometry_variable() False >>> print(f.nc_get_geometry_variable(None)) None >>> print(f.nc_del_geometry_variable(None)) None """ try: return self._get_component("netcdf").pop("geometry_variable") except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF geometry variable name", ) def nc_get_geometry_variable(self, default=ValueError()): """Return the netCDF geometry container variable name. .. versionadded:: (cfdm) 1.8.0 .. seealso:: `nc_del_geometry_variable`, `nc_has_geometry_variable`, `nc_set_geometry_variable` :Parameters: default: optional Return the value of the *default* parameter if the netCDF dimension name has not been set. If set to an `Exception` instance then it will be raised instead. :Returns: `str` The netCDF geometry container variable name. **Examples:** >>> f.nc_set_geometry_variable('geometry') >>> f.nc_has_geometry_variable() True >>> f.nc_get_geometry_variable() 'geometry' >>> f.nc_del_geometry_variable() 'geometry' >>> f.nc_has_geometry_variable() False >>> print(f.nc_get_geometry_variable(None)) None >>> print(f.nc_del_geometry_variable(None)) None """ try: return self._get_component("netcdf")["geometry_variable"] except KeyError: if default is None: return default return self._default( default, f"{self.__class__.__name__} has no netCDF geometry variable name", ) def nc_has_geometry_variable(self): """Whether a netCDF geometry container variable has a name. .. versionadded:: (cfdm) 1.8.0 .. seealso:: `nc_del_geometry_variable`, `nc_get_geometry_variable`, `nc_set_geometry_variable` :Returns: `bool` `True` if the netCDF geometry container variable name has been set, otherwise `False`. **Examples:** >>> f.nc_set_geometry_variable('geometry') >>> f.nc_has_geometry_variable() True >>> f.nc_get_geometry_variable() 'geometry' >>> f.nc_del_geometry_variable() 'geometry' >>> f.nc_has_geometry_variable() False >>> print(f.nc_get_geometry_variable(None)) None >>> print(f.nc_del_geometry_variable(None)) None """ return "geometry_variable" in self._get_component("netcdf") def nc_set_geometry_variable(self, value): """Set the netCDF geometry container variable name. If there are any ``/`` (slash) characters in the netCDF name then these act as delimiters for a group hierarchy. By default, or if the name starts with a ``/`` character and contains no others, the name is assumed to be in the root group. .. versionadded:: (cfdm) 1.8.0 .. seealso:: `nc_del_geometry_variable`, `nc_get_geometry_variable`, `nc_has_geometry_variable` :Parameters: value: `str` The value for the netCDF geometry container variable name. :Returns: `None` **Examples:** >>> f.nc_set_geometry_variable('geometry') >>> f.nc_has_geometry_variable() True >>> f.nc_get_geometry_variable() 'geometry' >>> f.nc_del_geometry_variable() 'geometry' >>> f.nc_has_geometry_variable() False >>> print(f.nc_get_geometry_variable(None)) None >>> print(f.nc_del_geometry_variable(None)) None """ if not value or value == "/": raise ValueError( f"Invalid netCDF geometry variable name: {value!r}" ) if "/" in value: if not value.startswith("/"): raise ValueError( "A netCDF geometry variable name with a group structure " f"must start with a '/'. Got {value!r}" ) if value.count("/") == 1: value = value[1:] elif value.endswith("/"): raise ValueError( "A netCDF geometry variable name with a group structure " f"can't end with a '/'. Got {value!r}" ) self._get_component("netcdf")["geometry_variable"] = value def nc_geometry_variable_groups(self): """Return the netCDF geometry variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_geometry_variable_groups`, `nc_set_geometry_variable_groups` :Returns: `tuple` of `str` The group structure. **Examples:** >>> f.nc_set_geometry_variable('geometry1') >>> f.nc_geometry_variable_groups() () >>> f.nc_set_geometry_variable_groups(['forecast', 'model']) >>> f.nc_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() '/forecast/model/geometry1' >>> f.nc_clear_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() 'geometry1' >>> f.nc_set_geometry_variable('/forecast/model/geometry1') >>> f.nc_geometry_variable_groups() ('forecast', 'model') >>> f.nc_del_geometry_variable('/forecast/model/geometry1') '/forecast/model/geometry1' >>> f.nc_geometry_variable_groups() () """ return self._nc_groups(nc_get=self.nc_get_geometry_variable) def nc_set_geometry_variable_groups(self, groups): """Set the netCDF geometry variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for setting the group structure is to set the netCDF variable name, with `nc_set_geometry_variable`, with the group structure delimited by ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_clear_geometry_variable_groups`, `nc_geometry_variable_groups` :Parameters: groups: sequence of `str` The new group structure. :Returns: `tuple` of `str` The group structure prior to being reset. **Examples:** >>> f.nc_set_geometry_variable('geometry1') >>> f.nc_geometry_variable_groups() () >>> f.nc_set_geometry_variable_groups(['forecast', 'model']) >>> f.nc_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() '/forecast/model/geometry1' >>> f.nc_clear_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() 'geometry1' >>> f.nc_set_geometry_variable('/forecast/model/geometry1') >>> f.nc_geometry__variablegroups() ('forecast', 'model') >>> f.nc_del_geometry_variable('/forecast/model/geometry1') '/forecast/model/geometry1' >>> f.nc_geometry_variable_groups() () """ return self._nc_set_groups( groups, nc_get=self.nc_get_geometry_variable, nc_set=self.nc_set_geometry_variable, nc_groups=self.nc_geometry_variable_groups, ) def nc_clear_geometry_variable_groups(self): """Remove the netCDF geometry variable group hierarchy. The group hierarchy is defined by the netCDF name. Groups are delimited by ``/`` (slash) characters in the netCDF name. The groups are returned, in hierarchical order, as a sequence of strings. If the name is not set, or contains no ``/`` characters then an empty sequence is returned, signifying the root group. An alternative technique for removing the group structure is to set the netCDF variable name, with `nc_set_geometry_variable`, with no ``/`` characters. .. versionadded:: (cfdm) 1.8.6 .. seealso:: `nc_geometry_variable_groups`, `nc_set_geometry_variable_groups` :Returns: `tuple` of `str` The removed group structure. **Examples:** >>> f.nc_set_geometry_variable('geometry1') >>> f.nc_geometry_variable_groups() () >>> f.nc_set_geometry_variable_groups(['forecast', 'model']) >>> f.nc_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() '/forecast/model/geometry1' >>> f.nc_clear_geometry_variable_groups() ('forecast', 'model') >>> f.nc_get_geometry_variable() 'geometry1' >>> f.nc_set_geometry_variable('/forecast/model/geometry1') >>> f.nc_geometry_variable_groups() ('forecast', 'model') >>> f.nc_del_geometry_variable('/forecast/model/geometry1') '/forecast/model/geometry1' >>> f.nc_geometry_variable_groups() () """ return self._nc_clear_groups( nc_get=self.nc_get_geometry_variable, nc_set=self.nc_set_geometry_variable, nc_groups=self.nc_geometry_variable_groups, ) class NetCDFHDF5(NetCDF): """Mixin class for accessing the netCDF HDF5 chunksizes. .. versionadded:: (cfdm) 1.7.2 """ def nc_hdf5_chunksizes(self): """Return the HDF5 chunksizes for the data. .. note:: Chunksizes are cleared from the output of methods that change the data shape. .. note:: Chunksizes are ignored for netCDF3 files that do not use HDF5. .. versionadded:: (cfdm) 1.7.2 .. seealso:: `nc_clear_hdf5_chunksizes`, `nc_set_hdf5_chunksizes` :Returns: `tuple` The current chunksizes. **Examples:** >>> d.shape (1, 96, 73) >>> d.nc_set_hdf5_chunksizes([1, 48, 73]) >>> d.nc_hdf5_chunksizes() (1, 48, 73) >>> d.nc_clear_hdf5_chunksizes() (1, 48, 73) >>> d.nc_hdf5_chunksizes() () """ return self._get_component("netcdf").get("hdf5_chunksizes", ()) def nc_clear_hdf5_chunksizes(self): """Clear the HDF5 chunksizes for the data. .. note:: Chunksizes are cleared from the output of methods that change the data shape. .. note:: Chunksizes are ignored for netCDF3 files that do not use HDF5. .. versionadded:: (cfdm) 1.7.2 .. seealso:: `nc_hdf5_chunksizes`, `nc_set_hdf5_chunksizes` :Returns: `tuple` The chunksizes defined prior to being cleared. **Examples:** >>> d.shape (1, 96, 73) >>> d.nc_set_hdf5_chunksizes([1, 48, 73]) >>> d.nc_hdf5_chunksizes() (1, 48, 73) >>> d.nc_clear_hdf5_chunksizes() (1, 48, 73) >>> d.nc_hdf5_chunksizes() () """ return self._get_component("netcdf").pop("hdf5_chunksizes", ()) def nc_set_hdf5_chunksizes(self, chunksizes): """Set the HDF5 chunksizes for the data. .. note:: Chunksizes are cleared from the output of methods that change the data shape. .. note:: Chunksizes are ignored for netCDF3 files that do not use HDF5. .. versionadded:: (cfdm) 1.7.2 .. seealso:: `nc_hdf5_chunksizes`, `nc_clear_hdf5_chunksizes` :Parameters: chunksizes: sequence of `int` The chunksizes for each dimension. Can be integers from 0 to the dimension size. :Returns: `None` **Examples:** >>> d.shape (1, 96, 73) >>> d.nc_set_hdf5_chunksizes([1, 48, 73]) >>> d.nc_hdf5_chunksizes() (1, 48, 73) >>> d.nc_clear_hdf5_chunksizes() (1, 48, 73) >>> d.nc_hdf5_chunksizes() () """ try: shape = self.shape except AttributeError: pass else: if len(chunksizes) != len(shape): raise ValueError( "chunksizes must be a sequence with the same length " "as dimensions" ) for i, j in zip(chunksizes, shape): if i < 0: raise ValueError("chunksize cannot be negative") if i > j: raise ValueError("chunksize cannot exceed dimension size") self._get_component("netcdf")["hdf5_chunksizes"] = tuple(chunksizes) class NetCDFUnlimitedDimension(NetCDF): """Mixin class for accessing a netCDF unlimited dimension. .. versionadded:: (cfdm) 1.7.4 """ def nc_is_unlimited(self): """Inspect the unlimited status of the a netCDF dimension. By default output netCDF dimensions are not unlimited. The status is used by the `write` function. .. versionadded:: (cfdm) 1.7.4 .. seealso:: `nc_set_unlimited` :Returns: `bool` The existing unlimited status. True and False signify "unlimited" and "not unlimited" respectively. **Examples:** >>> da = f.domain_axis('domainaxis1') >>> da.nc_is_unlimited() False >>> da.nc_set_unlimited(True) >>> da.nc_is_unlimited() True >>> da.nc_set_unlimited(False) False >>> da.nc_is_unlimited() True """ return self._get_component("netcdf").get("unlimited", False) def nc_set_unlimited(self, value): """Set the unlimited status of the a netCDF dimension. By default output netCDF dimensions are not unlimited. The status is used by the `write` function. .. versionadded:: (cfdm) 1.7.4 .. seealso:: `nc_is_unlimited` :Parameters: value: `bool` The new unlimited status. True and False signify "unlimited" and "not unlimited" respectively. :Returns: `None` **Examples:** >>> da = f.domain_axis('domainaxis1') >>> da.nc_is_unlimited() False >>> da.nc_set_unlimited(True) >>> da.nc_is_unlimited() True >>> da.nc_set_unlimited(False) False >>> da.nc_is_unlimited() True """ self._get_component("netcdf")["unlimited"] = bool(value) class NetCDFComponents(NetCDF): """Mixin class for netCDF fetaure common to many constructs. Accesses netCDF names consistently across multiple metadata constructs. Assumes that the mixin classes `NetCDFDimension` and `NetCDFVariable` have also been subclassed. Assumes that the methods `_get_data_compression_variables` and `_get_coordinate_geometry_variables` have been defined elsewhere. .. versionadded:: (cfdm) 1.8.9.0 """ def nc_set_component_variable(self, component, value): """Set the netCDF variable name for components. Sets the netCDF variable name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_variable`, `nc_set_component_variable_groups`, `nc_clear_component_variable_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'node_count'`` Node count variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ``'list'`` List variables for compression by gathering ===================== =============================== value: `str` The netCDF variable name to be set for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_variable('interior_ring', 'interiorring_1') """ if component in ("count", "index", "list"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "node_count", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_variable(value) def nc_del_component_variable(self, component): """Remove the netCDF variable name of components. Removes the netCDF variable name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_set_component_variable`, `nc_set_component_variable_groups`, `nc_clear_component_variable_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'node_count'`` Node count variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ``'list'`` List variables for compression by gathering ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_del_component_variable('interior_ring') """ if component in ("count", "index", "list"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "node_count", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_del_variable(None) def nc_set_component_variable_groups(self, component, groups): """Set the netCDF variable groups of components. Sets the netCDF variable groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_variable`, `nc_set_component_variable`, `nc_clear_component_variable_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'node_count'`` Node count variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ``'list'`` List variables for compression by gathering ===================== =============================== groups: sequence of `str` The new group structure for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_variable_groups('interior_ring', ['forecast']) """ if component in ("count", "index", "list"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "node_count", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_variable_groups(groups) def nc_clear_component_variable_groups(self, component): """Remove the netCDF variable groups of components. Removes the netCDF variable groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_variable`, `nc_set_component_variable`, `nc_set_component_variable_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'node_count'`` Node count variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ``'list'`` List variables for compression by gathering ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_clear_component_variable_groups('interior_ring') """ if component in ("count", "index", "list"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "node_count", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_clear_variable_groups() def nc_set_component_dimension(self, component, value): """Set the netCDF dimension name of components. Sets the netCDF dimension name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_dimension`, `nc_set_component_dimension_groups`, `nc_clear_component_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== value: `str` The netCDF dimension name to be set for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_dimension('interior_ring', 'part') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_dimension(value) def nc_del_component_dimension(self, component): """Remove the netCDF dimension name of components. Removes the netCDF dimension name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_set_component_dimension`, `nc_set_component_dimension_groups`, `nc_clear_component_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_del_component_dimension('interior_ring') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_del_dimension(None) def nc_set_component_dimension_groups(self, component, groups): """Set the netCDF dimension groups of components. Sets the netCDF dimension groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_dimension`, `nc_set_component_dimension`, `nc_clear_component_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== groups: sequence of `str` The new group structure for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_dimension_groups('interior_ring', ['forecast']) """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_dimension_groups(groups) def nc_clear_component_dimension_groups(self, component): """Remove the netCDF dimension groups of components. Removes the netCDF dimension groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_dimension`, `nc_set_component_dimension`, `nc_set_component_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'interior_ring'`` Interior ring variables for geometry coordinates ``'part_node_count'`` Part node count variables for geometry coordinates ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_clear_component_dimension_groups('interior_ring') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) elif component in ("interior_ring", "part_node_count"): variables = self._get_coordinate_geometry_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_clear_dimension_groups() def nc_set_component_sample_dimension(self, component, value): """Set the netCDF sample dimension name of components. Sets the netCDF sample dimension name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_sample_dimension`, `nc_set_component_sample_dimension_groups`, `nc_clear_component_sample_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== value: `str` The netCDF sample_dimension name to be set for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_sample_dimension('count', 'obs') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_sample_dimension(value) def nc_del_component_sample_dimension(self, component): """Remove the netCDF sample dimension name of components. Removes the netCDF sample dimension name for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_set_component_sample_dimension`, `nc_set_component_sample_dimension_groups`, `nc_clear_component_sample_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_del_component_sample_dimension('count') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_del_sample_dimension(None) def nc_set_component_sample_dimension_groups(self, component, groups): """Set the netCDF sample dimension groups of components. Sets the netCDF sample dimension groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_sample_dimension`, `nc_set_component_sample_dimension`, `nc_clear_component_sample_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== groups: sequence of `str` The new group structure for each component. :Returns: `None` **Examples:** >>> f.nc_set_component_sample_dimension_groups('count', ['forecast']) """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_set_sample_dimension_groups(groups) def nc_clear_component_sample_dimension_groups(self, component): """Remove the netCDF sample dimension groups of components. Removes the netCDF sample dimension groups for all components of a given type. Some components exist within multiple constructs, but when written to a netCDF dataset the netCDF names associated with such components will be arbitrarily taken from one of them. The netCDF names can be set on all such occurrences individually, or preferably by using this method to ensure consistency across all such components. .. versionadded:: (cfdm) 1.8.6.0 .. seealso:: `nc_del_component_sample_dimension`, `nc_set_component_sample_dimension`, `nc_set_component_sample_dimension_groups` :Parameters: component: `str` Specify the component type. One of: ===================== =============================== *component* Description ===================== =============================== ``'count'`` Count variables for contiguous ragged arrays ``'index'`` Index variables for indexed ragged arrays ===================== =============================== :Returns: `None` **Examples:** >>> f.nc_del_component_sample_dimension_groups('count') """ if component in ("count", "index"): variables = self._get_data_compression_variables(component) else: raise ValueError(f"Invalid component: {component!r}") for v in variables: v.nc_clear_sample_dimension_groups() class NetCDFUnreferenced: """Mixin class for constructs of unrefereced netCDF variables. .. versionadded:: (cfdm) 1.8.9.0 """ def _set_dataset_compliance(self, value, copy=True): """Set the dataset compliance report. Set the report of problems encountered whilst reading the construct from a dataset. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `dataset_compliance` :Parameters: value: `dict` The value of the ``dataset_compliance`` component. This will be deep copied. copy: `bool`, optional If False then the compliance report dictionary is not copied prior to insertion. :Returns: `None` **Examples:** """ self._set_component("dataset_compliance", value, copy=copy) def dataset_compliance(self, display=False): """Return the dataset compliance report. A report of problems encountered whilst reading the construct from a dataset. If the dataset is partially CF-compliant to the extent that it is not possible to unambiguously map an element of the netCDF dataset to an element of the CF data model, then a construct is still returned by the `read` function, but may be incomplete. Such "structural" non-compliance would occur, for example, if the ``coordinates`` attribute of a CF-netCDF data variable refers to another variable that does not exist, or refers to a variable that spans a netCDF dimension that does not apply to the data variable. Other types of non-compliance are not checked, such whether or not controlled vocabularies have been adhered to. When a dictionary is returned, the compliance report may be updated by changing the dictionary in-place. .. versionadded:: (cfdm) 1.7.0 .. seealso:: `{{package}}.read`, `_set_dataset_compliance` :Parameters: display: `bool`, optional If True print the compliance report. By default the report is returned as a dictionary. :Returns: `None` or `dict` The report. If *display* is True then the report is printed and `None` is returned. Otherwise the report is returned as a dictionary. **Examples:** If no problems were encountered, an empty dictionary is returned: >>> f = {{package}}.example_field(1) >>> cfdm.write(f, 'example.nc') >>> g = {{package}}.read('example.nc')[0] >>> g.dataset_compliance() {} """ d = self._get_component("dataset_compliance", {}) if not display: return d if not d: print(d) return for key0, value0 in d.items(): print(f"{{{key0!r}:") print(f" CF version: {value0['CF version']!r},") print(f" dimensions: {value0['dimensions']!r},") print(" non-compliance: {") for key1, value1 in sorted(value0["non-compliance"].items()): for x in value1: print(f" {key1!r}: [") print( " {{{0}}},".format( "\n ".join( [ f"{key2!r}: {value2!r}," for key2, value2 in sorted(x.items()) ] ) ) ) print(" ],") print(" },")
31.69042
82
0.549484
11,316
107,177
5.013521
0.03685
0.017767
0.008672
0.013537
0.908572
0.895898
0.87628
0.844923
0.834488
0.823401
0
0.006266
0.340306
107,177
3,381
83
31.699793
0.796136
0.641145
0
0.599237
0
0
0.162988
0.017468
0
0
0
0
0
1
0.122137
false
0.003817
0.001908
0
0.257634
0.017176
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
75bb8d1fa809b63ca392da9d2c7936c56c688827
44
py
Python
__init__.py
jhs7jhs/emilator
ebd7ba2601bf2077f6670365778a16d1e550db2f
[ "MIT" ]
null
null
null
__init__.py
jhs7jhs/emilator
ebd7ba2601bf2077f6670365778a16d1e550db2f
[ "MIT" ]
null
null
null
__init__.py
jhs7jhs/emilator
ebd7ba2601bf2077f6670365778a16d1e550db2f
[ "MIT" ]
null
null
null
from .emilator import * from . import errors
22
23
0.772727
6
44
5.666667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.159091
44
2
24
22
0.918919
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f9d6c2ebd00f9fe4f8bf1eb128813c309ad437c8
6,665
py
Python
autodh/autodh.py
OsaroAI/AutoDH
156ed4dea7408dcc981468dd5c2e0c2bd41a045f
[ "MIT" ]
7
2018-11-27T21:57:49.000Z
2021-12-20T07:32:51.000Z
autodh/autodh.py
EdsterG/AutoDH
156ed4dea7408dcc981468dd5c2e0c2bd41a045f
[ "MIT" ]
1
2021-09-03T00:17:03.000Z
2021-09-10T22:13:42.000Z
autodh/autodh.py
OsaroAI/AutoDH
156ed4dea7408dcc981468dd5c2e0c2bd41a045f
[ "MIT" ]
3
2020-11-30T12:19:06.000Z
2021-12-20T07:32:54.000Z
import numpy as np from . import math_utils from .dh_table import DHTable from .joint import Joint def _compute_row_in_standard_dh_table(z1, x1, o1, z2, x2, o2): """Given two frames, compute a row in the Denavit-Hartenberg table using the standard convention :param z1: z-axis of first frame, as numpy array with shape (3,) :param x1: x-axis of first frame, as numpy array with shape (3,) :param o1: origin of first frame, as numpy array with shape (3,) :param z2: z-axis of second frame, as numpy array with shape (3,) :param x2: x-axis of second frame, as numpy array with shape (3,) :param o2: origin of second frame, as numpy array with shape (3,) :returns: next row in dh table (a, alpha, d, theta) """ alpha = np.arctan2(np.cross(z1, z2).dot(x2), z1.dot(z2)) theta = np.arctan2(np.cross(x1, x2).dot(z1), x1.dot(x2)) a = (o2 - o1).dot(x2) d = (o2 - o1).dot(z1) return d, theta, a, alpha def _compute_row_in_modified_dh_table(z1, x1, o1, z2, x2, o2): """Given two frames, compute a row in the Denavit-Hartenberg table using the modified convention :param z1: z-axis of first frame, as numpy array with shape (3,) :param x1: x-axis of first frame, as numpy array with shape (3,) :param o1: origin of first frame, as numpy array with shape (3,) :param z2: z-axis of second frame, as numpy array with shape (3,) :param x2: x-axis of second frame, as numpy array with shape (3,) :param o2: origin of second frame, as numpy array with shape (3,) :returns: next row in dh table (a, alpha, d, theta) """ alpha = np.arctan2(np.cross(z1, z2).dot(x1), z1.dot(z2)) theta = np.arctan2(np.cross(x1, x2).dot(z2), x1.dot(x2)) a = (o2 - o1).dot(x1) d = (o2 - o1).dot(z2) return d, theta, a, alpha def _get_standard_dh_parameters(joints, base_frame, ee_frame): """Compute the standard Denavit-Hartenberg parameters :param joints: list of Joints :param base_frame: homogenous matrix representing the base frame, as numpy array with shape (4, 4) :param ee_frame: homogenous matrix representing the end-effector frame, as numpy array with shape (4, 4) :returns: (d, theta, a, alpha, joint_types), all as iterable objects """ # Create a list of partial frames partial_frames = [] partial_frames.append([base_frame[:3, 3], base_frame[:3, 0], base_frame[:3, 2], Joint.Type.Fixed]) for j in joints: partial_frames.append([j.anchor, None, j.axis, j.type]) partial_frames.append([ee_frame[:3, 3], None, ee_frame[:3, 2], Joint.Type.Fixed]) partial_frames.append([ee_frame[:3, 3], ee_frame[:3, 0], ee_frame[:3, 2], Joint.Type.Fixed]) # Determine x-axis for each partial frame frames = [partial_frames[0]] for i in range(1, len(partial_frames) - 1): o0, x0, z0, _ = frames[i - 1] o1, _, z1, jt = partial_frames[i] x1, _, o1_p = math_utils.common_perpendicular_and_intersection_points(o0, z0, o1, z1, cp_hint=x0) if np.allclose(x0.dot(x1), -1): x1 = -x1 frames.append([o1_p, x1, z1, jt]) frames.append(partial_frames[-1]) # Create the DH table dh_table, joint_types = [], [] for i in range(len(frames) - 1): o1, x1, z1, joint_type = frames[i] o2, x2, z2, _ = frames[i + 1] d, theta, a, alpha = _compute_row_in_standard_dh_table(z1, x1, o1, z2, x2, o2) if np.allclose([d, theta, a, alpha], 0) and joint_type == Joint.Type.Fixed: continue dh_table.append([d, theta, a, alpha]) joint_types.append(joint_type) d, theta, a, alpha = np.transpose(dh_table) return d, theta, a, alpha, joint_types def _get_modified_dh_parameters(joints, base_frame, ee_frame): """Compute the modified Denavit-Hartenberg parameters :param joints: list of Joints :param base_frame: homogenous matrix representing the base frame, as numpy array with shape (4, 4) :param ee_frame: homogenous matrix representing the end-effector frame, as numpy array with shape (4, 4) :returns: (d, theta, a, alpha, joint_types) all as iterable objects """ # Create a list of partial frames partial_frames = [] partial_frames.append([base_frame[:3, 3], base_frame[:3, 0], base_frame[:3, 2], Joint.Type.Fixed]) partial_frames.append([base_frame[:3, 3], None, base_frame[:3, 2], Joint.Type.Fixed]) for j in joints: partial_frames.append([j.anchor, None, j.axis, j.type]) partial_frames.append([ee_frame[:3, 3], ee_frame[:3, 0], ee_frame[:3, 2], Joint.Type.Fixed]) # Determine x-axis for each partial frame frames = [partial_frames[0]] for i in range(1, len(partial_frames) - 1): _, x0, _, _ = frames[i - 1] o1, _, z1, jt = partial_frames[i] o2, _, z2, _ = partial_frames[i + 1] x1, o1_p, _ = math_utils.common_perpendicular_and_intersection_points(o1, z1, o2, z2, cp_hint=x0) if np.allclose(x0.dot(x1), -1): x1 = -x1 frames.append([o1_p, x1, z1, jt]) frames.append(partial_frames[-1]) # Create the DH table dh_table, joint_types = [], [] for i in range(1, len(frames)): o0, x0, z0, _ = frames[i - 1] o1, x1, z1, joint_type = frames[i] d, theta, a, alpha = _compute_row_in_modified_dh_table(z0, x0, o0, z1, x1, o1) if np.allclose([d, theta, a, alpha], 0) and joint_type == Joint.Type.Fixed: continue dh_table.append([d, theta, a, alpha]) joint_types.append(joint_type) d, theta, a, alpha = np.transpose(dh_table) return d, theta, a, alpha, joint_types def get_dh_parameters(joints, base_frame, ee_frame, convention=DHTable.Convention.Standard): """Compute the Denavit-Hartenberg parameters :param joints: list of Joints :param base_frame: homogenous matrix representing the base frame, as numpy array with shape (4, 4) :param ee_frame: homogenous matrix representing the end-effector frame, as numpy array with shape (4, 4) :param convention: convention used to compute parameters, defaults to DHTable.Convention.Standard :returns: (d, theta, a, alpha, joint_types) all as iterable objects """ if convention is DHTable.Convention.Modified: return _get_modified_dh_parameters(joints, base_frame, ee_frame) else: return _get_standard_dh_parameters(joints, base_frame, ee_frame) def create_dh_table(joints, base_frame, ee_frame, convention=DHTable.Convention.Standard): d, theta, a, alpha, joint_types = get_dh_parameters(joints, base_frame, ee_frame, convention) return DHTable(d, theta, a, alpha, joint_types, convention)
45.340136
108
0.664216
1,058
6,665
4.038752
0.102079
0.044231
0.05055
0.071612
0.903347
0.903347
0.874093
0.851158
0.840393
0.730166
0
0.037595
0.213803
6,665
146
109
45.650685
0.777863
0.362491
0
0.556962
0
0
0
0
0
0
0
0
0
1
0.075949
false
0
0.050633
0
0.21519
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
fb22b7c7073d099b9891b7d4aea1f514e82b158b
6,951
py
Python
tests/commands/test_anno_command.py
artur-shaik/wallabag-client
6c03a3beebcf27f51076e0eb11bb99f618f8daa3
[ "MIT" ]
16
2020-09-30T23:08:45.000Z
2022-03-30T02:34:17.000Z
tests/commands/test_anno_command.py
artur-shaik/wallabag-client
6c03a3beebcf27f51076e0eb11bb99f618f8daa3
[ "MIT" ]
15
2020-11-05T09:22:38.000Z
2022-03-11T16:56:18.000Z
tests/commands/test_anno_command.py
artur-shaik/wallabag-client
6c03a3beebcf27f51076e0eb11bb99f618f8daa3
[ "MIT" ]
1
2021-04-02T11:00:57.000Z
2021-04-02T11:00:57.000Z
# -*- coding: utf-8 -*- import delorean import humanize from wallabag.config import Configs from wallabag.api.api import Response from wallabag.api.get_entry import GetEntry from wallabag.api.delete_annotation import DeleteAnnotation from wallabag.commands.anno import ( AnnoCommand, AnnoCommandParams, AnnoSubcommand) class TestAnno(): def setup_method(self, method): self.config = Configs("/tmp/config") self.config.config.read_string(""" [api] serverurl = url username = user password = pass [oauth2] client = 100 secret = 100 """) def test_list_entry_annotations(self, monkeypatch): def getentry_request(self): return Response( 200, ( '{"id": 1, "title": "title",' '"content": "<h1>head</h1>content", "url": "url",' '"is_archived": 0, "is_starred": 1,' '"annotations": [{' '"user": "User", "annotator_schema_version":' ' "v1.0", "id": 1, "text": "content", ' '"created_at": "2020-10-28T10:50:51+0000", ' '"updated_at": "2020-10-28T10:50:51+0000", ' '"quote": "quote", "ranges": ' '[{"start": "/div[1]/p[1]", "startOffset": "23", ' '"end": "/div[1]/p[1]", "endOffset": "49"}]}]}')) monkeypatch.setattr(GetEntry, 'request', getentry_request) params = AnnoCommandParams() params.entry_id = 1 result = AnnoCommand(self.config, params).execute() assert result[0] past = delorean.utcnow() - delorean.parse('2020-10-28T10:50:51+0000') assert result[1] == f'1. quote ({humanize.naturaltime(past)}) [7]' def test_remove_annotation(self, monkeypatch): def request_success(self): return Response(200, None) monkeypatch.setattr(DeleteAnnotation, 'request', request_success) params = AnnoCommandParams() params.anno_id = 1 params.command = AnnoSubcommand.REMOVE result = AnnoCommand(self.config, params).execute() assert result[0] assert result[1] == 'Annotation successfully deleted' def test_show_annotations(self, monkeypatch): def getentry_request(self): return Response( 200, ( '{"id": 1, "title": "title",' '"content": "<h1>head</h1>content", "url": "url",' '"is_archived": 0, "is_starred": 1,' '"annotations": [{' '"user": "User", "annotator_schema_version":' ' "v1.0", "id": 1, "text": "content", ' '"created_at": "2020-10-28T10:50:51+0000", ' '"updated_at": "2020-10-28T10:50:51+0000", ' '"quote": "quote", "ranges": ' '[{"start": "/div[1]/p[1]", "startOffset": "23", ' '"end": "/div[1]/p[1]", "endOffset": "49"}]},{' '"user": "User", "annotator_schema_version":' ' "v1.0", "id": 2, "text": "another content", ' '"created_at": "2020-10-28T10:50:51+0000", ' '"updated_at": "2020-10-28T10:50:51+0000", ' '"quote": "another quote", "ranges": ' '[{"start": "/div[1]/p[2]", "startOffset": "23", ' '"end": "/div[1]/p[2]", "endOffset": "49"}]}]}')) monkeypatch.setattr(GetEntry, 'request', getentry_request) params = AnnoCommandParams() params.entry_id = 1 params.command = AnnoSubcommand.SHOW result = AnnoCommand(self.config, params).execute() assert result[0] past = delorean.utcnow() - delorean.parse('2020-10-28T10:50:51+0000') assert result[1] == ( f'1. quote ({humanize.naturaltime(past)}):\n\n\tcontent\n\n' f'2. another quote ({humanize.naturaltime(past)}):' '\n\n\tanother content\n') def test_show_annotations_by_id(self, monkeypatch): def getentry_request(self): return Response( 200, ( '{"id": 1, "title": "title",' '"content": "<h1>head</h1>content", "url": "url",' '"is_archived": 0, "is_starred": 1,' '"annotations": [{' '"user": "User", "annotator_schema_version":' ' "v1.0", "id": 1, "text": "content", ' '"created_at": "2020-10-28T10:50:51+0000", ' '"updated_at": "2020-10-28T10:50:51+0000", ' '"quote": "quote", "ranges": ' '[{"start": "/div[1]/p[1]", "startOffset": "23", ' '"end": "/div[1]/p[1]", "endOffset": "49"}]},{' '"user": "User", "annotator_schema_version":' ' "v1.0", "id": 2, "text": "another content", ' '"created_at": "2020-10-28T10:50:51+0000", ' '"updated_at": "2020-10-28T10:50:51+0000", ' '"quote": "another quote", "ranges": ' '[{"start": "/div[1]/p[2]", "startOffset": "23", ' '"end": "/div[1]/p[2]", "endOffset": "49"}]}]}')) monkeypatch.setattr(GetEntry, 'request', getentry_request) params = AnnoCommandParams() params.entry_id = 1 params.anno_id = 2 params.command = AnnoSubcommand.SHOW result = AnnoCommand(self.config, params).execute() assert result[0] past = delorean.utcnow() - delorean.parse('2020-10-28T10:50:51+0000') assert result[1] == ( f'2. another quote ({humanize.naturaltime(past)}):' '\n\n\tanother content\n') def test_show_empty_params(self): params = AnnoCommandParams() params.command = AnnoSubcommand.SHOW result = AnnoCommand(self.config, params).execute() assert not result[0] assert result[1] == 'Entry ID not specified' def test_list_empty_params(self): params = AnnoCommandParams() params.command = AnnoSubcommand.LIST result = AnnoCommand(self.config, params).execute() assert not result[0] assert result[1] == 'Entry ID not specified' def test_remove_empty_params(self): params = AnnoCommandParams() params.command = AnnoSubcommand.REMOVE result = AnnoCommand(self.config, params).execute() assert not result[0] assert result[1] == 'Annotation ID not specified'
42.907407
77
0.496331
680
6,951
4.980882
0.161765
0.023029
0.04222
0.049897
0.79628
0.795394
0.785061
0.785061
0.743431
0.741364
0
0.076433
0.344986
6,951
161
78
43.173913
0.667472
0.003021
0
0.720588
0
0
0.362009
0.099018
0
0
0
0
0.102941
1
0.088235
false
0.007353
0.051471
0.029412
0.176471
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
fb3b1f24bb1ba9ebc580b1a088c5cc3d7f8ce461
121
py
Python
users/resources/__init__.py
marianojabdala/microservices
4a020cb9d703f20baa40446d525363d159d008ac
[ "Apache-2.0" ]
1
2018-04-20T20:12:31.000Z
2018-04-20T20:12:31.000Z
users/resources/__init__.py
marianojabdala/microservices
4a020cb9d703f20baa40446d525363d159d008ac
[ "Apache-2.0" ]
17
2021-03-19T22:51:22.000Z
2021-08-30T20:22:33.000Z
users/resources/__init__.py
marianojabdala/microservices
4a020cb9d703f20baa40446d525363d159d008ac
[ "Apache-2.0" ]
2
2018-04-20T20:12:36.000Z
2018-10-07T15:37:46.000Z
# -*- coding: utf-8 -*- """Export the base and user resource.""" import users.resources.base import users.resources.user
24.2
40
0.710744
17
121
5.058824
0.705882
0.255814
0.465116
0
0
0
0
0
0
0
0
0.009434
0.123967
121
4
41
30.25
0.801887
0.471074
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
34ca7e34e4092242a2e1b2f9a28ea7dea085410f
30
py
Python
pytorchgui/__init__.py
CallumJHays/pytorch-gui
6df3eefeff20176a811c3c12989aeac588fa2f9b
[ "MIT" ]
4
2018-09-10T12:17:01.000Z
2021-08-16T04:42:07.000Z
pytorchgui/__init__.py
CallumJHays/pytorch-gui
6df3eefeff20176a811c3c12989aeac588fa2f9b
[ "MIT" ]
1
2019-10-29T17:21:10.000Z
2019-10-29T17:21:10.000Z
pytorchgui/__init__.py
CallumJHays/pytorch-gui
6df3eefeff20176a811c3c12989aeac588fa2f9b
[ "MIT" ]
1
2019-07-10T18:56:36.000Z
2019-07-10T18:56:36.000Z
from .pytorchgui import Graph
15
29
0.833333
4
30
6.25
1
0
0
0
0
0
0
0
0
0
0
0
0.133333
30
1
30
30
0.961538
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
551664e9b2d39d3c8c9853ddab6dca1df9edda73
39
py
Python
Warp_Module/direct_warp/__init__.py
ClementPinard/direct-warper
be46410202c8cd9efb982b5dc4c1eb954ab45b10
[ "MIT" ]
2
2021-05-24T06:27:40.000Z
2021-06-11T02:39:59.000Z
Warp_Module/direct_warp/__init__.py
ClementPinard/direct-warper
be46410202c8cd9efb982b5dc4c1eb954ab45b10
[ "MIT" ]
null
null
null
Warp_Module/direct_warp/__init__.py
ClementPinard/direct-warper
be46410202c8cd9efb982b5dc4c1eb954ab45b10
[ "MIT" ]
null
null
null
from .direct_warper import DirectWarper
39
39
0.897436
5
39
6.8
1
0
0
0
0
0
0
0
0
0
0
0
0.076923
39
1
39
39
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9b81910be79cb31685c2036c21dec29d983fac89
24
py
Python
game/__init__.py
Onii-Chan-Discord/onii-cogs
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
[ "Apache-2.0" ]
1
2021-07-11T18:46:08.000Z
2021-07-11T18:46:08.000Z
game/__init__.py
Onii-Chan-Discord/onii-cogs
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
[ "Apache-2.0" ]
35
2021-06-05T06:33:34.000Z
2022-03-22T01:44:00.000Z
game/__init__.py
Onii-Chan-Discord/onii-cogs
ec0a22e9a1dcd8fd0617448ba21b4c39ee113bd5
[ "Apache-2.0" ]
4
2021-07-07T04:29:33.000Z
2021-12-31T12:12:00.000Z
from .game import setup
12
23
0.791667
4
24
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.166667
24
1
24
24
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9b9d576f3a6ef05181a8f15b56f6d0b0acc3370c
119
py
Python
setup_script.py
rperrin22/QCF_demag
7d85e020b7e1d7f59ddf0427b67768db238a05b3
[ "MIT" ]
null
null
null
setup_script.py
rperrin22/QCF_demag
7d85e020b7e1d7f59ddf0427b67768db238a05b3
[ "MIT" ]
null
null
null
setup_script.py
rperrin22/QCF_demag
7d85e020b7e1d7f59ddf0427b67768db238a05b3
[ "MIT" ]
null
null
null
from FEHM_supplementary.prep import * setup_test_folders('param_file_new.csv') build_run_script('param_file_new.csv')
23.8
40
0.840336
19
119
4.789474
0.789474
0.197802
0.263736
0.32967
0
0
0
0
0
0
0
0
0.058824
119
4
41
29.75
0.8125
0
0
0
0
0
0.302521
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
fd3f1739d2a56a8a8b817bd73d6f0899ac4b2210
132
py
Python
report/admin.py
noriHanda/study_matching
55e9525c9502c45c799f8f3a4c9c2b54406ed9e1
[ "MIT" ]
null
null
null
report/admin.py
noriHanda/study_matching
55e9525c9502c45c799f8f3a4c9c2b54406ed9e1
[ "MIT" ]
null
null
null
report/admin.py
noriHanda/study_matching
55e9525c9502c45c799f8f3a4c9c2b54406ed9e1
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Report @admin.register(Report) class ReportAdmin(admin.ModelAdmin): pass
16.5
36
0.787879
17
132
6.117647
0.705882
0
0
0
0
0
0
0
0
0
0
0
0.136364
132
7
37
18.857143
0.912281
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.2
0.4
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
fd5c3c9285f068d91b12d1b73f8c9e8a02ccf387
185
py
Python
videoDetection/config.py
Rubik90/TFM_AG
5e836245d0704122f2a0d47413e93bf53d966ca0
[ "MIT" ]
1
2021-02-16T18:32:38.000Z
2021-02-16T18:32:38.000Z
videoDetection/config.py
Rubik90/TFM_AG
5e836245d0704122f2a0d47413e93bf53d966ca0
[ "MIT" ]
null
null
null
videoDetection/config.py
Rubik90/TFM_AG
5e836245d0704122f2a0d47413e93bf53d966ca0
[ "MIT" ]
1
2021-02-16T18:32:39.000Z
2021-02-16T18:32:39.000Z
import sys import pathlib working_dir_path = pathlib.Path().absolute() SAVE_DIR_PATH = str(working_dir_path) + '/joblib_features/' MODEL_DIR_PATH = str(working_dir_path) + '/model/'
20.555556
59
0.767568
27
185
4.851852
0.444444
0.267176
0.320611
0.259542
0.366412
0.366412
0
0
0
0
0
0
0.108108
185
8
60
23.125
0.793939
0
0
0
0
0
0.12973
0
0
0
0
0
0
1
0
false
0
0.4
0
0.4
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
b5ce6fee1723d9ddcdd913d7eac11654c8283aef
34
py
Python
src/model/__init__.py
lopezco/test
2dd420bd8cecd91ad0a617e7fd019e10c8abce06
[ "MIT" ]
16
2019-08-20T08:42:23.000Z
2022-02-16T16:12:22.000Z
src/model/__init__.py
lopezco/test
2dd420bd8cecd91ad0a617e7fd019e10c8abce06
[ "MIT" ]
5
2019-08-19T11:24:46.000Z
2020-06-02T16:49:21.000Z
src/model/__init__.py
lopezco/test
2dd420bd8cecd91ad0a617e7fd019e10c8abce06
[ "MIT" ]
6
2020-01-09T12:25:40.000Z
2021-12-29T01:43:34.000Z
from .sklearn import SklearnModel
17
33
0.852941
4
34
7.25
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b5db5029ee5dc53f71f7f678f8c7742adb6926ac
88
py
Python
tamcolors/tests/tests_tests/__init__.py
cmcmarrow/tamcolors
65a5f2455bbe35a739b98d14af158c3df7feb786
[ "Apache-2.0" ]
29
2020-07-17T23:46:17.000Z
2022-02-06T05:36:44.000Z
tamcolors/tests/tests_tests/__init__.py
sudo-nikhil/tamcolors
65a5f2455bbe35a739b98d14af158c3df7feb786
[ "Apache-2.0" ]
42
2020-07-25T19:39:52.000Z
2021-02-24T01:19:58.000Z
tamcolors/tests/tests_tests/__init__.py
sudo-nikhil/tamcolors
65a5f2455bbe35a739b98d14af158c3df7feb786
[ "Apache-2.0" ]
8
2020-07-18T23:02:48.000Z
2020-12-30T04:07:35.000Z
from . import test_multi_task_helper_tests __all__ = ("test_multi_task_helper_tests",)
22
43
0.829545
13
88
4.692308
0.615385
0.295082
0.42623
0.622951
0.786885
0
0
0
0
0
0
0
0.090909
88
3
44
29.333333
0.7625
0
0
0
0
0
0.318182
0.318182
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
b5e126179bb9e3d80057bc25f35acc30589036e4
30
py
Python
MartinsCrap.py
BostonA/SpudnikPi
011d95e95e86fbec3186db70416c7391190352b3
[ "Apache-2.0" ]
null
null
null
MartinsCrap.py
BostonA/SpudnikPi
011d95e95e86fbec3186db70416c7391190352b3
[ "Apache-2.0" ]
1
2017-06-01T13:07:31.000Z
2017-06-01T13:07:45.000Z
MartinsCrap.py
BostonA/SpudnikPi
011d95e95e86fbec3186db70416c7391190352b3
[ "Apache-2.0" ]
null
null
null
def f(d): # h=-4.9t +
5
12
0.3
6
30
1.5
1
0
0
0
0
0
0
0
0
0
0
0.117647
0.433333
30
5
13
6
0.411765
0.3
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
bd167373edfb246b39a6249c80bc323af42e46c5
41
py
Python
conditional_independence/suffstats/invariance_suffstats/__init__.py
uhlerlab/conditional_independence
aa4b5117b6f24bf39433d427d490312864e9bd69
[ "BSD-3-Clause" ]
4
2021-01-29T20:27:31.000Z
2022-02-01T11:55:33.000Z
conditional_independence/suffstats/invariance_suffstats/__init__.py
uhlerlab/conditional_independence
aa4b5117b6f24bf39433d427d490312864e9bd69
[ "BSD-3-Clause" ]
null
null
null
conditional_independence/suffstats/invariance_suffstats/__init__.py
uhlerlab/conditional_independence
aa4b5117b6f24bf39433d427d490312864e9bd69
[ "BSD-3-Clause" ]
1
2021-09-12T13:41:21.000Z
2021-09-12T13:41:21.000Z
from .gauss_invariance_suffstat import *
20.5
40
0.853659
5
41
6.6
1
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
0.891892
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
bd23fd42f3b66a612a96ad3d260a619eabdeb618
3,350
py
Python
datasets/facescape_wrapper.py
mudimingquedeyinmoujia/liif
6e0c1d46948e641addcb32a0c0b3a2fa810026d5
[ "BSD-3-Clause" ]
null
null
null
datasets/facescape_wrapper.py
mudimingquedeyinmoujia/liif
6e0c1d46948e641addcb32a0c0b3a2fa810026d5
[ "BSD-3-Clause" ]
null
null
null
datasets/facescape_wrapper.py
mudimingquedeyinmoujia/liif
6e0c1d46948e641addcb32a0c0b3a2fa810026d5
[ "BSD-3-Clause" ]
null
null
null
import functools import random import math from PIL import Image import numpy as np import torch from torch.utils.data import Dataset from torchvision import transforms from datasets import register from utils import to_pixel_samples def resize_fn(img, size): return transforms.ToTensor()( transforms.Resize(size, Image.BICUBIC)( transforms.ToPILImage()(img))) @register('facescape-liif-wrapper') class FacescapeWrapper(Dataset): def __init__(self, dataset, inp_size=64, scale_min=1, scale_max=None, random_sample=None): self.dataset = dataset self.inp_size = inp_size self.scale_min = scale_min if scale_max is None: scale_max = scale_min self.scale_max = scale_max self.random_sample = random_sample def __len__(self): return len(self.dataset) def __getitem__(self, idx): img_path, img_id, exp, index = self.dataset[idx] img = transforms.ToTensor()(Image.open(img_path).convert('RGB')) s = random.uniform(self.scale_min, self.scale_max) gt_size = round(self.inp_size * s) gt = resize_fn(img, gt_size) inp= resize_fn(img,self.inp_size) hr_coord, hr_rgb = to_pixel_samples(gt.contiguous()) if self.random_sample is not None: sample_lst = np.random.choice( len(hr_coord), self.random_sample, replace=False) hr_coord = hr_coord[sample_lst] hr_rgb = hr_rgb[sample_lst] cell = torch.ones_like(hr_coord) cell[:, 0] *= 2 / gt_size cell[:, 1] *= 2 / gt_size return { 'inp': inp, # 4096 scale to inp_size=64 'coord': hr_coord, # 4096 scale to inp_size*s 'cell': cell, 'gt': hr_rgb # 4096 scale to inp_size*s } @register('facescape-valid-wrapper') class FacescapeValidWrapper(Dataset): def __init__(self, dataset, inp_size=64, scale_min=1, scale_max=None, random_sample=None): self.dataset = dataset self.inp_size = inp_size self.scale_min = scale_min if scale_max is None: scale_max = scale_min self.scale_max = scale_max self.random_sample = random_sample def __len__(self): return len(self.dataset) def __getitem__(self, idx): img_path, img_id, exp, index = self.dataset[idx] img = transforms.ToTensor()(Image.open(img_path).convert('RGB')) s = random.uniform(self.scale_min, self.scale_max) gt_size = round(self.inp_size * s) gt = resize_fn(img, gt_size) inp= resize_fn(img,self.inp_size) hr_coord, hr_rgb = to_pixel_samples(gt.contiguous()) if self.random_sample is not None: sample_lst = np.random.choice( len(hr_coord), self.random_sample, replace=False) hr_coord = hr_coord[sample_lst] hr_rgb = hr_rgb[sample_lst] cell = torch.ones_like(hr_coord) cell[:, 0] *= 2 / gt_size cell[:, 1] *= 2 / gt_size return { 'inp': inp, # 4096 scale to inp_size=64 [3,64,64] 'coord': hr_coord, # 4096 scale to inp_size*s [sample,2] 'cell': cell, 'gt': hr_rgb, # 4096 scale to inp_size*s [sample,3] 'gt512':resize_fn(img,512) # [3,512,512] }
31.308411
94
0.619701
465
3,350
4.193548
0.172043
0.057436
0.033846
0.043077
0.779487
0.779487
0.779487
0.773333
0.773333
0.773333
0
0.025546
0.275522
3,350
107
95
31.308411
0.777915
0.058209
0
0.722892
0
0
0.026684
0.014295
0
0
0
0
0
1
0.084337
false
0
0.120482
0.036145
0.289157
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
1ff50af0c469e9e4417f5c704ed3028b0db6e8db
12
py
Python
python/testData/mover/simple_afterDown.py
jnthn/intellij-community
8fa7c8a3ace62400c838e0d5926a7be106aa8557
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/mover/simple_afterDown.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/mover/simple_afterDown.py
Cyril-lamirand/intellij-community
60ab6c61b82fc761dd68363eca7d9d69663cfa39
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
b = 2 a = 1
4
5
0.333333
4
12
1
1
0
0
0
0
0
0
0
0
0
0
0.333333
0.5
12
2
6
6
0.333333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
1f1639d7ea7dfb8a2d4fd2080e4bfaef7ba68ed0
5,937
py
Python
Web/contestManager.py
cmd2001/Open-TesutoHime
2c30aa35650383adfb99496aebd425dffd287eda
[ "MIT" ]
11
2020-11-28T16:45:35.000Z
2021-08-31T07:56:26.000Z
Web/contestManager.py
cmd2001/Open-TesutoHime
2c30aa35650383adfb99496aebd425dffd287eda
[ "MIT" ]
null
null
null
Web/contestManager.py
cmd2001/Open-TesutoHime
2c30aa35650383adfb99496aebd425dffd287eda
[ "MIT" ]
2
2021-05-16T03:09:58.000Z
2021-08-21T07:24:58.000Z
import sys from utils import * class ContestManager: def create_contest(self, name: str, start_time: int, end_time: int, contest_type: int): db = db_connect() cursor = db.cursor() try: cursor.execute("INSERT INTO Contest (Name, Start_Time, End_Time, Type) VALUES (%s, %s, %s, %s)", (name, start_time, end_time, contest_type)) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Create_Contest\n") db.close() return def modify_contest(self, contest_id: int, new_name: str, new_start_time: int, new_end_time: int, new_contest_type: int): db = db_connect() cursor = db.cursor() try: cursor.execute("UPDATE Contest SET Name = %s, Start_Time = %s, End_Time = %s, Type = %s WHERE ID = %s", (new_name, new_start_time, new_end_time, new_contest_type, contest_id)) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Modify_Contest\n") db.close() return def delete_contest(self, contest_id: int): db = db_connect() cursor = db.cursor() try: cursor.execute("DELETE FROM Contest WHERE ID = %s", (str(contest_id))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Delete_Contest(1)\n") try: cursor.execute("DELETE FROM Contest_Player WHERE Belong = %s", (str(contest_id))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Delete_Contest(2)\n") try: cursor.execute("DELETE FROM Contest_Problem WHERE Belong = %s", (str(contest_id))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Delete_Contest(3)\n") db.close() return def add_problem_to_contest(self, contest_id: int, problem_id: int): db = db_connect() cursor = db.cursor() try: cursor.execute("INSERT INTO Contest_Problem (Belong, Problem_ID) VALUES (%s, %s)", (str(contest_id), str(problem_id))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Add_Problem_To_Contest\n") db.close() return def delete_problem_from_contest(self, contest_id: int, problem_id: int): db = db_connect() cursor = db.cursor() try: cursor.execute("DELETE FROM Contest_Problem WHERE Belong = %s AND Problem_ID = %s", (str(contest_id), str(problem_id))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Delete_Problem_From_Contest\n") db.close() return def add_player_to_contest(self, contest_id: int, username: str): db = db_connect() cursor = db.cursor() try: cursor.execute("INSERT INTO Contest_Player (Belong, Username) VALUES (%s, %s)", (str(contest_id), str(username))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Add_Player_To_Contest\n") db.close() return def check_player_in_contest(self, contest_id: int, username: str): db = db_connect() cursor = db.cursor() cursor.execute("SELECT tempID FROM Contest_Player WHERE Belong = %s AND Username = %s", (contest_id, username)) ret = cursor.fetchall() db.close() return len(ret) != 0 def delete_player_from_contest(self, contest_id: int, username: str): db = db_connect() cursor = db.cursor() try: cursor.execute("DELETE FROM Contest_Player WHERE Belong = %s AND Username = %s", (str(contest_id), str(username))) db.commit() except pymysql.Error: db.rollback() sys.stderr.write("SQL Error in ContestManager: Delete_Player_From_Contest\n") db.close() return def list_contest(self, contest_type: int): db = db_connect() cursor = db.cursor() cursor.execute("SELECT ID, Name, Start_Time, End_Time FROM Contest WHERE Type = %s ORDER BY ID DESC", (contest_type)) ret = cursor.fetchall() db.close() return ret def get_time(self, contest_id: int): db = db_connect() cursor = db.cursor() cursor.execute("SELECT Start_Time, End_Time FROM Contest WHERE ID = %s", (str(contest_id))) ret = cursor.fetchone() db.close() return int(ret[0]), int(ret[1]) def list_problem_for_contest(self, contest_id: int): db = db_connect() cursor = db.cursor() cursor.execute("SELECT Problem_ID FROM Contest_Problem WHERE Belong = %s", (str(contest_id))) ret = cursor.fetchall() db.close() return ret def list_player_for_contest(self, contest_id: int): db = db_connect() cursor = db.cursor() cursor.execute("SELECT Username FROM Contest_Player WHERE Belong = %s", (str(contest_id))) ret = cursor.fetchall() db.close() return ret def get_title(self, contest_id: int): db = db_connect() cursor = db.cursor() cursor.execute("SELECT Name FROM Contest WHERE ID = %s", (str(contest_id))) ret = cursor.fetchall() db.close() return ret Contest_Manager = ContestManager()
36.423313
119
0.577396
720
5,937
4.583333
0.098611
0.065455
0.043333
0.06697
0.820303
0.799394
0.772424
0.709394
0.707273
0.659091
0
0.001469
0.311942
5,937
162
120
36.648148
0.806365
0
0
0.664336
0
0.013986
0.226377
0.017517
0
0
0
0
0
1
0.090909
false
0
0.013986
0
0.202797
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
2f160e0628f97a2b09541978379116c85108342f
188
py
Python
article.py
entrpn/stocksight
36f51b69ebba77195e1bf76a96072e9b80a4af13
[ "Apache-2.0" ]
1
2019-08-02T00:18:24.000Z
2019-08-02T00:18:24.000Z
article.py
entrpn/stocksight
36f51b69ebba77195e1bf76a96072e9b80a4af13
[ "Apache-2.0" ]
null
null
null
article.py
entrpn/stocksight
36f51b69ebba77195e1bf76a96072e9b80a4af13
[ "Apache-2.0" ]
null
null
null
class Article: def __init__(self, headline, url): self.headline = headline self.url = url def __eq__(self, other): return self.headline == other.headline
20.888889
46
0.62234
22
188
4.954545
0.454545
0.330275
0
0
0
0
0
0
0
0
0
0
0.281915
188
8
47
23.5
0.807407
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.166667
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
85cb1bb4ae02b1a4342fa468007dd9491aca6415
37
py
Python
comparer/__init__.py
eaingaran/TimeMachine
f6199827ffc358dd32f26edd8d68e2dbf7c63a90
[ "MIT" ]
null
null
null
comparer/__init__.py
eaingaran/TimeMachine
f6199827ffc358dd32f26edd8d68e2dbf7c63a90
[ "MIT" ]
null
null
null
comparer/__init__.py
eaingaran/TimeMachine
f6199827ffc358dd32f26edd8d68e2dbf7c63a90
[ "MIT" ]
null
null
null
from comparer import FileComparision
18.5
36
0.891892
4
37
8.25
1
0
0
0
0
0
0
0
0
0
0
0
0.108108
37
1
37
37
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c8041ed13662eb3d33f0ff7baa5964e9a4c11f90
11,341
py
Python
gapid_tests/resource_creation_tests/FlushAndInvalidateMappedMemoryRanges_test/FlushInvalidateMappedMemoryRanges_test.py
AWoloszyn/vulkan_test_applications
5e9f86cdbd4e2344f41db9e0a578fe9fba41106f
[ "Apache-2.0" ]
null
null
null
gapid_tests/resource_creation_tests/FlushAndInvalidateMappedMemoryRanges_test/FlushInvalidateMappedMemoryRanges_test.py
AWoloszyn/vulkan_test_applications
5e9f86cdbd4e2344f41db9e0a578fe9fba41106f
[ "Apache-2.0" ]
null
null
null
gapid_tests/resource_creation_tests/FlushAndInvalidateMappedMemoryRanges_test/FlushInvalidateMappedMemoryRanges_test.py
AWoloszyn/vulkan_test_applications
5e9f86cdbd4e2344f41db9e0a578fe9fba41106f
[ "Apache-2.0" ]
null
null
null
# Copyright 2017 Google Inc. # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from gapit_test_framework import gapit_test, require, require_equal from gapit_test_framework import require_not_equal, little_endian_bytes_to_int from gapit_test_framework import GapitTest, get_read_offset_function from gapit_test_framework import get_write_offset_function from struct_offsets import VulkanStruct, ARRAY, UINT32_T, DEVICE_SIZE from struct_offsets import CHAR, POINTER, HANDLE from vulkan_constants import VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, VK_WHOLE_SIZE NON_COHERENT_ATOM_SIZE = 256 BUFFER_SIZE = NON_COHERENT_ATOM_SIZE * 2 FLUSH_INVALIDATE_SIZE = NON_COHERENT_ATOM_SIZE SRC_BUFFER_FLUSH_INVALIDATE_OFFSET = NON_COHERENT_ATOM_SIZE * 2 DST_BUFFER_FLUSH_INVALIDATE_OFFSET = NON_COHERENT_ATOM_SIZE * 5 MAPPED_MEMORY_RANGE = [ ("sType", UINT32_T), ("pNext", POINTER), ("memory", HANDLE), ("offset", DEVICE_SIZE), ("size", DEVICE_SIZE), ] @gapit_test("FlushAndInvalidateMappedMemoryRanges_test") class FlushMappedMemoryRangesNonZeroOffsetNotWholeSize(GapitTest): def expect(self): """Check the arguments and the VkMappedMemoryRange structs used in vkFlushMappedMemoryRanges(). The flushed memory starts at offset 768 and has size 256""" MAP_OFFSET = 512 FLUSH_OFFSET = MAP_OFFSET + 256 FLUSH_SIZE = 256 MEMORY_DATA = [("data", ARRAY, FLUSH_SIZE, CHAR)] EXPECTED_MEMORY_DATA = [i for i in range(FLUSH_SIZE)] architecture = self.architecture # The first and second vkMapMemory() result is managed in # VulkanApplication and is not used here, the third is the one we need # here. map_memory = require(self.nth_call_of("vkMapMemory", 3)) require_not_equal(0, map_memory.hex_ppData) # The flushed data starts at mapped offset + 256 flushed_data_ptr = little_endian_bytes_to_int(require( map_memory.get_write_data(map_memory.hex_ppData, architecture.int_integerSize))) + 256 # Check arguments flush_mapped_memory_ranges = require(self.nth_call_of( "vkFlushMappedMemoryRanges", 1)) require_equal(1, flush_mapped_memory_ranges.int_memoryRangeCount) require_not_equal(0, flush_mapped_memory_ranges.hex_pMemoryRanges) # Check the memory range struct content mapped_memory_range = VulkanStruct( architecture, MAPPED_MEMORY_RANGE, get_read_offset_function( flush_mapped_memory_ranges, flush_mapped_memory_ranges.hex_pMemoryRanges)) require_equal(VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, mapped_memory_range.sType) require_equal(0, mapped_memory_range.pNext) require_not_equal(0, mapped_memory_range.memory) require_equal(FLUSH_OFFSET, mapped_memory_range.offset) require_equal(FLUSH_SIZE, mapped_memory_range.size) # check the flushed memory data memory_data = VulkanStruct( architecture, MEMORY_DATA, get_read_offset_function( flush_mapped_memory_ranges, flushed_data_ptr)) require_equal(EXPECTED_MEMORY_DATA, memory_data.data) @gapit_test("FlushAndInvalidateMappedMemoryRanges_test") class InvalidateMappedMemoryRangesZeroOffsetWholeSize(GapitTest): def expect(self): """Check the arguments and the VkMappedMemoryRange structs used in vkInvalidateMappedMemoryRanges(). The invalidate memory covers the whole dst buffer which starts at offset 0 and has size VK_WHOLE_SIZE. Only the second half of the buffer has the data copied from the previously flushed memory""" MAP_OFFSET = 0 INVALIDATE_OFFSET = MAP_OFFSET INVALIDATE_SIZE = VK_WHOLE_SIZE DATA_SIZE = 256 MEMORY_DATA = [("data", ARRAY, DATA_SIZE, CHAR)] EXPECTED_MEMORY_DATA = [i for i in range(DATA_SIZE)] architecture = self.architecture # The first and second vkMapMemory() result is managed in # VulkanApplication and is not used here, the fourth is the one we need # here. map_memory = require(self.nth_call_of("vkMapMemory", 4)) require_not_equal(0, map_memory.hex_ppData) # The invalidated data offset is equal to the mapped offset, but the # flushed data starts at mapped offset + 256 invalidate_data_ptr = little_endian_bytes_to_int(require( map_memory.get_write_data(map_memory.hex_ppData, architecture.int_integerSize))) + 256 # Check arguments invalidate_mapped_memory_ranges = require(self.nth_call_of( "vkInvalidateMappedMemoryRanges", 1)) require_equal(1, invalidate_mapped_memory_ranges.int_memoryRangeCount) require_not_equal(0, invalidate_mapped_memory_ranges.hex_pMemoryRanges) # Check the memory range struct contents mapped_memory_range = VulkanStruct( architecture, MAPPED_MEMORY_RANGE, get_read_offset_function( invalidate_mapped_memory_ranges, invalidate_mapped_memory_ranges.hex_pMemoryRanges)) require_equal(VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, mapped_memory_range.sType) require_equal(0, mapped_memory_range.pNext) require_not_equal(0, mapped_memory_range.memory) require_equal(INVALIDATE_OFFSET, mapped_memory_range.offset) require_equal(INVALIDATE_SIZE, mapped_memory_range.size) # Check data of the invalidated memory memory_data = VulkanStruct( architecture, MEMORY_DATA, get_write_offset_function( invalidate_mapped_memory_ranges, invalidate_data_ptr)) require_equal(EXPECTED_MEMORY_DATA, memory_data.data) @gapit_test("FlushAndInvalidateMappedMemoryRanges_test") class FlushMappedMemoryRangesZeroOffsetWholeSize(GapitTest): def expect(self): """Check the arguments and the VkMappedMemoryRange structs used in vkFlushMappedMemoryRanges(). The flushed memory starts at offset 0 and has size VK_WHOLE_SIZE""" MAP_OFFSET = 0 FLUSH_OFFSET = MAP_OFFSET FLUSH_SIZE = VK_WHOLE_SIZE DATA_SIZE = 512 MEMORY_DATA = [("data", ARRAY, DATA_SIZE, CHAR)] EXPECTED_MEMORY_DATA = [min(i, 512 - i) & 0xFF for i in range(DATA_SIZE)] architecture = self.architecture # The first and second vkMapMemory() result is managed in # VulkanApplication and is not used here, the fifth is the one we need # here. map_memory = require(self.nth_call_of("vkMapMemory", 5)) require_not_equal(0, map_memory.hex_ppData) # The flushed data starts at mapped offset flushed_data_ptr = little_endian_bytes_to_int(require( map_memory.get_write_data(map_memory.hex_ppData, architecture.int_integerSize))) # Check arguments flush_mapped_memory_ranges = require(self.nth_call_of( "vkFlushMappedMemoryRanges", 1)) require_equal(1, flush_mapped_memory_ranges.int_memoryRangeCount) require_not_equal(0, flush_mapped_memory_ranges.hex_pMemoryRanges) # Check the memory range struct content mapped_memory_range = VulkanStruct( architecture, MAPPED_MEMORY_RANGE, get_read_offset_function( flush_mapped_memory_ranges, flush_mapped_memory_ranges.hex_pMemoryRanges)) require_equal(VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, mapped_memory_range.sType) require_equal(0, mapped_memory_range.pNext) require_not_equal(0, mapped_memory_range.memory) require_equal(FLUSH_OFFSET, mapped_memory_range.offset) require_equal(FLUSH_SIZE, mapped_memory_range.size) # check the flushed memory data memory_data = VulkanStruct( architecture, MEMORY_DATA, get_read_offset_function( flush_mapped_memory_ranges, flushed_data_ptr)) require_equal(EXPECTED_MEMORY_DATA, memory_data.data) @gapit_test("FlushAndInvalidateMappedMemoryRanges_test") class InvalidateMappedMemoryRangesNonZeroOffsetNotWholeSize(GapitTest): def expect(self): """Check the arguments and the VkMappedMemoryRange structs used in vkInvalidateMappedMemoryRanges(). The invalidate memory covers the second half of the dst buffer which starts at offset 1024 and has size 512. Only the second half of the buffer has the data is invalidated to be host accessible.""" MAP_OFFSET = 1024 INVALIDATE_OFFSET = MAP_OFFSET + 256 INVALIDATE_SIZE = 256 MEMORY_DATA = [("data", ARRAY, INVALIDATE_SIZE, CHAR)] EXPECTED_MEMORY_DATA = [(512 - i) & 0xFF for i in range(INVALIDATE_SIZE, 512)] architecture = self.architecture # The first and second vkMapMemory() result is managed in # VulkanApplication and is not used here, the sixth is the one we need # here. map_memory = require(self.nth_call_of("vkMapMemory", 6)) require_not_equal(0, map_memory.hex_ppData) # The invalidated data offset is equal to the mapped offset, but the # flushed data starts at mapped offset + 256 invalidate_data_ptr = little_endian_bytes_to_int(require( map_memory.get_write_data(map_memory.hex_ppData, architecture.int_integerSize))) + 256 # Check arguments invalidate_mapped_memory_ranges = require(self.nth_call_of( "vkInvalidateMappedMemoryRanges", 1)) require_equal(1, invalidate_mapped_memory_ranges.int_memoryRangeCount) require_not_equal(0, invalidate_mapped_memory_ranges.hex_pMemoryRanges) # Check the memory range struct contents mapped_memory_range = VulkanStruct( architecture, MAPPED_MEMORY_RANGE, get_read_offset_function( invalidate_mapped_memory_ranges, invalidate_mapped_memory_ranges.hex_pMemoryRanges)) require_equal(VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE, mapped_memory_range.sType) require_equal(0, mapped_memory_range.pNext) require_not_equal(0, mapped_memory_range.memory) require_equal(INVALIDATE_OFFSET, mapped_memory_range.offset) require_equal(INVALIDATE_SIZE, mapped_memory_range.size) # Check data of the invalidated memory memory_data = VulkanStruct( architecture, MEMORY_DATA, get_write_offset_function( invalidate_mapped_memory_ranges, invalidate_data_ptr)) require_equal(EXPECTED_MEMORY_DATA, memory_data.data)
47.451883
81
0.709549
1,365
11,341
5.556044
0.132601
0.091772
0.076213
0.025316
0.827136
0.787579
0.764504
0.760944
0.748418
0.748418
0
0.013461
0.233577
11,341
238
82
47.651261
0.859066
0.231549
0
0.682119
0
0
0.042061
0.032013
0
0
0.000935
0
0
1
0.02649
false
0
0.046358
0
0.099338
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c83da4e2bd5e89fbcf4c04cc8979761dddc66092
142
py
Python
users/models.py
tahasama/books
9400183077e8708a5f9365ce2b4622a2086427a0
[ "MIT" ]
null
null
null
users/models.py
tahasama/books
9400183077e8708a5f9365ce2b4622a2086427a0
[ "MIT" ]
null
null
null
users/models.py
tahasama/books
9400183077e8708a5f9365ce2b4622a2086427a0
[ "MIT" ]
null
null
null
from django.db import models from django.contrib.auth.models import AbstractUser, PermissionsMixin class CustomUser(AbstractUser): pass
20.285714
69
0.816901
17
142
6.823529
0.705882
0.172414
0
0
0
0
0
0
0
0
0
0
0.126761
142
6
70
23.666667
0.935484
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
c073d01245bd5d08142e7c14d44cec1724304262
45
py
Python
miniargs/__init__.py
moskomule/simpleargs
c329e96a37bb4939fbfbd3d5a3007d245811a39d
[ "MIT" ]
4
2018-12-18T04:16:17.000Z
2020-03-07T03:30:57.000Z
miniargs/__init__.py
983632847/miniargs
c329e96a37bb4939fbfbd3d5a3007d245811a39d
[ "MIT" ]
null
null
null
miniargs/__init__.py
983632847/miniargs
c329e96a37bb4939fbfbd3d5a3007d245811a39d
[ "MIT" ]
4
2018-12-18T04:16:29.000Z
2020-03-07T03:31:01.000Z
from miniargs.miniargs import ArgumentParser
22.5
44
0.888889
5
45
8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.088889
45
1
45
45
0.97561
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c076499e13baab8fd7e45edd63b168d79772e8c5
5,305
py
Python
hill_cipher.py
pgandhi03/CiphersInPython
0d417fec3b7e107a5648fe5399060b1c0921ef3e
[ "MIT" ]
18
2019-02-03T16:53:00.000Z
2021-05-23T17:52:24.000Z
hill_cipher.py
pgandhi03/CiphersInPython
0d417fec3b7e107a5648fe5399060b1c0921ef3e
[ "MIT" ]
null
null
null
hill_cipher.py
pgandhi03/CiphersInPython
0d417fec3b7e107a5648fe5399060b1c0921ef3e
[ "MIT" ]
17
2019-02-05T05:44:35.000Z
2022-03-28T16:15:53.000Z
# ********* # -*- Made by VoxelPixel # -*- For YouTube Tutorial # -*- https://github.com/VoxelPixel # -*- Support me on Patreon: https://www.patreon.com/voxelpixel # ********* import sys import numpy as np def cipher_encryption(): msg = input("Enter message: ").upper() msg = msg.replace(" ", "") # if message length is odd number, append 0 at the end len_chk = 0 if len(msg) % 2 != 0: msg += "0" len_chk = 1 # msg to matrices row = 2 col = int(len(msg)/2) msg2d = np.zeros((row, col), dtype=int) itr1 = 0 itr2 = 0 for i in range(len(msg)): if i % 2 == 0: msg2d[0][itr1] = int(ord(msg[i])-65) itr1 += 1 else: msg2d[1][itr2] = int(ord(msg[i])-65) itr2 += 1 # for key = input("Enter 4 letter Key String: ").upper() key = key.replace(" ", "") # key to 2x2 key2d = np.zeros((2, 2), dtype=int) itr3 = 0 for i in range(2): for j in range(2): key2d[i][j] = ord(key[itr3])-65 itr3 += 1 # checking validity of the key # finding determinant deter = key2d[0][0] * key2d[1][1] - key2d[0][1] * key2d[1][0] deter = deter % 26 # finding multiplicative inverse mul_inv = -1 for i in range(26): temp_inv = deter * i if temp_inv % 26 == 1: mul_inv = i break else: continue # for if mul_inv == -1: print("Invalid key") sys.exit() # if encryp_text = "" itr_count = int(len(msg)/2) if len_chk == 0: for i in range(itr_count): temp1 = msg2d[0][i] * key2d[0][0] + msg2d[1][i] * key2d[0][1] encryp_text += chr((temp1 % 26) + 65) temp2 = msg2d[0][i] * key2d[1][0] + msg2d[1][i] * key2d[1][1] encryp_text += chr((temp2 % 26) + 65) # for else: for i in range(itr_count-1): temp1 = msg2d[0][i] * key2d[0][0] + msg2d[1][i] * key2d[0][1] encryp_text += chr((temp1 % 26) + 65) temp2 = msg2d[0][i] * key2d[1][0] + msg2d[1][i] * key2d[1][1] encryp_text += chr((temp2 % 26) + 65) # for # if else print("Encrypted Text: {}".format(encryp_text)) def cipher_decryption(): msg = input("Enter message: ").upper() msg = msg.replace(" ", "") # if message length is odd number, append 0 at the end len_chk = 0 if len(msg) % 2 != 0: msg += "0" len_chk = 1 # msg to matrices row = 2 col = int(len(msg) / 2) msg2d = np.zeros((row, col), dtype=int) itr1 = 0 itr2 = 0 for i in range(len(msg)): if i % 2 == 0: msg2d[0][itr1] = int(ord(msg[i]) - 65) itr1 += 1 else: msg2d[1][itr2] = int(ord(msg[i]) - 65) itr2 += 1 # for key = input("Enter 4 letter Key String: ").upper() key = key.replace(" ", "") # key to 2x2 key2d = np.zeros((2, 2), dtype=int) itr3 = 0 for i in range(2): for j in range(2): key2d[i][j] = ord(key[itr3]) - 65 itr3 += 1 # for # finding determinant deter = key2d[0][0] * key2d[1][1] - key2d[0][1] * key2d[1][0] deter = deter % 26 # finding multiplicative inverse mul_inv = -1 for i in range(26): temp_inv = deter * i if temp_inv % 26 == 1: mul_inv = i break else: continue # for # adjugate matrix # swapping key2d[0][0], key2d[1][1] = key2d[1][1], key2d[0][0] # changing signs key2d[0][1] *= -1 key2d[1][0] *= -1 key2d[0][1] = key2d[0][1] % 26 key2d[1][0] = key2d[1][0] % 26 # multiplying multiplicative inverse with adjugate matrix for i in range(2): for j in range(2): key2d[i][j] *= mul_inv # modulo for i in range(2): for j in range(2): key2d[i][j] = key2d[i][j] % 26 # cipher to plain decryp_text = "" itr_count = int(len(msg) / 2) if len_chk == 0: for i in range(itr_count): temp1 = msg2d[0][i] * key2d[0][0] + msg2d[1][i] * key2d[0][1] decryp_text += chr((temp1 % 26) + 65) temp2 = msg2d[0][i] * key2d[1][0] + msg2d[1][i] * key2d[1][1] decryp_text += chr((temp2 % 26) + 65) # for else: for i in range(itr_count - 1): temp1 = msg2d[0][i] * key2d[0][0] + msg2d[1][i] * key2d[0][1] decryp_text += chr((temp1 % 26) + 65) temp2 = msg2d[0][i] * key2d[1][0] + msg2d[1][i] * key2d[1][1] decryp_text += chr((temp2 % 26) + 65) # for # if else print("Decrypted Text: {}".format(decryp_text)) def main(): choice = int(input("1. Encryption\n2. Decryption\nChoose(1,2): ")) if choice == 1: print("---Encryption---") cipher_encryption() elif choice == 2: print("---Decryption---") cipher_decryption() else: print("Invalid Choice") if __name__ == "__main__": main()
26.525
74
0.469369
730
5,305
3.347945
0.150685
0.041735
0.02946
0.05401
0.746727
0.736907
0.736907
0.729133
0.729133
0.720131
0
0.094469
0.369463
5,305
199
75
26.658291
0.636173
0.118379
0
0.780303
0
0
0.052679
0.005403
0
0
0
0
0
1
0.022727
false
0
0.015152
0
0.037879
0.045455
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c08c8f43a351ed5d1c864b5e5c8e2072a9d7b4bc
187
py
Python
backend/model/Associator.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
null
null
null
backend/model/Associator.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
null
null
null
backend/model/Associator.py
CiprianBodnar/DeftEval
7afbaa9013873c91e4f209311ed499005080e131
[ "MIT" ]
1
2019-10-15T15:48:13.000Z
2019-10-15T15:48:13.000Z
class Associator: def __init__(self): pass def associate_definition(self, listOfDefinition): pass def type_association(self, listOfDefinition): pass
18.7
53
0.663102
18
187
6.555556
0.611111
0.118644
0.40678
0
0
0
0
0
0
0
0
0
0.272727
187
9
54
20.777778
0.867647
0
0
0.428571
0
0
0
0
0
0
0
0
0
1
0.428571
false
0.428571
0
0
0.571429
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
23d0660a7fb137e26002404851acf84d04c1e331
8,769
py
Python
devilry/devilry_cradmin/tests/test_devilry_multiselect2/test_user.py
devilry/devilry-django
9ae28e462dfa4cfee966ebacbca04ade9627e715
[ "BSD-3-Clause" ]
29
2015-01-18T22:56:23.000Z
2020-11-10T21:28:27.000Z
devilry/devilry_cradmin/tests/test_devilry_multiselect2/test_user.py
devilry/devilry-django
9ae28e462dfa4cfee966ebacbca04ade9627e715
[ "BSD-3-Clause" ]
786
2015-01-06T16:10:18.000Z
2022-03-16T11:10:50.000Z
devilry/devilry_cradmin/tests/test_devilry_multiselect2/test_user.py
devilry/devilry-django
9ae28e462dfa4cfee966ebacbca04ade9627e715
[ "BSD-3-Clause" ]
15
2015-04-06T06:18:43.000Z
2021-02-24T12:28:30.000Z
import htmls import mock from django import test from django import forms from django.conf import settings from cradmin_legacy import cradmin_testhelpers from model_bakery import baker from devilry.devilry_cradmin import devilry_multiselect2 class TestSelectedItem(test.TestCase): def test_title_without_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, shortname='test@example.com', fullname='') selector = htmls.S(devilry_multiselect2.user.SelectedItem(value=user).render()) self.assertEqual( 'test@example.com', selector.one('.cradmin-legacy-multiselect2-target-selected-item-title').alltext_normalized) def test_title_with_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, fullname='Test User', shortname='test@example.com') selector = htmls.S(devilry_multiselect2.user.SelectedItem(value=user).render()) self.assertEqual( 'Test User', selector.one('.cradmin-legacy-multiselect2-target-selected-item-title').alltext_normalized) def test_description_without_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, shortname='test@example.com', fullname='') selector = htmls.S(devilry_multiselect2.user.SelectedItem(value=user).render()) self.assertFalse( selector.exists('.cradmin-legacy-multiselect2-target-selected-item-description')) def test_description_with_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, fullname='Test User', shortname='test@example.com') selector = htmls.S(devilry_multiselect2.user.SelectedItem(value=user).render()) self.assertEqual( 'test@example.com', selector.one('.cradmin-legacy-multiselect2-target-selected-item-description').alltext_normalized) class TestItemValue(test.TestCase): def test_title_without_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, shortname='test@example.com', fullname='') selector = htmls.S(devilry_multiselect2.user.ItemValue(value=user).render()) self.assertEqual( 'test@example.com', selector.one('.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized) def test_title_with_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, fullname='Test User', shortname='test@example.com') selector = htmls.S(devilry_multiselect2.user.ItemValue(value=user).render()) self.assertEqual( 'Test User', selector.one('.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized) def test_description_without_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, shortname='test@example.com', fullname='') selector = htmls.S(devilry_multiselect2.user.ItemValue(value=user).render()) self.assertFalse( selector.exists('.cradmin-legacy-listbuilder-itemvalue-titledescription-description')) def test_description_with_fullname(self): user = baker.make(settings.AUTH_USER_MODEL, fullname='Test User', shortname='test@example.com') selector = htmls.S(devilry_multiselect2.user.ItemValue(value=user).render()) self.assertEqual( 'test@example.com', selector.one('.cradmin-legacy-listbuilder-itemvalue-titledescription-description').alltext_normalized) class TestTarget(test.TestCase): def test_with_items_title(self): selector = htmls.S(devilry_multiselect2.user.Target(form=forms.Form()).render(request=mock.MagicMock())) self.assertEqual( 'Selected users', selector.one('.cradmin-legacy-multiselect2-target-title').alltext_normalized) def test_without_items_text(self): selector = htmls.S(devilry_multiselect2.user.Target(form=forms.Form()).render(request=mock.MagicMock())) self.assertEqual( 'No users selected', selector.one('.cradmin-legacy-multiselect2-target-without-items-content').alltext_normalized) class MockMultiselectUsersView(devilry_multiselect2.user.BaseMultiselectUsersView): def get_filterlist_url(self, filters_string): return '/{}'.format(filters_string) class TestBaseMultiselectUsersView(test.TestCase, cradmin_testhelpers.TestCaseMixin): viewclass = MockMultiselectUsersView def test_render_sanity(self): # Only a sanity test - we do not repeat all the tests from TestItemValue baker.make(settings.AUTH_USER_MODEL, fullname='Test User', shortname='test@example.com') mockresponse = self.mock_http200_getrequest_htmls(requestuser=mock.MagicMock()) self.assertEqual( 'Test User', mockresponse.selector.one( '.cradmin-legacy-listbuilder-itemvalue-titledescription-title').alltext_normalized) self.assertEqual( 'test@example.com', mockresponse.selector.one( '.cradmin-legacy-listbuilder-itemvalue-titledescription-description').alltext_normalized) def __get_titles(self, selector): return [element.alltext_normalized for element in selector.list('.cradmin-legacy-listbuilder-itemvalue-titledescription-title')] def test_ordering(self): baker.make(settings.AUTH_USER_MODEL, shortname='userb') baker.make(settings.AUTH_USER_MODEL, shortname='usera') baker.make(settings.AUTH_USER_MODEL, shortname='userc') mockresponse = self.mock_http200_getrequest_htmls(requestuser=mock.MagicMock()) self.assertEqual( ['usera', 'userb', 'userc'], self.__get_titles(mockresponse.selector)) def test_selectall_not_available(self): baker.make(settings.AUTH_USER_MODEL) mockresponse = self.mock_http200_getrequest_htmls(requestuser=mock.MagicMock()) self.assertFalse(mockresponse.selector.exists('.cradmin-legacy-multiselect2-listcolumn-buttons .btn')) def test_search_shortname(self): baker.make(settings.AUTH_USER_MODEL, shortname='userb') baker.make(settings.AUTH_USER_MODEL, shortname='usera') mockresponse = self.mock_http200_getrequest_htmls( requestuser=mock.MagicMock(), viewkwargs={'filters_string': 'search-usera'}) self.assertEqual( {'usera'}, set(self.__get_titles(mockresponse.selector))) def test_search_fullname(self): baker.make(settings.AUTH_USER_MODEL, fullname='Userb') baker.make(settings.AUTH_USER_MODEL, fullname='Usera') mockresponse = self.mock_http200_getrequest_htmls( requestuser=mock.MagicMock(), viewkwargs={'filters_string': 'search-usera'}) self.assertEqual( {'Usera'}, set(self.__get_titles(mockresponse.selector))) def test_search_username(self): baker.make('devilry_account.UserName', user__fullname='Test User 1', username='testuser1') baker.make('devilry_account.UserName', user__fullname='Test User 2', username='testuser2') mockresponse = self.mock_http200_getrequest_htmls( requestuser=mock.MagicMock(), viewkwargs={'filters_string': 'search-testuser1'}) self.assertEqual( {'Test User 1'}, set(self.__get_titles(mockresponse.selector))) def test_search_useremail(self): baker.make('devilry_account.UserEmail', user__fullname='Test User 1', email='testuser1@example.com') baker.make('devilry_account.UserEmail', user__fullname='Test User 2', email='testuser2@example.com') mockresponse = self.mock_http200_getrequest_htmls( requestuser=mock.MagicMock(), viewkwargs={'filters_string': 'search-testuser1'}) self.assertEqual( {'Test User 1'}, set(self.__get_titles(mockresponse.selector)))
44.969231
118
0.632911
856
8,769
6.290888
0.129673
0.035097
0.053668
0.066295
0.810585
0.790344
0.758032
0.713649
0.709192
0.65441
0
0.008055
0.26377
8,769
194
119
45.201031
0.826053
0.007983
0
0.694611
0
0
0.177762
0.109808
0
0
0
0
0.107784
1
0.113772
false
0
0.047904
0.011976
0.209581
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
23def35a46648df20c9f529870df96553fc388f7
135
py
Python
stdnet/utils/structures.py
stuaxo/python-stdnet
78db5320bdedc3f28c5e4f38cda13a4469e35db7
[ "BSD-3-Clause" ]
61
2015-01-13T23:41:10.000Z
2021-09-14T03:23:59.000Z
stdnet/utils/structures.py
drscream/python-stdnet
78db5320bdedc3f28c5e4f38cda13a4469e35db7
[ "BSD-3-Clause" ]
4
2015-02-28T08:57:14.000Z
2020-12-29T17:14:43.000Z
stdnet/utils/structures.py
drscream/python-stdnet
78db5320bdedc3f28c5e4f38cda13a4469e35db7
[ "BSD-3-Clause" ]
12
2015-01-01T09:23:06.000Z
2021-09-16T08:09:13.000Z
import sys from collections import * if sys.version_info < (2, 7): # pragma nocover from .fallbacks._collections import *
22.5
50
0.688889
17
135
5.352941
0.705882
0.373626
0
0
0
0
0
0
0
0
0
0.019231
0.22963
135
5
51
27
0.855769
0.103704
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9b01ca1d392db8753d720ba4bf25e6be2db1d977
147
py
Python
src/orqviz/elastic_band/__init__.py
n17/orqviz
76a3f9855515583d9a59ed3f11cef506b4f993af
[ "Apache-2.0" ]
57
2021-11-09T03:21:36.000Z
2022-03-29T08:48:00.000Z
src/orqviz/elastic_band/__init__.py
n17/orqviz
76a3f9855515583d9a59ed3f11cef506b4f993af
[ "Apache-2.0" ]
18
2021-11-09T10:58:40.000Z
2022-03-09T16:19:22.000Z
src/orqviz/elastic_band/__init__.py
n17/orqviz
76a3f9855515583d9a59ed3f11cef506b4f993af
[ "Apache-2.0" ]
8
2021-11-09T11:55:52.000Z
2022-02-07T20:35:37.000Z
from .auto_neb import run_AutoNEB from .data_structures import Chain, ChainPath from .neb import run_NEB from .plots import plot_all_chains_losses
29.4
45
0.85034
24
147
4.916667
0.625
0.152542
0.20339
0
0
0
0
0
0
0
0
0
0.115646
147
4
46
36.75
0.907692
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7b0e7810f8e57dfd771c8ec5280564f29e8e80d4
24
py
Python
dynamic_systems/identification/__init__.py
eddardd/CrossDomainFaultDetection
83dd24727a8b35cda2549b40166beaf740e14c98
[ "MIT" ]
3
2021-08-30T11:41:36.000Z
2021-12-22T10:45:25.000Z
dynamic_systems/identification/__init__.py
eddardd/CrossDomainFaultDiagnosis
83dd24727a8b35cda2549b40166beaf740e14c98
[ "MIT" ]
1
2021-02-26T06:02:33.000Z
2021-02-26T06:02:33.000Z
dynamic_systems/identification/__init__.py
eddardd/CrossDomainFaultDetection
83dd24727a8b35cda2549b40166beaf740e14c98
[ "MIT" ]
2
2021-06-03T11:46:20.000Z
2022-03-25T09:16:03.000Z
from .foptd import FOPTD
24
24
0.833333
4
24
5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.125
24
1
24
24
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7b119ac390115cc4290ca2e49ef7ead632c8aee1
44
py
Python
holobot/extensions/admin/database/__init__.py
rexor12/holobot
89b7b416403d13ccfeee117ef942426b08d3651d
[ "MIT" ]
1
2021-05-24T00:17:46.000Z
2021-05-24T00:17:46.000Z
holobot/extensions/admin/database/__init__.py
rexor12/holobot
89b7b416403d13ccfeee117ef942426b08d3651d
[ "MIT" ]
41
2021-03-24T22:50:09.000Z
2021-12-17T12:15:13.000Z
holobot/extensions/admin/database/__init__.py
rexor12/holobot
89b7b416403d13ccfeee117ef942426b08d3651d
[ "MIT" ]
null
null
null
from .rules_migration import RulesMigration
22
43
0.886364
5
44
7.6
1
0
0
0
0
0
0
0
0
0
0
0
0.090909
44
1
44
44
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9e43bc35e9f9f69c7d98282a7651cd2bee0545c5
156
py
Python
src/common/exceptions.py
alexlykasov/codebuild-notifications-github
f0cad0c3e3b00e7201d79bcec83e1ec40fe8f267
[ "Apache-2.0" ]
null
null
null
src/common/exceptions.py
alexlykasov/codebuild-notifications-github
f0cad0c3e3b00e7201d79bcec83e1ec40fe8f267
[ "Apache-2.0" ]
null
null
null
src/common/exceptions.py
alexlykasov/codebuild-notifications-github
f0cad0c3e3b00e7201d79bcec83e1ec40fe8f267
[ "Apache-2.0" ]
null
null
null
class CodeBuildProjectNotFound(Exception): pass class UnsupportedBuildStatus(Exception): pass class RequiredEnvVarsNotSet(Exception): pass
13
42
0.775641
12
156
10.083333
0.5
0.322314
0.297521
0
0
0
0
0
0
0
0
0
0.166667
156
11
43
14.181818
0.930769
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
1
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
6
7b67749ea586454c10f7fce11456a9c240f613a1
88
py
Python
donutshell/utils/__init__.py
loldonut/donutshell
a53d15b0a4950797e460d38ef82172cca1141fa2
[ "MIT" ]
null
null
null
donutshell/utils/__init__.py
loldonut/donutshell
a53d15b0a4950797e460d38ef82172cca1141fa2
[ "MIT" ]
null
null
null
donutshell/utils/__init__.py
loldonut/donutshell
a53d15b0a4950797e460d38ef82172cca1141fa2
[ "MIT" ]
null
null
null
from .command_not_found import * from .help_console import * from .read_config import *
22
32
0.795455
13
88
5.076923
0.692308
0.30303
0
0
0
0
0
0
0
0
0
0
0.136364
88
3
33
29.333333
0.868421
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c875fb8a22c117ede36d40d71e22345409a1bb4b
12,242
py
Python
spec_classes/methods/collections/sets.py
matthewwardrop/spec-classes
d50c9ded426b5becd445c255e45b88ccc8961672
[ "MIT" ]
1
2021-07-10T11:43:16.000Z
2021-07-10T11:43:16.000Z
spec_classes/methods/collections/sets.py
matthewwardrop/spec-classes
d50c9ded426b5becd445c255e45b88ccc8961672
[ "MIT" ]
1
2021-09-02T18:15:10.000Z
2021-09-02T18:15:10.000Z
spec_classes/methods/collections/sets.py
matthewwardrop/spec-classes
d50c9ded426b5becd445c255e45b88ccc8961672
[ "MIT" ]
null
null
null
import functools from inspect import Parameter from typing import Any, Callable, Dict, Union from spec_classes.types import Attr, MISSING from spec_classes.utils.method_builder import MethodBuilder from spec_classes.utils.mutation import mutate_attr from spec_classes.utils.type_checking import type_label from ..base import AttrMethodDescriptor def _get_set_item_type(attr_spec): """ Get the type(s) of items for set method signatures. """ item_type = attr_spec.item_type if attr_spec.item_spec_key_type: item_type = Union[ attr_spec.item_spec_key_type, item_type, ] return item_type class WithSetItemMethod(AttrMethodDescriptor): """ The method descriptor/generator for `with_<attr_singular>' for set collections. The default behavior of this method is to copy the spec-class with an additional item added to the collection associated with the singular form of the attribute name. For more information refer to the spec-classes documentation or the generated method. """ @property def method_name(self) -> str: return f"with_{self.attr_spec.item_name}" @staticmethod def with_set_item( attr_spec: Attr, self, _item: Any, *, _inplace: bool = False, _if: bool = True, **attrs, ) -> Any: if not _if: return self return mutate_attr( obj=self, attr=attr_spec.name, value=( attr_spec.get_collection_mutator(self, inplace=_inplace) .add_item( item=_item, attrs=attrs, ) .collection ), inplace=_inplace, type_check=False, ) def build_method(self) -> Callable: fn_item_type = _get_set_item_type(self.attr_spec) return ( MethodBuilder( self.name, functools.partial(self.with_set_item, self.attr_spec), ) .with_preamble( f"Return a `{self.spec_cls.__name__}` instance identical to this one except with an item added to `{self.attr_spec.name}`." ) .with_arg( "_item", desc=f"A new `{type_label(self.attr_spec.item_type)}` instance for {self.attr_spec.name}.", default=MISSING if self.attr_spec.item_spec_type else Parameter.empty, annotation=fn_item_type, ) .with_arg( "_inplace", desc="Whether to perform change without first copying.", default=False, kind="keyword_only", annotation=bool, ) .with_arg( "_if", desc="This action is only taken when `_if` is `True`. If it is `False`, this is a no-op.", default=True, kind="keyword_only", annotation=bool, ) .with_spec_attrs_for( self.attr_spec.item_spec_type, desc_template=f"An optional new value for `{self.attr_spec.item_name}.{{}}`.", ) .with_returns( f"A reference to the mutated `{self.spec_cls.__name__}` instance.", annotation=self.spec_cls, ) .build() ) class UpdateSetItemMethod(AttrMethodDescriptor): """ The method descriptor/generator for `update_<attr_singular>' for set collections. The default behavior of this method is to copy the spec-class with the item associated with the given item in the collection associated with the singular form of the attribute name updated with the provided new values. For more information refer to the spec-classes documentation or the generated method. """ @property def method_name(self) -> str: return f"update_{self.attr_spec.item_name}" @staticmethod def update_set_item( attr_spec: Attr, self, _item: Any, _new_item: Any, *, _inplace: bool = False, _if: bool = True, **attrs: Dict[str, Any], ) -> Any: if not _if: return self return mutate_attr( obj=self, attr=attr_spec.name, value=( attr_spec.get_collection_mutator(self, inplace=_inplace) .add_item( item=_new_item, attrs=attrs, value_or_index=_item, replace=False, ) .collection ), inplace=_inplace, type_check=False, ) def build_method(self) -> Callable: fn_item_type = _get_set_item_type(self.attr_spec) return ( MethodBuilder( self.name, functools.partial(self.update_set_item, self.attr_spec), ) .with_preamble( f"Return a `{self.spec_cls.__name__}` instance identical to this one except with an item updated in `{self.attr_spec.name}`." ) .with_arg( "_item", desc="The value to transform.", annotation=fn_item_type, ) .with_arg( "_new_item", desc="A new item to replace the current item.", default=MISSING if self.attr_spec.item_spec_type else Parameter.empty, annotation=Callable[[fn_item_type], fn_item_type], ) .with_arg( "_inplace", desc="Whether to perform change without first copying.", default=False, kind="keyword_only", annotation=bool, ) .with_arg( "_if", desc="This action is only taken when `_if` is `True`. If it is `False`, this is a no-op.", default=True, kind="keyword_only", annotation=bool, ) .with_spec_attrs_for( self.attr_spec.item_spec_type, desc_template=f"An optional new value for `{self.attr_spec.item_name}.{{}}`.", ) .with_returns( f"A reference to the mutated `{self.spec_cls.__name__}` instance.", annotation=self.spec_cls, ) .build() ) class TransformSetItemMethod(AttrMethodDescriptor): """ The method descriptor/generator for `transform_<attr_singular>' for set collections. The default behavior of this method is to copy the spec-class with the item associated with the given item in the collection associated with the singular form of the attribute name transformed under the provided transform. For more information refer to the spec-classes documentation or the generated method. """ @property def method_name(self) -> str: return f"transform_{self.attr_spec.item_name}" @staticmethod def transform_set_item( attr_spec: Attr, self, _item: Any, _transform: Callable[[Any], Any], *, _inplace: bool = False, _if: bool = True, **attr_transforms: Dict[str, Callable[[Any], Any]], ) -> Any: if not _if: return self return mutate_attr( obj=self, attr=attr_spec.name, value=( attr_spec.get_collection_mutator(self, inplace=_inplace) .transform_item( item=_item, transform=_transform, attr_transforms=attr_transforms, ) .collection ), inplace=_inplace, type_check=False, ) def build_method(self) -> Callable: fn_item_type = _get_set_item_type(self.attr_spec) return ( MethodBuilder( self.name, functools.partial(self.transform_set_item, self.attr_spec), ) .with_preamble( f"Return a `{self.spec_cls.__name__}` instance identical to this one except with an item transformed in `{self.attr_spec.name}`." ) .with_arg( "_item", desc="The value to transform.", annotation=fn_item_type, ) .with_arg( "_transform", desc="A function that takes the old item as input, and returns the new item.", default=MISSING if self.attr_spec.item_spec_type else Parameter.empty, annotation=Callable[[fn_item_type], fn_item_type], ) .with_arg( "_inplace", desc="Whether to perform change without first copying.", default=False, kind="keyword_only", annotation=bool, ) .with_arg( "_if", desc="This action is only taken when `_if` is `True`. If it is `False`, this is a no-op.", default=True, kind="keyword_only", annotation=bool, ) .with_spec_attrs_for( self.attr_spec.item_spec_type, desc_template=f"An optional transformer for `{self.attr_spec.item_name}.{{}}`.", ) .with_returns( f"A reference to the mutated `{self.spec_cls.__name__}` instance.", annotation=self.spec_cls, ) .build() ) class WithoutSetItemMethod(AttrMethodDescriptor): """ The method descriptor/generator for `without_<attr_singular>' for set collections. The default behavior of this method is to copy the spec-class with the given item in the collection associated with the singular form of the attribute name removed. For more information refer to the spec-classes documentation or the generated method. """ @property def method_name(self) -> str: return f"without_{self.attr_spec.item_name}" @staticmethod def without_set_item( attr_spec: Attr, self, _item: Any, *, _inplace: bool = False, _if: bool = True ) -> Any: if not _if: return self return mutate_attr( obj=self, attr=attr_spec.name, value=( attr_spec.get_collection_mutator(self, inplace=_inplace) .remove_item(_item) .collection ), inplace=_inplace, type_check=False, ) def build_method(self) -> Callable: fn_item_type = _get_set_item_type(self.attr_spec) return ( MethodBuilder( self.name, functools.partial(self.without_set_item, self.attr_spec), ) .with_preamble( f"Return a `{self.spec_cls.__name__}` instance identical to this one except with an item removed from `{self.attr_spec.name}`." ) .with_arg( "_item", desc="The value to remove.", annotation=fn_item_type, ) .with_arg( "_inplace", desc="Whether to perform change without first copying.", default=False, kind="keyword_only", annotation=bool, ) .with_arg( "_if", desc="This action is only taken when `_if` is `True`. If it is `False`, this is a no-op.", default=True, kind="keyword_only", annotation=bool, ) .with_returns( f"A reference to the mutated `{self.spec_cls.__name__}` instance.", annotation=self.spec_cls, ) .build() ) SET_METHODS = [ WithSetItemMethod, UpdateSetItemMethod, TransformSetItemMethod, WithoutSetItemMethod, ]
32.908602
145
0.545172
1,314
12,242
4.829528
0.11035
0.054207
0.051056
0.035298
0.812323
0.812323
0.780176
0.753546
0.729436
0.72266
0
0
0.373468
12,242
371
146
32.997305
0.827379
0.122774
0
0.663366
0
0.026403
0.190638
0.056927
0
0
0
0
0
1
0.042904
false
0
0.026403
0.013201
0.138614
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
c888c8422179d073644a82a4990066217c0bc2e8
38
py
Python
hocon_validator/__init__.py
chezou/hocon-validator
1eb450a8d36f0dcad2e4fd26622bb587579bf602
[ "Apache-2.0" ]
5
2018-11-11T08:38:58.000Z
2021-02-18T19:37:52.000Z
hocon_validator/__init__.py
chezou/hocon-validator
1eb450a8d36f0dcad2e4fd26622bb587579bf602
[ "Apache-2.0" ]
1
2017-08-16T13:38:31.000Z
2017-08-16T13:38:31.000Z
hocon_validator/__init__.py
chezou/hocon-validator
1eb450a8d36f0dcad2e4fd26622bb587579bf602
[ "Apache-2.0" ]
1
2018-05-29T01:37:18.000Z
2018-05-29T01:37:18.000Z
from .hocon_validator import validate
19
37
0.868421
5
38
6.4
1
0
0
0
0
0
0
0
0
0
0
0
0.105263
38
1
38
38
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c8ac576a4d3295811be28f392daeb1ed055b3378
117
py
Python
www/lib/handlers/not_found_handler.py
cripplet/ipfire-material-design
972dac352a097bbfe556bf4a6cad900c037cca21
[ "MIT" ]
null
null
null
www/lib/handlers/not_found_handler.py
cripplet/ipfire-material-design
972dac352a097bbfe556bf4a6cad900c037cca21
[ "MIT" ]
3
2019-07-13T08:03:22.000Z
2019-07-13T08:06:03.000Z
www/lib/handlers/not_found_handler.py
cripplet/ipfire-material-design
972dac352a097bbfe556bf4a6cad900c037cca21
[ "MIT" ]
null
null
null
import flask import http def not_found_handler(): return flask.Response( status=http.HTTPStatus.NOT_FOUND)
14.625
39
0.760684
16
117
5.375
0.6875
0.186047
0
0
0
0
0
0
0
0
0
0
0.162393
117
7
40
16.714286
0.877551
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
true
0
0.4
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
1
1
0
0
6
c8b68aee55d6d14a5b8da63257ca797d20dc8624
35
py
Python
tuple.py
toby20-meet/meet2018y1lab6
aa57062969a867bc242c44ba788ab134510d8470
[ "MIT" ]
null
null
null
tuple.py
toby20-meet/meet2018y1lab6
aa57062969a867bc242c44ba788ab134510d8470
[ "MIT" ]
null
null
null
tuple.py
toby20-meet/meet2018y1lab6
aa57062969a867bc242c44ba788ab134510d8470
[ "MIT" ]
null
null
null
my_tuple = (0,1,2) my_tuple[0] = 3
11.666667
18
0.6
9
35
2.111111
0.666667
0.736842
0.842105
0
0
0
0
0
0
0
0
0.172414
0.171429
35
2
19
17.5
0.482759
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
cdc2998a60e830eb36184c38550c1ac92024a2ae
133
py
Python
bitquant/__init__.py
rosspalmer/bitQuant
424745deeef8ee6af6a32f8c508903c2da727f8d
[ "MIT" ]
27
2015-02-22T22:01:27.000Z
2021-06-24T15:38:03.000Z
bitquant/__init__.py
rosspalmer/bitQuant
424745deeef8ee6af6a32f8c508903c2da727f8d
[ "MIT" ]
17
2015-03-05T09:02:38.000Z
2015-08-14T20:21:42.000Z
bitquant/__init__.py
rosspalmer/bitQuant
424745deeef8ee6af6a32f8c508903c2da727f8d
[ "MIT" ]
10
2015-03-04T08:20:44.000Z
2020-03-18T03:24:26.000Z
# -*- coding: utf-8 -*- from api.clss import api from sql.setup import setup_sql from sql.clss import sql from data.clss import data
22.166667
31
0.744361
24
133
4.083333
0.416667
0.306122
0
0
0
0
0
0
0
0
0
0.008929
0.157895
133
5
32
26.6
0.866071
0.157895
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
cdcec445cd807ed39ce7f568bc04be2ecc4c4ef6
706
py
Python
pygears/sim/log.py
bogdanvuk/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
120
2018-04-23T08:29:04.000Z
2022-03-30T14:41:52.000Z
pygears/sim/log.py
FZP1607152286/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
12
2019-07-09T17:12:58.000Z
2022-03-18T09:05:10.000Z
pygears/sim/log.py
FZP1607152286/pygears
a0b21d445e1d5c89ad66751447b8253536b835ee
[ "MIT" ]
12
2019-05-10T19:42:08.000Z
2022-03-28T18:26:44.000Z
import logging def debug(msg, *args, **kwds): return logging.getLogger('sim').debug(msg, *args, **kwds) def info(msg, *args, **kwds): return logging.getLogger('sim').info(msg, *args, **kwds) def warning(msg, *args, **kwds): return logging.getLogger('sim').warning(msg, *args, **kwds) def error(msg, *args, **kwds): return logging.getLogger('sim').error(msg, *args, **kwds) def critical(msg, *args, **kwds): return logging.getLogger('sim').critical(msg, *args, **kwds) def log(level, msg, *args, **kwds): return logging.getLogger('sim').log(level, msg, *args, **kwds) def exception(msg, *args, **kwds): return logging.getLogger('sim').exception(msg, *args, **kwds)
23.533333
66
0.644476
95
706
4.789474
0.178947
0.215385
0.338462
0.261538
0.613187
0.553846
0.553846
0
0
0
0
0
0.144476
706
29
67
24.344828
0.753311
0
0
0
0
0
0.029745
0
0
0
0
0
0
1
0.466667
false
0
0.066667
0.466667
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
cdf67e95c611de4b6486b851b7c8489095a47c9d
75
py
Python
py_tdlib/constructors/chat_report_reason_child_abuse.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/chat_report_reason_child_abuse.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/chat_report_reason_child_abuse.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Type class chatReportReasonChildAbuse(Type): pass
12.5
39
0.8
8
75
7.5
0.875
0
0
0
0
0
0
0
0
0
0
0
0.133333
75
5
40
15
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
b54a48dd2d406324c11d3415f8942ae7dc5ebdf3
1,125
py
Python
terrascript/pagerduty/r.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
4
2022-02-07T21:08:14.000Z
2022-03-03T04:41:28.000Z
terrascript/pagerduty/r.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
null
null
null
terrascript/pagerduty/r.py
hugovk/python-terrascript
08fe185904a70246822f5cfbdc9e64e9769ec494
[ "BSD-2-Clause" ]
2
2022-02-06T01:49:42.000Z
2022-02-08T14:15:00.000Z
# terrascript/pagerduty/r.py import terrascript class pagerduty_addon(terrascript.Resource): pass class pagerduty_escalation_policy(terrascript.Resource): pass class pagerduty_maintenance_window(terrascript.Resource): pass class pagerduty_schedule(terrascript.Resource): pass class pagerduty_service(terrascript.Resource): pass class pagerduty_service_integration(terrascript.Resource): pass class pagerduty_team(terrascript.Resource): pass class pagerduty_team_membership(terrascript.Resource): pass class pagerduty_user(terrascript.Resource): pass class pagerduty_user_contact_method(terrascript.Resource): pass class pagerduty_user_notification_rule(terrascript.Resource): pass class pagerduty_extension(terrascript.Resource): pass class pagerduty_event_rule(terrascript.Resource): pass class pagerduty_ruleset(terrascript.Resource): pass class pagerduty_ruleset_rule(terrascript.Resource): pass class pagerduty_business_service(terrascript.Resource): pass class pagerduty_service_dependency(terrascript.Resource): pass
15.84507
61
0.796444
120
1,125
7.225
0.233333
0.27451
0.45098
0.516724
0.768166
0.597463
0.117647
0
0
0
0
0
0.139556
1,125
70
62
16.071429
0.895661
0.023111
0
0.485714
0
0
0
0
0
0
0
0
0
1
0
true
0.485714
0.028571
0
0.514286
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
6
b55a43aad6496daa1da9d49f32be4e6b21218767
340
py
Python
travel/home/views.py
Alijeyrad/django_practice
da5918b9283471706378e32885943e4e829758d5
[ "MIT" ]
null
null
null
travel/home/views.py
Alijeyrad/django_practice
da5918b9283471706378e32885943e4e829758d5
[ "MIT" ]
null
null
null
travel/home/views.py
Alijeyrad/django_practice
da5918b9283471706378e32885943e4e829758d5
[ "MIT" ]
null
null
null
from django.shortcuts import render # Create your views here. def index(request): return render(request, 'home/index.html') def about(request): return render(request, 'home/about.html') def contact(request): return render(request, 'home/contact.html') def elements(request): return render(request, 'home/elements.html')
22.666667
48
0.729412
45
340
5.511111
0.4
0.209677
0.306452
0.419355
0.483871
0
0
0
0
0
0
0
0.144118
340
15
48
22.666667
0.852234
0.067647
0
0
0
0
0.205696
0
0
0
0
0
0
1
0.444444
false
0
0.111111
0.444444
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
8d1d9982c9eea2c6279eff5b5961c3daf7fce110
17,859
py
Python
server/apps/streamnote/tests/test_note_api.py
iotile/iotile_cloud
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
[ "MIT" ]
null
null
null
server/apps/streamnote/tests/test_note_api.py
iotile/iotile_cloud
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
[ "MIT" ]
null
null
null
server/apps/streamnote/tests/test_note_api.py
iotile/iotile_cloud
9dc65ac86d3a730bba42108ed7d9bbb963d22ba6
[ "MIT" ]
null
null
null
import json from django.contrib.auth import get_user_model from django.utils import timezone from rest_framework import status from rest_framework.test import APITestCase from apps.streamfilter.models import * from apps.utils.test_util import TestMixin from apps.vartype.models import VarType from ..models import * user_model = get_user_model() class StreamNoteAPITests(TestMixin, APITestCase): def setUp(self): self.usersTestSetup() self.orgTestSetup() self.deviceTemplateTestSetup() self.v1 = StreamVariable.objects.create_variable( name='Var A', project=self.p1, created_by=self.u2, lid=1, ) self.v2 = StreamVariable.objects.create_variable( name='Var B', project=self.p2, created_by=self.u3, lid=2, ) self.pd1 = Device.objects.create_device(project=self.p1, label='d1', template=self.dt1, created_by=self.u2) self.pd2 = Device.objects.create_device(project=self.p2, label='d2', template=self.dt1, created_by=self.u3) StreamId.objects.create_after_new_device(self.pd1) StreamId.objects.create_after_new_device(self.pd2) self.s1 = StreamId.objects.filter(variable=self.v1).first() self.s2 = StreamId.objects.filter(variable=self.v2).first() self.var_type = VarType.objects.create( name='Accelerometer', storage_units_full='Object', created_by=self.u1 ) if cache: cache.clear() def tearDown(self): S3File.objects.all().delete() StreamNote.objects.all().delete() StreamId.objects.all().delete() StreamVariable.objects.all().delete() Device.objects.all().delete() self.deviceTemplateTestTearDown() self.orgTestTearDown() self.userTestTearDown() def testBasicGet(self): n1 = StreamNote.objects.create( target_slug=self.s1.slug, timestamp=timezone.now(), note='Note 1', created_by=self.u2 ) n2 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 2', created_by=self.u1, type='sc' ) list_url = reverse('streamnote-list') detail_url1 = reverse('streamnote-detail', kwargs={'pk': n1.id}) detail_url2 = reverse('streamnote-detail', kwargs={'pk': n2.id}) response = self.client.get(list_url, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) response = self.client.get(detail_url1, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) response = self.client.get(detail_url2, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) ok = self.client.login(email='user1@foo.com', password='pass') self.assertTrue(ok) response = self.client.get(list_url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.get(list_url+'?target={}'.format(self.s1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 1) self.assertEqual(deserialized['results'][0]['note'], 'Note 1') self.assertEqual(deserialized['results'][0]['user_info']['slug'], self.u2.slug) response = self.client.get(list_url+'?target={}'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) response = self.client.get(detail_url1, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['note'], 'Note 1') self.assertEqual(deserialized['type'], 'ui') self.assertEqual(deserialized['user_info']['slug'], self.u2.slug) response = self.client.get(detail_url2, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['note'], 'Note 2') self.assertEqual(deserialized['type'], 'sc') self.assertEqual(deserialized['user_info']['slug'], self.u1.slug) self.client.logout() ok = self.client.login(email='user2@foo.com', password='pass') self.assertTrue(ok) self.assertTrue(self.o2.has_permission(self.u2, 'can_read_notes')) response = self.client.get(list_url+'?target={}'.format(self.s1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 1) response = self.client.get(detail_url1, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['note'], 'Note 1') self.assertEqual(deserialized['user_info']['slug'], self.u2.slug) response = self.client.get(detail_url2, format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) self.client.logout() ok = self.client.login(email='user3@foo.com', password='pass') self.assertTrue(ok) response = self.client.get(list_url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.get(detail_url1, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) response = self.client.get(detail_url2, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) membership = self.p1.org.register_user(self.u3, role='m1') membership.permissions['can_read_notes'] = False membership.save() response = self.client.get(list_url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.get(detail_url2, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.client.logout() def testGetLastN(self): n1 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 1', created_by=self.u2 ) n2 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 2', created_by=self.u1, type='sc' ) n3 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 3', created_by=self.u1, type='sc' ) url = reverse('streamnote-list') response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) ok = self.client.login(email='user1@foo.com', password='pass') self.assertTrue(ok) response = self.client.get(url+'?target={}'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 3) self.assertEqual(deserialized['results'][0]['note'], 'Note 1') response = self.client.get(url+'?target={}&lastn=1'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 1) self.assertEqual(deserialized['results'][0]['note'], 'Note 3') response = self.client.get(url+'?target={}&lastn=2'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 2) self.assertEqual(deserialized['results'][0]['note'], 'Note 2') self.assertEqual(deserialized['results'][1]['note'], 'Note 3') response = self.client.get(url+'?target={}&lastn=-1'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.get(url+'?target={}&lastn=20000000000'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) self.client.logout() def testGetWithFilter(self): n1 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 1', created_by=self.u2 ) n2 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 2', created_by=self.u1, type='sc' ) n3 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 3', created_by=self.u1, type='sc' ) n4 = StreamNote.objects.create( target_slug=self.pd1.slug, timestamp=timezone.now(), note='Note 4', created_by=self.u1, type='sc' ) n5 = StreamNote.objects.create( target_slug=self.s1.slug, timestamp=timezone.now(), note='Note 5', created_by=self.u1, type='sc' ) url = reverse('streamnote-list') ok = self.client.login(email='user1@foo.com', password='pass') self.assertTrue(ok) response = self.client.get(url, format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST) response = self.client.get(url+'?target={}'.format(self.pd1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 4) response = self.client.get(url+'?target={}'.format(self.s1.slug), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 1) response = self.client.get(url+'?target={0}&id_min={1}'.format(self.pd1.slug, n1.id), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 4) response = self.client.get(url+'?target={0}&id_min={1}&id_max={1}'.format(self.pd1.slug, n1.id), format='json') self.assertEqual(response.status_code, status.HTTP_200_OK) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 1) self.client.logout() def testBasicPost(self): url = reverse('streamnote-list') payload = { 'target': self.s1.slug, 'timestamp': timezone.now(), 'note': 'This is my first node' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) ok = self.client.login(email='user3@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.client.logout() ok = self.client.login(email='user1@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(StreamNote.objects.count(), 1) deserialized = json.loads(response.content.decode()) self.assertTrue('id' in deserialized) self.assertEqual(deserialized['target'], self.s1.slug) self.assertTrue('user_info' in deserialized) self.assertEqual(deserialized['user_info']['slug'], self.u1.slug) self.client.logout() ok = self.client.login(email='user2@foo.com', password='pass') self.assertTrue(ok) payload = { 'target': self.s1.slug, 'timestamp': timezone.now(), 'note': 'This is my second node' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(StreamNote.objects.count(), 2) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['target'], self.s1.slug) self.client.logout() ok = self.client.login(email='user3@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.p1.org.register_user(self.u3, role='r1') payload = { 'target': self.s1.slug, 'timestamp': timezone.now(), 'note': 'This is my third node (operator)' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(StreamNote.objects.count(), 3) self.client.logout() def testMultiPost(self): url = reverse('streamnote-list') payload = [ { 'target': self.s1.slug, 'timestamp': timezone.now(), 'note': 'This is my first node' }, { 'target': self.s1.slug, 'timestamp': timezone.now(), 'note': 'This is my second node' } ] ok = self.client.login(email='user1@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.assertEqual(StreamNote.objects.count(), 2) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['count'], 2) self.client.logout() def testS3FileAttachUrl(self): """ Ensure we can create a new s3file with the script """ n1 = StreamNote.objects.create( target_slug=self.s1.slug, timestamp=timezone.now(), note='Note 1', created_by=self.u2 ) url = reverse('streamnote-uploadurl', kwargs={'pk': n1.pk}) payload = { 'name': 'deleteme.png' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) ok = self.client.login(email='user2@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) self.assertEqual(StreamNote.objects.count(), 1) self.assertEqual(S3File.objects.count(), 0) deserialized = json.loads(response.content.decode()) self.assertTrue('url' in deserialized) self.assertTrue('uuid' in deserialized) self.assertTrue('fields' in deserialized) self.assertTrue('acl' in deserialized['fields']) self.assertEqual(deserialized['fields']['acl'], 'private') self.assertEqual(deserialized['fields']['x-amz-meta-filename'], 'deleteme.png') self.assertEqual(deserialized['fields']['x-amz-meta-type'], 'note') """ import os import requests test_filename = __file__ self.assertTrue(os.path.isfile(test_filename)) with open(test_filename, 'r') as fp: files = {"file": fp} response = requests.post(deserialized["url"], data=deserialized["fields"], files=files) print(response) """ self.client.logout() def testS3FileAttachSuccess(self): """ Ensure we can create a new s3file with the script """ n1 = StreamNote.objects.create( target_slug=self.s1.slug, timestamp=timezone.now(), note='Note 1', created_by=self.u2 ) url = reverse('streamnote-uploadsuccess', kwargs={'pk': n1.pk}) payload = { 'name': 'deleteme.png', 'uuid': 'a470cd29-915f-4cfb-97de-ea378c46d51c' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED) ok = self.client.login(email='user2@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) self.assertEqual(StreamNote.objects.count(), 1) self.assertEqual(S3File.objects.count(), 1) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['id'], payload['uuid']) self.client.logout() ok = self.client.login(email='user3@foo.com', password='pass') self.assertTrue(ok) response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN) self.p1.org.register_user(self.u3, role='r1') payload = { 'name': 'deleteme.png', 'uuid': 'a470cd29-915f-0000-97de-ea378c461234' } response = self.client.post(url, payload, format='json') self.assertEqual(response.status_code, status.HTTP_202_ACCEPTED) self.assertEqual(S3File.objects.count(), 2) deserialized = json.loads(response.content.decode()) self.assertEqual(deserialized['id'], payload['uuid']) self.client.logout()
42.420428
119
0.646285
2,111
17,859
5.363809
0.102321
0.107304
0.065177
0.088316
0.845536
0.83061
0.801996
0.779829
0.755718
0.755012
0
0.025519
0.212274
17,859
420
120
42.521429
0.779357
0.005543
0
0.628483
0
0
0.093208
0.010312
0
0
0
0
0.312694
1
0.027864
false
0.040248
0.027864
0
0.058824
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8d56322e420a9b87c08c3b68f54ac1cc60fe613c
42
py
Python
syfertext/data/readers/__init__.py
Dat-Boi-Arjun/SyferText
8df9c4807501b613df79773b391ded22c101e0b0
[ "Apache-2.0" ]
204
2019-10-18T15:49:15.000Z
2022-02-16T12:34:29.000Z
syfertext/data/readers/__init__.py
Dat-Boi-Arjun/SyferText
8df9c4807501b613df79773b391ded22c101e0b0
[ "Apache-2.0" ]
162
2019-12-16T21:03:17.000Z
2021-05-22T19:40:37.000Z
syfertext/data/readers/__init__.py
Dat-Boi-Arjun/SyferText
8df9c4807501b613df79773b391ded22c101e0b0
[ "Apache-2.0" ]
82
2020-01-18T14:15:29.000Z
2021-04-30T09:48:27.000Z
from .language_modeling import TextReader
21
41
0.880952
5
42
7.2
1
0
0
0
0
0
0
0
0
0
0
0
0.095238
42
1
42
42
0.947368
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
8da1f174c319060662a0a56587e4d520273a6155
281
py
Python
fohCreateThreadInProcessForIdAndAddress.py
SkyLined/mWindowsAPI
d64d57bbf87d2a7b33cf7de89263553793484a84
[ "CC-BY-4.0" ]
7
2017-10-09T14:32:22.000Z
2021-01-30T07:25:50.000Z
fohCreateThreadInProcessForIdAndAddress.py
SkyLined/mWindowsAPI
d64d57bbf87d2a7b33cf7de89263553793484a84
[ "CC-BY-4.0" ]
2
2017-12-12T02:53:18.000Z
2019-02-19T09:23:18.000Z
fohCreateThreadInProcessForIdAndAddress.py
SkyLined/mWindowsAPI
d64d57bbf87d2a7b33cf7de89263553793484a84
[ "CC-BY-4.0" ]
1
2017-12-12T02:42:18.000Z
2017-12-12T02:42:18.000Z
from .ftohuCreateThreadForProcessIdAndAddress import ftohuCreateThreadForProcessIdAndAddress; def fohCreateThreadForProcessIdAndAddress(*txArguments, **dxArguments): (ohThread, uThreadId) = ftohuCreateThreadForProcessIdAndAddress(*txArguments, **dxArguments); return ohThread;
56.2
95
0.86121
15
281
16.133333
0.666667
0.181818
0
0
0
0
0
0
0
0
0
0
0.064057
281
5
96
56.2
0.920152
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.75
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
6
a5f8d8c57ed6f8f4cf0ba51dd7c760a9d9354fb6
202
py
Python
squid/core/admin.py
DjangoNYC/squid
e9776df722d6c4d8e43738c053c610475f73f0db
[ "MIT" ]
null
null
null
squid/core/admin.py
DjangoNYC/squid
e9776df722d6c4d8e43738c053c610475f73f0db
[ "MIT" ]
null
null
null
squid/core/admin.py
DjangoNYC/squid
e9776df722d6c4d8e43738c053c610475f73f0db
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Member, Event, Venue, MemberRSVP admin.site.register(Member) admin.site.register(Event) admin.site.register(Venue) admin.site.register(MemberRSVP)
22.444444
52
0.811881
28
202
5.857143
0.428571
0.219512
0.414634
0
0
0
0
0
0
0
0
0
0.084158
202
8
53
25.25
0.886486
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
574a0e4bcf031f1727caaad1550885dec0c0c961
2,036
py
Python
spareparts/lib/colors.py
recs12/spareparts
6dd66c2992e4fbe598a9e30b065f5752f8ff6c1c
[ "Apache-2.0", "MIT" ]
null
null
null
spareparts/lib/colors.py
recs12/spareparts
6dd66c2992e4fbe598a9e30b065f5752f8ff6c1c
[ "Apache-2.0", "MIT" ]
18
2020-03-25T19:55:19.000Z
2020-05-27T19:01:26.000Z
spareparts/lib/colors.py
recs12/spareparts
6dd66c2992e4fbe598a9e30b065f5752f8ff6c1c
[ "Apache-2.0", "MIT" ]
null
null
null
import functools class Colors: def electric(prp1, color): def _outer_wrapper(wrapped_function): @functools.wraps(wrapped_function) def _wrapper(*args, **kwargs): d, s = wrapped_function(*args, **kwargs) targeted_index = d.index[d.prp1.isin(prp1)].tolist() for row in targeted_index: cellule = ( f"A{row+2}:U{row+2}" ) # number 2 added for compensate lapse in excel file s.range(cellule).color = color return (d, s) return _wrapper return _outer_wrapper def obsolete(color): def _outer_wrapper(wrapped_function): @functools.wraps(wrapped_function) def _wrapper(*args, **kwargs): d, s = wrapped_function(*args, **kwargs) targeted_index = d.index[d.ST.isin(["O", "U"])].tolist() for row in targeted_index: cellule = ( f"J{row+2}" ) # number 2 added for compensate lapse in excel file s.range(cellule).color = color return (d, s) return _wrapper return _outer_wrapper def meter_foot(color): def _outer_wrapper(wrapped_function): @functools.wraps(wrapped_function) def _wrapper(*args, **kwargs): d, s = wrapped_function(*args, **kwargs) targeted_index = d.index[d.UOM.isin(["MT", "FT", "RL", "SF"])].tolist() for row in targeted_index: cellule = ( f"I{row+2}" ) # number 2 added for compensate lapse in excel file s.range(cellule).color = color return (d, s) return _wrapper return _outer_wrapper electric = staticmethod(electric) obsolete = staticmethod(obsolete) meter_foot = staticmethod(meter_foot)
34.508475
87
0.514735
213
2,036
4.751174
0.234742
0.133399
0.038538
0.059289
0.815217
0.815217
0.815217
0.815217
0.711462
0.711462
0
0.008032
0.388507
2,036
58
88
35.103448
0.804819
0.073183
0
0.638298
0
0
0.022836
0
0
0
0
0
0
1
0.191489
false
0
0.021277
0
0.489362
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
93a1d968a38b1a1f17add4e2b5c4c45c2869df78
21
py
Python
model/mlp/dbm/__init__.py
remmyzen/nqs-tensorflow2
2af5d5ebb108eac4d2daa5082bdef11c8107bd1b
[ "MIT" ]
4
2021-07-29T17:52:54.000Z
2022-02-15T06:32:15.000Z
model/mlp/dbm/__init__.py
remmyzen/nqs-tensorflow2
2af5d5ebb108eac4d2daa5082bdef11c8107bd1b
[ "MIT" ]
null
null
null
model/mlp/dbm/__init__.py
remmyzen/nqs-tensorflow2
2af5d5ebb108eac4d2daa5082bdef11c8107bd1b
[ "MIT" ]
null
null
null
from .dbm import DBM
10.5
20
0.761905
4
21
4
0.75
0
0
0
0
0
0
0
0
0
0
0
0.190476
21
1
21
21
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
93d5d780bcc5ed64d7215cba088fc100f3ce6870
899
py
Python
pytools_sample_#5.py
gnokem/blog-code
ba3bf3be939d91a808910f2d5407b350237f93e7
[ "MIT" ]
null
null
null
pytools_sample_#5.py
gnokem/blog-code
ba3bf3be939d91a808910f2d5407b350237f93e7
[ "MIT" ]
null
null
null
pytools_sample_#5.py
gnokem/blog-code
ba3bf3be939d91a808910f2d5407b350237f93e7
[ "MIT" ]
null
null
null
import os, sys; def F(x): # this function factors a number x into its prime factors r=[]; i=2; while x>1: while (x % i)==0: r+=[i]; x/=i; i+=1; return r; print "prime factors of 100 are: ",F(100); print "prime factors of 1024 are: ",F(1024); print "prime factors of 1789 are: ",F(1789); print "prime factors of 2013 are: ",F(2013); print "prime factors of 11204243 are: ",F(11204243); print "prime factors of 112042431 are: ",F(112042431); print "prime factors of 1120424311 are: ",F(1120424311); # output follows: # prime factors of 100 are: [2, 2, 5, 5] # prime factors of 1024 are: [2, 2, 2, 2, 2, 2, 2, 2, 2, 2] # prime factors of 1789 are: [1789] # prime factors of 2013 are: [3, 11, 61] # prime factors of 11204243 are: [19, 23, 25639] # prime factors of 112042431 are: [3, 3, 101, 123259] # prime factors of 1120424311 are: [1120424311]
29.966667
68
0.628476
153
899
3.69281
0.27451
0.318584
0.346903
0.235398
0.587611
0.017699
0.017699
0.017699
0.017699
0
0
0.263309
0.226919
899
29
69
31
0.54964
0.437152
0
0
0
0
0.43469
0
0
0
0
0
0
0
null
null
0
0.058824
null
null
0.411765
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
6
9e16c4d5033f921c698febe768f1c769e05e2e14
14,602
py
Python
gala/coordinates/tests/test_velocity_frame_transforms.py
ltlancas/gala
2621bb599d67e74a85446abf72d5930ef70ca181
[ "MIT" ]
1
2021-10-14T03:36:15.000Z
2021-10-14T03:36:15.000Z
gala/coordinates/tests/test_velocity_frame_transforms.py
ltlancas/gala
2621bb599d67e74a85446abf72d5930ef70ca181
[ "MIT" ]
null
null
null
gala/coordinates/tests/test_velocity_frame_transforms.py
ltlancas/gala
2621bb599d67e74a85446abf72d5930ef70ca181
[ "MIT" ]
null
null
null
# coding: utf-8 """ Test conversions in core.py """ from __future__ import absolute_import, division, print_function # Standard library import tempfile # Third-party import astropy.coordinates as coord import astropy.units as u from astropy.utils.data import get_pkg_data_filename from astropy.tests.helper import quantity_allclose import numpy as np # This package from ..velocity_frame_transforms import (vgal_to_hel, vhel_to_gal, vgsr_to_vhel, vhel_to_vgsr) def test_vgsr_to_vhel(): filename = get_pkg_data_filename('idl_vgsr_vhel.txt') data = np.genfromtxt(filename, names=True, skip_header=2) # one row row = data[0] l = coord.Angle(row["lon"] * u.degree) b = coord.Angle(row["lat"] * u.degree) c = coord.Galactic(l, b) vgsr = row["vgsr"] * u.km/u.s vlsr = [row["vx"],row["vy"],row["vz"]]*u.km/u.s # this is right vcirc = row["vcirc"]*u.km/u.s vsun = vlsr + [0,1,0]*vcirc vhel = vgsr_to_vhel(c, vgsr, vsun=vsun) return np.testing.assert_almost_equal(vhel.value, row['vhelio'], decimal=4) # now check still get right answer passing in ICRS coordinates vhel = vgsr_to_vhel(c.transform_to(coord.ICRS), vgsr, vsun=vsun) np.testing.assert_almost_equal(vhel.value, row['vhelio'], decimal=4) # all together now l = coord.Angle(data["lon"] * u.degree) b = coord.Angle(data["lat"] * u.degree) c = coord.Galactic(l, b) vgsr = data["vgsr"] * u.km/u.s vhel = vgsr_to_vhel(c, vgsr, vsun=vsun) np.testing.assert_almost_equal(vhel.value, data['vhelio'], decimal=4) # now check still get right answer passing in ICRS coordinates vhel = vgsr_to_vhel(c.transform_to(coord.ICRS), vgsr, vsun=vsun) np.testing.assert_almost_equal(vhel.value, data['vhelio'], decimal=4) def test_vgsr_to_vhel_misc(): # make sure it works with longitude in 0-360 or -180-180 l1 = coord.Angle(190.*u.deg) l2 = coord.Angle(-170.*u.deg) b = coord.Angle(30.*u.deg) c1 = coord.Galactic(l1, b) c2 = coord.Galactic(l2, b) vgsr = -110.*u.km/u.s vhel1 = vgsr_to_vhel(c1, vgsr) vhel2 = vgsr_to_vhel(c2, vgsr) np.testing.assert_almost_equal(vhel1.value, vhel2.value, decimal=9) def test_vhel_to_vgsr(): filename = get_pkg_data_filename('idl_vgsr_vhel.txt') data = np.genfromtxt(filename, names=True, skip_header=2) # one row row = data[0] l = coord.Angle(row["lon"] * u.degree) b = coord.Angle(row["lat"] * u.degree) c = coord.Galactic(l, b) vhel = row["vhelio"] * u.km/u.s vlsr = [row["vx"],row["vy"],row["vz"]]*u.km/u.s # this is right vcirc = row["vcirc"]*u.km/u.s vsun = vlsr + [0,1,0]*vcirc vgsr = vhel_to_vgsr(c, vhel, vsun=vsun) np.testing.assert_almost_equal(vgsr.value, row['vgsr'], decimal=4) # now check still get right answer passing in ICRS coordinates vgsr = vhel_to_vgsr(c.transform_to(coord.ICRS), vhel, vsun=vsun) np.testing.assert_almost_equal(vgsr.value, row['vgsr'], decimal=4) # all together now l = coord.Angle(data["lon"] * u.degree) b = coord.Angle(data["lat"] * u.degree) c = coord.Galactic(l, b) vhel = data["vhelio"] * u.km/u.s vgsr = vhel_to_vgsr(c, vhel, vsun=vsun) np.testing.assert_almost_equal(vgsr.value, data['vgsr'], decimal=4) # now check still get right answer passing in ICRS coordinates vgsr = vhel_to_vgsr(c.transform_to(coord.ICRS), vhel, vsun=vsun) np.testing.assert_almost_equal(vgsr.value, data['vgsr'], decimal=4) _txt = """# from: XHIP catalog # ra dec HIPID l b dist pml pmb rv U V W 0.022010 20.036114 7 106.82021040 -41.22316218 57.56 -253.69 -138.84 8.30 71.7 2.1 -34.0 2.208349 40.494550 714 114.23363142 -21.65650026 249.00 5.57 -9.00 -11.78 0.1 -16.3 -5.5 3.126297 14.563522 999 108.98177530 -47.25067692 40.94 296.66 -141.05 -15.30 -44.5 -47.6 -7.3 """ class TestVHelGalConvert(object): def setup(self): with tempfile.NamedTemporaryFile(mode='w+b') as temp: temp.write(_txt.encode('utf-8')) temp.flush() temp.seek(0) self.data = np.genfromtxt(temp, names=True, skip_header=1) # This should make the transformations more compatible g = coord.Galactic(l=0*u.deg, b=0*u.deg).transform_to(coord.ICRS) self.galcen_frame = coord.Galactocentric(galcen_coord=g, z_sun=0*u.kpc) def test_vhel_to_gal_single(self): for row in self.data: # test one entry at a time c = coord.SkyCoord(ra=row['ra']*u.deg, dec=row['dec']*u.deg, distance=row['dist']*u.pc) icrs = c.icrs gal = c.galactic pm = [row['pml']*u.mas/u.yr, row['pmb']*u.mas/u.yr,] rv = row['rv']*u.km/u.s # stupid check vxyz_i = vhel_to_gal(icrs, pm=pm, rv=rv, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s) vxyz = vhel_to_gal(gal, pm=pm, rv=rv, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s) assert vxyz_i.shape == vxyz.shape true_UVW = np.array([row['U'], row['V'], row['W']]) UVW = vxyz.to(u.km/u.s).value # catalog values are rounded assert np.allclose(UVW, true_UVW, rtol=1E-2, atol=0.1) # -------------------------------------------------------------------- # l = 0 # without LSR and circular velocity c = coord.SkyCoord(ra=self.galcen_frame.galcen_coord.ra, dec=self.galcen_frame.galcen_coord.dec, distance=2*u.kpc) pm = [0,0]*u.mas/u.yr rv = 20*u.km/u.s vxyz = vhel_to_gal(c.galactic, pm=pm, rv=rv, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vxyz.to(u.km/u.s).value, [20,0,0.], atol=1E-12) # with LSR and circular velocity c = coord.SkyCoord(ra=self.galcen_frame.galcen_coord.ra, dec=self.galcen_frame.galcen_coord.dec, distance=2*u.kpc) vxyz = vhel_to_gal(c.galactic, pm=pm, rv=rv, vcirc=200*u.km/u.s, vlsr=[-20.,0,10]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vxyz.to(u.km/u.s).value, [0,200,10], atol=1E-12) # l = 90 # with LSR and circular velocity c = coord.SkyCoord(l=90*u.deg, b=0*u.deg, distance=2*u.kpc, frame=coord.Galactic) vxyz = vhel_to_gal(c.galactic, pm=pm, rv=rv, vcirc=200*u.km/u.s, vlsr=[-20.,0,10]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vxyz.to(u.km/u.s).value, [-20,220,10], atol=1E-5) # l = 180 # with LSR and circular velocity c = coord.SkyCoord(l=180*u.deg, b=0*u.deg, distance=2*u.kpc, frame=coord.Galactic) vxyz = vhel_to_gal(c.galactic, pm=pm, rv=rv, vcirc=200*u.km/u.s, vlsr=[-20.,0,10]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vxyz.to(u.km/u.s).value, [-40,200,10], atol=1E-12) # l = 270 # with LSR and circular velocity c = coord.SkyCoord(l=270*u.deg, b=0*u.deg, distance=2*u.kpc, frame=coord.Galactic) vxyz = vhel_to_gal(c.galactic, pm=pm, rv=rv, vcirc=200*u.km/u.s, vlsr=[-20.,0,10]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vxyz.to(u.km/u.s).value, [-20,180,10], atol=1E-5) def test_vhel_to_gal_array(self): # test all together d = self.data c = coord.SkyCoord(ra=d['ra']*u.deg, dec=d['dec']*u.deg, distance=d['dist']*u.pc) icrs = c.icrs gal = c.galactic pm = [d['pml'], d['pmb']]*u.mas/u.yr rv = d['rv']*u.km/u.s # stupid check vxyz_i = vhel_to_gal(icrs, pm=pm, rv=rv, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s) vxyz = vhel_to_gal(gal, pm=pm, rv=rv, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s) assert vxyz_i.shape == vxyz.shape # check values true_UVW = np.array([d['U'], d['V'], d['W']]) UVW = vxyz.to(u.km/u.s).value # catalog values are rounded assert np.allclose(UVW, true_UVW, rtol=1E-2, atol=0.1) def test_vgal_to_hel_single(self): for row in self.data: # test one entry at a time c = coord.SkyCoord(ra=row['ra']*u.deg, dec=row['dec']*u.deg, distance=row['dist']*u.pc) gal = c.galactic vxyz = [row['U'], row['V'], row['W']] * u.km/u.s vhel = vgal_to_hel(gal, vxyz, vcirc=0.*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s, galactocentric_frame=self.galcen_frame) # tolerance set by the catalog rounded numbers assert quantity_allclose(vhel[0], row['pml'] * u.mas/u.yr, rtol=1E-2) assert quantity_allclose(vhel[1], row['pmb'] * u.mas/u.yr, rtol=1E-2) assert quantity_allclose(vhel[2], row['rv'] * u.km/u.s, rtol=1E-2) # -------------------------------------------------------------------- # l = 0 # without LSR and circular velocity c = coord.SkyCoord(l=0*u.deg, b=0*u.deg, distance=2*u.kpc, frame=coord.Galactic) vxyz = [20., 0, 0]*u.km/u.s vhel = vgal_to_hel(c.galactic, vxyz, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vhel[0].value, 0., atol=1E-12) assert np.allclose(vhel[1].value, 0., atol=1E-12) assert np.allclose(vhel[2].to(u.km/u.s).value, 20., atol=1E-12) vxyz = [20., 0, 50]*u.km/u.s vhel = vgal_to_hel(c.galactic, vxyz, vcirc=0*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s, galactocentric_frame=self.galcen_frame) assert np.allclose(vhel[0].value, 0., atol=2E-5) # TODO: astropy precision issues with u.set_enabled_equivalencies(u.dimensionless_angles()): assert quantity_allclose(vhel[1], 50*u.km/u.s / (2*u.kpc), atol=1E-10*u.mas/u.yr) assert quantity_allclose(vhel[2].to(u.km/u.s), vxyz[0], atol=1E-10*u.km/u.s) # with LSR and circular velocity vxyz = [20., 0, 50]*u.km/u.s vhel = vgal_to_hel(c.galactic, vxyz, vcirc=-200*u.km/u.s, vlsr=[0., 0, 10]*u.km/u.s, galactocentric_frame=self.galcen_frame) with u.set_enabled_equivalencies(u.dimensionless_angles()): assert quantity_allclose(vhel[0], (200.*u.km/u.s) / (2*u.kpc), atol=1E-10*u.mas/u.yr) assert quantity_allclose(vhel[1], (40.*u.km/u.s) / (2*u.kpc), atol=1E-6*u.mas/u.yr) assert quantity_allclose(vhel[2], 20.*u.km/u.s, atol=1E-10*u.km/u.s) def test_vgal_to_hel_array(self): # test all together d = self.data c = coord.SkyCoord(ra=d['ra']*u.deg, dec=d['dec']*u.deg, distance=d['dist']*u.pc) pm = np.vstack([d['pml'],d['pmb']])*u.mas/u.yr rv = d['rv']*u.km/u.s vxyz = np.vstack((d['U'], d['V'], d['W']))*u.km/u.s vhel = vgal_to_hel(c.galactic, vxyz, vcirc=0.*u.km/u.s, vlsr=[0.,0,0]*u.km/u.s, galactocentric_frame=self.galcen_frame) # tolerance set by the catalog rounded numbers assert quantity_allclose(vhel[0], pm[0], rtol=1E-2) assert quantity_allclose(vhel[1], pm[1], rtol=1E-2) assert quantity_allclose(vhel[2], rv, rtol=5E-3) def test_roundtrip_icrs(self): np.random.seed(42) n = 100 # yeahhhh, i know this isn't uniform on the sphere - shut up c = coord.SkyCoord(ra=np.random.uniform(0,360,n)*u.degree, dec=np.random.uniform(-90,90,n)*u.degree, distance=np.random.uniform(0.1,10.,n)*u.kpc) pm = np.random.uniform(-20,20,size=(2,n)) * u.mas/u.yr vr = np.random.normal(0., 75., size=n)*u.km/u.s # first to galactocentric vxyz = vhel_to_gal(c.icrs, pm=pm, rv=vr) # then back again, wooo vhel2 = vgal_to_hel(c.icrs, vxyz) assert quantity_allclose(vhel2[0], pm[0], rtol=1e-12) assert quantity_allclose(vhel2[1], pm[1], rtol=1e-12) assert quantity_allclose(vhel2[2], vr, rtol=1e-12) def test_roundtrip_gal(self): np.random.seed(42) n = 100 # yeahhhh, i know this isn't uniform on the sphere - shut up c = coord.SkyCoord(ra=np.random.uniform(0,360,n)*u.degree, dec=np.random.uniform(-90,90,n)*u.degree, distance=np.random.uniform(0.1,10.,n)*u.kpc) pm = np.random.uniform(-20,20,size=(2,n)) * u.mas/u.yr vr = np.random.normal(0., 75., size=n)*u.km/u.s # first to galactocentric vxyz = vhel_to_gal(c.galactic, pm=pm, rv=vr) # then back again, wooo vhel2 = vgal_to_hel(c.galactic, vxyz) # TODO: why such bad roundtripping??? assert quantity_allclose(vhel2[0], pm[0], rtol=1e-12) assert quantity_allclose(vhel2[1], pm[1], rtol=1e-12) assert quantity_allclose(vhel2[2], vr, rtol=1e-12)
40.005479
106
0.52753
2,202
14,602
3.404632
0.126249
0.02561
0.034147
0.042684
0.810191
0.780846
0.763906
0.75777
0.737762
0.700947
0
0.058133
0.317902
14,602
364
107
40.115385
0.694578
0.099712
0
0.624
0
0.012
0.046691
0
0
0
0
0.002747
0.156
1
0.04
false
0
0.032
0
0.08
0.004
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
f55ccb7d78870351f12e275fb139e1c15251ab9e
98
py
Python
project/RealEstateMarketPlace/views/handler500.py
Mihaaai/RealEstateMarketplace
9b9fa1376436801303e1ed0207ef09845a7d827e
[ "Apache-2.0" ]
null
null
null
project/RealEstateMarketPlace/views/handler500.py
Mihaaai/RealEstateMarketplace
9b9fa1376436801303e1ed0207ef09845a7d827e
[ "Apache-2.0" ]
null
null
null
project/RealEstateMarketPlace/views/handler500.py
Mihaaai/RealEstateMarketplace
9b9fa1376436801303e1ed0207ef09845a7d827e
[ "Apache-2.0" ]
null
null
null
from django.shortcuts import redirect def handler500(request): return redirect('list_listings')
19.6
37
0.816327
12
98
6.583333
0.916667
0
0
0
0
0
0
0
0
0
0
0.034091
0.102041
98
4
38
24.5
0.863636
0
0
0
0
0
0.132653
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
f5a4b0b2443eabe5db1995e89cba0becf6858602
46
py
Python
examples/lb-provider-reactive/lib/charms/layer/provides_reactive.py
juju-solutions/loadbalancer-interface
ee84fb93ea52e55506f267cde28935df7d60a16d
[ "Apache-2.0" ]
null
null
null
examples/lb-provider-reactive/lib/charms/layer/provides_reactive.py
juju-solutions/loadbalancer-interface
ee84fb93ea52e55506f267cde28935df7d60a16d
[ "Apache-2.0" ]
2
2021-01-19T22:29:02.000Z
2021-03-12T16:55:06.000Z
examples/lb-provider-reactive/lib/charms/layer/provides_reactive.py
juju-solutions/loadbalancer-interface
ee84fb93ea52e55506f267cde28935df7d60a16d
[ "Apache-2.0" ]
null
null
null
def create_lb(request): return "dummy-lb"
15.333333
23
0.695652
7
46
4.428571
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.173913
46
2
24
23
0.815789
0
0
0
0
0
0.173913
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
1928dc23810efe224469e6152f98f1ab2d07764b
36
py
Python
tests-bad-syntax/dup-arg-names.py
brownplt/lambda-py
c3ee39502c8953d36b886e5a203f2eb51d2f495b
[ "Apache-2.0" ]
25
2015-04-16T04:31:49.000Z
2022-03-10T15:53:28.000Z
tests-bad-syntax/dup-arg-names.py
brownplt/lambda-py
c3ee39502c8953d36b886e5a203f2eb51d2f495b
[ "Apache-2.0" ]
1
2018-11-21T22:40:02.000Z
2018-11-26T17:53:11.000Z
tests-bad-syntax/dup-arg-names.py
brownplt/lambda-py
c3ee39502c8953d36b886e5a203f2eb51d2f495b
[ "Apache-2.0" ]
1
2021-03-26T03:36:19.000Z
2021-03-26T03:36:19.000Z
def f(a,*a):pass # No dup arg names
18
35
0.638889
9
36
2.555556
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.194444
36
1
36
36
0.793103
0.444444
0
0
0
0
0
0
0
0
0
0
0
1
1
false
1
0
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
193a960b2b0112d76e3366388622207dda07357d
72
py
Python
fiscomp/methods.py
fiscomp-usp/fiscomp
6f50655c8c0d78fdb39d3dcef454906c2513026e
[ "MIT" ]
null
null
null
fiscomp/methods.py
fiscomp-usp/fiscomp
6f50655c8c0d78fdb39d3dcef454906c2513026e
[ "MIT" ]
null
null
null
fiscomp/methods.py
fiscomp-usp/fiscomp
6f50655c8c0d78fdb39d3dcef454906c2513026e
[ "MIT" ]
null
null
null
import numpy as np def difference(f,tag='data',mode='central'): pass
14.4
44
0.708333
12
72
4.25
1
0
0
0
0
0
0
0
0
0
0
0
0.138889
72
4
45
18
0.822581
0
0
0
0
0
0.152778
0
0
0
0
0
0
1
0.333333
false
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
1
0
1
0
0
6
193b69329d5e7a198b76dd15004c97229b2dc376
25,397
py
Python
seed_services_client/tests/test_identity_store.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
null
null
null
seed_services_client/tests/test_identity_store.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
25
2016-06-24T14:37:51.000Z
2018-06-26T09:08:31.000Z
seed_services_client/tests/test_identity_store.py
praekeltfoundation/seed-services-client
bfb216b6b770f9433bd9cda573f13199c4afee9c
[ "BSD-3-Clause" ]
null
null
null
from mock import patch from unittest import TestCase import responses from seed_services_client.identity_store import IdentityStoreApiClient from seed_services_client.seed_services import SeedServicesApiClient class TestIdentityStoreClient(TestCase): def setUp(self): self.api = IdentityStoreApiClient("NO", "http://id.example.org/api/v1") @patch("seed_services_client.seed_services.SeedServicesApiClient.__init__") def test_inherited_from_base_api_class(self, mock_parent_init): self.assertTrue( issubclass(IdentityStoreApiClient, SeedServicesApiClient)) IdentityStoreApiClient( 'token', 'http://api/', session='session', argument=True) mock_parent_init.assert_called_with( 'token', 'http://api/', session='session', argument=True) @responses.activate def test_identity_search_one_page(self): # setup search_response = { "next": None, "previous": None, "results": [ { "id": "0c03d360-1180-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } qs = "?details__addresses__msisdn=%2B27001" responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s" % qs, json=search_response, status=200, match_querystring=True) # Execute result = self.api.get_identity_by_address(address_type="msisdn", address_value="+27001") # Check result1 = next(result["results"]) self.assertEqual(result1["id"], "0c03d360-1180-4fb4-9eed-ecd2cff8fa05") self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/search/?details__addresses__msisdn=%2B27001") # noqa @responses.activate def test_identity_search_multiple_pages(self): # setup qs = "?details__addresses__msisdn=%2B27001" search_response = { "next": "http://id.example.org/api/v1/identities/search/%s&" "cursor=1" % qs, "previous": None, "results": [ { "id": "identity-1-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s" % qs, json=search_response, status=200, match_querystring=True) search_response = { "next": None, "previous": "http://id.example.org/api/v1/identities/search/%s&" "cursor=0" % qs, "results": [ { "id": "identity-2-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s&" "cursor=1" % qs, json=search_response, status=200, match_querystring=True) # Execute result = self.api.get_identity_by_address(address_type="msisdn", address_value="+27001") # Check result1 = next(result["results"]) result2 = next(result["results"]) self.assertEqual(result1["id"], "identity-1-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(result2["id"], "identity-2-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(len(responses.calls), 2) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/search/?details__addresses__msisdn=%2B27001") # noqa self.assertEqual(responses.calls[1].request.url, "http://id.example.org/api/v1/identities/search/?details__addresses__msisdn=%2B27001&cursor=1") # noqa @responses.activate def test_details_search(self): # setup qs = "?details__preferred_language=eng_ZA" search_response = { "next": "http://id.example.org/api/v1/identities/search/%s&" "cursor=1" % qs, "previous": None, "results": [ { "id": "identity-1-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "preferred_language": "eng_ZA", "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s" % qs, json=search_response, status=200, match_querystring=True) search_response = { "next": None, "previous": "http://id.example.org/api/v1/identities/search/%s&" "cursor=0" % qs, "results": [ { "id": "identity-2-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "preferred_language": "eng_ZA", "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s&" "cursor=1" % qs, json=search_response, status=200, match_querystring=True) # Execute result = self.api.search_identities("details__preferred_language", "eng_ZA") # Check result1 = next(result["results"]) result2 = next(result["results"]) self.assertEqual(result1["id"], "identity-1-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(result2["id"], "identity-2-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(len(responses.calls), 2) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/search/?details__preferred_language=eng_ZA") # noqa self.assertEqual(responses.calls[1].request.url, "http://id.example.org/api/v1/identities/search/?details__preferred_language=eng_ZA&cursor=1") # noqa @responses.activate def test_identity_search_no_results(self): # setup search_response = { "next": None, "previous": None, "results": [] } qs = "?details__addresses__msisdn=%2B27002" responses.add(responses.GET, "http://id.example.org/api/v1/identities/search/%s" % qs, json=search_response, status=200, match_querystring=True) # Execute result = self.api.get_identity_by_address(address_type="msisdn", address_value="+27002") # Check self.assertEqual(len(list(result["results"])), 0) self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/search/?details__addresses__msisdn=%2B27002") # noqa @responses.activate def test_get_identity_none(self): # setup four_oh_four = { "detail": "Not found." } responses.add(responses.GET, "http://id.example.org/api/v1/identities/uuid/", json=four_oh_four, status=404, match_querystring=True) # Execute result = self.api.get_identity(identity="uuid") # Check self.assertEqual(result, None) self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/uuid/") @responses.activate def test_get_identity_found(self): # setup identity = { "id": "4275a063-3129-45ac-853b-0d64aaefd8c5", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+26773000000": {} } } }, "communicate_through": None, "operator": None, "created_at": "2016-04-21T09:11:05.725680Z", "created_by": 2, "updated_at": "2016-06-15T15:09:05.333526Z", "updated_by": 2 } uid = identity["id"] responses.add(responses.GET, "http://id.example.org/api/v1/identities/%s/" % uid, json=identity, status=200, match_querystring=True) # Execute result = self.api.get_identity(identity=uid) # Check self.assertEqual(result["id"], uid) self.assertEqual(result["version"], 1) self.assertEqual(result["details"]["default_addr_type"], "msisdn") self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/%s/" % uid) @responses.activate def test_get_identity_address(self): # Setup uid = 'uid' url = ('http://id.example.org/api/v1/identities/{0}' '/addresses/msisdn?default=True').format(uid) addresses_msisdn_response = {'results': [ {'address': '+27000000000'}, {'address': '+27000000001'}, ]} responses.add(responses.GET, url, json=addresses_msisdn_response, status=200, match_querystring=True) # Execute result = self.api.get_identity_address(identity_id=uid) # Check self.assertEqual(result, '+27000000000') @responses.activate def test_get_identity_address_no_results(self): # Setup uid = 'uid' url = ('http://id.example.org/api/v1/identities/{0}' '/addresses/msisdn?default=True').format(uid) addresses_msisdn_response = {'results': []} responses.add(responses.GET, url, json=addresses_msisdn_response, status=200, match_querystring=True) # Execute result = self.api.get_identity_address(identity_id=uid) # Check self.assertEqual(result, None) @responses.activate def test_get_identity_address_custom_params(self): # Setup uid = 'uid' url = ('http://id.example.org/api/v1/identities/{0}' '/addresses/msisdn?param_one=set').format(uid) addresses_msisdn_response = {'results': []} responses.add(responses.GET, url, json=addresses_msisdn_response, status=200, match_querystring=True) # Execute self.api.get_identity_address(identity_id=uid, params={'param_one': 'set'}) # This test doesn't assert anything, but responses will raise a # ConnectionError if the params don't match @responses.activate def test_identity_list_no_results(self): # setup response = { "next": None, "previous": None, "results": [] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/", json=response, status=200, match_querystring=True) # Execute result = self.api.get_identities() # Check self.assertEqual(len(list(result["results"])), 0) self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/") @responses.activate def test_identity_list_one_results(self): # setup response = { "next": None, "previous": None, "results": [ { "id": "4275a063-3129-45ac-853b-0d64aaefd8c5", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+26773000000": {} } } }, "communicate_through": None, "operator": None, "created_at": "2016-04-21T09:11:05.725680Z", "created_by": 2, "updated_at": "2016-06-15T15:09:05.333526Z", "updated_by": 2 } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/", json=response, status=200, match_querystring=True) # Execute result = self.api.get_identities() # Check result1 = next(result["results"]) self.assertEqual(result1["version"], 1) self.assertEqual(result1["details"]["default_addr_type"], "msisdn") self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/") @responses.activate def test_identity_list_multiple_pages(self): # setup search_response = { "next": "http://id.example.org/api/v1/identities/?cursor=1", "previous": None, "results": [ { "id": "identity-1-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/", json=search_response, status=200, match_querystring=True) search_response = { "next": None, "previous": "http://id.example.org/api/v1/identities/?cursor=0", "results": [ { "id": "identity-2-80-4fb4-9eed-ecd2cff8fa05", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+27123": {} } } } } ] } responses.add(responses.GET, "http://id.example.org/api/v1/identities/?cursor=1", json=search_response, status=200, match_querystring=True) # Execute result = self.api.get_identities() # Check result1 = next(result["results"]) result2 = next(result["results"]) self.assertEqual(result1["id"], "identity-1-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(result2["id"], "identity-2-80-4fb4-9eed-ecd2cff8fa05") self.assertEqual(len(responses.calls), 2) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/") # noqa self.assertEqual(responses.calls[1].request.url, "http://id.example.org/api/v1/identities/?cursor=1") # noqa @responses.activate def test_update_identity_details(self): # Setup uid = "4275a063-3129-45ac-853b-0d64aaefd8c5" response = { "id": uid, "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+26773000000": {} } }, "risk": "high" }, "communicate_through": None, "operator": None, "created_at": "2016-04-21T09:11:05.725680Z", "created_by": 2, "updated_at": "2016-06-15T15:09:05.333526Z", "updated_by": 2 } responses.add(responses.PATCH, "http://id.example.org/api/v1/identities/%s/" % uid, json=response, status=200) data = { "details": { "risk": "high" } } # Execute result = self.api.update_identity(uid, data) # Check self.assertEqual(result["id"], uid) self.assertEqual(result["version"], 1) self.assertEqual(result["details"]["default_addr_type"], "msisdn") self.assertEqual(result["details"]["risk"], "high") self.assertEqual(len(responses.calls), 1) self.assertEqual(responses.calls[0].request.url, "http://id.example.org/api/v1/identities/%s/" % uid) @responses.activate def test_create_identity(self): # Setup identity = { "id": "4275a063-3129-45ac-853b-0d64aaefd8c5", "version": 1, "details": { "default_addr_type": "msisdn", "addresses": { "msisdn": { "+26773000000": {} } }, "risk": "high" }, "communicate_through": None, "operator": None, "created_at": "2016-04-21T09:11:05.725680Z", "created_by": 2, "updated_at": "2016-06-15T15:09:05.333526Z", "updated_by": 2 } responses.add(responses.POST, "http://id.example.org/api/v1/identities/", json=identity, status=201) # Execute self.api.create_identity(identity) self.assertEqual(len(responses.calls), 1) self.assertEqual( responses.calls[0].request.url, "http://id.example.org/api/v1/identities/") @responses.activate def test_get_optouts_one_page(self): optouts = { "next": None, "previous": None, "results": [ { "id": "e5210c99-8d8a-40f1-8e7f-8a66c4de9e29", "optout_type": "stop", "identity": "8311c23d-f3c4-4cab-9e20-5208d77dcd1b", "address_type": "msisdn", "address": "+1234", "request_source": "testsource", "requestor_source_id": "1", "reason": "Test reason", "created_at": "2017-01-27T10:00:06.354178Z" }, ] } responses.add( responses.GET, "http://id.example.org/api/v1/optouts/search/?optout_type=stop", json=optouts, match_querystring=True) res = self.api.get_optouts(params={'optout_type': 'stop'}) result1 = next(res["results"]) self.assertEqual(result1['id'], "e5210c99-8d8a-40f1-8e7f-8a66c4de9e29") self.assertEqual(len(responses.calls), 1) self.assertEqual( responses.calls[0].request.url, "http://id.example.org/api/v1/optouts/search/?optout_type=stop" ) @responses.activate def test_get_optouts_multiple_pages(self): optouts = { "next": "http://id.example.org/api/v1/optouts/search/?" "optout_type=stop&cursor=1", "previous": None, "results": [ { "id": "optout-1-8d8a-40f1-8e7f-8a66c4de9e29", "optout_type": "stop", "identity": "identity-1-c4-4cab-9e20-5208d77dcd1b", "address_type": "msisdn", "address": "+1234", "request_source": "testsource", "requestor_source_id": "1", "reason": "Test reason", "created_at": "2017-01-27T10:00:06.354178Z" }, ] } responses.add( responses.GET, "http://id.example.org/api/v1/optouts/search/?optout_type=stop", json=optouts, match_querystring=True) optouts = { "next": None, "previous": "http://id.example.org/api/v1/optouts/search/?" "optout_type=stop&cursor=0", "results": [ { "id": "optout-2-8d8a-40f1-8e7f-8a66c4de9e29", "optout_type": "stop", "identity": "identity-2-c4-4cab-9e20-5208d77dcd1b", "address_type": "msisdn", "address": "+1234", "request_source": "testsource", "requestor_source_id": "1", "reason": "Test reason", "created_at": "2017-01-27T10:00:06.354178Z" }, ] } responses.add( responses.GET, "http://id.example.org/api/v1/optouts/search/?optout_type=stop&" "cursor=1", json=optouts, match_querystring=True) res = self.api.get_optouts(params={'optout_type': 'stop'}) result1 = next(res["results"]) result2 = next(res["results"]) self.assertEqual(result1['id'], "optout-1-8d8a-40f1-8e7f-8a66c4de9e29") self.assertEqual(result2['id'], "optout-2-8d8a-40f1-8e7f-8a66c4de9e29") self.assertEqual(len(responses.calls), 2) self.assertEqual( responses.calls[0].request.url, "http://id.example.org/api/v1/optouts/search/?optout_type=stop" ) self.assertEqual( responses.calls[1].request.url, "http://id.example.org/api/v1/optouts/search/?optout_type=stop&" "cursor=1" ) @responses.activate def test_create_optout(self): optout = { "id": "e5210c99-8d8a-40f1-8e7f-8a66c4de9e29", "optout_type": "stop", "identity": "8311c23d-f3c4-4cab-9e20-5208d77dcd1b", "address_type": "msisdn", "address": "+1234", "request_source": "testsource", "requestor_source_id": "1", "reason": "Test reason", "created_at": "2017-01-27T10:00:06.354178Z" } responses.add( responses.POST, 'http://id.example.org/api/v1/optout/', json=optout, status=201) self.api.create_optout(optout) self.assertEqual(len(responses.calls), 1) self.assertEqual( responses.calls[0].request.url, "http://id.example.org/api/v1/optout/" ) @responses.activate def test_create_optin(self): optin = { "id": "ba27bb4e-49a3-49cd-81a4-9f6af7380cbf", "identity": "46f61a96-d54f-4eda-8250-e5bb86be2580", "address_type": "msisdn", "address": "+1234", "request_source": "Test source", "requestor_source_id": "1", "created_at": "2017-01-27T10:41:38.924319Z" } responses.add( responses.POST, 'http://id.example.org/api/v1/optin/', json=optin, status=201) self.api.create_optin(optin) self.assertEqual(len(responses.calls), 1) self.assertEqual( responses.calls[0].request.url, 'http://id.example.org/api/v1/optin/' )
38.656012
127
0.486278
2,278
25,397
5.285777
0.086479
0.072253
0.053982
0.06644
0.888714
0.856158
0.831077
0.787974
0.777178
0.744373
0
0.071726
0.385164
25,397
656
128
38.714939
0.699392
0.016183
0
0.663248
0
0.003419
0.272869
0.073912
0
0
0
0
0.102564
1
0.034188
false
0
0.008547
0
0.044444
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
1986375e3e5b64e2ae5732a9f6cf07109006b3ca
36
py
Python
hackerrank/interview-preparation-kit/f2018.py
AmrMKayid/KayAlgo
df6e2b5b0f74174d5c0950520f0c47b04212dfaa
[ "MIT" ]
1
2019-02-11T13:29:32.000Z
2019-02-11T13:29:32.000Z
hackerrank/interview-preparation-kit/f2018.py
AmrMKayid/KayAlgo
df6e2b5b0f74174d5c0950520f0c47b04212dfaa
[ "MIT" ]
1
2019-02-11T15:26:36.000Z
2019-02-11T15:26:36.000Z
hackerrank/interview-preparation-kit/f2018.py
AmrMKayid/KayAlgo
df6e2b5b0f74174d5c0950520f0c47b04212dfaa
[ "MIT" ]
null
null
null
print('Interview Preparation Kit')
18
35
0.777778
4
36
7
1
0
0
0
0
0
0
0
0
0
0
0
0.111111
36
1
36
36
0.875
0
0
0
0
0
0.714286
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
6
273cb7471e32c8a6a1649821b5e68bd9da595bea
6,686
py
Python
experiment/deep_learning.py
howietiangh/patch_prediction2
e1effc0a6cd384412cb64b6d78ef93b0ade30ed6
[ "MIT" ]
2
2022-01-08T21:24:22.000Z
2022-01-08T21:24:25.000Z
experiment/deep_learning.py
howietiangh/patch_prediction2
e1effc0a6cd384412cb64b6d78ef93b0ade30ed6
[ "MIT" ]
null
null
null
experiment/deep_learning.py
howietiangh/patch_prediction2
e1effc0a6cd384412cb64b6d78ef93b0ade30ed6
[ "MIT" ]
null
null
null
import random import numpy as np import lightgbm as lgb import keras from sklearn.model_selection import KFold, StratifiedKFold from sklearn.preprocessing import StandardScaler,MinMaxScaler from sklearn.linear_model import LogisticRegression from sklearn.metrics import roc_curve, auc from sklearn.metrics import confusion_matrix from keras.models import load_model from sklearn import preprocessing from keras import models from keras.layers import MaxPool1D, Activation, Dense, Flatten, Input, Multiply, Permute, RepeatVector, Reshape, Concatenate, Conv1D from keras.utils import plot_model from sklearn.metrics import roc_curve, auc, accuracy_score, recall_score, precision_score def get_dnn(dimension): input_embeddings_tensor = Input(shape=(dimension,)) embeddings_tensor = Dense(512, activation='tanh')(input_embeddings_tensor) # 100为神经元 # for _ in range(3): # DNN层数,该为3层 embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) # embeddings_tensor = Dense(64, activation='tanh')(embeddings_tensor) output_tensor = Dense(1, activation='sigmoid')(embeddings_tensor) model = models.Model(inputs=input_embeddings_tensor, outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) # plot_model(model, to_file='./model.png', show_shapes=True) return model def get_dnn_4engineered(dimension): input_engineered_tensor = Input(shape=(dimension,)) engineered_tensor = Dense(128, activation='tanh')(input_engineered_tensor) engineered_tensor = Dense(64, activation='tanh')(engineered_tensor) # engineered_tensor = Dense(1024, activation='tanh')(engineered_tensor) # engineered_tensor = Dense(512, activation='sigmoid')(engineered_tensor) output_tensor = Dense(1, activation='sigmoid')(engineered_tensor) model = models.Model(inputs=input_engineered_tensor, outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) # plot_model(model, to_file='./model.png', show_shapes=True) return model def get_cnn(): input_engineered_tensor = Input(shape=(4497,)) engineered_tensor = Reshape((4497, 1))(input_engineered_tensor) engineered_tensor = Conv1D(256, (50), activation='relu')(engineered_tensor) engineered_tensor = MaxPool1D((8))(engineered_tensor) # engineered_tensor = Conv1D(128, (50), activation='relu')(engineered_tensor) # engineered_tensor = MaxPool1D((8))(engineered_tensor) engineered_tensor = Flatten()(engineered_tensor) output_tensor = Dense(1, activation='sigmoid')(engineered_tensor) model = models.Model(inputs=input_engineered_tensor, outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) # plot_model(model, to_file='./model.png', show_shapes=True) return model def get_wide_deep(dimension_learned, dimension_engineered): input_embeddings_tensor = Input(shape=(dimension_learned,)) embeddings_tensor = Dense(512, activation='tanh')(input_embeddings_tensor) # 100为神经元 # for _ in range(3): # DNN层数,该为3层 embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) # embeddings_tensor = Dense(64, activation='tanh')(embeddings_tensor) input_fe_tensor = Input(shape=(dimension_engineered,)) # engineered_tensor = Dense(1, activation='sigmoid')(input_fe_tensor) concat_tensor = Concatenate()([embeddings_tensor, input_fe_tensor]) output_tensor = Dense(1, activation='sigmoid')(concat_tensor) model = models.Model(inputs=[input_embeddings_tensor, input_fe_tensor], outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) plot_model(model, to_file='../model/wide_deep.png', show_shapes=True) return model def get_dnn_dnn(dimension_learned, dimension_engineered): input_embeddings_tensor = Input(shape=(dimension_learned,)) embeddings_tensor = Dense(512, activation='tanh')(input_embeddings_tensor) # 100为神经元 # for _ in range(3): # DNN层数,该为3层 embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) embeddings_tensor = Dense(128, activation='tanh')(embeddings_tensor) # embeddings_tensor = Dense(1, activation='sigmoid')(embeddings_tensor) input_engineered_tensor = Input(shape=(dimension_engineered,)) engineered_tensor = Dense(128, activation='tanh')(input_engineered_tensor) engineered_tensor = Dense(64, activation='tanh')(engineered_tensor) # engineered_tensor = Dense(512, activation='tanh')(engineered_tensor) # engineered_tensor = Dense(128, activation='sigmoid')(engineered_tensor) concat_tensor = Concatenate()([embeddings_tensor, engineered_tensor]) output_tensor = Dense(1, activation='sigmoid')(concat_tensor) model = models.Model(inputs=[input_embeddings_tensor, input_engineered_tensor], outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) plot_model(model, to_file='../model/dnn_dnn.png', show_shapes=True) return model def get_dnn_cnn(dimension_learned, dimension_engineered): input_embeddings_tensor = Input(shape=(dimension_learned,)) embeddings_tensor = Dense(1024, activation='tanh')(input_embeddings_tensor) # 100为神经元 # for _ in range(3): # DNN层数,该为3层 embeddings_tensor = Dense(512, activation='tanh')(embeddings_tensor) embeddings_tensor = Dense(512, activation='tanh')(embeddings_tensor) # embeddings_tensor = Dense(1, activation='sigmoid')(embeddings_tensor) input_engineered_tensor = Input(shape=(dimension_engineered,)) # CNN engineered_tensor = Reshape((dimension_engineered, 1))(input_engineered_tensor) engineered_tensor = Conv1D(256, (50), activation='relu')(engineered_tensor) engineered_tensor = MaxPool1D((8))(engineered_tensor) # engineered_tensor = Conv1D(32, (50), activation='relu')(engineered_tensor) # engineered_tensor = MaxPool1D((2))(engineered_tensor) engineered_tensor = Flatten()(engineered_tensor) # engineered_tensor = Dense(1, activation='sigmoid')(engineered_tensor) concat_tensor = Concatenate()([embeddings_tensor, engineered_tensor]) output_tensor = Dense(1, activation='sigmoid')(concat_tensor) model = models.Model(inputs=[input_embeddings_tensor, input_engineered_tensor], outputs=output_tensor) model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['AUC']) plot_model(model, to_file='../model/dnn_cnn.png', show_shapes=True) return model
48.449275
132
0.760395
790
6,686
6.165823
0.122785
0.180661
0.085814
0.111681
0.862451
0.850544
0.821597
0.766167
0.734141
0.702525
0
0.023497
0.121597
6,686
138
133
48.449275
0.805891
0.194436
0
0.583333
0
0
0.061987
0.004108
0
0
0
0
0
1
0.071429
false
0
0.178571
0
0.321429
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
2785bc1626cfd268ef70c3f27d8be47f5bbccefe
166
py
Python
main.py
TSpidermanBoss/hppylinebot
13fb7a99c673565d323469c450fda542d7d2d474
[ "MIT" ]
null
null
null
main.py
TSpidermanBoss/hppylinebot
13fb7a99c673565d323469c450fda542d7d2d474
[ "MIT" ]
null
null
null
main.py
TSpidermanBoss/hppylinebot
13fb7a99c673565d323469c450fda542d7d2d474
[ "MIT" ]
null
null
null
from pyrogram import Client Client("mnnn",bot_token="918978080:AAHHAghRQq83v3tDhQp1H75rvbPpjHAgmZ0",api_id=768402,api_hash="f6420bf67303614279049d48d3e670f6").run()
41.5
136
0.855422
17
166
8.176471
0.882353
0
0
0
0
0
0
0
0
0
0
0.29375
0.036145
166
3
137
55.333333
0.575
0
0
0
0
0
0.487952
0.463855
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
27dacf291bbe5e967cafdf4129e63c99b8b984ae
23,118
py
Python
DAD/Sala_aula/runtests_lms_v2.py
Gustavolsl/Falculdade_Impacta_3s
bf1a12c81fd088c4e1a0cd9182600730b43dcd63
[ "Apache-2.0" ]
null
null
null
DAD/Sala_aula/runtests_lms_v2.py
Gustavolsl/Falculdade_Impacta_3s
bf1a12c81fd088c4e1a0cd9182600730b43dcd63
[ "Apache-2.0" ]
6
2021-03-19T03:57:08.000Z
2022-03-12T00:00:33.000Z
DAD/Sala_aula/runtests_lms_v2.py
Gustavolsl/Falculdade_Impacta_3s
bf1a12c81fd088c4e1a0cd9182600730b43dcd63
[ "Apache-2.0" ]
null
null
null
import requests import unittest import sqlite3 class TestStringMethods(unittest.TestCase): def test_000_alunos_retorna_lista(self): r = requests.get('http://localhost:5002/alunos') self.assertEqual(type(r.json()),type([])) def test_001_adiciona_alunos(self): r = requests.post('http://localhost:5002/alunos',json={'nome':'fernando','id':1}) r = requests.post('http://localhost:5002/alunos',json={'nome':'roberto','id':2}) r_lista = requests.get('http://localhost:5002/alunos') achei_fernando = False achei_roberto = False for aluno in r_lista.json(): if aluno['nome'] == 'fernando': achei_fernando = True if aluno['nome'] == 'roberto': achei_roberto = True if not achei_fernando: self.fail('aluno fernando nao apareceu na lista de alunos') if not achei_roberto: self.fail('aluno roberto nao apareceu na lista de alunos') def test_002_aluno_por_id(self): r = requests.post('http://localhost:5002/alunos',json={'nome':'mario','id':20}) r_id = requests.get('http://localhost:5002/alunos/20') self.assertEqual(r_id.json()['nome'],'mario') def test_003_adiciona_e_reseta(self): r = requests.post('http://localhost:5002/alunos',json={'nome':'cicero','id':29}) r_lista = requests.get('http://localhost:5002/alunos') self.assertTrue(len(r_lista.json()) > 0) r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r_lista_depois = requests.get('http://localhost:5002/alunos') self.assertEqual(len(r_lista_depois.json()),0) def test_004_adiciona_e_deleta(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) requests.post('http://localhost:5002/alunos',json={'nome':'cicero','id':29}) requests.post('http://localhost:5002/alunos',json={'nome':'lucas','id':28}) r_lista = requests.get('http://localhost:5002/alunos') self.assertEqual(len(r_lista.json()),2) requests.delete('http://localhost:5002/alunos/28') r_lista = requests.get('http://localhost:5002/alunos') self.assertEqual(len(r_lista.json()),1) def test_005_edita(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) requests.post('http://localhost:5002/alunos',json={'nome':'lucas','id':28}) r_antes = requests.get('http://localhost:5002/alunos/28') self.assertEqual(r_antes.json()['nome'],'lucas') requests.put('http://localhost:5002/alunos/28', json={'nome':'lucas mendes'}) r_depois = requests.get('http://localhost:5002/alunos/28') self.assertEqual(r_depois.json()['nome'],'lucas mendes') def test_006_id_inexistente(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.put('http://localhost:5002/alunos/15',json={'nome':'bowser','id':15}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'aluno nao encontrado') r = requests.get('http://localhost:5002/alunos/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'aluno nao encontrado') r = requests.delete('http://localhost:5002/alunos/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'aluno nao encontrado') def test_007_criar_com_id_ja_existente(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/alunos',json={'nome':'bond','id':7}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/alunos',json={'nome':'james','id':7}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'id ja utilizada') def test_008_post_ou_put_sem_nome(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/alunos',json={'id':8}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'aluno sem nome') r = requests.post('http://localhost:5002/alunos',json={'nome':'maximus','id':7}) self.assertEqual(r.status_code,200) r = requests.put('http://localhost:5002/alunos/7',json={'id':7}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'aluno sem nome') def test_100_professores_retorna_lista(self): r = requests.get('http://localhost:5002/professores') self.assertEqual(type(r.json()),type([])) def test_100b_nao_confundir_professor_e_aluno(self): r_reset = requests.post('http://localhost:5002/reseta') r = requests.post('http://localhost:5002/alunos',json={'nome':'fernando','id':1}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/alunos',json={'nome':'roberto','id':2}) self.assertEqual(r.status_code,200) r_lista = requests.get('http://localhost:5002/professores') self.assertEqual(len(r_lista.json()),0) r_lista_alunos = requests.get('http://localhost:5002/alunos') self.assertEqual(len(r_lista_alunos.json()),2) def test_101_adiciona_professores(self): r = requests.post('http://localhost:5002/professores',json={'nome':'fernando','id':1}) r = requests.post('http://localhost:5002/professores',json={'nome':'roberto','id':2}) r_lista = requests.get('http://localhost:5002/professores') achei_fernando = False achei_roberto = False for professor in r_lista.json(): if professor['nome'] == 'fernando': achei_fernando = True if professor['nome'] == 'roberto': achei_roberto = True if not achei_fernando: self.fail('professor fernando nao apareceu na lista de professores') if not achei_roberto: self.fail('professor roberto nao apareceu na lista de professores') def test_102_professores_por_id(self): r = requests.post('http://localhost:5002/professores',json={'nome':'mario','id':20}) r_lista = requests.get('http://localhost:5002/professores/20') self.assertEqual(r_lista.json()['nome'],'mario') def test_103_adiciona_e_reseta(self): r = requests.post('http://localhost:5002/professores',json={'nome':'cicero','id':29}) r_lista = requests.get('http://localhost:5002/professores') self.assertTrue(len(r_lista.json()) > 0) r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r_lista_depois = requests.get('http://localhost:5002/professores') self.assertEqual(len(r_lista_depois.json()),0) def test_104_adiciona_e_deleta(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) requests.post('http://localhost:5002/professores',json={'nome':'cicero','id':29}) requests.post('http://localhost:5002/professores',json={'nome':'lucas','id':28}) r_lista = requests.get('http://localhost:5002/professores') self.assertEqual(len(r_lista.json()),2) requests.delete('http://localhost:5002/professores/28') r_lista = requests.get('http://localhost:5002/professores') self.assertEqual(len(r_lista.json()),1) def test_105_edita(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) requests.post('http://localhost:5002/professores',json={'nome':'lucas','id':28}) r_antes = requests.get('http://localhost:5002/professores/28') self.assertEqual(r_antes.json()['nome'],'lucas') requests.put('http://localhost:5002/professores/28', json={'nome':'lucas mendes'}) r_depois = requests.get('http://localhost:5002/professores/28') self.assertEqual(r_depois.json()['nome'],'lucas mendes') def test_106_id_inexistente(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.put('http://localhost:5002/professores/15',json={'nome':'bowser','id':15}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'professor nao encontrado') r = requests.get('http://localhost:5002/professores/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'professor nao encontrado') r = requests.delete('http://localhost:5002/professores/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'professor nao encontrado') def test_107_criar_com_id_ja_existente(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'bond','id':7}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'james','id':7}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'id ja utilizada') def test_108_post_ou_put_sem_nome(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/professores',json={'id':8}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'professor sem nome') r = requests.post('http://localhost:5002/professores',json={'nome':'maximus','id':7}) self.assertEqual(r.status_code,200) r = requests.put('http://localhost:5002/professores/7',json={'id':7}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'professor sem nome') def test_109_nao_confundir_professor_e_aluno(self): r_reset = requests.post('http://localhost:5002/reseta') r = requests.post('http://localhost:5002/professores',json={'nome':'fernando','id':1}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'roberto','id':2}) self.assertEqual(r.status_code,200) r_lista = requests.get('http://localhost:5002/professores') self.assertEqual(len(r_lista.json()),2) r_lista_alunos = requests.get('http://localhost:5002/alunos') self.assertEqual(len(r_lista_alunos.json()),0) def test_200_disciplinas_retorna_lista(self): r_reset = requests.post('http://localhost:5003/reseta') r = requests.get('http://localhost:5003/disciplinas') self.assertEqual(type(r.json()),type([])) def test_200b_nao_confundir_disciplina_e_pessoas(self): r_lista = requests.get('http://localhost:5003/disciplinas') tam_inicial = len(r_lista.json()) r_reset = requests.post('http://localhost:5002/reseta') r_reset = requests.post('http://localhost:5003/reseta') r = requests.post('http://localhost:5002/alunos',json={'nome':'fernando','id':1}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'roberto','id':2}) self.assertEqual(r.status_code,200) r_lista = requests.get('http://localhost:5003/disciplinas') self.assertEqual(len(r_lista.json()),tam_inicial) def test_201_adiciona_disciplinas(self): r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) r = requests.post('http://localhost:5003/disciplinas',json={'id':101,'nome':'distribuidos','status':12,'plano_ensino':'clientes e servidores','carga_horaria':10}) r_lista = requests.get('http://localhost:5003/disciplinas') achei_dados = False achei_distribuidos = False for disciplina in r_lista.json(): if 'dados' in disciplina['nome']: achei_dados = True if 'distri' in disciplina['nome']: achei_distribuidos = True if not achei_dados: self.fail('disciplina estrutura de dados nao apareceu na lista de disciplinas') if not achei_distribuidos: self.fail('disciplina distribuidos nao apareceu na lista de disciplinas') def test_202_disciplinas_por_id(self): r = requests.post('http://localhost:5003/disciplinas',json={'id':103,'nome':'matematica','status':12,'plano_ensino':'funcoes e calculo','carga_horaria':15}) r_lista = requests.get('http://localhost:5003/disciplinas/103') self.assertEqual(r_lista.json()['nome'],'matematica') self.assertEqual(r_lista.json()['plano_ensino'],'funcoes e calculo') self.assertEqual(r_lista.json()['carga_horaria'],15) self.assertEqual(r_lista.json()['status'],12) def test_203_adiciona_e_reseta(self): r = requests.post('http://localhost:5003/disciplinas',json={'id':104,'nome':'lp2','status':12,'plano_ensino':'dicionarios e classes','carga_horaria':15}) r_lista = requests.get('http://localhost:5003/disciplinas') self.assertTrue(len(r_lista.json()) > 0) r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r_lista_depois = requests.get('http://localhost:5003/disciplinas') self.assertEqual(len(r_lista_depois.json()),0) def test_204_adiciona_e_deleta(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) r = requests.post('http://localhost:5003/disciplinas',json={'id':101,'nome':'distribuidos','status':12,'plano_ensino':'clientes e servidores','carga_horaria':10}) r_lista = requests.get('http://localhost:5003/disciplinas') self.assertEqual(len(r_lista.json()),2) requests.delete('http://localhost:5003/disciplinas/100') r_lista = requests.get('http://localhost:5003/disciplinas') self.assertEqual(len(r_lista.json()),1) def test_205_edita(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) r_antes = requests.get('http://localhost:5003/disciplinas/100') self.assertEqual(r_antes.json()['nome'],'estruturas de dados') requests.put('http://localhost:5003/disciplinas/100', json={'nome':'algoritmos'}) r_depois = requests.get('http://localhost:5003/disciplinas/100') self.assertEqual(r_depois.json()['nome'],'algoritmos') def test_206_id_inexistente(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.put('http://localhost:5003/disciplinas/15',json={'nome':'bowser','id':15}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'disciplina nao encontrada') r = requests.get('http://localhost:5003/disciplinas/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'disciplina nao encontrada') r = requests.delete('http://localhost:5003/disciplinas/15') self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'disciplina nao encontrada') def test_207_criar_com_id_ja_existente(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'distribuidos','status':12,'plano_ensino':'clientes e servidores','carga_horaria':10}) self.assertEqual(r.status_code,400) self.assertEqual(r.json()['erro'],'id ja utilizada') def test_208_post_com_campos_faltando(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={ 'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,400) self.assertTrue('erro' in r.json()) r = requests.post('http://localhost:5003/disciplinas',json={'id':101, 'status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,400) self.assertTrue('erro' in r.json()) r = requests.post('http://localhost:5003/disciplinas',json={'id':102,'nome':'estruturas de dados','status':12, 'carga_horaria':15}) self.assertEqual(r.status_code,400) self.assertTrue('erro' in r.json()) r = requests.post('http://localhost:5003/disciplinas',json={'id':103,'nome':'estruturas de dados','status':12,'plano_ensino':'dados'}) self.assertEqual(r.status_code,400) self.assertTrue('erro' in r.json()) def test_209_criar_com_campos_invalidos(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':'banana','plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,400) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':'abacate'}) self.assertEqual(r.status_code,400) r = requests.post('http://localhost:5003/disciplinas',json={'id':'orangotango','nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,400) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15}) self.assertEqual(r.status_code,200) def test_300_inclui_coordenador(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'cicero','id':29}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15, 'id_coordenador':29}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'dai','id':28}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':101,'nome':'matemica','status':12,'plano_ensino':'sistemas lineares','carga_horaria':15, 'id_coordenador':28}) self.assertEqual(r.status_code,200) def test_301_inclui_coordenador_invalido(self): r_reset = requests.post('http://localhost:5002/reseta') self.assertEqual(r_reset.status_code,200) r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) r = requests.post('http://localhost:5002/professores',json={'nome':'cicero','id':29}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5002/alunos',json={'nome':'lucas','id':28}) self.assertEqual(r.status_code,200) r = requests.post('http://localhost:5003/disciplinas',json={'id':101,'nome':'matemica','status':12,'plano_ensino':'sistemas lineares','carga_horaria':15, 'id_coordenador':28}) self.assertEqual(r.status_code,400) def test_400_db_existe(self): import os existe = os.path.exists('disciplina.db') if not existe: self.fail('arquivo db nao existe ou nao esta na mesma pasta que o runtests') def test_401_disciplinas_com_db(self): r_reset = requests.post('http://localhost:5003/reseta') self.assertEqual(r_reset.status_code,200) self.nao_devo_achar('estruturas de dados') self.nao_devo_achar('distribuidos') r = requests.post('http://localhost:5003/disciplinas',json={'id':100,'nome':'estruturas de dados','status':12,'plano_ensino':'dados','carga_horaria':15, 'id_coordenador':29}) self.assertEqual(r.status_code,200) self.devo_achar('estruturas de dados') self.nao_devo_achar('distribuidos') r = requests.post('http://localhost:5003/disciplinas',json={'id':101,'nome':'distribuidos','status':10,'plano_ensino':'servidores','carga_horaria':15, 'id_coordenador':29}) self.assertEqual(r.status_code,200) self.devo_achar('estruturas de dados') self.devo_achar('distribuidos') def sql_busca_tosca(self,procurando): con = sqlite3.connect('disciplina.db') cursorObj = con.cursor() cursorObj.execute('SELECT name from sqlite_master where type= "table"') tabelas = [a[0] for a in cursorObj.fetchall()] dados = [] for tabela in tabelas: cursorObj.execute('SELECT * from '+tabela) dados.extend(cursorObj.fetchall()) con.close() return procurando in str(dados) def devo_achar(self,string): if not self.sql_busca_tosca(string): self.fail('procurei a string '+string+'''na sua base de dados era para ela estar lá, mas não achei''') def nao_devo_achar(self,string): if self.sql_busca_tosca(string): self.fail('procurei a string '+string+'''na sua base de dados era para ela NAO estar lá, mas achei''') def runTests(): suite = unittest.defaultTestLoader.loadTestsFromTestCase(TestStringMethods) unittest.TextTestRunner(verbosity=2,failfast=True).run(suite) if __name__ == '__main__': runTests()
54.014019
183
0.658232
2,998
23,118
4.92962
0.072048
0.11787
0.105014
0.142094
0.883957
0.867041
0.824142
0.818391
0.797754
0.775289
0
0.059629
0.17229
23,118
427
184
54.140515
0.712725
0
0
0.560773
0
0
0.325071
0
0
0
0
0
0.334254
1
0.107735
false
0
0.01105
0
0.124309
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
27ecb47be225f9c2657e340f780db63fe49db388
86
py
Python
prodapt_solutions/parser/xml_parser.py
DineshDevaraj/interview_answers
8d3d631dc96dc97ebef80604d6455c2c57c8823d
[ "MIT" ]
null
null
null
prodapt_solutions/parser/xml_parser.py
DineshDevaraj/interview_answers
8d3d631dc96dc97ebef80604d6455c2c57c8823d
[ "MIT" ]
null
null
null
prodapt_solutions/parser/xml_parser.py
DineshDevaraj/interview_answers
8d3d631dc96dc97ebef80604d6455c2c57c8823d
[ "MIT" ]
null
null
null
from parser.unified_parser import InputParser class XmlParser(InputParser): pass
17.2
45
0.813953
10
86
6.9
0.8
0
0
0
0
0
0
0
0
0
0
0
0.139535
86
5
46
17.2
0.932432
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
fd6945a8cb2800604bb2cdeb166dfc4e47496ff6
33
py
Python
dataset/__init__.py
ResByte/aptos-blindness-pytorch
7e0825336d1c1a82c8a4a405102df713580ec184
[ "Apache-2.0" ]
null
null
null
dataset/__init__.py
ResByte/aptos-blindness-pytorch
7e0825336d1c1a82c8a4a405102df713580ec184
[ "Apache-2.0" ]
null
null
null
dataset/__init__.py
ResByte/aptos-blindness-pytorch
7e0825336d1c1a82c8a4a405102df713580ec184
[ "Apache-2.0" ]
null
null
null
from .dataset import AptosDataset
33
33
0.878788
4
33
7.25
1
0
0
0
0
0
0
0
0
0
0
0
0.090909
33
1
33
33
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fd9e6e116488d3d00923200df263b088f87457c6
14,198
py
Python
nas_lib/params_nas.py
auroua/SSNENAS
65bdece174f0da2f9a3c716b86859abba077d279
[ "MIT" ]
2
2020-12-29T06:33:22.000Z
2022-02-19T22:21:05.000Z
nas_lib/params_nas.py
auroua/SSNENAS
65bdece174f0da2f9a3c716b86859abba077d279
[ "MIT" ]
null
null
null
nas_lib/params_nas.py
auroua/SSNENAS
65bdece174f0da2f9a3c716b86859abba077d279
[ "MIT" ]
null
null
null
import sys def meta_neuralnet_params(param_str): if param_str == 'nasbench_101': params = {'search_space': 'nasbench_101', 'loss': 'mae', 'num_layers': 10, 'layer_width': 20, 'epochs': 150, 'batch_size': 32, 'lr': .01, 'regularization': 0, 'verbose': 0} elif param_str == 'nasbench_201': params = {'search_space': 'nasbench_201', 'loss': 'mae', 'num_layers': 10, 'layer_width': 200, 'epochs': 200, 'batch_size': 32, 'lr': .001, 'regularization': 0, 'verbose': 0} else: print('invalid meta neural net params') sys.exit() return params def algo_params_close_domain(param_str, search_budget=100, dataname='cifar10'): """ Return params list based on param_str. These are the parameters used to produce the figures in the paper For AlphaX and Reinforcement Learning, we used the corresponding github repos: https://github.com/linnanwang/AlphaX-NASBench101 https://github.com/automl/nas_benchmarks """ params = [] if dataname == 'cifar10-valid': rate = 10. elif dataname == 'cifar100': rate = 30. elif dataname == 'ImageNet16-120': rate = 55 else: raise NotImplementedError() if param_str == 'nasbench_101': params.append({'algo_name': 'random', 'total_queries': search_budget}) params.append({'algo_name': 'evolution', 'total_queries': search_budget, 'population_size': 30, 'num_init': 10, 'k': 10, 'tournament_size': 10, 'mutation_rate': 1.0}) params.append({'algo_name': 'bananas', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': True}) params.append({'algo_name': 'bananas_f', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': False}) params.append({'algo_name': 'bananas_context', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': False}) params.append({'algo_name': 'gin_predictor', 'total_queries': search_budget, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 300}) params.append({'algo_name': 'gin_predictor_fixed_nums', 'total_queries': search_budget, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 300, 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_rl', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl'}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 90}) params.append({'algo_name': 'gin_predictor_ss_ccl', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl'}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 90}) elif param_str == 'nasbench_201': params.append({'algo_name': 'random', 'total_queries': search_budget}) params.append({'algo_name': 'evolution', 'total_queries': search_budget, 'population_size': 30, 'num_init': 10, 'k': 10, 'tournament_size': 10, 'mutation_rate': 1.0, 'allow_isomorphisms': False, 'deterministic': True}) params.append({'algo_name': 'bananas', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': True, 'eva_new': False}) params.append({'algo_name': 'bananas_f', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': False}) params.append({'algo_name': 'bananas_context', 'total_queries': search_budget, 'num_ensemble': 5, 'allow_isomorphisms': False, 'acq_opt_type': 'mutation', 'candidate_nums': 100, 'num_init': 10, 'k': 10, 'encode_paths': False}) params.append({'algo_name': 'gin_predictor', 'total_queries': search_budget, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 300, 'rate': rate}) params.append({'algo_name': 'gin_predictor_fixed_nums', 'total_queries': search_budget, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 300, 'training_nums': 50, 'rate': rate}) params.append({'algo_name': 'gin_predictor_ss_rl', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'rate': rate}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_ccl', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl'}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl', 'training_nums': 50}) elif param_str == 'nasbench_101_fixed': params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 20}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 80}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 110}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 150}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 20}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 80}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 110}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'candidate_nums': 100, 'epochs': 200, 'predictor_type': 'ss_ccl', 'training_nums': 150}) elif param_str == 'nasbench_201_fixed': params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 20}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 80}) params.append({'algo_name': 'gin_predictor_ss_rl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_rl', 'training_nums': 100}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl', 'training_nums': 20}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl', 'training_nums': 50}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl', 'training_nums': 80}) params.append({'algo_name': 'gin_predictor_ss_ccl_num_fixed', 'total_queries': search_budget, 'k': 10, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'lr': 0.005, 'rate': rate, 'candidate_nums': 100, 'epochs': 150, 'predictor_type': 'ss_ccl', 'training_nums': 100}) elif param_str == 'experiment': pass else: print('invalid algorithm params') sys.exit() print('* Running experiment: ' + param_str) return params def algo_params_open_domain(param_str): if param_str == 'gin_predictor': # gin_predictor gin_predictor_fixed_num param = {'algo_name': 'gin_predictor_fixed_num', 'total_queries': 30, 'agent': 'gin_predictor', 'num_init': 10, 'allow_isomorphisms': False, 'k': 10, 'epochs': 300, 'batch_size': 32, 'lr': 0.005, 'encode_path': True, 'candidate_nums': 100, 'mutate_rate': 2.0, 'filter_method': 'pape', 'fixed_num': 70} else: raise NotImplementedError("This algorithm have not implement!") print('* Running experiment: ' + str(param)) return param
81.131429
128
0.599662
1,695
14,198
4.691445
0.087316
0.098089
0.080483
0.100604
0.867329
0.851735
0.838028
0.830986
0.82998
0.823944
0
0.058378
0.237498
14,198
175
129
81.131429
0.67615
0.022116
0
0.605096
0
0
0.429026
0.052012
0
0
0
0
0
1
0.019108
false
0.006369
0.006369
0
0.044586
0.025478
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
fda5d899ee7641b030918c162e49d451ec8a4d62
22
py
Python
pyalgo/__init__.py
ksks2211/pyalgo
e8f26361fc404324b267f3923f6a1a5183ba102e
[ "MIT" ]
null
null
null
pyalgo/__init__.py
ksks2211/pyalgo
e8f26361fc404324b267f3923f6a1a5183ba102e
[ "MIT" ]
null
null
null
pyalgo/__init__.py
ksks2211/pyalgo
e8f26361fc404324b267f3923f6a1a5183ba102e
[ "MIT" ]
null
null
null
from . import sorting
11
21
0.772727
3
22
5.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.181818
22
1
22
22
0.944444
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fdaee7e72697de590bf1e25412a7da67ca369ea3
31
py
Python
sidebars/guidance/__init__.py
whytheplatypus/cmcs-eregulations
010affdbefd8499ad9bed4d95275ccfd2ba9014f
[ "CC0-1.0" ]
null
null
null
sidebars/guidance/__init__.py
whytheplatypus/cmcs-eregulations
010affdbefd8499ad9bed4d95275ccfd2ba9014f
[ "CC0-1.0" ]
null
null
null
sidebars/guidance/__init__.py
whytheplatypus/cmcs-eregulations
010affdbefd8499ad9bed4d95275ccfd2ba9014f
[ "CC0-1.0" ]
null
null
null
from .guidance import Guidance
15.5
30
0.83871
4
31
6.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fdcf0687815c431a6a2fb6cbd7bfca5dc21c28f1
325
py
Python
reqlog/dbschema/__init__.py
JFF-Bohdan/reqlog
a7ba7b6e12609d736b3cd8cd8bc2913d511848ee
[ "MIT" ]
null
null
null
reqlog/dbschema/__init__.py
JFF-Bohdan/reqlog
a7ba7b6e12609d736b3cd8cd8bc2913d511848ee
[ "MIT" ]
null
null
null
reqlog/dbschema/__init__.py
JFF-Bohdan/reqlog
a7ba7b6e12609d736b3cd8cd8bc2913d511848ee
[ "MIT" ]
null
null
null
from .tbl_sys_users import * # noqa from .tbl_logged_requests import * # noqa from .tbl_request_parameters import * # noqa from .tbl_data_collecting_nodes import * # noqa from .tbl_data_collecting_device import * # noqa from .tbl_dc_available_scopes import * # noqa from .tbl_link_users_to_scopes import * # noqa
46.428571
50
0.769231
47
325
4.914894
0.404255
0.212121
0.363636
0.441558
0.268398
0.268398
0
0
0
0
0
0
0.166154
325
7
51
46.428571
0.852399
0.104615
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
8b686750e96472a73cac5cdca5baa582e730ea0f
145
py
Python
thecreamind_frappe/the_crea_mind/doctype/crea_mind_kid/test_crea_mind_kid.py
FathimaSayeeda/creamindBackend
7d94b08b1f1df5ab35a5b07a0d00d88bfaf1f90e
[ "MIT" ]
2
2021-07-25T09:28:46.000Z
2021-12-07T22:03:29.000Z
thecreamind_frappe/the_crea_mind/doctype/crea_mind_kid/test_crea_mind_kid.py
FathimaSayeeda/creamindBackend
7d94b08b1f1df5ab35a5b07a0d00d88bfaf1f90e
[ "MIT" ]
null
null
null
thecreamind_frappe/the_crea_mind/doctype/crea_mind_kid/test_crea_mind_kid.py
FathimaSayeeda/creamindBackend
7d94b08b1f1df5ab35a5b07a0d00d88bfaf1f90e
[ "MIT" ]
3
2021-07-22T23:31:04.000Z
2021-08-14T22:45:20.000Z
# Copyright (c) 2021, Fahim and Contributors # See license.txt # import frappe import unittest class TestCreaMindKid(unittest.TestCase): pass
16.111111
44
0.77931
18
145
6.277778
0.888889
0
0
0
0
0
0
0
0
0
0
0.032258
0.144828
145
8
45
18.125
0.879032
0.496552
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
8ba160e85b1169541602c16f2804a26a2d5058b6
27
py
Python
book-code/numpy-ml/numpy_ml/neural_nets/activations/__init__.py
yangninghua/code_library
b769abecb4e0cbdbbb5762949c91847a0f0b3c5a
[ "MIT" ]
3
2021-07-07T13:28:01.000Z
2021-11-12T06:32:49.000Z
book-code/numpy-ml/numpy_ml/neural_nets/activations/__init__.py
yangninghua/code_library
b769abecb4e0cbdbbb5762949c91847a0f0b3c5a
[ "MIT" ]
null
null
null
book-code/numpy-ml/numpy_ml/neural_nets/activations/__init__.py
yangninghua/code_library
b769abecb4e0cbdbbb5762949c91847a0f0b3c5a
[ "MIT" ]
3
2021-11-17T08:46:37.000Z
2022-03-04T16:35:36.000Z
from .activations import *
13.5
26
0.777778
3
27
7
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
27
1
27
27
0.913043
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
47609f893d72b1fb1dabce0543ded32355768797
555
py
Python
scripts/utils/__init__.py
kellisfm/deep-protein-generation
0804aa5c8e48edeab0fe269e9129234f62558fc2
[ "MIT" ]
23
2020-04-08T07:49:17.000Z
2022-03-30T06:02:35.000Z
scripts/utils/__init__.py
kellisfm/deep-protein-generation
0804aa5c8e48edeab0fe269e9129234f62558fc2
[ "MIT" ]
4
2020-01-28T23:13:38.000Z
2022-02-10T01:10:08.000Z
scripts/utils/__init__.py
kellisfm/deep-protein-generation
0804aa5c8e48edeab0fe269e9129234f62558fc2
[ "MIT" ]
11
2020-05-13T15:18:04.000Z
2022-03-23T08:12:58.000Z
aa_letters = ['-', 'A', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'K', 'L', 'M', 'N', 'P', 'Q', 'R', 'S', 'T', 'V', 'W', 'Y'] luxa_seq = 'MKFGNFLLTYQPPQFSQTEVMKRLVKLGRISEECGFDTVWLLEHHFTEFGLLGNPYVAAAYLLGATKKLNVGTAAIVLPTAHPVRQLEDVNLLDQMSK' \ 'GRFRFGICRGLYNKDFRVFGTDMNNSRALAECWYGLIKNGMTEGYMEADNEHIKFHKVKVNPAAYSRGGAPVYVVAESASTTEWAAQFGLPMILSWIIN' \ 'TNEKKAQLELYNEVAQEYGHDIHNIDHCLSYITSVDHDSIKAKEICRKFLGHWYDSYVNATTIFDDSDQTRGYDFNKGQWRDFVLKGHKDTNRRIDYSY' \ 'EINPVGTPQECIDIIQKDIDATGISNICCGFEANGTVDEIIASMKLFQSDVMPFLKEKQRSLLY'
79.285714
114
0.717117
28
555
14.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.142342
555
7
115
79.285714
0.831933
0
0
0
0
0
0.685252
0.647482
0
1
0
0
0
1
0
false
0
0
0
0
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
1
null
1
0
0
0
0
0
0
0
0
0
0
0
0
6
476ba6db5517cc0cad4b40d95e400eeb5d2533e9
3,783
py
Python
pyflux/var/tests/var_tests.py
ThomasHoppe/pyflux
297f2afc2095acd97c12e827dd500e8ea5da0c0f
[ "BSD-3-Clause" ]
2,091
2016-04-01T02:52:10.000Z
2022-03-29T11:38:15.000Z
pyflux/var/tests/var_tests.py
EricSchles/pyflux
297f2afc2095acd97c12e827dd500e8ea5da0c0f
[ "BSD-3-Clause" ]
160
2016-04-26T14:52:18.000Z
2022-03-15T02:09:07.000Z
pyflux/var/tests/var_tests.py
EricSchles/pyflux
297f2afc2095acd97c12e827dd500e8ea5da0c0f
[ "BSD-3-Clause" ]
264
2016-05-02T14:03:31.000Z
2022-03-29T07:48:20.000Z
import numpy as np import pyflux as pf import pandas as pd noise_1 = np.random.normal(0,1,350) noise_2 = np.random.normal(0,1,350) data_1 = np.zeros(350) data_2 = np.zeros(350) for i in range(1,len(data_1)): data_1[i] = 0.9*data_1[i-1] + noise_1[i] data_2[i] = 0.9*data_2[i-1] + noise_2[i] data = pd.DataFrame([data_1,data_2]).T data.columns = ['test1','test2'] # Uncomment once PML/Laplace approximation is more robust def test_couple_terms(): """ Tests an VAR model with 1 AR and 1 MA term and that the latent variable list length is correct, and that the estimated latent variables are not nan """ model = pf.VAR(data=data, lags=2) x = model.fit() assert(len(model.latent_variables.z_list) == 13) lvs = np.array([i.value for i in model.latent_variables.z_list]) assert(len(lvs[np.isnan(lvs)]) == 0) def test_couple_terms_integ(): """ Tests an VAR model with 1 AR and 1 MA term, integrated once, and that the latent variable list length is correct, and that the estimated latent variables are not nan """ model = pf.VAR(data=data, lags=2, integ=1) x = model.fit() assert(len(model.latent_variables.z_list) == 13) lvs = np.array([i.value for i in model.latent_variables.z_list]) assert(len(lvs[np.isnan(lvs)]) == 0) def test_bbvi(): """ Tests an VAR model estimated with BBVI and that the length of the latent variable list is correct, and that the estimated latent variables are not nan """ model = pf.VAR(data=data, lags=2) x = model.fit('BBVI',iterations=100) assert(len(model.latent_variables.z_list) == 13) lvs = np.array([i.value for i in model.latent_variables.z_list]) assert(len(lvs[np.isnan(lvs)]) == 0) def test_mh(): """ Tests an VAR model estimated with Metropolis-Hastings and that the length of the latent variable list is correct, and that the estimated latent variables are not nan """ model = pf.VAR(data=data, lags=2) x = model.fit('M-H',nsims=300) assert(len(model.latent_variables.z_list) == 13) lvs = np.array([i.value for i in model.latent_variables.z_list]) assert(len(lvs[np.isnan(lvs)]) == 0) #def test_laplace(): # """ # Tests an VAR model estimated with Laplace approximation and that the length of the # latent variable list is correct, and that the estimated latent variables are not nan # """ # model = pf.VAR(data=data, lags=2) # x = model.fit('Laplace') # assert(len(model.latent_variables.z_list) == 13) # lvs = np.array([i.value for i in model.latent_variables.z_list]) # assert(len(lvs[np.isnan(lvs)]) == 0) #def test_pml(): # """ # Tests a PML model estimated with Laplace approximation and that the length of the # latent variable list is correct, and that the estimated latent variables are not nan # """ # model = pf.VAR(data=data, lags=2) # x = model.fit('PML') # assert(len(model.latent_variables.z_list) == 13) # lvs = np.array([i.value for i in model.latent_variables.z_list]) # assert(len(lvs[np.isnan(lvs)]) == 0) def test_predict_length(): """ Tests that the prediction dataframe length is equal to the number of steps h """ model = pf.VAR(data=data, lags=2) x = model.fit() assert(model.predict(h=5).shape[0] == 5) def test_predict_is_length(): """ Tests that the prediction IS dataframe length is equal to the number of steps h """ model = pf.VAR(data=data, lags=2) x = model.fit() assert(model.predict_is(h=5).shape[0] == 5) def test_predict_nans(): """ Tests that the predictions are not nans """ model = pf.VAR(data=data, lags=2) x = model.fit() assert(len(model.predict(h=5).values[np.isnan(model.predict(h=5).values)]) == 0) def test_predict_is_nans(): """ Tests that the in-sample predictions are not nans """ model = pf.VAR(data=data, lags=2) x = model.fit() assert(len(model.predict_is(h=5).values[np.isnan(model.predict_is(h=5).values)]) == 0)
32.333333
87
0.706318
673
3,783
3.882615
0.144131
0.10333
0.045924
0.096441
0.825871
0.800612
0.756219
0.735553
0.717949
0.717949
0
0.027657
0.149352
3,783
117
87
32.333333
0.784338
0.459424
0
0.480769
0
0
0.008731
0
0
0
0
0
0.230769
1
0.153846
false
0
0.057692
0
0.211538
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7bd72088d4eefd46cd04b5ada1ac1cd20e8e2179
5,224
py
Python
tests/test_contract_lima.py
isabella232/aepp-sdk-python
58ac2d12e0062896a473c254781ee397e3da318e
[ "0BSD" ]
22
2018-02-20T02:39:34.000Z
2021-12-17T12:12:00.000Z
tests/test_contract_lima.py
aeternity/aepp-sdk-python
58ac2d12e0062896a473c254781ee397e3da318e
[ "0BSD" ]
284
2018-02-12T11:50:39.000Z
2021-07-27T04:55:15.000Z
tests/test_contract_lima.py
isabella232/aepp-sdk-python
58ac2d12e0062896a473c254781ee397e3da318e
[ "0BSD" ]
17
2018-02-20T17:17:12.000Z
2022-03-06T08:36:26.000Z
import pytest def _sophia_contract_tx_create_online(node_cli, account): tests = [ { # init(42) "name": "simplestorage.aes", "sourcecode": "contract SimpleStorage =\n record state = { data : int }\n entrypoint init(value : int) : state = { data = value }\n entrypoint get() : int = state.data\n stateful entrypoint set(value : int) = put(state{data = value})\n", "bytecode": "cb_+JFGA6CaF4v9syrdOevYZSWs8H6yoVk//r4Azu+V96W3B5CXFMC4YLg8/i+GW9kANwAHKCwAggD+RNZEHwA3AQc3AAwBACcMAhoCggEDP/7oxF62ADcBBzcADAEAJwwCGgKCAQM/ni8DES+GW9kNZ2V0EUTWRB8RaW5pdBHoxF62DXNldIIvAIk0LjAuMC1yYzUAYj/1oA==", "calldata": "cb_KxFE1kQfG1TH2kjs", }, { "name": "identity.aes", "sourcecode": "contract Identity =\n entrypoint main(x : int) = x", "bytecode": "cb_+GpGA6Abk28ISckRxfWDHCo6FYfNzlAYCRW6TBDvQZ2BYQUmH8C4OZ7+RNZEHwA3ADcAGg6CPwEDP/64F37sADcBBwcBAQCWLwIRRNZEHxFpbml0EbgXfuwRbWFpboIvAIk0LjAuMC1yYzUAfpEWYw==", "calldata": "cb_KxFE1kQfP4oEp9E=", # init() } ] for t in tests: contract = node_cli.Contract() tx = contract.create(account, t.get("bytecode"), calldata=t.get("calldata"), gas=100000) c_id = tx.metadata.contract_id deployed = node_cli.get_contract(pubkey=c_id) assert deployed.active is True assert deployed.owner_id == account.get_address() def _sophia_contract_tx_call_online(node_cli, account): tests = [ { "name": "simplestorage.aes", "sourcecode": "contract SimpleStorage =\n record state = { data : int }\n entrypoint init(value : int) : state = { data = value }\n entrypoint get() : int = state.data\n stateful entrypoint set(value : int) = put(state{data = value})\n", "bytecode": "cb_+JFGA6CaF4v9syrdOevYZSWs8H6yoVk//r4Azu+V96W3B5CXFMC4YLg8/i+GW9kANwAHKCwAggD+RNZEHwA3AQc3AAwBACcMAhoCggEDP/7oxF62ADcBBzcADAEAJwwCGgKCAQM/ni8DES+GW9kNZ2V0EUTWRB8RaW5pdBHoxF62DXNldIIvAIk0LjAuMC1yYzUAYj/1oA==", # init(42) "init.calldata": "cb_KxFE1kQfG1TH2kjs", "call.function": "set", "call.arguments": [24], "call.calldata": "cb_KxHoxF62GzAP+Odz", "return.value": "cb_P4fvHVw=" # 0, }, { "name": "identity.aes", "sourcecode": "contract Identity =\n entrypoint main(x : int) = x", "bytecode": "cb_+GpGA6Abk28ISckRxfWDHCo6FYfNzlAYCRW6TBDvQZ2BYQUmH8C4OZ7+RNZEHwA3ADcAGg6CPwEDP/64F37sADcBBwcBAQCWLwIRRNZEHxFpbml0EbgXfuwRbWFpboIvAIk0LjAuMC1yYzUAfpEWYw==", "init.calldata": "cb_KxFE1kQfP4oEp9E=", "call.function": "main", "call.arguments": [42], "call.calldata": "cb_KxG4F37sG1Q/+F7e", "return.value": "cb_VNLOFXc=" # 42 } ] for t in tests: print(f"call contract {t.get('name')}") contract = node_cli.Contract() tx = contract.create(account, t.get("bytecode"), calldata=t.get("init.calldata")) c_id = tx.metadata.contract_id deployed = node_cli.get_contract(pubkey=c_id) assert deployed.active is True assert deployed.owner_id == account.get_address() tx = contract.call(c_id, account, t.get("call.function"), t.get("call.calldata")) # retrieve the call object call = contract.get_call_object(tx.hash) assert call.return_value == t.get("return.value") assert call.return_type == "ok" def _sophia_contract_tx_call_static(node_cli, account): tests = [ { "name": "identity.aes", "sourcecode": "contract Identity =\n entrypoint main(x : int) = x", "bytecode": "cb_+GpGA6Abk28ISckRxfWDHCo6FYfNzlAYCRW6TBDvQZ2BYQUmH8C4OZ7+RNZEHwA3ADcAGg6CPwEDP/64F37sADcBBwcBAQCWLwIRRNZEHxFpbml0EbgXfuwRbWFpboIvAIk0LjAuMC1yYzUAfpEWYw==", "init.calldata": "cb_KxFE1kQfP4oEp9E=", "call.function": "main", "call.arguments": [42], "call.calldata": "cb_KxG4F37sG1Q/+F7e", "return.value": "cb_VNLOFXc=" # 42 } ] for t in tests: contract = node_cli.Contract() tx = contract.create(account, t.get("bytecode"), calldata=t.get("init.calldata")) c_id = tx.metadata.contract_id deployed = node_cli.get_contract(pubkey=c_id) assert deployed.active is True assert deployed.owner_id == account.get_address() tx = contract.call_static(c_id, t.get("call.function"), t.get("call.calldata"), address=account.get_address()) assert tx.result == "ok" def test_sophia_contract_tx_create_native_lima(chain_fixture): # save settings and go online _sophia_contract_tx_create_online(chain_fixture.NODE_CLI, chain_fixture.ALICE) def test_sophia_contract_tx_call_native_lima(chain_fixture): # save settings and go online _sophia_contract_tx_call_online(chain_fixture.NODE_CLI, chain_fixture.ALICE) # restore settings def test_sophia_contract_tx_call_static_native_lima(chain_fixture): # save settings and go online _sophia_contract_tx_call_static(chain_fixture.NODE_CLI, chain_fixture.BOB) # restore settings
47.490909
253
0.662711
542
5,224
6.180812
0.184502
0.035821
0.042985
0.035821
0.867761
0.812836
0.787463
0.787463
0.743284
0.743284
0
0.039529
0.220329
5,224
109
254
47.926606
0.782961
0.033691
0
0.55814
0
0.046512
0.441486
0.174647
0
0
0
0
0.104651
1
0.069767
false
0
0.011628
0
0.081395
0.011628
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
7be2d2b64f3512d94e59f3506bbed2cc20f5e171
94
py
Python
structure/__init__.py
Jaidev810/Data-Structures-package
f651615275817f182662892b2b57b200310d3dba
[ "MIT" ]
2
2021-02-27T06:13:11.000Z
2021-02-27T06:15:03.000Z
structure/__init__.py
Jaidev810/Data-Structures-package
f651615275817f182662892b2b57b200310d3dba
[ "MIT" ]
null
null
null
structure/__init__.py
Jaidev810/Data-Structures-package
f651615275817f182662892b2b57b200310d3dba
[ "MIT" ]
null
null
null
from . BinaryTree import * from . SinglyLinkedList import * from . DoublyLinkedList import *
18.8
32
0.765957
9
94
8
0.555556
0.277778
0
0
0
0
0
0
0
0
0
0
0.170213
94
4
33
23.5
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
7bf67171698a0944d4f320105ed0a0a9d01d1a8e
45
py
Python
orthogonal_table/__init__.py
CAgAG/OrthogonalArray_Flask
dba6d28cf7118ff7d3a0d7e21974a360599f6749
[ "Apache-2.0" ]
1
2021-06-21T03:28:55.000Z
2021-06-21T03:28:55.000Z
orthogonal_table/__init__.py
CAgAG/OrthogonalArray_Flask
dba6d28cf7118ff7d3a0d7e21974a360599f6749
[ "Apache-2.0" ]
null
null
null
orthogonal_table/__init__.py
CAgAG/OrthogonalArray_Flask
dba6d28cf7118ff7d3a0d7e21974a360599f6749
[ "Apache-2.0" ]
null
null
null
from orthogonal_table.Query import QueryTable
45
45
0.911111
6
45
6.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.066667
45
1
45
45
0.952381
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d01967ae30afad30e5e9adff8ab00f997d34a22a
82
py
Python
demo.py
zlqm/docx-equation
52e4c8b021457425e9f975a68eaf7beb6ce9fa18
[ "MIT" ]
4
2020-05-18T18:48:09.000Z
2022-01-16T13:33:37.000Z
demo.py
zlqm/docx-equation
52e4c8b021457425e9f975a68eaf7beb6ce9fa18
[ "MIT" ]
1
2021-08-25T12:11:58.000Z
2021-08-30T07:51:00.000Z
demo.py
zlqm/docx-equation
52e4c8b021457425e9f975a68eaf7beb6ce9fa18
[ "MIT" ]
3
2021-02-08T12:57:47.000Z
2021-11-06T14:18:28.000Z
from docx_equation.docx import convert_to_html convert_to_html('equation.docx')
16.4
46
0.841463
13
82
4.923077
0.538462
0.375
0.40625
0
0
0
0
0
0
0
0
0
0.085366
82
4
47
20.5
0.853333
0
0
0
0
0
0.158537
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
d02329291c94acf4820a1a1242ac7a39fe6ec96e
49
py
Python
Basic Text/GUI.py
PyHubs/Workspace-UX
5f6829b7fa9039814266e403da476a612cd2e5ee
[ "MIT" ]
null
null
null
Basic Text/GUI.py
PyHubs/Workspace-UX
5f6829b7fa9039814266e403da476a612cd2e5ee
[ "MIT" ]
null
null
null
Basic Text/GUI.py
PyHubs/Workspace-UX
5f6829b7fa9039814266e403da476a612cd2e5ee
[ "MIT" ]
null
null
null
from tkinter import * import tkinter.tkFileDialog
24.5
27
0.857143
6
49
7
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.102041
49
2
27
24.5
0.954545
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d04a6c2d5ca5ab189b9540049c1bd7899dc59bb9
481
py
Python
libcluster/pvlib_cspopt/__init__.py
Matthew-Boyd/dao-tk
2ab34425aeccbac2d08cf1b8fc262568ab8102eb
[ "MIT" ]
2
2019-11-14T01:40:49.000Z
2021-04-04T09:23:16.000Z
libcluster/pvlib_cspopt/__init__.py
Matthew-Boyd/dao-tk
2ab34425aeccbac2d08cf1b8fc262568ab8102eb
[ "MIT" ]
1
2019-01-07T22:05:36.000Z
2019-01-07T22:07:02.000Z
libcluster/pvlib_cspopt/__init__.py
Matthew-Boyd/dao-tk
2ab34425aeccbac2d08cf1b8fc262568ab8102eb
[ "MIT" ]
4
2019-12-03T16:41:48.000Z
2020-12-15T20:18:53.000Z
import logging logging.basicConfig() #from pvlib.version import __version__ from pvlib_cspopt import tools from pvlib_cspopt import atmosphere from pvlib_cspopt import clearsky # from pvlib import forecast from pvlib_cspopt import irradiance from pvlib_cspopt import location from pvlib_cspopt import solarposition #from pvlib_cspopt import tmy #from pvlib_cspopt import tracking #rom pvlib_cspopt import pvsystem #from pvlib_cspopt import spa #from pvlib_cspopt import modelchain
30.0625
38
0.860707
68
481
5.867647
0.294118
0.270677
0.468672
0.526316
0
0
0
0
0
0
0
0
0.116424
481
15
39
32.066667
0.938824
0.45738
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.875
0
0.875
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d0a6b3a7add4a0656978ded4d9ee6af419bbd9f0
6,346
py
Python
scripts/qgis_import_bag_data.py
ElmarJ/Amsterdam.1892.GeoJSON
483b1a42f406ae72a6149f36feeb90f39cd07a84
[ "MIT" ]
null
null
null
scripts/qgis_import_bag_data.py
ElmarJ/Amsterdam.1892.GeoJSON
483b1a42f406ae72a6149f36feeb90f39cd07a84
[ "MIT" ]
8
2020-05-01T08:42:00.000Z
2022-01-02T19:43:04.000Z
scripts/qgis_import_bag_data.py
ElmarJ/Amsterdam.1892.GeoJSON
483b1a42f406ae72a6149f36feeb90f39cd07a84
[ "MIT" ]
null
null
null
processing.run("native:extractbylocation", {'INPUT':'WFS:// pagingEnabled=\'true\' restrictToRequestBBOX=\'1\' srsname=\'EPSG:28992\' typename=\'BAG3D:pand3d\' url=\'http://3dbag.bk.tudelft.nl/data/wfs\' url=\'http://3dbag.bk.tudelft.nl/data/wfs?request=getcapabilities\' version=\'auto\'','PREDICATE':[0],'INTERSECT':'https://maps.amsterdam.nl/open_geodata/geojson.php?KAARTLAAG=GEBIED_STADSDELEN&THEMA=gebiedsindeling|layername=OGRGeoJSON|subset=\"Stadsdeel_code\" = \'A\'','OUTPUT':'TEMPORARY_OUTPUT'}) processing.run("native:reprojectlayer", {'INPUT':'memory://Polygon?crs=EPSG:28992&field=gid:integer(0,0)&field=identificatie:string(0,0)&field=aanduidingrecordinactief:string(0,0)&field=aanduidingrecordcorrectie:integer(0,0)&field=officieel:string(0,0)&field=inonderzoek:string(0,0)&field=documentnummer:string(0,0)&field=documentdatum:string(0,0)&field=bouwjaar:string(0,0)&field=begindatumtijdvakgeldigheid:datetime(0,0)&field=einddatumtijdvakgeldigheid:datetime(0,0)&field=gemeentecode:string(0,0)&field=ground-0.00:double(0,0)&field=ground-0.10:double(0,0)&field=ground-0.20:double(0,0)&field=ground-0.30:double(0,0)&field=ground-0.40:double(0,0)&field=ground-0.50:double(0,0)&field=roof-0.25:double(0,0)&field=rmse-0.25:double(0,0)&field=roof-0.50:double(0,0)&field=rmse-0.50:double(0,0)&field=roof-0.75:double(0,0)&field=rmse-0.75:double(0,0)&field=roof-0.90:double(0,0)&field=rmse-0.90:double(0,0)&field=roof-0.95:double(0,0)&field=rmse-0.95:double(0,0)&field=roof-0.99:double(0,0)&field=rmse-0.99:double(0,0)&field=roof_flat:string(0,0)&field=nr_ground_pts:integer(0,0)&field=nr_roof_pts:integer(0,0)&field=ahn_file_date:datetime(0,0)&field=ahn_version:integer(0,0)&field=height_valid:string(0,0)&field=tile_id:string(0,0)&field=pand_h_75:double(0,0)&uid={3c158045-ada3-409f-ad3c-ecb02dc2efa8}','TARGET_CRS':QgsCoordinateReferenceSystem('EPSG:4326'),'OPERATION':'+proj=pipeline +step +inv +proj=sterea +lat_0=52.1561605555556 +lon_0=5.38763888888889 +k=0.9999079 +x_0=155000 +y_0=463000 +ellps=bessel +step +proj=push +v_3 +step +proj=cart +ellps=bessel +step +proj=helmert +x=565.2369 +y=50.0087 +z=465.658 +rx=0.406857330322398 +ry=-0.350732676542563 +rz=1.8703473836068 +s=4.0812 +convention=coordinate_frame +step +inv +proj=cart +ellps=WGS84 +step +proj=pop +v_3 +step +proj=unitconvert +xy_in=rad +xy_out=deg','OUTPUT':'C:/Users/elmar/repos/Amsterdam.1892.GeoJSON/bag_3d_centrum.gpkg'}) processing.run("native:refactorfields", {'INPUT':'C:/Users/elmar/repos/Amsterdam.1892.GeoJSON/3d_bag_centrum.gpkg','FIELDS_MAPPING':[{'expression': '\"fid\"','length': 0,'name': 'fid','precision': 0,'type': 4},{'expression': '\"gid\"','length': 0,'name': 'gid','precision': 0,'type': 2},{'expression': '\"identificatie\"','length': 0,'name': 'identificatie','precision': 0,'type': 10},{'expression': '\"aanduidingrecordinactief\"','length': 0,'name': 'aanduidingrecordinactief','precision': 0,'type': 10},{'expression': '\"aanduidingrecordcorrectie\"','length': 0,'name': 'aanduidingrecordcorrectie','precision': 0,'type': 2},{'expression': '\"officieel\"','length': 0,'name': 'officieel','precision': 0,'type': 10},{'expression': '\"inonderzoek\"','length': 0,'name': 'inonderzoek','precision': 0,'type': 10},{'expression': '\"documentnummer\"','length': 0,'name': 'documentnummer','precision': 0,'type': 10},{'expression': '\"documentdatum\"','length': 0,'name': 'documentdatum','precision': 0,'type': 10},{'expression': '\"bouwjaar\"','length': 0,'name': 'bouwjaar','precision': 0,'type': 10},{'expression': '\"begindatumtijdvakgeldigheid\"','length': 0,'name': 'begindatumtijdvakgeldigheid','precision': 0,'type': 16},{'expression': '\"einddatumtijdvakgeldigheid\"','length': 0,'name': 'einddatumtijdvakgeldigheid','precision': 0,'type': 16},{'expression': '\"gemeentecode\"','length': 0,'name': 'gemeentecode','precision': 0,'type': 10},{'expression': '\"ground-0.00\"','length': 0,'name': 'ground_0_00','precision': 0,'type': 6},{'expression': '\"ground-0.10\"','length': 0,'name': 'ground_0_10','precision': 0,'type': 6},{'expression': '\"ground-0.20\"','length': 0,'name': 'ground_0_20','precision': 0,'type': 6},{'expression': '\"ground-0.30\"','length': 0,'name': 'ground_0_30','precision': 0,'type': 6},{'expression': '\"ground-0.40\"','length': 0,'name': 'ground_0_40','precision': 0,'type': 6},{'expression': '\"ground-0.50\"','length': 0,'name': 'ground_0_50','precision': 0,'type': 6},{'expression': '\"roof-0.25\"','length': 0,'name': 'roof_0_25','precision': 0,'type': 6},{'expression': '\"rmse-0.25\"','length': 0,'name': 'rmse_0_25','precision': 0,'type': 6},{'expression': '\"roof-0.50\"','length': 0,'name': 'roof_0_50','precision': 0,'type': 6},{'expression': '\"rmse-0.50\"','length': 0,'name': 'rmse_0_50','precision': 0,'type': 6},{'expression': '\"roof-0.75\"','length': 0,'name': 'roof_0_75','precision': 0,'type': 6},{'expression': '\"rmse-0.75\"','length': 0,'name': 'rmse_0_75','precision': 0,'type': 6},{'expression': '\"roof-0.90\"','length': 0,'name': 'roof_0_90','precision': 0,'type': 6},{'expression': '\"rmse-0.90\"','length': 0,'name': 'rmse_0_90','precision': 0,'type': 6},{'expression': '\"roof-0.95\"','length': 0,'name': 'roof_0_95','precision': 0,'type': 6},{'expression': '\"rmse-0.95\"','length': 0,'name': 'rmse_0_95','precision': 0,'type': 6},{'expression': '\"roof-0.99\"','length': 0,'name': 'roof_0_99','precision': 0,'type': 6},{'expression': '\"rmse-0.99\"','length': 0,'name': 'rmse_0_99','precision': 0,'type': 6},{'expression': '\"roof_flat\"','length': 0,'name': 'roof_flat','precision': 0,'type': 10},{'expression': '\"nr_ground_pts\"','length': 0,'name': 'nr_ground_pts','precision': 0,'type': 2},{'expression': '\"nr_roof_pts\"','length': 0,'name': 'nr_roof_pts','precision': 0,'type': 2},{'expression': '\"ahn_file_date\"','length': 0,'name': 'ahn_file_date','precision': 0,'type': 16},{'expression': '\"ahn_version\"','length': 0,'name': 'ahn_version','precision': 0,'type': 2},{'expression': '\"height_valid\"','length': 0,'name': 'height_valid','precision': 0,'type': 10},{'expression': '\"tile_id\"','length': 0,'name': 'tile_id','precision': 0,'type': 10},{'expression': '\"pand_h_75\"','length': 0,'name': 'pand_h_75','precision': 0,'type': 6},{'expression': '\"bouwjaar_date\"','length': -1,'name': 'bouwjaar_date','precision': 0,'type': 14}],'OUTPUT':'TEMPORARY_OUTPUT'})
2,115.333333
3,939
0.679798
934
6,346
4.518201
0.191649
0.094787
0.132701
0.067536
0.472512
0.274408
0.179621
0.03981
0.01564
0
0
0.090715
0.039395
6,346
3
3,939
2,115.333333
0.601542
0
0
0
0
1
0.734048
0.284071
0
0
0
0
0
1
0
true
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
1
1
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
d0c21a24f09c6fa57bbbb5ea8bc831670710f235
113
py
Python
mobula/glue/__init__.py
hustzxd/MobulaOP
49e4062f6578b31918ddcc613e38e0fbb92bb015
[ "MIT" ]
null
null
null
mobula/glue/__init__.py
hustzxd/MobulaOP
49e4062f6578b31918ddcc613e38e0fbb92bb015
[ "MIT" ]
null
null
null
mobula/glue/__init__.py
hustzxd/MobulaOP
49e4062f6578b31918ddcc613e38e0fbb92bb015
[ "MIT" ]
null
null
null
from . import backend from .common import register, CUSTOM_OP_LIST from . import common common.backend = backend
22.6
44
0.80531
16
113
5.5625
0.5
0.224719
0
0
0
0
0
0
0
0
0
0
0.141593
113
4
45
28.25
0.917526
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d0dccdcda5b3c340a9347de4563c54d4d53c2a89
52
py
Python
test.py
renge080958/flask-2002
a2df43666fa9d3158c8d9f597dc9b1210c7c21fa
[ "Apache-2.0" ]
null
null
null
test.py
renge080958/flask-2002
a2df43666fa9d3158c8d9f597dc9b1210c7c21fa
[ "Apache-2.0" ]
null
null
null
test.py
renge080958/flask-2002
a2df43666fa9d3158c8d9f597dc9b1210c7c21fa
[ "Apache-2.0" ]
null
null
null
class Person(object): def haha(): pass
13
21
0.538462
6
52
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.346154
52
4
22
13
0.823529
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0.333333
0
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
1
0
0
6
d0f93c6e96ef1c8441cd0a0982fa18535aac9419
4,434
py
Python
abcdp/prob_flip.py
ParkLabML/ABCDP
a127cb43a7a045e24ad59b1e502574243ffc0d9e
[ "MIT" ]
1
2021-09-08T09:06:15.000Z
2021-09-08T09:06:15.000Z
abcdp/prob_flip.py
ParkLabML/ABCDP
a127cb43a7a045e24ad59b1e502574243ffc0d9e
[ "MIT" ]
null
null
null
abcdp/prob_flip.py
ParkLabML/ABCDP
a127cb43a7a045e24ad59b1e502574243ffc0d9e
[ "MIT" ]
null
null
null
# to plot the probability of flipping between ABC output and ABCDP output import numpy as np import matplotlib import matplotlib.pyplot as plt # font options font = { #'family' : 'normal', #'weight' : 'bold', 'size' : 18 } plt.rc('font', **font) plt.rc('lines', linewidth=2) matplotlib.rcParams['pdf.fonttype'] = 42 matplotlib.rcParams['ps.fonttype'] = 42 def prob_flip(rho, eps_abc, b): if rho-eps_abc>0: tmp = - (rho-eps_abc)/b else: tmp = (rho-eps_abc)/b prob = 1/6*(4*np.exp(tmp/2) - np.exp(tmp)) return prob def b_func(c, sensitivity, eps_tot): return 2*c*sensitivity/eps_tot ##################################################################### n = 10 sensitivity = 1/n rho_samps = np.random.uniform(0,1,size=100) eps_abc = 0.2 eps_tot_mat = [0.01, 0.1, 0.5, 1, 5, 10, 50, 100] c_mat = [10, 100, 1000] prob = np.zeros((len(rho_samps), len(eps_tot_mat), len(c_mat))) for i in np.arange(0,len(rho_samps)): rho = rho_samps[i] for j in np.arange(0, len(eps_tot_mat)): eps_tot = eps_tot_mat[j] for k in np.arange(0, len(c_mat)): c = c_mat[k] b = b_func(c,sensitivity,eps_tot) prob[i,j,k] = prob_flip(rho, eps_abc, b) plt.figure(1) plt.subplot(311) plt.title('flipping probability (c=10)') plt.boxplot([prob[:,0,0], prob[:,1,0], prob[:,2,0], prob[:,3,0], prob[:,4,0], prob[:,5,0], prob[:,6,0], prob[:,7,0]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(312) plt.title('(c=100)') plt.boxplot([prob[:,0,1], prob[:,1,1], prob[:,2,1], prob[:,3,1], prob[:,4,1], prob[:,5,1], prob[:,6,1], prob[:,7,1]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(313) plt.title('(c=1000)') plt.boxplot([prob[:,0,2], prob[:,1,2], prob[:,2,2], prob[:,3,2], prob[:,4,2], prob[:,5,2], prob[:,6,2], prob[:,7,2]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) plt.xlabel('total epsilons') # plt.ylim(0, 0.6) ##################################################################### n = 100 sensitivity = 1/n prob = np.zeros((len(rho_samps), len(eps_tot_mat), len(c_mat))) for i in np.arange(0,len(rho_samps)): rho = rho_samps[i] for j in np.arange(0, len(eps_tot_mat)): eps_tot = eps_tot_mat[j] for k in np.arange(0, len(c_mat)): c = c_mat[k] b = b_func(c,sensitivity,eps_tot) prob[i,j,k] = prob_flip(rho, eps_abc, b) plt.figure(2) plt.subplot(311) plt.title('flipping probability (c=10)') plt.boxplot([prob[:,0,0], prob[:,1,0], prob[:,2,0], prob[:,3,0], prob[:,4,0], prob[:,5,0], prob[:,6,0], prob[:,7,0]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(312) plt.title('(c=100)') plt.boxplot([prob[:,0,1], prob[:,1,1], prob[:,2,1], prob[:,3,1], prob[:,4,1], prob[:,5,1], prob[:,6,1], prob[:,7,1]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(313) plt.title('(c=1000)') plt.boxplot([prob[:,0,2], prob[:,1,2], prob[:,2,2], prob[:,3,2], prob[:,4,2], prob[:,5,2], prob[:,6,2], prob[:,7,2]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) plt.xlabel('total epsilons') # plt.ylim(0, 0.6) ##################################################################### n = 1000 sensitivity = 1/n prob = np.zeros((len(rho_samps), len(eps_tot_mat), len(c_mat))) for i in np.arange(0,len(rho_samps)): rho = rho_samps[i] for j in np.arange(0, len(eps_tot_mat)): eps_tot = eps_tot_mat[j] for k in np.arange(0, len(c_mat)): c = c_mat[k] b = b_func(c,sensitivity,eps_tot) prob[i,j,k] = prob_flip(rho, eps_abc, b) plt.figure(3) plt.subplot(311) plt.title('flipping probability (c=10)') plt.boxplot([prob[:,0,0], prob[:,1,0], prob[:,2,0], prob[:,3,0], prob[:,4,0], prob[:,5,0], prob[:,6,0], prob[:,7,0]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(312) plt.title('(c=100)') plt.boxplot([prob[:,0,1], prob[:,1,1], prob[:,2,1], prob[:,3,1], prob[:,4,1], prob[:,5,1], prob[:,6,1], prob[:,7,1]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) # plt.ylim(0, 0.6) plt.subplot(313) plt.title('(c=1000)') plt.boxplot([prob[:,0,2], prob[:,1,2], prob[:,2,2], prob[:,3,2], prob[:,4,2], prob[:,5,2], prob[:,6,2], prob[:,7,2]]) plt.xticks([1, 2, 3, 4, 5, 6, 7, 8], eps_tot_mat) plt.xlabel('total epsilons') # plt.ylim(0, 0.6) plt.show()
27.7125
117
0.550744
851
4,434
2.773208
0.104583
0.068644
0.072458
0.041949
0.811864
0.800847
0.782203
0.782203
0.782203
0.782203
0
0.098414
0.161254
4,434
160
118
27.7125
0.536166
0.062021
0
0.69697
0
0
0.05179
0
0
0
0
0
0
1
0.020202
false
0
0.030303
0.010101
0.070707
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
ef84fc3383041517fbe4b33a286b1256ef92fedf
11,372
py
Python
selfservice-api/tests/api/test_api_project.py
bcgov/BCSC-BPS
3bfe09c100a0f5b98d61228324336d5f45ad93ad
[ "Apache-2.0" ]
2
2020-07-03T18:18:34.000Z
2021-03-08T10:25:50.000Z
selfservice-api/tests/api/test_api_project.py
bcgov/BCSC-BPS
3bfe09c100a0f5b98d61228324336d5f45ad93ad
[ "Apache-2.0" ]
312
2020-01-10T23:00:08.000Z
2022-03-29T22:07:00.000Z
selfservice-api/tests/api/test_api_project.py
bcgov/BCSC-BPS
3bfe09c100a0f5b98d61228324336d5f45ad93ad
[ "Apache-2.0" ]
2
2020-03-26T05:10:20.000Z
2021-02-05T19:22:56.000Z
# Copyright © 2019 Province of British Columbia # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Tests to assure the API endpoints for managing a project info resource is working as expected.""" import json from http import HTTPStatus from ..helper.api_create_data import (PROJECTINFO_API, # noqa: I001 _create_admin_user_, _create_project_, _get_project_, # noqa: I001 _get_all_project_, _update_technical_req_with_test_account_, # noqa: I001 create_project, create_technical_req_with_additional, create_user, # noqa: I001 get_project) # noqa: I001 from ..helper.auth import ss_admin_auth_header, ss_client_auth_header from selfservice_api.models.enums import ProjectRoles, ProjectStatus def test_post_project_as_analyst(client, jwt, session): """Assert that the endpoint returns the success status.""" response = _create_project_(client, jwt, None, is_analyst=True) assert response.status_code == HTTPStatus.CREATED def test_post_project_as_developer(client, jwt, session): """Assert that the endpoint returns the success status.""" create_user(client, jwt, project_role='manager') response = _create_project_(client, jwt, ProjectRoles.Developer) assert response.status_code == HTTPStatus.CREATED def test_post_project_as_manager(client, jwt, session): """Assert that the endpoint returns the success status.""" create_user(client, jwt, project_role='cto') response = _create_project_(client, jwt, ProjectRoles.Manager) assert response.status_code == HTTPStatus.CREATED def test_post_project_as_cto(client, jwt, session): """Assert that the endpoint returns the success status.""" create_user(client, jwt, project_role='developer') response = _create_project_(client, jwt, ProjectRoles.Cto) assert response.status_code == HTTPStatus.CREATED def test_post_project_validation(client, jwt, session): """Assert that the endpoint returns the failure status.""" create_user(client, jwt) headers = ss_client_auth_header(jwt) req_data = {} response = client.post(PROJECTINFO_API, data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST def test_get_all_project(client, jwt, session): """Assert that the endpoint returns the success status.""" response = _get_all_project_(client, jwt) assert response.status_code == HTTPStatus.OK def test_get_all_project_analyst(client, jwt, session): """Assert that the endpoint returns the success status.""" _create_admin_user_(client, jwt) response = _get_all_project_(client, jwt, True) assert response.status_code == HTTPStatus.OK def test_get_project(client, jwt, session): """Assert that the endpoint returns the success status.""" response = _get_project_(client, jwt) assert response.status_code == HTTPStatus.OK response = _get_project_(client, jwt, is_analyst=True) assert response.status_code == HTTPStatus.OK def test_delete_project(client, jwt, session): """Assert that the endpoint returns the success status.""" # Delete as admin headers = ss_admin_auth_header(jwt) project = get_project(client, jwt) response = client.delete(PROJECTINFO_API + '/' + str(project['id']), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK # Delete as client project = get_project(client, jwt) response = client.delete(PROJECTINFO_API + '/' + str(project['id']), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK headers = ss_client_auth_header(jwt) technical_req = create_technical_req_with_additional(client, jwt) req_data = {'update': 'status', 'status': ProjectStatus.Dev} project_id = str(technical_req['projectId']) response = client.patch(PROJECTINFO_API + '/' + project_id, data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK req_data = {'update': 'status', 'status': ProjectStatus.DevComplete} response = client.patch(PROJECTINFO_API + '/' + project_id, data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK response = client.delete(PROJECTINFO_API + '/' + project_id, headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST def test_put_project(client, jwt, session): """Assert that the endpoint returns the success status.""" headers = ss_client_auth_header(jwt) project = get_project(client, jwt) response = client.put(PROJECTINFO_API + '/' + str(project['id']), data=json.dumps(project), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK def test_put_project_validation(client, jwt, session): """Assert that the endpoint returns the failure status.""" headers = ss_client_auth_header(jwt) project = get_project(client, jwt) req_data = {} response = client.put(PROJECTINFO_API + '/' + str(project['id']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST def test_patch_project_status(client, jwt, session): """Assert that the endpoint returns the success status.""" headers = ss_client_auth_header(jwt) technical_req = create_technical_req_with_additional(client, jwt) req_data = {'update': 'status', 'status': ProjectStatus.Dev} response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK # check the update condition on test account _update_technical_req_with_test_account_(client, jwt, str(technical_req['projectId']), 2) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK _update_technical_req_with_test_account_(client, jwt, str(technical_req['projectId']), 5) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK _update_technical_req_with_test_account_(client, jwt, str(technical_req['projectId']), 0) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK req_data = {'update': 'status', 'status': ProjectStatus.DevComplete} response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK def test_patch_project_status_validation(client, jwt, session): """Assert that the endpoint returns the failure status.""" headers = ss_client_auth_header(jwt) create_user(client, jwt) req_data = {} technical_req = create_technical_req_with_additional(client, jwt) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST req_data = {'update': ''} technical_req = create_technical_req_with_additional(client, jwt) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST req_data = {'update': 'status'} technical_req = create_technical_req_with_additional(client, jwt) response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST req_data = {'update': 'status', 'status': ProjectStatus.Dev} project = create_project(client, jwt) response = client.patch(PROJECTINFO_API + '/' + str(project['id']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.BAD_REQUEST def test_patch_project_status_oidc_and_test_account(client, jwt, session, config): """Assert that the endpoint returns the failure status.""" headers = ss_client_auth_header(jwt) technical_req = create_technical_req_with_additional(client, jwt) # Dynamic OIDC None response: Start config['dynamic_api_return_none'] = True req_data = {'update': 'status', 'status': ProjectStatus.Dev} response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR # call again to cover update api call config.pop('dynamic_api_return_none') response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK config['dynamic_api_return_none'] = True response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.INTERNAL_SERVER_ERROR config.pop('dynamic_api_return_none') # Dynamic OIDC None response: End config['LIMITED_TEST_ACCOUNT_TRIGGER_COUNT'] = 200 req_data = {'update': 'status', 'status': ProjectStatus.Dev} response = client.patch(PROJECTINFO_API + '/' + str(technical_req['projectId']), data=json.dumps(req_data), headers=headers, content_type='application/json') assert response.status_code == HTTPStatus.OK
44.249027
116
0.699877
1,371
11,372
5.547775
0.118162
0.049698
0.076256
0.091507
0.833684
0.806469
0.76203
0.759926
0.74862
0.720878
0
0.003161
0.193282
11,372
256
117
44.421875
0.825812
0.144038
0
0.662162
0
0
0.082927
0.013077
0
0
0
0
0.195946
1
0.094595
false
0
0.033784
0
0.128378
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
efe7304f31d16d8ce1472c10b957c68ce3594137
145
py
Python
config/config.py
neogeo/FirestoreLambdas
1ecb807cd41538fcbd1e32631e06a5040d5eb725
[ "MIT" ]
2
2020-11-21T14:26:49.000Z
2020-12-30T12:16:36.000Z
config/config.py
neogeo/firestore-lambdas
1ecb807cd41538fcbd1e32631e06a5040d5eb725
[ "MIT" ]
null
null
null
config/config.py
neogeo/firestore-lambdas
1ecb807cd41538fcbd1e32631e06a5040d5eb725
[ "MIT" ]
null
null
null
# Firebase settings # path to private key json file FIRESTORE_SERVICE_ACCOUNT_PRIVATE_KEY = 'config/firestore_service_account_private_key.json'
29
91
0.855172
20
145
5.8
0.6
0.258621
0.241379
0.517241
0.568966
0
0
0
0
0
0
0
0.096552
145
4
92
36.25
0.885496
0.324138
0
0
0
0
0.521277
0.521277
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
ef19b8c1fda9ef091267c457dce759a551b4cc3b
22
py
Python
test_6.py
lianggk2018/Git_test
eed9c3d274c3d51f321c25ad03c32a3f8d3556e5
[ "MIT" ]
null
null
null
test_6.py
lianggk2018/Git_test
eed9c3d274c3d51f321c25ad03c32a3f8d3556e5
[ "MIT" ]
null
null
null
test_6.py
lianggk2018/Git_test
eed9c3d274c3d51f321c25ad03c32a3f8d3556e5
[ "MIT" ]
null
null
null
1 2 3 45 6 s adwadwad
2.75
8
0.681818
7
22
2.142857
1
0
0
0
0
0
0
0
0
0
0
0.4
0.318182
22
7
9
3.142857
0.6
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
ef507cf744ec08fa4c375edacf515f99a712c801
23
py
Python
whatsapp/__init__.py
DefCon-007/py-whatsapp
fc4db51136fb8331fa1077968ca9ecc2c60b6c8b
[ "MIT" ]
2
2021-05-02T14:35:07.000Z
2021-06-08T03:40:53.000Z
whatsapp/__init__.py
DefCon-007/py-whatsapp
fc4db51136fb8331fa1077968ca9ecc2c60b6c8b
[ "MIT" ]
null
null
null
whatsapp/__init__.py
DefCon-007/py-whatsapp
fc4db51136fb8331fa1077968ca9ecc2c60b6c8b
[ "MIT" ]
null
null
null
from .whatsapp import *
23
23
0.782609
3
23
6
1
0
0
0
0
0
0
0
0
0
0
0
0.130435
23
1
23
23
0.9
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
323c5cb2493e1057e0b3ce180a384d8ee50a6091
34
py
Python
utils/__init__.py
elviswf/pytorch_cv
a7f11f857a0c1d5e5a807aeed5e594659212fba0
[ "Apache-2.0" ]
29
2018-05-24T12:47:23.000Z
2021-12-31T02:05:27.000Z
tools/__init__.py
mxguo/DriverPostureClassification
44b22e98e8952f9846db73177874de5c332e5cf5
[ "MIT" ]
null
null
null
tools/__init__.py
mxguo/DriverPostureClassification
44b22e98e8952f9846db73177874de5c332e5cf5
[ "MIT" ]
9
2018-05-24T13:39:42.000Z
2020-10-23T08:29:01.000Z
from .visualize import Visualizer
17
33
0.852941
4
34
7.25
1
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
323f12801fa0350971df6f895e04f14b2bd07e14
5,699
py
Python
test/test_cam_v2.py
dondemonz/RestApi
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
[ "Apache-2.0" ]
null
null
null
test/test_cam_v2.py
dondemonz/RestApi
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
[ "Apache-2.0" ]
null
null
null
test/test_cam_v2.py
dondemonz/RestApi
0459d2b8079b9f2abc50bf5e206625427c4a2dcf
[ "Apache-2.0" ]
null
null
null
import requests from model.json_check import * from model.input_data import * # Запрос на получение настроек всех объектов CAM def test_GetV2AllCamerasCode200(): data = "success" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/", auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["status"] assert data == n def test_GetV2AllCamerasStatusCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/", auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code # Запрос на получение настроек объекта CAM def test_GetV2CamerasByIdCode200(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId, auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["data"]["id"] assert camId == n def test_GetV2CamerasByIdCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId, auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code def test_GetV2CamerasByIdCode404(): data = "Unknown CAM id:0" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/0", auth=auth) user_resp_code = "404" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["message"] assert data == n #Запрос на получение поля status объекта CAM def test_GetV2CameraStatusCode200(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/status", auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["data"]["id"] assert camId == n def test_GetV2CameraStatusCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/status", auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code def test_GetV2CameraStatusCode404(): data = "Unknown CAM id:0" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/0/status", auth=auth) user_resp_code = "404" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["message"] assert data == n # Запрос на получение поля rtsp объекта CAM def test_GetV2CameraRtspCode200(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp", auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["data"]["id"] assert camId == n def test_GetV2CameraRtspCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp", auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code def test_GetV2CameraRtspCode404(): data = "Unknown CAM id:0" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/0/rtsp", auth=auth) user_resp_code = "404" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["message"] assert data == n # Запрос на получение поля rtsp/live объекта CAM def test_GetV2CameraRtspLiveCode200(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp/live", auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["data"]["id"] assert camId == n def test_GetV2CameraRtspLiveCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp/live", auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code def test_GetV2CameraRtspLiveCode404(): data = "Unknown CAM id:0" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/0/rtsp/live", auth=auth) user_resp_code = "404" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["message"] assert data == n # Запрос на получение поля rtsp/archive объекта CAM def test_GetV2CameraRtspArchiveCode200(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp/archive", auth=auth) user_resp_code = "200" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["data"]["id"] assert camId == n def test_GetV2CameraRtspArchiveCode401(): response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/"+camId+"/rtsp/archive", auth=("", "")) user_resp_code = "401" assert str(response.status_code) == user_resp_code def test_GetV2CameraRtspArchiveCode404(): data = "Unknown CAM id:0" response = requests.get(url="http://"+slave_ip+":"+restPort+"/api/v2/cameras/0/rtsp/archive", auth=auth) user_resp_code = "404" assert str(response.status_code) == user_resp_code body = json.dumps(response.json()) data1 = json.loads(body) n = data1["message"] assert data == n
37.248366
120
0.676785
755
5,699
4.948344
0.092715
0.072805
0.109208
0.100107
0.814507
0.814507
0.812902
0.809957
0.809957
0.809957
0
0.034884
0.154939
5,699
152
121
37.493421
0.740864
0.047201
0
0.677686
0
0
0.134059
0.018993
0
0
0
0
0.231405
1
0.140496
false
0
0.024793
0
0.165289
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
324cd3b00266bda78e31d877322346bfee9bd5c1
169
py
Python
lunchbox/views.py
tot0rokr/MyWeb
18eea4fc02abcf7a28271904507c39d617036668
[ "MIT" ]
null
null
null
lunchbox/views.py
tot0rokr/MyWeb
18eea4fc02abcf7a28271904507c39d617036668
[ "MIT" ]
null
null
null
lunchbox/views.py
tot0rokr/MyWeb
18eea4fc02abcf7a28271904507c39d617036668
[ "MIT" ]
null
null
null
from django.shortcuts import render from django.http import HttpResponse def index(request): return HttpResponse("Lunch Box Application") # Create your views here.
24.142857
48
0.792899
22
169
6.090909
0.818182
0.149254
0
0
0
0
0
0
0
0
0
0
0.142012
169
7
49
24.142857
0.924138
0.136095
0
0
0
0
0.144828
0
0
0
0
0
0
1
0.25
false
0
0.5
0.25
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
3277534f467165308c621cb0742e2d76f00db7b8
5,768
py
Python
z2/part2/interactive/jm/random_normal_1/305084984.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
1
2020-04-16T12:13:47.000Z
2020-04-16T12:13:47.000Z
z2/part2/interactive/jm/random_normal_1/305084984.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:50:15.000Z
2020-05-19T14:58:30.000Z
z2/part2/interactive/jm/random_normal_1/305084984.py
kozakusek/ipp-2020-testy
09aa008fa53d159672cc7cbf969a6b237e15a7b8
[ "MIT" ]
18
2020-03-06T17:45:13.000Z
2020-06-09T19:18:31.000Z
from part1 import ( gamma_board, gamma_busy_fields, gamma_delete, gamma_free_fields, gamma_golden_move, gamma_golden_possible, gamma_move, gamma_new, ) """ scenario: test_random_actions uuid: 305084984 """ """ random actions, total chaos """ board = gamma_new(5, 7, 6, 4) assert board is not None assert gamma_move(board, 1, 3, 4) == 1 assert gamma_move(board, 2, 0, 4) == 1 assert gamma_move(board, 2, 4, 5) == 1 assert gamma_move(board, 5, 0, 0) == 1 assert gamma_move(board, 6, 4, 6) == 1 assert gamma_move(board, 1, 0, 4) == 0 assert gamma_move(board, 1, 2, 5) == 1 assert gamma_move(board, 2, 4, 1) == 1 assert gamma_move(board, 2, 3, 2) == 1 assert gamma_move(board, 3, 1, 1) == 1 assert gamma_move(board, 4, 0, 4) == 0 assert gamma_move(board, 5, 4, 3) == 1 assert gamma_move(board, 6, 4, 4) == 1 assert gamma_move(board, 6, 3, 3) == 1 assert gamma_golden_possible(board, 6) == 1 assert gamma_move(board, 1, 2, 4) == 1 assert gamma_move(board, 1, 3, 4) == 0 assert gamma_move(board, 2, 6, 1) == 0 assert gamma_move(board, 3, 5, 1) == 0 assert gamma_move(board, 3, 2, 4) == 0 assert gamma_free_fields(board, 3) == 22 assert gamma_golden_possible(board, 3) == 1 assert gamma_move(board, 4, 2, 0) == 1 assert gamma_free_fields(board, 4) == 21 assert gamma_move(board, 5, 1, 6) == 1 assert gamma_move(board, 6, 0, 1) == 1 assert gamma_busy_fields(board, 6) == 4 assert gamma_move(board, 1, 2, 2) == 1 assert gamma_busy_fields(board, 1) == 4 assert gamma_busy_fields(board, 2) == 4 assert gamma_move(board, 3, 6, 3) == 0 assert gamma_busy_fields(board, 3) == 1 assert gamma_move(board, 4, 5, 0) == 0 assert gamma_move(board, 5, 0, 4) == 0 assert gamma_move(board, 6, 0, 3) == 0 board554887423 = gamma_board(board) assert board554887423 is not None assert board554887423 == (".5..6\n" "..1.2\n" "2.116\n" "...65\n" "..12.\n" "63..2\n" "5.4..\n") del board554887423 board554887423 = None assert gamma_move(board, 1, 4, 2) == 1 assert gamma_move(board, 2, 0, 3) == 1 assert gamma_move(board, 2, 1, 5) == 0 assert gamma_move(board, 3, 6, 3) == 0 assert gamma_move(board, 4, 3, 6) == 1 assert gamma_busy_fields(board, 4) == 2 assert gamma_free_fields(board, 4) == 15 assert gamma_move(board, 5, 0, 5) == 1 assert gamma_move(board, 6, 0, 4) == 0 assert gamma_golden_possible(board, 6) == 1 assert gamma_move(board, 1, 3, 3) == 0 assert gamma_move(board, 1, 4, 0) == 1 assert gamma_move(board, 3, 0, 3) == 0 assert gamma_move(board, 3, 2, 0) == 0 assert gamma_move(board, 4, 0, 0) == 0 assert gamma_move(board, 4, 4, 2) == 0 assert gamma_move(board, 5, 6, 2) == 0 assert gamma_move(board, 5, 1, 5) == 1 assert gamma_move(board, 6, 3, 0) == 0 assert gamma_move(board, 6, 4, 2) == 0 assert gamma_move(board, 1, 0, 3) == 0 assert gamma_move(board, 1, 3, 3) == 0 assert gamma_move(board, 2, 6, 0) == 0 assert gamma_move(board, 2, 1, 3) == 1 assert gamma_move(board, 3, 5, 3) == 0 assert gamma_move(board, 3, 3, 2) == 0 assert gamma_golden_possible(board, 3) == 1 assert gamma_move(board, 4, 1, 2) == 1 assert gamma_move(board, 4, 2, 1) == 1 assert gamma_busy_fields(board, 4) == 4 assert gamma_move(board, 5, 2, 0) == 0 assert gamma_move(board, 5, 1, 6) == 0 assert gamma_golden_possible(board, 5) == 1 assert gamma_move(board, 6, 0, 0) == 0 assert gamma_move(board, 1, 4, 1) == 0 assert gamma_move(board, 2, 3, 3) == 0 assert gamma_move(board, 3, 5, 3) == 0 assert gamma_move(board, 4, 6, 2) == 0 assert gamma_move(board, 4, 2, 5) == 0 assert gamma_move(board, 5, 6, 2) == 0 assert gamma_move(board, 6, 5, 3) == 0 assert gamma_move(board, 6, 3, 5) == 0 assert gamma_move(board, 1, 4, 1) == 0 assert gamma_golden_move(board, 2, 1, 0) == 0 assert gamma_move(board, 3, 0, 5) == 0 assert gamma_move(board, 4, 0, 1) == 0 assert gamma_move(board, 4, 4, 3) == 0 assert gamma_free_fields(board, 4) == 9 assert gamma_move(board, 5, 1, 3) == 0 assert gamma_move(board, 6, 6, 0) == 0 assert gamma_move(board, 6, 4, 3) == 0 assert gamma_move(board, 1, 0, 5) == 0 assert gamma_busy_fields(board, 1) == 6 board374933349 = gamma_board(board) assert board374933349 is not None assert board374933349 == (".5.46\n" "551.2\n" "2.116\n" "22.65\n" ".4121\n" "634.2\n" "5.4.1\n") del board374933349 board374933349 = None assert gamma_move(board, 2, 1, 6) == 0 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_move(board, 3, 5, 3) == 0 assert gamma_move(board, 3, 1, 5) == 0 assert gamma_golden_possible(board, 3) == 1 assert gamma_move(board, 4, 1, 3) == 0 assert gamma_move(board, 4, 2, 4) == 0 assert gamma_move(board, 5, 2, 0) == 0 board729753162 = gamma_board(board) assert board729753162 is not None assert board729753162 == (".5.46\n" "551.2\n" "2.116\n" "22.65\n" ".4121\n" "634.2\n" "5.4.1\n") del board729753162 board729753162 = None assert gamma_move(board, 6, 0, 3) == 0 assert gamma_move(board, 1, 4, 1) == 0 assert gamma_free_fields(board, 1) == 5 assert gamma_golden_possible(board, 1) == 1 assert gamma_move(board, 2, 4, 1) == 0 assert gamma_move(board, 2, 4, 5) == 0 assert gamma_move(board, 3, 1, 3) == 0 assert gamma_move(board, 4, 1, 1) == 0 assert gamma_move(board, 4, 2, 6) == 1 assert gamma_move(board, 5, 4, 1) == 0 assert gamma_move(board, 5, 2, 2) == 0 assert gamma_move(board, 6, 0, 1) == 0 assert gamma_move(board, 6, 1, 6) == 0 assert gamma_free_fields(board, 6) == 2 assert gamma_move(board, 1, 0, 3) == 0 assert gamma_move(board, 1, 1, 3) == 0 assert gamma_golden_possible(board, 1) == 1 assert gamma_move(board, 2, 2, 0) == 0 assert gamma_free_fields(board, 2) == 5 assert gamma_move(board, 3, 0, 5) == 0 assert gamma_move(board, 4, 6, 0) == 0 assert gamma_move(board, 4, 0, 2) == 1 gamma_delete(board)
31.519126
46
0.655513
1,077
5,768
3.362117
0.046425
0.358465
0.393538
0.524717
0.83071
0.815797
0.665562
0.410936
0.30765
0.30765
0
0.137529
0.181865
5,768
182
47
31.692308
0.629794
0
0
0.256098
0
0
0.02588
0
0
0
0
0
0.762195
1
0
false
0
0.006098
0
0.006098
0
0
0
0
null
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
6
32bbd3aef73ad91f44e9c8a66532947d5e5698af
28
py
Python
regal/pnr/__init__.py
psurply/ReGAL
b0634a64b141ed34969d8e3c54073da2904bb345
[ "MIT" ]
43
2018-06-03T12:40:43.000Z
2022-01-31T07:55:54.000Z
regal/pnr/__init__.py
psurply/ReGAL
b0634a64b141ed34969d8e3c54073da2904bb345
[ "MIT" ]
4
2019-12-26T10:01:21.000Z
2020-08-17T02:56:20.000Z
regal/pnr/__init__.py
psurply/ReGAL
b0634a64b141ed34969d8e3c54073da2904bb345
[ "MIT" ]
5
2021-05-27T00:26:11.000Z
2022-01-01T12:25:12.000Z
from regal.pnr.pnr import *
14
27
0.75
5
28
4.2
0.8
0
0
0
0
0
0
0
0
0
0
0
0.142857
28
1
28
28
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6