hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
bbee29ba0d95c4dca840b621137c9ff855af01a6
790
py
Python
test_python_toolbox/test_cute_iter_tools/test_pushback_iterator.py
hboshnak/python_toolbox
cb9ef64b48f1d03275484d707dc5079b6701ad0c
[ "MIT" ]
119
2015-02-05T17:59:47.000Z
2022-02-21T22:43:40.000Z
test_python_toolbox/test_cute_iter_tools/test_pushback_iterator.py
hboshnak/python_toolbox
cb9ef64b48f1d03275484d707dc5079b6701ad0c
[ "MIT" ]
4
2019-04-24T14:01:14.000Z
2020-05-21T12:03:29.000Z
test_python_toolbox/test_cute_iter_tools/test_pushback_iterator.py
hboshnak/python_toolbox
cb9ef64b48f1d03275484d707dc5079b6701ad0c
[ "MIT" ]
14
2015-03-30T06:30:42.000Z
2021-12-24T23:45:11.000Z
# Copyright 2009-2017 Ram Rachum. # This program is distributed under the MIT license. from python_toolbox import cute_testing from python_toolbox.cute_iter_tools import PushbackIterator def test_pushback_iterator(): pushback_iterator = PushbackIterator(iter([1, 2, 3])) assert next(pushback_iterator) == 1 assert next(pushback_iterator) == 2 pushback_iterator.push_back() assert next(pushback_iterator) == 2 assert next(pushback_iterator) == 3 pushback_iterator.push_back() assert next(pushback_iterator) == 3 with cute_testing.RaiseAssertor(StopIteration): next(pushback_iterator) pushback_iterator.push_back() assert next(pushback_iterator) == 3 with cute_testing.RaiseAssertor(StopIteration): next(pushback_iterator)
31.6
59
0.75443
96
790
5.958333
0.375
0.363636
0.27972
0.272727
0.575175
0.479021
0.479021
0.479021
0.391608
0.391608
0
0.025797
0.165823
790
25
60
31.6
0.842185
0.103797
0
0.705882
0
0
0
0
0
0
0
0
0.470588
1
0.058824
false
0
0.117647
0
0.176471
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
a53d33fac1b5e0304d1f558a3b2f56778388ee19
78
py
Python
knowledge_graph/crawler/runner/jd/__init__.py
Syhen/knowledge-graph
35b35624f78ec58b3ca9f1e6eaf4a5e5ff80edc2
[ "MIT" ]
2
2019-07-01T02:18:33.000Z
2020-01-14T11:20:44.000Z
knowledge_graph/crawler/runner/jd/__init__.py
Syhen/knowledge-graph
35b35624f78ec58b3ca9f1e6eaf4a5e5ff80edc2
[ "MIT" ]
null
null
null
knowledge_graph/crawler/runner/jd/__init__.py
Syhen/knowledge-graph
35b35624f78ec58b3ca9f1e6eaf4a5e5ff80edc2
[ "MIT" ]
2
2020-03-19T10:22:34.000Z
2022-03-06T01:41:56.000Z
# -*- coding: utf-8 -*- """ Author: @heyao Created On: 2019/6/26 上午10:07 """
11.142857
29
0.551282
12
78
3.583333
1
0
0
0
0
0
0
0
0
0
0
0.1875
0.179487
78
6
30
13
0.484375
0.871795
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
a56573d1cd986e2ca56dcd4354455eb4ec0362fb
129
py
Python
musicsort/web/admin.py
markoshorro/MusicSort
fc85ceeb8c06a3c7486645b4f4b924146de45734
[ "Apache-2.0" ]
null
null
null
musicsort/web/admin.py
markoshorro/MusicSort
fc85ceeb8c06a3c7486645b4f4b924146de45734
[ "Apache-2.0" ]
null
null
null
musicsort/web/admin.py
markoshorro/MusicSort
fc85ceeb8c06a3c7486645b4f4b924146de45734
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from web.models import UploadFile # Register your models here. admin.site.register(UploadFile)
21.5
33
0.821705
18
129
5.888889
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.116279
129
5
34
25.8
0.929825
0.20155
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a56a3991bd863b9a821850bf51c3bd8eb964d675
167
py
Python
boa3_test/test_sc/built_in_methods_test/StrSplit.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
25
2020-07-22T19:37:43.000Z
2022-03-08T03:23:55.000Z
boa3_test/test_sc/built_in_methods_test/StrSplit.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
419
2020-04-23T17:48:14.000Z
2022-03-31T13:17:45.000Z
boa3_test/test_sc/built_in_methods_test/StrSplit.py
hal0x2328/neo3-boa
6825a3533384cb01660773050719402a9703065b
[ "Apache-2.0" ]
15
2020-05-21T21:54:24.000Z
2021-11-18T06:17:24.000Z
from typing import List from boa3.builtin import public @public def main(string: str, sep: str, maxsplit: int) -> List[str]: return string.split(sep, maxsplit)
18.555556
60
0.724551
25
167
4.84
0.64
0
0
0
0
0
0
0
0
0
0
0.007194
0.167665
167
8
61
20.875
0.863309
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.4
0.2
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
5
a5753ba66364bfc00da52f102d50a9b75efcb714
72
py
Python
lib/scripts/convertPrintable.py
gideontong/Bot4Christ
4a503df857397cddebdc9e098c8ba7fd4ef3f17c
[ "MIT" ]
1
2020-09-10T09:27:32.000Z
2020-09-10T09:27:32.000Z
lib/scripts/convertPrintable.py
gideontong/Bot4Christ
4a503df857397cddebdc9e098c8ba7fd4ef3f17c
[ "MIT" ]
11
2020-07-31T04:59:09.000Z
2021-02-23T18:21:30.000Z
lib/scripts/convertPrintable.py
gideontong/Bot4Christ
4a503df857397cddebdc9e098c8ba7fd4ef3f17c
[ "MIT" ]
1
2021-03-07T20:07:41.000Z
2021-03-07T20:07:41.000Z
import json print(json.loads(open('CUV.json', encoding='utf-8').read()))
36
60
0.708333
12
72
4.25
0.833333
0
0
0
0
0
0
0
0
0
0
0.014493
0.041667
72
2
60
36
0.724638
0
0
0
0
0
0.178082
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
3c36737ac656039ff1fd242e645063e199a24177
180
py
Python
client/cart/errors.py
daniel-waruo/e-commerse-api
6b080039398fb4099a34335317d649dd67783f63
[ "Apache-2.0" ]
6
2019-11-21T10:09:49.000Z
2021-06-19T09:52:59.000Z
client/cart/errors.py
daniel-waruo/e-commerse-api
6b080039398fb4099a34335317d649dd67783f63
[ "Apache-2.0" ]
null
null
null
client/cart/errors.py
daniel-waruo/e-commerse-api
6b080039398fb4099a34335317d649dd67783f63
[ "Apache-2.0" ]
null
null
null
class NoUserIdOrSessionKeyError(Exception): pass class NoProductToDelete(Exception): pass class NoCart(Exception): pass class BadConfigError(Exception): pass
12
43
0.744444
16
180
8.375
0.4375
0.38806
0.402985
0
0
0
0
0
0
0
0
0
0.188889
180
14
44
12.857143
0.917808
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
5
3c7f4418c06b101fe9a141bc5d7934b71d420b48
161
py
Python
scipy/io/arff/utils.py
lesserwhirls/scipy-cwt
ee673656d879d9356892621e23ed0ced3d358621
[ "BSD-3-Clause" ]
8
2015-10-07T00:37:32.000Z
2022-01-21T17:02:33.000Z
scipy/io/arff/utils.py
lesserwhirls/scipy-cwt
ee673656d879d9356892621e23ed0ced3d358621
[ "BSD-3-Clause" ]
null
null
null
scipy/io/arff/utils.py
lesserwhirls/scipy-cwt
ee673656d879d9356892621e23ed0ced3d358621
[ "BSD-3-Clause" ]
8
2015-05-09T14:23:57.000Z
2018-11-15T05:56:00.000Z
#! /usr/bin/env python # Last Change: Mon Aug 20 02:00 PM 2007 J try: from functools import partial except ImportError: from myfunctools import partial
20.125
41
0.732919
25
161
4.72
0.88
0.220339
0
0
0
0
0
0
0
0
0
0.078125
0.204969
161
7
42
23
0.84375
0.378882
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
3c9fe7670ba972fdb98b8f44d78954a29771ab45
146
py
Python
learn_app/endpoints/index.py
alex-d-bondarev/learn-flask
dfa47821a3cf606f0535bbe79c373610afe1b957
[ "MIT" ]
null
null
null
learn_app/endpoints/index.py
alex-d-bondarev/learn-flask
dfa47821a3cf606f0535bbe79c373610afe1b957
[ "MIT" ]
null
null
null
learn_app/endpoints/index.py
alex-d-bondarev/learn-flask
dfa47821a3cf606f0535bbe79c373610afe1b957
[ "MIT" ]
null
null
null
""" Index Page """ from learn_app.main import app @app.route("/") def index(): """Default url :return: """ return "Index Page"
10.428571
30
0.561644
18
146
4.5
0.666667
0.222222
0
0
0
0
0
0
0
0
0
0
0.253425
146
13
31
11.230769
0.743119
0.212329
0
0
0
0
0.11828
0
0
0
0
0
0
1
0.25
true
0
0.25
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
b1c9c012f6a0fafcca2c9cee054e1bf1c9410db4
77
py
Python
cherrypy/__main__.py
abancu/core
e110a1df32ec8bf67f007960e61df55f0a926219
[ "MIT" ]
674
2015-11-06T04:22:47.000Z
2022-02-26T17:31:43.000Z
cherrypy/__main__.py
abancu/core
e110a1df32ec8bf67f007960e61df55f0a926219
[ "MIT" ]
713
2015-11-06T10:48:58.000Z
2018-11-27T16:32:18.000Z
cherrypy/__main__.py
abancu/core
e110a1df32ec8bf67f007960e61df55f0a926219
[ "MIT" ]
115
2015-01-08T14:41:00.000Z
2022-02-13T12:31:17.000Z
import cherrypy.daemon if __name__ == '__main__': cherrypy.daemon.run()
15.4
26
0.714286
9
77
5.222222
0.777778
0.595745
0
0
0
0
0
0
0
0
0
0
0.155844
77
4
27
19.25
0.723077
0
0
0
0
0
0.103896
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
b1dbbd139bd089bb7853d3ab04dd3994a00683b3
50,191
py
Python
ibis/driver/tests/test_driver.py
shivaathreya/ibis
f99e3b7a677652a8a1c00a069e645d97682e839c
[ "Apache-2.0" ]
50
2018-09-27T13:03:45.000Z
2021-04-06T15:36:59.000Z
ibis/driver/tests/test_driver.py
shivaathreya/ibis
f99e3b7a677652a8a1c00a069e645d97682e839c
[ "Apache-2.0" ]
null
null
null
ibis/driver/tests/test_driver.py
shivaathreya/ibis
f99e3b7a677652a8a1c00a069e645d97682e839c
[ "Apache-2.0" ]
14
2018-10-03T20:36:15.000Z
2021-05-18T07:08:57.000Z
"""Driver tests.""" import copy import difflib import os import sys import time import unittest from mock import patch, Mock, MagicMock from ibis.driver.driver import Driver from ibis.inventor.tests.fixture_workflow_generator import * from ibis.inventory.cb_inventory import CheckBalancesInventory from ibis.inventory.automation_ids_inventory import AUTOInventory from ibis.inventory.export_it_inventory import ExportITInventory from ibis.inventory.inventory import Inventory from ibis.inventory.it_inventory import ITInventory from ibis.inventory.perf_inventory import PerfInventory from ibis.inventory.request_inventory import Request, RequestInventory from ibis.model.exporttable import ItTableExport from ibis.model.table import ItTable from ibis.settings import UNIT_TEST_ENV from ibis.utilities.config_manager import ConfigManager from ibis.utilities.file_parser import parse_file_by_sections from ibis.utilities.it_table_generation import Get_Auto_Split from ibis.utilities.utilities import Utilities from ibis.utilities.vizoozie import VizOozie BASE_DIR = os.path.dirname(os.path.abspath(__file__)) class DriverFunctionsTest(unittest.TestCase): """Tests the functionality of the Driver class.""" @patch('ibis.driver.driver.Utilities', autospec=True) @patch.object(Inventory, '_connect', autospec=True) def setUp(self, mock_connect, m_U): """Setup.""" mock_util_methods = MagicMock() mock_util_methods.run_subprocess = MagicMock() mock_util_methods.run_subprocess.return_value = 0 m_U.return_value = mock_util_methods self.cfg_mgr = ConfigManager(UNIT_TEST_ENV) self.driver = Driver(self.cfg_mgr) self.start_time = time.time() def tearDown(self): """Tear down.""" self.driver = None t2 = time.time() - self.start_time # print "%s: %.3f" % (self.id(), t2) def files_equal(self, test_file, expected_file): """Compares two files""" same = True test_fh = open(test_file, 'r') fo_gen = open(expected_file, 'r') test_str = test_fh.read() expected_str = fo_gen.read() if not self.strings_equal(test_str, expected_str): print "Generated test file:{0}".format(test_file) print "Fix the file:{0}".format(expected_file) same = False test_fh.close() fo_gen.close() return same def strings_equal(self, test_str, expected_str): """compare strings""" same = True test_str = [xml.strip().replace('\t', '') for xml in test_str.splitlines()] expected_str = [xml.strip().replace('\t', '') for xml in expected_str.splitlines()] if "".join(expected_str) != "".join(test_str): same = False print '\n' print '=' * 100 print "\nFiles don't match..." diff = difflib.unified_diff(expected_str, test_str) print '\n'.join(list(diff)) return same def test_submit_it_file_empty(self): """Test submit it file with an empty file.""" file_h = open(os.path.join(BASE_DIR, 'test_resources/empty_file.txt'), 'r') result = self.driver.submit_it_file(file_h) self.assertEquals(result, '') def test_submit_it_file_export_empty(self): """Test submit it file with an empty file.""" file_h = open(os.path.join(BASE_DIR, 'test_resources/empty_file.txt'), 'r') result = self.driver.submit_it_file_export(file_h) self.assertEquals(result, '') @patch.object(Get_Auto_Split, 'get_split_by_column', return_value='') def test_submit_it_file_insert(self, m1): """Test submit it file with a valid it table file.""" self.driver.req_inventory = MagicMock(spec=RequestInventory) self.driver.req_inventory.parse_file.return_value = \ ([Request(mock_table_mapping_val, self.cfg_mgr)], 'Parse File Success') self.driver.it_inventory.get_table_mapping = \ MagicMock(spec=ITInventory.get_table_mapping) self.driver.it_inventory.get_table_mapping.return_value = {} self.driver.it_inventory.insert = MagicMock(spec=ITInventory.insert) self.driver.it_inventory.insert.return_value = (True, 'Insert Success') result = self.driver.submit_it_file('test') self.assertEquals(result, 'Parse File Success\nInsert Success') def test_submit_it_file_export_insert(self): """Test submit it file with a valid it table file.""" self.driver.req_inventory = MagicMock(spec=RequestInventory) self.driver.req_inventory.parse_file_export.return_value = \ ([Request(mock_table_mapping_val_export, self.cfg_mgr)], 'Parse File Success') self.driver.export_it_inventory.get_table_mapping = \ MagicMock(spec=ExportITInventory.get_table_mapping) self.driver.export_it_inventory.get_table_mapping.return_value = {} self.driver.export_it_inventory.insert_export = \ MagicMock(spec=ExportITInventory.insert_export) self.driver.export_it_inventory.insert_export.return_value = \ (True, 'Insert Success') result = self.driver.submit_it_file_export('test') self.assertEquals(result, 'Parse File Success\nInsert Success') @patch.object(Get_Auto_Split, 'get_split_by_column', return_value='') def test_submit_it_file_update(self, m1): """Test submit it file with an updated it table file.""" self.driver.req_inventory = MagicMock(spec=RequestInventory) self.driver.req_inventory.parse_file.return_value = \ ([Request(mock_table_mapping_val, self.cfg_mgr)], 'Parse File Success') self.driver.it_inventory.insert = MagicMock(spec=ITInventory.insert) self.driver.it_inventory.update = MagicMock(spec=ITInventory.update) self.driver.it_inventory.get_table_mapping = MagicMock( spec=ITInventory.get_table_mapping) self.driver.it_inventory.update.return_value = (True, 'Update Success') updated_table = copy.deepcopy(mock_table_mapping_val) updated_table['db_username'] = 'updated_user' self.driver.it_inventory.get_table_mapping.return_value = updated_table result = self.driver.submit_it_file('test') self.assertEquals(result, 'Parse File Success\nUpdate Success') def test_submit_it_file_export_update(self): """Test submit it file with an updated it table file.""" self.driver.req_inventory = MagicMock(spec=RequestInventory) self.driver.req_inventory.parse_file_export.return_value = \ ([Request(mock_table_mapping_val_export, self.cfg_mgr)], 'Parse File Success\nUpdate Success') self.driver.export_it_inventory.insert_export = \ MagicMock(spec=ExportITInventory.insert_export) self.driver.export_it_inventory.update_export = \ MagicMock(spec=ExportITInventory.update_export) self.driver.export_it_inventory.get_table_mapping = \ MagicMock(spec=ExportITInventory.get_table_mapping) self.driver.export_it_inventory.update_export.return_value = \ (True, 'Update Success') updated_table = copy.deepcopy(mock_table_mapping_val_export) updated_table['db_username'] = 'updated_user' self.driver.export_it_inventory.get_table_mapping.return_value = \ updated_table result = self.driver.submit_it_file_export('test') self.assertEquals(result, 'Parse File Success\nUpdate Success') @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.driver.driver.Driver.update_it_table', autospec=True) @patch.object(sys, 'exit', autospec=True) @patch.object(Inventory, '_connect', autospec=True) @patch.object(Inventory, 'get_table_mapping', return_value=mock_table_mapping_val) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch('ibis.driver.driver.RequestInventory.get_available_requests', autospec=True) def test_submit_request_valid(self, mock_get_available_requests, m_eval, m_get_t_m, mock_connect, mock_sys, m_s_it_file, m_sqoop_cache, m_sqoop_cache_view, m_dryrun): """Test submit request with a valid request file.""" m_eval.return_value = [['Col1', 'TIMESTAMP'], ['Col2', 'TIMESTAMP'], ['Col3', 'varchar']] mock_get_available_requests.return_value = \ ([ItTable(mock_table_mapping_val, self.cfg_mgr)], [], []) # self.driver.it_inventory = MagicMock(spec=ITInventory) self.driver.vizoozie = MagicMock(spec=VizOozie) self.driver.req_inventory.it_inventory = MagicMock(spec=ITInventory) file_h = open( os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt'), 'r') result = self.driver.submit_request(file_h, True) self.assertIsNotNone(result) file_h = open( os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt'), 'r') result = self.driver.submit_request(file_h, False) self.assertIsNotNone(result) def test_submit_request_invalid(self): """Test submit request with an invalid request file.""" file_h = open( os.path.join(BASE_DIR, 'test_resources/request_test_invalid.txt'), 'r') self.assertRaises(ValueError, self.driver.submit_request, file_h, True) @patch('ibis.driver.driver.Driver.update_it_table', autospec=True) def test_submit_request_empty(self, m_s_it_file): """Test submit request with an empty file.""" file_h = open(os.path.join(BASE_DIR, 'test_resources/empty_file.txt'), 'r') status, result = self.driver.submit_request(file_h, True) self.assertIn('Workflow not generated for request.', result) self.assertFalse(status) @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.driver.driver.Driver.update_it_table', autospec=True) @patch.object(Inventory, '_connect', autospec=True) @patch('ibis.driver.driver.RequestInventory.get_available_requests', autospec=True) def test_submit_request_unavailable(self, m_get_ar, m_con, m_s_it_file, m_sqoop_cache, m_sqoop_cache_view, m_dryrun, m_eval, m_convert_pdf, m_vi_xml): """Test submit request.""" m_eval.return_value = [['Col1', 'TIMESTAMP'], ['Col2', 'TIMESTAMP'], ['Col3', 'varchar']] m_get_ar.side_effect = [ ([ItTable(mock_table_mapping_val, self.cfg_mgr)], [ItTable(mock_table_mapping_val, self.cfg_mgr)], []), ([], [ItTable(mock_table_mapping_val, self.cfg_mgr)], [])] _path = os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt') file_h = open(_path, 'r') _, result = self.driver.submit_request(file_h, True) self.assertIn('generated successfully', result) _path = os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt') file_h = open(_path, 'r') _, result = self.driver.submit_request(file_h, True) self.assertIn('Workflow not generated for request.', result) def test_run_oozie_job(self): """Test run oozie.""" self.driver.utilities = Mock(spec=Utilities) self.driver.utilities.run_workflow.return_value = True self.assertTrue(self.driver.run_oozie_job("test_xml")) def test_save_it_table(self): """Test save it table.""" self.driver.it_inventory = Mock(spec=ITInventory) self.driver.it_inventory.save_all_tables.return_value = True self.assertTrue(self.driver.save_it_table(None)) def test_update_lifespan(self): """Test update lifespan using a mocked table from check and balances table.""" self.driver.cb_inventory = Mock(spec=CheckBalancesInventory) self.driver.cb_inventory.get.return_value = [ ['directory', 'pull_time', 'avro_size', 'ingest_timestamp', 'parquet_time', 'parquet_size', 'rows', 'lifespan', 'ack', 'cleaned', 'current_repull', 'domain', 'table']] self.assertIn('updated with new lifespan in checks_balances', self.driver.update_lifespan('db_name', 'tbl_name', 'lifespan')) def test_update_lifespan_no_tbl(self): """Test update lifespan using no table.""" self.driver.cb_inventory = Mock(spec=CheckBalancesInventory) self.driver.cb_inventory.get.return_value = None self.assertIn('ecord doesn\'t exist for table=', self.driver.update_lifespan('db_name', 'tbl_name', 'lifespan')) def test_update_all_lifespan(self): """Test update all lifespan.""" self.driver.it_inventory = Mock(spec=ITInventory) self.driver.cb_inventory = Mock(spec=CheckBalancesInventory) self.driver.it_inventory.get_all_tables.return_value = [ {'full_table_name': 'member.fake_database_fake_prog_tablename', 'domain': 'member', 'target_dir': 'mdm/member/fake_database/fake_prog_tablename', 'split_by': '', 'mappers': 10, 'db_username': 'fake_username', 'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle:' '1521/fake_servicename', 'connection_factories': 'com.quest.oraoop.OraOopManagerFactory', 'password_file': 'jceks://hdfs/user/dev/fake.passwords.' 'jceks#fake.password.alias', 'load': '000100', 'fetch_size': 20000, 'hold': 0, 'source_database_name': 'fake_database', 'source_table_name': 'fake_prog_tablename', 'automation_appl_id': 'TEST01', 'views': 'fake_view_im'}, {'full_table_name': 'fake_domainfake_database_fake_job_tablename', 'domain': 'fake_domain', 'target_dir': 'mdm/fake_domain/fake_database/fake_job_tablename', 'split_by': '', 'mappers': 2, 'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle:' '1521/fake_servicename', 'connection_factories': 'com.quest.oraoop.OraOopManagerFactory', 'password_file': 'jceks://hdfs/user/dev/fake.passwords.jceks' '#fake.password.alias', 'db_username': 'fake_username', 'load': '000100', 'fetch_size': 20000, 'hold': 0, 'source_database_name': 'fake_database', 'source_table_name': 'fake_job_tablename', 'automation_appl_id': 'TEST01', 'views': 'fake_view_im'}] self.driver.cb_inventory.get.return_value = [ ['directory', 'pull_time', 'avro_size', 'ingest_timestamp', 'parquet_time', 'parquet_size', 'rows', 'lifespan', 'ack', 'cleaned', 'current_repull', 'domain', 'table']] self.assertIsNotNone(self.driver.update_all_lifespan()) def test_update_all_lifespan_load_invalid(self): """ Tests update all lifespan using a mocked table and invalid load value """ self.driver.it_inventory = Mock(spec=ITInventory) self.driver.it_inventory.get_all_tables.return_value = [ {'full_table_name': 'member.fake_database_fake_prog_tablename', 'domain': 'member', 'target_dir': 'mdm/member/fake_database/fake_prog_tablename', 'split_by': '', 'jdbcurl': 'jdbc:oracle:thin:@//fake.oracle:' '1521/fake_servicename', 'connection_factories': 'com.quest.oraoop.OraOopManagerFactory', 'password_file': 'jceks://hdfs/user/dev/fake.passwords.jceks' '#fake.password.alias', 'load': '200100', 'db_username': 'fake_username', 'mappers': 10, 'fetch_size': 20000, 'hold': 0, 'source_database_name': 'fake_database', 'automation_appl_id': 'TEST01', 'source_table_name': 'fake_prog_tablename', 'views': 'fake_view_im'}] self.assertEquals(self.driver.update_all_lifespan(), '') def test_update_all_lifespan_no_tables(self): """ Tests update all lifespan using no tables """ self.driver.it_inventory = Mock(spec=ITInventory) self.driver.it_inventory.get_all_tables.return_value = [] self.assertEquals(self.driver.update_all_lifespan(), '') @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) def test_gen_schedule_request(self, m_connect, m_eval, m_convert_pdf, m_vi_xml): """test gen wf for prod workflows""" m_eval.return_value = [['Col1', 'TIMESTAMP'], ['Col2', 'TIMESTAMP'], ['Col3', 'varchar']] tables = [ItTable(heavy_3_prop, self.cfg_mgr)] gen_files = self.driver.gen_schedule_request(tables, 'test_wf', 'test_appl') self.assertEquals(len(gen_files), 5) self.assertIn('test_wf.xml', gen_files) self.assertIn('test_wf.ksh', gen_files) self.assertIn('test_wf_job.properties', gen_files) @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.utilities.utilities.Utilities.put_dry_workflow', autospec=True) @patch('ibis.utilities.utilities.Utilities.dryrun_workflow', autospec=True) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch.object(AUTOInventory, 'get_tables_by_id', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch.object(PerfInventory, 'insert_freq_ingest', autospec=True) def test_gen_prod_workflow(self, m_freq_ingest, m_convert_pdf, m_v_xml, m_eval, m_c, m_get_id, m_get_t_automation, m_dryrun, m_put_w, m_sqoop_cache, m_sqoop_cache_view, m_dryrun_all): """ Tests generate_prod_workflows with 3 tables. One light, one medium and oen heavy.""" m_eval.return_value = [['Col1', 'varchar'], ['Col2', 'varchar']] m_get_id.side_effect = [appl_ref_id_tbl_01, appl_ref_id_tbl_02] _mock_automation_tables_02 = [ItTable(tbl, self.cfg_mgr) for tbl in mock_automation_tables_02] m_get_t_automation.return_value = _mock_automation_tables_02 self.cfg_mgr.env = 'perf' status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') for file_name in git_files: git_file = 'full_fake_open_fake_prog_tablename.hql' if git_file in file_name: actual_hql_nm = os.path.join(self.cfg_mgr.files, file_name) with open(actual_hql_nm, 'r') as file_h: actual_hql = file_h.read() with open(BASE_DIR + '/test_resources/git_team_hql.hql', 'r') as file_h: expected_hql = file_h.read() self.assertTrue(expected_hql, actual_hql) self.assertEquals(len(git_files), 26) self.assertIn('Generated', msg) self.assertIn('workflow:', msg) self.assertIn('subworkflow:', msg) self.assertTrue(status) @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.utilities.utilities.Utilities.put_dry_workflow', autospec=True) @patch('ibis.utilities.utilities.Utilities.dryrun_workflow', autospec=True) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch.object(AUTOInventory, 'get_tables_by_id', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch.object(PerfInventory, 'insert_freq_ingest', autospec=True) def test_gen_prod_workflow_perf_nodomain(self, m_freq_ingest, m_convert_pdf, m_v_xml, m_eval, m_c, m_get_id, m_get_t_automation, m_dryrun, m_put_w, m_sqoop_cache, m_sqoop_cache_view, m_dryrun_all): """ Tests generate_prod_workflows with 3 tables. One light, one medium and oen heavy.""" m_eval.return_value = [['Col1', 'varchar'], ['Col2', 'varchar']] m_get_id.side_effect = [appl_ref_id_tbl_01, appl_ref_id_tbl_02] _mock_automation_tables = [ItTable(tbl, self.cfg_mgr) for tbl in mock_automation_tbl_perf_domain] m_get_t_automation.return_value = _mock_automation_tables self.cfg_mgr.env = 'perf' status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') for file_name in git_files: git_file = 'full_fake_open_fake_prog_tablename.hql' if git_file in file_name: actual_hql_nm = os.path.join(self.cfg_mgr.files, file_name) with open(actual_hql_nm, 'r') as file_h: actual_hql = file_h.read() with open(BASE_DIR + '/test_resources/git_team_hql_nodomain.hql', 'r') as file_h: expected_hql = file_h.read() self.assertTrue(expected_hql, actual_hql) self.assertEquals(len(git_files), 23) self.assertIn('Generated', msg) self.assertIn('workflow:', msg) self.assertIn('subworkflow:', msg) self.assertTrue(status) @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.utilities.utilities.Utilities.put_dry_workflow', autospec=True) @patch('ibis.utilities.utilities.Utilities.dryrun_workflow', autospec=True) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch.object(AUTOInventory, 'get_tables_by_id', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch.object(Driver, 'gen_incr_workflow_files', autospec=True) @patch.object(PerfInventory, 'insert_freq_ingest', autospec=True) def test_gen_prod_workflow_2(self, m_freq_ingest, m_gen_incr, m_convert_pdf, m_v_xml, m_eval, m_c, m_get_id, m_get_t_automation, m_dryrun, m_put_w, m_sqoop_cache, m_sqoop_cache_view, m_dryrun_all): """ Tests generate_prod_workflows with five tables. 3 heavy, one medium and one light.""" m_eval.return_value = [['Col1', 'varchar'], ['Col2', 'varchar']] m_get_id.side_effect = [appl_ref_id_tbl_01, appl_ref_id_tbl_02] _mock_automation_tables_03 = [ItTable(tbl, self.cfg_mgr) for tbl in mock_automation_tables_03] m_get_t_automation.return_value = _mock_automation_tables_03 m_gen_incr.return_value = [] self.cfg_mgr.env = 'perf' status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') self.assertEquals(len(git_files), 40) self.assertIn('Generated', msg) self.assertIn('workflow:', msg) self.assertIn('subworkflow:', msg) self.assertTrue(status) @patch('ibis.utilities.run_parallel.DryRunWorkflowManager.run_all', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_views', autospec=True) @patch('ibis.utilities.run_parallel.SqoopCacheManager.cache_ddl_queries', autospec=True) @patch('ibis.utilities.utilities.Utilities.put_dry_workflow', autospec=True) @patch('ibis.utilities.utilities.Utilities.dryrun_workflow', autospec=True) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch.object(AUTOInventory, 'get_tables_by_id', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) @patch('ibis.inventor.action_builder.SqoopHelper.eval', autospec=True) @patch.object(VizOozie, 'visualizeXML', autospec=True) @patch.object(VizOozie, 'convertDotToPDF', autospec=True) @patch.object(PerfInventory, 'insert_freq_ingest', autospec=True) def test_gen_prod_workflow_3(self, m_freq_ingest, m_convert_pdf, m_v_xml, m_eval, m_c, m_get_id, m_get_t_automation, m_dryrun, m_put_w, m_sqoop_cache, m_sqoop_cache_view, m_dryrun_all): """ Tests generate_prod_workflows with 6 tables. Two light, three medium and one heavy.""" m_eval.return_value = [['Col1', 'varchar'], ['Col2', 'varchar']] m_get_id.side_effect = [appl_ref_id_tbl_01, appl_ref_id_tbl_02] _mock_automation_tables_01 = [ItTable(tbl, self.cfg_mgr) for tbl in mock_automation_tables_01] m_get_t_automation.return_value = _mock_automation_tables_01 self.cfg_mgr.env = 'perf' status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') self.assertEquals(len(git_files), 51) self.assertIn('Generated', msg) self.assertIn('workflow:', msg) self.assertIn('subworkflow:', msg) self.assertTrue(status) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch.object(AUTOInventory, 'get_tables_by_id', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) def test_gen_prod_workflow_without_applrefs(self, m_c, m_get_tables_by_id, m_get_all_tables_for_automation): """ test_gen_prod_workflow_without_applrefs""" m_get_all_tables_for_automation.return_value = [mock_automation_tables_01, mock_automation_tables_02] m_get_tables_by_id.return_value = [] status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') self.assertEquals(len(git_files), 0) self.assertIn( "No row found for automation_appl_id: 'FAKED001' " "in 'ibis.automation_ids' table", msg) self.assertFalse(status) @patch.object(ITInventory, 'get_all_tables_for_automation', autospec=True) @patch('ibis.inventory.inventory.Inventory._connect', autospec=True) def test_gen_prod_workflow_without_tables(self, m_c, m_get_all_tables_for_automation): """ test_gen_prod_workflow_without_applrefs""" m_get_all_tables_for_automation.return_value = [] status, msg, git_files = self.driver.gen_prod_workflow('FAKED001') self.assertEquals(len(git_files), 0) self.assertIn("No tables found for automation_appl_id: 'FAKED001'", msg) self.assertFalse(status) @patch.object(Inventory, '_connect', autospec=True) def test_export(self, m_con): """test export no table""" self.driver.it_inventory = MagicMock(spec=ITInventory) self.driver.it_inventory.get_table_mapping.return_value = light_3_prop result = self.driver.export('fake_database', 'fake_cens_tablename', 'fake_domain.fake_database_fake_cens_tablename') print result self.assertIn('_export.xml, generated to', result) def test_export_no_tbl(self): """test export no table""" self.driver.it_inventory = MagicMock(spec=ITInventory) self.driver.it_inventory.get_table_mapping.return_value = {} result = self.driver.export('fake_database', 'fake_fa_tablename', 'member.fake_database' '_fake_fa_tablename') self.assertIn( 'doesn\'t exist in the it_table export directory ' 'could not be found.', result) def test_export_fail(self): """test export no table""" self.driver.it_inventory = MagicMock(spec=ITInventory) self.driver.it_inventory.get_table_mapping.return_value = {} result = self.driver.export( 'fake_database', 'fake_fa_tablename', 'memberfake_database_fake_fa_tablename') self.assertIn('Please provide an appropriate --to', result) @patch.object(Driver, 'submit_it_file', autospec=True) @patch('ibis.driver.driver.create', autospec=True) def test_gen_it_table_with_split_by(self, mock_it_table_gen_create, mock_submit_it_file): """Test split by wrapper.""" with patch('__builtin__.open') as m_open: m_open.readlines.return_value = MagicMock(spec=file) self.driver.gen_it_table_with_split_by(m_open, 45) @patch.object(Inventory, '_connect', autospec=True) def test_generate_subworkflow(self, m_con): result = self.driver.generate_subworkflow('test_generate_subworkflow', ['files']) self.assertIn('Generated subworkflow', result) def test_group_workflows(self): """test _group_workflows""" ws_path = self.cfg_mgr.oozie_workspace generated_wfs = [ ws_path + 'file1.xml', ws_path + 'file2.xml', ws_path + 'file3.xml', ws_path + 'file4.xml', ws_path + 'file5.xml', ws_path + 'file6.xml', ws_path + 'file7.xml', ws_path + 'file8.xml', ws_path + 'file9.xml', ws_path + 'file10.xml', ws_path + 'file11.xml', ws_path + 'file12.xml', ws_path + 'file13.xml', ws_path + 'file14.xml', ws_path + 'file15.xml', ws_path + 'file16.xml', ws_path + 'file17.xml', ws_path + 'file18.xml'] chunks = self.driver._group_workflows(generated_wfs) self.assertEquals(len(chunks), 4) self.assertIn(ws_path + 'file18.xml', chunks[3]) def test_parse_kite_request(self): required_fields = ['source_database_name', 'source_table_name', 'hdfs_loc'] optional_fields = [] expected_request = [{'source_database_name': 'database_one', 'hdfs_loc': '/test/hdfs/loc', 'source_table_name': 'table_one'}, {'source_database_name': 'database_one', 'hdfs_loc': '/test/hdfs/loc', 'source_table_name': 'table_two'}] file_h = open(os.path.join(BASE_DIR, 'test_resources/kite_request.txt'), 'r') requests, msg, _ = parse_file_by_sections(file_h, '[Request]', required_fields, optional_fields) self.assertEquals(expected_request, requests) @patch.object(Driver, '_get_workflow_name', return_value="kite_request") def test_gen_kite_workflow(self, mock1): file_h = open(os.path.join(BASE_DIR, 'test_resources/kite_request.txt'), 'r') result = self.driver.gen_kite_workflow(file_h) self.assertIsNotNone(result) def test_gen_schedule_subworkflows_int_sys(self): """test gen schedule subworkflows for same table from sys and int""" sub_wf_file_name = 'test_subwf' tab1 = ItTable(sqls_int, self.cfg_mgr) tab2 = ItTable(sqls_sys, self.cfg_mgr) heavy_tables = [tab1, tab2] workflows_chunks = [[tab1, 'int_full_load'], [tab2, 'sys_full_load']] gen_files = self.driver.gen_schedule_subworkflows( sub_wf_file_name, workflows_chunks, heavy_tables, 'FAKED001') self.assertEquals(len(gen_files), 4) expected_file = os.path.join( BASE_DIR, 'test_resources/subwf_sys_int.xml') test_file = os.path.join(self.cfg_mgr.files, 'test_subwf.xml') self.assertTrue(self.files_equal(expected_file, test_file)) def test_gen_schedule_subworkflows_light(self): """test gen schedule subworkflows for 5 light tables""" sub_wf_file_name = 'test_subwf' tab1 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) tab2 = ItTable(light_3_prop, self.cfg_mgr) tab3 = ItTable(light_4_prop, self.cfg_mgr) tab4 = ItTable(light_5_prop, self.cfg_mgr) tab5 = ItTable(fake_ben_tbl_prop, self.cfg_mgr) light_tables = [tab1, tab2, tab3, tab4, tab5] workflows_chunks = [ [tab1, 'tab1_full_load'], [tab2, 'tab2_full_load'], [tab3, 'tab3_full_load'], [tab4, 'tab4_full_load'], [tab5, 'tab5_full_load']] gen_files = self.driver.gen_schedule_subworkflows( sub_wf_file_name, workflows_chunks, light_tables, 'FAKED001') self.assertEquals(len(gen_files), 4) expected_file = os.path.join( BASE_DIR, 'test_resources/subwf_light.xml') test_file = os.path.join(self.cfg_mgr.files, 'test_subwf.xml') self.assertTrue(self.files_equal(expected_file, test_file)) def test_gen_schedule_subworkflows_heavy(self): """test gen schedule subworkflows for 3 heavy tables""" sub_wf_file_name = 'test_subwf' tab1 = ItTable(heavy_2_prop, self.cfg_mgr) tab2 = ItTable(heavy_3_prop, self.cfg_mgr) tab3 = ItTable(full_ingest_tbl_mysql, self.cfg_mgr) heavy_tables = [tab1, tab2, tab3] workflows_chunks = [ [tab1, 'tab1_full_load'], [tab2, 'tab2_full_load'], [tab3, 'tab3_full_load']] gen_files = self.driver.gen_schedule_subworkflows( sub_wf_file_name, workflows_chunks, heavy_tables, 'FAKED001') self.assertEquals(len(gen_files), 4) expected_file = os.path.join( BASE_DIR, 'test_resources/subwf_heavy.xml') test_file = os.path.join(self.cfg_mgr.files, 'test_subwf.xml') self.assertTrue(self.files_equal(expected_file, test_file)) def test_gen_schedule_subworkflows_mixed(self): """test gen schedule subworkflows for 5 light tables, 3 medium tables, 3 heavy tables """ sub_wf_file_name = 'test_subwf' # heavy tab1 = ItTable(heavy_2_prop, self.cfg_mgr) tab2 = ItTable(heavy_3_prop, self.cfg_mgr) tab3 = ItTable(full_ingest_tbl_mysql, self.cfg_mgr) # medium tab4 = ItTable(fake_fct_tbl_prop, self.cfg_mgr) tab5 = ItTable(fake_fact_tbl_prop, self.cfg_mgr) tab6 = ItTable(mock_table_mapping_val, self.cfg_mgr) # small tab7 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) tab8 = ItTable(light_3_prop, self.cfg_mgr) tab9 = ItTable(light_4_prop, self.cfg_mgr) tab10 = ItTable(light_5_prop, self.cfg_mgr) tab11 = ItTable(fake_ben_tbl_prop, self.cfg_mgr) all_tables = [tab1, tab2, tab3, tab4, tab5, tab6, tab7, tab8, tab9, tab10, tab11] workflows_chunks = [ [tab1, 'tab1_heavy'], [tab2, 'tab2_heavy'], [tab3, 'tab3_heavy'], [tab4, 'tab4_medium'], [tab5, 'tab5_medium'], [tab6, 'tab6_medium'], [tab7, 'tab7_small'], [tab8, 'tab8_small'], [tab9, 'tab9_small'], [tab10, 'tab10_small'], [tab11, 'tab11_small']] gen_files = self.driver.gen_schedule_subworkflows( sub_wf_file_name, workflows_chunks, all_tables, 'FAKED001') self.assertEquals(len(gen_files), 4) expected_file = os.path.join( BASE_DIR, 'test_resources/subwf_mixed.xml') test_file = os.path.join(self.cfg_mgr.files, 'test_subwf.xml') self.assertTrue(self.files_equal(expected_file, test_file)) def test_determine_auto_domain(self): tab1 = ItTable(full_ingest_tbl_auto_domain, self.cfg_mgr) self.driver.add_env_to_domain(tab1) def test_determine_auto_domain_env(self): tab1 = ItTable(full_ingest_tbl_auto_domain_env, self.cfg_mgr) self.driver.add_env_to_domain(tab1) @patch('ibis.driver.driver.RequestInventory.get_available_requests', autospec=True) @patch('ibis.driver.driver.Driver.gen_prod_workflow', autospec=True) @patch('ibis.driver.driver.Driver.update_it_table', autospec=True) def test_gen_prod_workflow_tables(self, m_s_it_file, gen_prod_workflow, mock_get_available_requests): mock_get_available_requests.return_value = \ ([ItTable(mock_table_mapping_val, self.cfg_mgr)], [], []) gen_prod_workflow.return_value = (None, None, None) file_h = open( os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt'), 'r') self.assertTrue(self.driver.gen_prod_workflow_tables(file_h)) @patch('ibis.driver.driver.subprocess', autospec=True) @patch.object(Inventory, 'get_table_mapping', return_value=fake_fct_tbl_prop) def test_retrieve_backup(self, mock_get_table_mapping, mock_subprocess): """test retrieve_backup""" tbl = fake_fct_tbl_prop msg = self.driver.retrieve_backup(tbl['source_database_name'], tbl['source_table_name']) self.assertEquals(msg, 'Retrieving backup for fake_database_fake_fct_tablename\n') @patch('ibis.inventory.it_inventory.ITInventory.update', autospec=True) @patch('ibis.driver.driver.RequestInventory.get_available_requests', autospec=True) @patch('ibis.driver.driver.Driver.gen_prod_workflow', autospec=True) @patch('ibis.driver.driver.Driver.update_it_table', autospec=True) def test_gen_prod_workflow_tables_noapp(self, m_s_it_file, gen_prod_workflow, mock_get_available_requests, mock_update): mock_get_available_requests.return_value = \ ([ItTable(mock_table_mapping_val_app, self.cfg_mgr)], [], []) gen_prod_workflow.return_value = (None, None, None) file_h = open( os.path.join(BASE_DIR, 'test_resources/request_test_valid.txt'), 'r') mock_update.return_value = (True, 'Update Success') self.assertTrue(self.driver.gen_prod_workflow_tables(file_h)) @patch('ibis.driver.driver.Driver.update_it_table_export') @patch('ibis.driver.driver.RequestInventory.get_available_requests_export', autospec=True) def test_export_request(self, mock_get_available_requests, mock_upate): mock_get_available_requests.return_value = \ ([ItTableExport(heavy_3_prop_exp, self.cfg_mgr)], []) request_file = open( os.path.join(BASE_DIR, 'test_resources/export_request.txt'), 'r') status, _ = self.driver.export_request(request_file, False) self.assertTrue(status) def test_export_oracle(self): self.driver.export_oracle("source_table_name", "source_database_name", "source_dir", "jdbc_url", "update_key", "target_table_name", "target_database_name", "user_name", "password_alias") def test_export_teradata(self): self.driver.export_teradata("source_table_name", "source_database_name", "source_dir", "jdbc_url", "target_table_name", "target_database_name", "user_name", "password_alias") @patch('subprocess.call') def test_retrieve_backup_notarget(self, mock_call): """ Given arguments for retrieve_back up expects print statement """ self.driver.it_inventory.get_table_mapping = MagicMock( spec=ITInventory.get_table_mapping) self.driver.it_inventory.get_table_mapping.return_value = \ mock_table_mapping_val self.driver.utilities.run_subprocess = \ MagicMock(spec=Utilities.run_subprocess) self.driver.utilities.run_subprocess.side_effect = [0, 1, 1, 1, 1, 1] msg = self.driver.retrieve_backup("db_name", "table_name") self.assertEqual(msg, "Retrieving backup for " + "db_name_table_name\nTarget directory doesn't " + "exist in it_table.\n") @patch('subprocess.call') def test_retrieve_backup_iftarget(self, mock_popen): """ Given arguments for retrieve_back up expects print statement """ self.driver.it_inventory.get_table_mapping = MagicMock( spec=ITInventory.get_table_mapping) self.driver.it_inventory.get_table_mapping.return_value = \ heavy_3_prop_exp self.driver.utilities.run_subprocess = \ MagicMock(spec=Utilities.run_subprocess) self.driver.utilities.run_subprocess.side_effect = [0, 1, 1, 1, 1, 1] msg = self.driver.retrieve_backup("db_name", "table_name") self.assertEqual(msg, "Retrieving backup for db_name_table_name\n" + "Failed to copy parquet_live.hql\nFailed to copy " + "avro_parquet.hql\nFailed to copy files to live.\n") @patch('subprocess.call') def test_retrieve_backup_iftable(self, mock_popen): """ Given arguments for retrieve_back up expects print statement """ self.driver.it_inventory.get_table_mapping = MagicMock( spec=ITInventory.get_table_mapping) self.driver.it_inventory.get_table_mapping.return_value = {} msg = self.driver.retrieve_backup("db_name", "table_name") self.assertEqual(msg, "Retrieving backup for db_name_table_name\n" + "db_name_{} does not exist in it_table. Directory " + "can't be found\n") @patch('getpass.getuser') def test_get_config_workflow_name(self, m_getuser): """test _get_config_workflow_name""" m_getuser.return_value = 'userId' mock_tmp = MagicMock() mock_tmp.name = '/path/to/requestFile.txt' val = self.driver._get_config_workflow_name(mock_tmp) self.assertEquals(val, 'userId_config_wf_requestFile') @patch('getpass.getuser') def test_get_workflow_name(self, m_getuser): """test _get_workflow_name""" m_getuser.return_value = 'userId' mock_tmp = MagicMock() mock_tmp.name = '/path/to/requestFile.txt' val = self.driver._get_workflow_name(mock_tmp) self.assertEquals(val, 'dev_userId_requestFile') @patch('getpass.getuser') def test_get_subworkflow_name_prefix(self, m_getuser): """test _get_subworkflow_name_prefix""" m_getuser.return_value = 'userId' mock_tmp = MagicMock() mock_tmp.name = '/path/to/requestFile.txt' val = self.driver._get_subworkflow_name_prefix(mock_tmp) self.assertEquals(val, 'userId_requestFile') @patch('getpass.getuser') def test_get_workflow_name_table(self, m_getuser): """test _get_workflow_name_table""" m_getuser.return_value = 'userId' tab1 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) val = self.driver._get_workflow_name_table(tab1) self.assertEquals(val, 'dev_userId_fake_database_fake_cens_tablename') @patch('getpass.getuser') def test_get_workflow_name_table_export(self, m_getuser): """test _get_workflow_name_table_export""" m_getuser.return_value = 'userId' tab1 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) val = self.driver._get_workflow_name_table_export(tab1) self.assertEquals(val, 'dev_userId_fake_database_fake_cens_tablename_export') def test_get_prod_table_workflow_name(self): """test _get_prod_table_workflow_name""" tab1 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) val = self.driver._get_prod_table_workflow_name(tab1) self.assertEquals(val, 'fake_database_fake_cens_tablename') @patch('getpass.getuser') def test_get_incr_workflow_name(self, m_getuser): """test _get_incr_workflow_name""" m_getuser.return_value = 'userId' tab1 = ItTable(fake_cens_tbl_prop, self.cfg_mgr) val = self.driver._get_incr_workflow_name(tab1) self.assertEquals(val, 'dev_userId_incr_fake_cens_tablename') @patch.object(PerfInventory, 'insert_freq_ingest') def test_insert_freq_ingest_driver(self, m_freq_ingest): """ test freq_ingest_driver""" self.driver.insert_freq_ingest_driver(['mock_team_nm'], ['daily'], ['mock_table_nm'], ['no']) m_freq_ingest.assert_called_once_with(['mock_team_nm'], ['daily'], ['mock_table_nm'], ['no']) @patch.object(PerfInventory, 'insert_freq_ingest') def test_insert_freq_ingest_driver_fa(self, m_freq_ingest): """ test freq_ingest_driver for frequency and actovor are none""" with self.assertRaises(ValueError) as context: self.driver.insert_freq_ingest_driver(['mock_team_nm'], None, ['mock_table_nm'], None) error = "Either of frequency or activate column must contain value" self.assertTrue(error in str(context.exception)) @patch.object(PerfInventory, 'wipe_perf_env') def test_wipe_perf_env_driver(self, m_wipe_perf): """test wipe_perf_driver with team name""" self.driver.wipe_perf_env_driver(['fake_view_im'], False) m_wipe_perf.assert_called_once_with('fake_view_im', False) @patch.object(PerfInventory, 'wipe_perf_env', autospec=True) def test_wipe_perf_env_driver_ibis(self, m_wipe_perf): """test wipe_perf_driver with domain as ibis""" with self.assertRaises(ValueError) as context: self.driver.wipe_perf_env_driver(['ibis'], False) self.assertTrue('Cannot wipe Ibis database' in str(context.exception)) @patch.object(PerfInventory, 'wipe_perf_env', autospec=True) def test_wipe_perf_env_driver_domain(self, m_wipe_perf): """test wipe_perf_driver with team name as domain""" with self.assertRaises(ValueError) as context: self.driver.wipe_perf_env_driver(['domain1'], False) self.assertTrue('Team name provided is Domain, please \ provide your team name' in str(context.exception)) if __name__ == "__main__": unittest.main()
49.842105
103
0.635811
5,951
50,191
5.021341
0.074777
0.041162
0.042099
0.030922
0.798507
0.764206
0.728298
0.700823
0.668262
0.632856
0
0.009397
0.257895
50,191
1,006
104
49.89165
0.792869
0.002152
0
0.543632
0
0
0.206584
0.105498
0
0
0
0
0.106132
0
null
null
0.016509
0.028302
null
null
0.008255
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
5
b1e9fff1193157930749bf5098beb644524c8007
2,825
py
Python
tests/test_sample_file.py
singer-io/singer-encodings
51256b6d2aa0dc492264a8f8094f30b3f7a26259
[ "Apache-2.0" ]
2
2021-10-06T00:14:40.000Z
2021-11-27T18:39:06.000Z
tests/test_sample_file.py
singer-io/singer-encodings
51256b6d2aa0dc492264a8f8094f30b3f7a26259
[ "Apache-2.0" ]
3
2019-10-31T15:51:49.000Z
2021-05-24T05:08:41.000Z
tests/test_sample_file.py
singer-io/singer-encodings
51256b6d2aa0dc492264a8f8094f30b3f7a26259
[ "Apache-2.0" ]
7
2020-05-27T13:22:59.000Z
2021-07-12T10:38:49.000Z
import unittest from unittest import mock from singer_encodings import json_schema from singer_encodings import csv import csv as _csv import io class Connection: def get_file_handle(self, f): if f.get("raise_error"): raise OSError("OSError") elif f.get("raise_permission_error"): raise PermissionError("Permission denied") else: return mock.mock_open() @mock.patch("singer_encodings.csv.get_row_iterators") class TestSampleFile(unittest.TestCase): def test_positive(self, mocked_csv_row_iterator): mocked_csv_row_iterator.return_value = [_csv.DictReader(io.StringIO("header\nvalue"))] conn = Connection() empty_file, samples = json_schema.sample_file(conn, {"table_name": "data", "key_properties": ["id"], "delimiter": ","}, {"filepath": "/root_dir/file.csv.gz", "last_modified": "2020-01-01"}, 1, 1000) # check if "csv.get_row_iterators" is called if it is called then error has not occurred # if it is not called then error has occured and function returned from the except block self.assertEquals(1, mocked_csv_row_iterator.call_count) # test if file is empty self.assertEquals(False, empty_file) # test if samples is not an empty list self.assertNotEquals([], samples) def test_negative_OSError(self, mocked_csv_row_iterator): conn = Connection() empty_file, samples = json_schema.sample_file(conn, {"table_name": "data", "key_properties": ["id"], "delimiter": ","}, {"filepath": "/root_dir/file.csv.gz", "last_modified": "2020-01-01", "raise_error": True}, 1, 1000) # check if "csv.get_row_iterators" is called if it is called then error has not occurred # if it is not called then error has occured and function returned from the except block self.assertEquals(0, mocked_csv_row_iterator.call_count) # test if file is empty self.assertEquals(False, empty_file) # test if samples is not an empty list self.assertEquals([], samples) def test_negative_PermisisonError(self, mocked_csv_row_iterator): conn = Connection() empty_file, samples = json_schema.sample_file(conn, {"table_name": "data", "key_properties": ["id"], "delimiter": ","}, {"filepath": "/root_dir/file.csv.gz", "last_modified": "2020-01-01", "raise_permission_error": True}, 1, 1000) # check if "csv.get_row_iterators" is called if it is called then error has not occurred # if it is not called then error has occured and function returned from the except block self.assertEquals(0, mocked_csv_row_iterator.call_count) # test if file is empty self.assertEquals(False, empty_file) # test if samples is not an empty list self.assertEquals([], samples)
54.326923
238
0.690265
390
2,825
4.802564
0.233333
0.06834
0.044848
0.074746
0.721837
0.709023
0.709023
0.709023
0.709023
0.709023
0
0.018675
0.203894
2,825
51
239
55.392157
0.81414
0.24708
0
0.285714
0
0
0.197256
0.06859
0
0
0
0
0.257143
1
0.114286
false
0
0.171429
0
0.371429
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b1fab5ca79bda60c6be92e0cf72089247b6368c1
9,914
py
Python
0000_students_work/2021tro/projection_liumethod_global_localrefine.py
takuya-ki/wrs
f6e1009b94332504042fbde9b39323410394ecde
[ "MIT" ]
23
2021-04-02T09:02:04.000Z
2022-03-22T05:31:03.000Z
0000_students_work/2021tro/projection_liumethod_global_localrefine.py
takuya-ki/wrs
f6e1009b94332504042fbde9b39323410394ecde
[ "MIT" ]
35
2021-04-12T09:41:05.000Z
2022-03-26T13:32:46.000Z
0000_students_work/2021tro/projection_liumethod_global_localrefine.py
takuya-ki/wrs
f6e1009b94332504042fbde9b39323410394ecde
[ "MIT" ]
16
2021-03-30T11:55:45.000Z
2022-03-30T07:10:59.000Z
import numpy as np import modeling.geometric_model as gm import modeling.collision_model as cm import visualization.panda.world as wd import basis.robot_math as rm import math from scipy.spatial import cKDTree import vision.depth_camera.surface.rbf_surface as rbfs base = wd.World(cam_pos=np.array([-.3,-.7,.42]), lookat_pos=np.array([0,0,0])) # gm.gen_frame().attach_to(base) bowl_model = cm.CollisionModel(initor="./objects/bowl.stl") bowl_model.set_rgba([.3,.3,.3,.3]) bowl_model.set_rotmat(rm.rotmat_from_euler(math.pi,0,0)) # bowl_model.attach_to(base) pn_direction = np.array([0, 0, -1]) bowl_samples, bowl_sample_normals = bowl_model.sample_surface(toggle_option='normals', radius=.002) selection = bowl_sample_normals.dot(-pn_direction)>.1 bowl_samples = bowl_samples[selection] bowl_sample_normals=bowl_sample_normals[selection] tree = cKDTree(bowl_samples) surface = rbfs.RBFSurface(bowl_samples[:, :2], bowl_samples[:,2]) surface.get_gometricmodel(rgba=[.3,.3,.3,.3]).attach_to(base) pt_direction = rm.orthogonal_vector(pn_direction, toggle_unit=True) tmp_direction = np.cross(pn_direction, pt_direction) plane_rotmat = np.column_stack((pt_direction, tmp_direction, pn_direction)) homomat=np.eye(4) homomat[:3,:3] = plane_rotmat homomat[:3,3] = np.array([-.07,-.03,.1]) twod_plane = gm.gen_box(np.array([.2, .2, .001]), homomat=homomat, rgba=[1,1,1,.3]) twod_plane.attach_to(base) circle_radius=.05 line_segs = [[homomat[:3,3], homomat[:3,3]+pt_direction*.05], [homomat[:3,3]+pt_direction*.05, homomat[:3,3]+pt_direction*.05+tmp_direction*.05], [homomat[:3,3]+pt_direction*.05+tmp_direction*.05, homomat[:3,3]+tmp_direction*.05], [homomat[:3,3]+tmp_direction*.05, homomat[:3,3]]] # gm.gen_linesegs(line_segs).attach_to(base) for sec in line_segs: gm.gen_stick(spos=sec[0], epos=sec[1], rgba=[0, 0, 0, 1], thickness=.002, type='round').attach_to(base) epos = (line_segs[0][1]-line_segs[0][0])*.7+line_segs[0][0] gm.gen_arrow(spos=line_segs[0][0], epos=epos, thickness=0.004).attach_to(base) spt = homomat[:3,3] # gm.gen_stick(spt, spt + pn_direction * 10, rgba=[0,1,0,1]).attach_to(base) # base.run() gm.gen_dasharrow(spt, spt-pn_direction*.07, thickness=.004).attach_to(base) # p0 cpt, cnrml = bowl_model.ray_hit(spt, spt + pn_direction * 10000, option='closest') gm.gen_dashstick(spt, cpt, rgba=[.57,.57,.57,.7], thickness=0.003).attach_to(base) gm.gen_sphere(pos=cpt, radius=.005).attach_to(base) gm.gen_dasharrow(cpt, cpt-pn_direction*.07, thickness=.004).attach_to(base) # p0 gm.gen_dasharrow(cpt, cpt+cnrml*.07, thickness=.004).attach_to(base) # p0 angle = rm.angle_between_vectors(-pn_direction, cnrml) vec = np.cross(-pn_direction, cnrml) rotmat = rm.rotmat_from_axangle(vec, angle) new_plane_homomat = np.eye(4) new_plane_homomat[:3,:3] = rotmat.dot(homomat[:3,:3]) new_plane_homomat[:3,3] = cpt twod_plane = gm.gen_box(np.array([.2, .2, .001]), homomat=new_plane_homomat, rgba=[1,1,1,.3]) twod_plane.attach_to(base) new_line_segs = [[cpt, cpt+rotmat.dot(pt_direction)*.05], [cpt+rotmat.dot(pt_direction)*.05, cpt+rotmat.dot(pt_direction)*.05+rotmat.dot(tmp_direction)*.05], [cpt+rotmat.dot(pt_direction)*.05+rotmat.dot(tmp_direction)*.05, cpt+rotmat.dot(tmp_direction)*.05], [cpt+rotmat.dot(tmp_direction)*.05, cpt]] # gm.gen_linesegs(new_line_segs).attach_to(base) for sec in new_line_segs: gm.gen_stick(spos=sec[0], epos=sec[1], rgba=[0, 0, 0, 1], thickness=.002, type='round').attach_to(base) epos = (new_line_segs[0][1]-new_line_segs[0][0])*.7+new_line_segs[0][0] gm.gen_arrow(spos=new_line_segs[0][0], epos=epos, thickness=0.004).attach_to(base) last_normal = cnrml direction = rotmat.dot(pt_direction) n=3 for tick in range(1, n+1): len = .05/n tmp_cpt = cpt extended_len = 0 for p in np.linspace(0, len, 1000): tmp_t_npt = cpt+direction*p tmp_z_surface = surface.get_zdata(np.array([tmp_t_npt[:2]])) tmp_projected_point = np.array([tmp_t_npt[0], tmp_t_npt[1], tmp_z_surface[0]]) tmp_len = np.linalg.norm(tmp_projected_point - tmp_cpt) extended_len += tmp_len tmp_cpt = tmp_projected_point print(tick, extended_len, len) if extended_len>len: break projected_point = tmp_projected_point t_npt = tmp_t_npt domain_grid = np.meshgrid(np.linspace(-.005, .005, 100, endpoint=True), np.linspace(-.005, .005, 100, endpoint=True)) domain_0, domain_1 = domain_grid domain = np.column_stack((domain_0.ravel()+t_npt[0], domain_1.ravel()+t_npt[1])) codomain = surface.get_zdata(domain) vertices = np.column_stack((domain, codomain)) plane_center, plane_normal = rm.fit_plane(vertices) new_normal = plane_normal if pn_direction.dot(new_normal) > .1: new_normal = -new_normal angle = rm.angle_between_vectors(-pn_direction, new_normal) vec = rm.unit_vector(np.cross(-pn_direction, new_normal)) new_rotmat = rm.rotmat_from_axangle(vec, angle) direction = new_rotmat.dot(direction) gm.gen_stick(spos=cpt, epos=projected_point, rgba=[1,.6,0,1], thickness=.002, type='round').attach_to(base) cpt=projected_point # last_normal = new_normal direction = new_rotmat.dot(tmp_direction) for tick in range(1, n+1): len = .05/n tmp_cpt = cpt extended_len = 0 for p in np.linspace(0, len, 1000): tmp_t_npt = cpt+direction*p tmp_z_surface = surface.get_zdata(np.array([tmp_t_npt[:2]])) tmp_projected_point = np.array([tmp_t_npt[0], tmp_t_npt[1], tmp_z_surface[0]]) tmp_len = np.linalg.norm(tmp_projected_point - tmp_cpt) extended_len += tmp_len tmp_cpt = tmp_projected_point print(tick, extended_len, len) if extended_len>len: break projected_point = tmp_projected_point t_npt = tmp_t_npt domain_grid = np.meshgrid(np.linspace(-.005, .005, 100, endpoint=True), np.linspace(-.005, .005, 100, endpoint=True)) domain_0, domain_1 = domain_grid domain = np.column_stack((domain_0.ravel()+t_npt[0], domain_1.ravel()+t_npt[1])) codomain = surface.get_zdata(domain) vertices = np.column_stack((domain, codomain)) plane_center, plane_normal = rm.fit_plane(vertices) new_normal = plane_normal if pn_direction.dot(new_normal) > .1: new_normal = -new_normal angle = rm.angle_between_vectors(-pn_direction, new_normal) vec = rm.unit_vector(np.cross(-pn_direction, new_normal)) new_rotmat = rm.rotmat_from_axangle(vec, angle) direction = new_rotmat.dot(tmp_direction) gm.gen_stick(spos=cpt, epos=projected_point, rgba=[1,.6,0,1], thickness=.002, type='round').attach_to(base) cpt=projected_point # last_normal = new_normal direction = new_rotmat.dot(-pt_direction) for tick in range(1, n+1): len = .05/n tmp_cpt = cpt extended_len = 0 for p in np.linspace(0, len, 1000): tmp_t_npt = cpt+direction*p tmp_z_surface = surface.get_zdata(np.array([tmp_t_npt[:2]])) tmp_projected_point = np.array([tmp_t_npt[0], tmp_t_npt[1], tmp_z_surface[0]]) tmp_len = np.linalg.norm(tmp_projected_point - tmp_cpt) extended_len += tmp_len tmp_cpt = tmp_projected_point print(tick, extended_len, len) if extended_len>len: break projected_point = tmp_projected_point t_npt = tmp_t_npt domain_grid = np.meshgrid(np.linspace(-.005, .005, 100, endpoint=True), np.linspace(-.005, .005, 100, endpoint=True)) domain_0, domain_1 = domain_grid domain = np.column_stack((domain_0.ravel()+t_npt[0], domain_1.ravel()+t_npt[1])) codomain = surface.get_zdata(domain) vertices = np.column_stack((domain, codomain)) plane_center, plane_normal = rm.fit_plane(vertices) new_normal = plane_normal if pn_direction.dot(new_normal) > .1: new_normal = -new_normal angle = rm.angle_between_vectors(-pn_direction, new_normal) vec = rm.unit_vector(np.cross(-pn_direction, new_normal)) new_rotmat = rm.rotmat_from_axangle(vec, angle) direction = new_rotmat.dot(-pt_direction) gm.gen_stick(spos=cpt, epos=projected_point, rgba=[1,.6,0,1], thickness=.002, type='round').attach_to(base) cpt=projected_point # last_normal = new_normal direction = new_rotmat.dot(-tmp_direction) for tick in range(1, n+1): len = .05/n tmp_cpt = cpt extended_len = 0 for p in np.linspace(0, len, 1000): tmp_t_npt = cpt+direction*p tmp_z_surface = surface.get_zdata(np.array([tmp_t_npt[:2]])) tmp_projected_point = np.array([tmp_t_npt[0], tmp_t_npt[1], tmp_z_surface[0]]) tmp_len = np.linalg.norm(tmp_projected_point - tmp_cpt) extended_len += tmp_len tmp_cpt = tmp_projected_point print(tick, extended_len, len) if extended_len>len: break projected_point = tmp_projected_point t_npt = tmp_t_npt domain_grid = np.meshgrid(np.linspace(-.005, .005, 100, endpoint=True), np.linspace(-.005, .005, 100, endpoint=True)) domain_0, domain_1 = domain_grid domain = np.column_stack((domain_0.ravel()+t_npt[0], domain_1.ravel()+t_npt[1])) codomain = surface.get_zdata(domain) vertices = np.column_stack((domain, codomain)) plane_center, plane_normal = rm.fit_plane(vertices) new_normal = plane_normal if pn_direction.dot(new_normal) > .1: new_normal = -new_normal angle = rm.angle_between_vectors(-pn_direction, new_normal) vec = rm.unit_vector(np.cross(-pn_direction, new_normal)) new_rotmat = rm.rotmat_from_axangle(vec, angle) direction = new_rotmat.dot(-tmp_direction) gm.gen_stick(spos=cpt, epos=projected_point, rgba=[1,.6,0,1], thickness=.002, type='round').attach_to(base) cpt=projected_point # last_normal = new_normal base.run()
46.111628
147
0.694069
1,597
9,914
4.035066
0.097683
0.019863
0.039106
0.026071
0.788175
0.761949
0.761328
0.74612
0.729981
0.717877
0
0.04628
0.160884
9,914
214
148
46.327103
0.728333
0.034497
0
0.694301
0
0
0.006487
0
0
0
0
0
0
1
0
false
0
0.041451
0
0.041451
0.020725
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
594cb284abeb34f3dc359b047113ad375cf81c2f
214
py
Python
src/plexus/utils/logger.py
houseofbigseals/plexus
370a4a7af2dc4df1ce353a6b8c5f1c54c4f3a06a
[ "Plexus" ]
2
2022-02-25T17:46:49.000Z
2022-02-25T17:49:46.000Z
src/plexus/utils/logger.py
houseofbigseals/plexus
370a4a7af2dc4df1ce353a6b8c5f1c54c4f3a06a
[ "Plexus" ]
11
2022-03-09T21:15:44.000Z
2022-03-09T21:37:00.000Z
src/plexus/utils/logger.py
houseofbigseals/plexus
370a4a7af2dc4df1ce353a6b8c5f1c54c4f3a06a
[ "Plexus" ]
null
null
null
from datetime import datetime class PrintLogger(): def __init__(self, name): self.name = name def __call__(self, arg): print("{} | {} : {}".format(datetime.now(), self.name, arg))
23.777778
68
0.579439
24
214
4.833333
0.583333
0.206897
0
0
0
0
0
0
0
0
0
0
0.261682
214
9
68
23.777778
0.734177
0
0
0
0
0
0.056075
0
0
0
0
0
0
1
0.333333
false
0
0.166667
0
0.666667
0.166667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
3ca197bc1347f2df110cdc0a84bc9933cd160740
33
py
Python
samples/src/main/resources/datasets/python/78.py
sritchie/kotlingrad
8165ed1cd77220a5347c58cded4c6f2bcf22ee30
[ "Apache-2.0" ]
11
2020-12-19T01:19:44.000Z
2021-12-25T20:43:33.000Z
src/main/resources/datasets/python/78.py
breandan/katholic
081c39f3acc73ff41f5865563debe78a36e1038f
[ "Apache-2.0" ]
null
null
null
src/main/resources/datasets/python/78.py
breandan/katholic
081c39f3acc73ff41f5865563debe78a36e1038f
[ "Apache-2.0" ]
2
2021-01-25T07:59:20.000Z
2021-08-07T07:13:49.000Z
def test15(x, y): {x.a, y.b}
11
17
0.454545
8
33
1.875
0.75
0
0
0
0
0
0
0
0
0
0
0.083333
0.272727
33
2
18
16.5
0.541667
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
0.5
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
5
3ce371b83b8d1ef63c4a66a5075f2d9fa2184cfd
160
py
Python
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/impl/data_store_s3/tests/test_config.py
groboclown/nightjar-mesh
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
[ "MIT" ]
3
2019-12-23T23:46:02.000Z
2020-08-07T23:10:20.000Z
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/impl/data_store_s3/tests/test_config.py
groboclown/nightjar-mesh
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
[ "MIT" ]
2
2020-02-07T15:59:15.000Z
2020-08-05T21:55:27.000Z
old-stuff-for-reference/nightjar-base/nightjar-src/python-src/nightjar/backend/impl/data_store_s3/tests/test_config.py
groboclown/nightjar-mesh
3655307b4a0ad00a0f18db835b3a0d04cb8e9615
[ "MIT" ]
1
2020-05-28T00:46:05.000Z
2020-05-28T00:46:05.000Z
"""Test the config module""" import unittest # from .. import config class S3ConfigTest(unittest.TestCase): """Tests for the S3 configuration class."""
16
47
0.70625
19
160
5.947368
0.736842
0
0
0
0
0
0
0
0
0
0
0.015038
0.16875
160
9
48
17.777778
0.834586
0.51875
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
3ce857ee41613daf8cc5e2954e66bfa836c04fc6
57
py
Python
platon_account/__init__.py
awake006/platon-account
81041b0e34ac35ad4bb629dd2ece8aeabbe20468
[ "MIT" ]
null
null
null
platon_account/__init__.py
awake006/platon-account
81041b0e34ac35ad4bb629dd2ece8aeabbe20468
[ "MIT" ]
null
null
null
platon_account/__init__.py
awake006/platon-account
81041b0e34ac35ad4bb629dd2ece8aeabbe20468
[ "MIT" ]
null
null
null
from platon_account.account import Account # noqa: F401
28.5
56
0.807018
8
57
5.625
0.75
0
0
0
0
0
0
0
0
0
0
0.061224
0.140351
57
1
57
57
0.857143
0.175439
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
3cfc7c16ac4fc66a4ff92ed42200fd5dc386cf24
31
py
Python
ABC039/B.py
shimomura314/AtcoderCodes
db1d62a7715f5f1b3c40eceff8d34f0f34839f41
[ "MIT" ]
null
null
null
ABC039/B.py
shimomura314/AtcoderCodes
db1d62a7715f5f1b3c40eceff8d34f0f34839f41
[ "MIT" ]
null
null
null
ABC039/B.py
shimomura314/AtcoderCodes
db1d62a7715f5f1b3c40eceff8d34f0f34839f41
[ "MIT" ]
null
null
null
print(int(int(input())**(1/4)))
31
31
0.580645
6
31
3
0.833333
0
0
0
0
0
0
0
0
0
0
0.064516
0
31
1
31
31
0.516129
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
a721a509ce8bdb945a1089a48b0eac692d8f266b
33
py
Python
test/output/096.py
EliRibble/pyfmt
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
[ "MIT" ]
null
null
null
test/output/096.py
EliRibble/pyfmt
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
[ "MIT" ]
null
null
null
test/output/096.py
EliRibble/pyfmt
e84a5531a7c06703eddd9dbc2072b0c8deae8c57
[ "MIT" ]
null
null
null
print({i: i for i in range(10)})
16.5
32
0.606061
8
33
2.5
0.75
0
0
0
0
0
0
0
0
0
0
0.074074
0.181818
33
1
33
33
0.666667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
59742bbb92403cbdaba5f5656ba53238695af28d
24
py
Python
tests/modules/ambiguous/pkg1/__init__.py
jouve/coveragepy
6aa3ae906a4d1fab8bbf5ef84e13ad7068ec361a
[ "Apache-2.0" ]
2,254
2015-01-05T01:28:03.000Z
2022-03-29T10:37:10.000Z
tests/modules/ambiguous/pkg1/__init__.py
jouve/coveragepy
6aa3ae906a4d1fab8bbf5ef84e13ad7068ec361a
[ "Apache-2.0" ]
707
2015-02-07T01:32:02.000Z
2022-03-31T18:00:14.000Z
tests/modules/ambiguous/pkg1/__init__.py
jouve/coveragepy
6aa3ae906a4d1fab8bbf5ef84e13ad7068ec361a
[ "Apache-2.0" ]
439
2015-01-16T15:06:08.000Z
2022-03-30T06:19:12.000Z
print("Ambiguous pkg1")
12
23
0.75
3
24
6
1
0
0
0
0
0
0
0
0
0
0
0.045455
0.083333
24
1
24
24
0.772727
0
0
0
0
0
0.583333
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
59c4f80d068cb2ddaeef67729553f8f9f270b91d
101
py
Python
CursoEmVideoPython/aula19.py
miguelabreuss/scripts_python
cf33934731a9d1b731672d4309aaea0a24ae151a
[ "MIT" ]
null
null
null
CursoEmVideoPython/aula19.py
miguelabreuss/scripts_python
cf33934731a9d1b731672d4309aaea0a24ae151a
[ "MIT" ]
1
2020-07-04T16:27:25.000Z
2020-07-04T16:27:25.000Z
CursoEmVideoPython/aula19.py
miguelabreuss/scripts_python
cf33934731a9d1b731672d4309aaea0a24ae151a
[ "MIT" ]
null
null
null
teste = [0, 2, 3, 4, 5] print(teste) teste.insert(0, teste[3]) print(teste) teste.pop(4) print(teste)
16.833333
25
0.663366
20
101
3.35
0.45
0.447761
0.447761
0
0
0
0
0
0
0
0
0.089888
0.118812
101
6
26
16.833333
0.662921
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
59e977bb1ef8b3dd72f60c247be6d06701b86f57
34
py
Python
test3.py
khlaiqjd/pyneta
0ee737308144fbf1b27984d561ca9d1ea2d2a680
[ "Apache-2.0" ]
null
null
null
test3.py
khlaiqjd/pyneta
0ee737308144fbf1b27984d561ca9d1ea2d2a680
[ "Apache-2.0" ]
null
null
null
test3.py
khlaiqjd/pyneta
0ee737308144fbf1b27984d561ca9d1ea2d2a680
[ "Apache-2.0" ]
null
null
null
for x in range (10) print(x)
6.8
20
0.558824
7
34
2.714286
0.857143
0
0
0
0
0
0
0
0
0
0
0.086957
0.323529
34
4
21
8.5
0.73913
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
ab7f7794263bca02df5cd980478b72f9c2b0d7ca
118
py
Python
mentalhacks/music/admin.py
ShubhamPatel33/mental-health-games
bd15fa5bc5627525455e690b679811707b9ddf40
[ "CC0-1.0" ]
null
null
null
mentalhacks/music/admin.py
ShubhamPatel33/mental-health-games
bd15fa5bc5627525455e690b679811707b9ddf40
[ "CC0-1.0" ]
null
null
null
mentalhacks/music/admin.py
ShubhamPatel33/mental-health-games
bd15fa5bc5627525455e690b679811707b9ddf40
[ "CC0-1.0" ]
null
null
null
from django.contrib import admin from .models import Journal # Register your models here. admin.site.register(Journal)
29.5
32
0.822034
17
118
5.705882
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.110169
118
4
33
29.5
0.92381
0.220339
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
ab81afccad793cc2cf0c06f8a0bcdf6dacf47836
5,808
py
Python
medvision/ops/cuda_fun_tools.py
TimothyZero/MedVision
92f5bc3f19f39542995f214818c93e3a870f8477
[ "Apache-2.0" ]
33
2021-06-16T08:49:34.000Z
2022-03-16T01:04:02.000Z
medvision/ops/cuda_fun_tools.py
TimothyZero/MedVision
92f5bc3f19f39542995f214818c93e3a870f8477
[ "Apache-2.0" ]
1
2021-12-04T08:30:24.000Z
2021-12-09T02:30:52.000Z
medvision/ops/cuda_fun_tools.py
TimothyZero/MedVision
92f5bc3f19f39542995f214818c93e3a870f8477
[ "Apache-2.0" ]
3
2021-08-23T05:48:58.000Z
2021-12-16T07:12:23.000Z
# -*- coding:utf-8 -*- import torch from medvision import _C def affine_2d(features, rois, out_size, spatial_scale, sampling_ratio=0, aligned=True, order=1): if isinstance(out_size, int): out_h = out_size out_w = out_size elif isinstance(out_size, tuple): assert len(out_size) == 2 assert isinstance(out_size[0], int) assert isinstance(out_size[1], int) out_h, out_w = out_size else: raise TypeError( '"out_size" must be an integer or tuple of integers') assert features.dtype in [torch.float32, torch.float16], \ f'input must be float16 or float32 nut get {features.dtype}' assert order in [0, 1, 3], f'order {order} is not supported!' if order == 0: # avoid sample average sampling_ratio = 1 batch_size, num_channels, data_height, data_width = features.size() num_rois = rois.size(0) output = features.new_zeros(num_rois, num_channels, out_h, out_w) _C.affine_2d( features, rois.type(features.type()), output, out_h, out_w, spatial_scale, sampling_ratio, aligned, order) return output def affine_3d(features, rois, out_size, spatial_scale, sampling_ratio=0, aligned=True, order=1): # clockwise is not used in 3d if isinstance(out_size, int): out_d = out_size out_h = out_size out_w = out_size elif isinstance(out_size, tuple): assert len(out_size) == 3 assert isinstance(out_size[0], int) assert isinstance(out_size[1], int) assert isinstance(out_size[2], int) out_d, out_h, out_w = out_size else: raise TypeError( '"out_size" must be an integer or tuple of integers') assert features.dtype in [torch.float32, torch.float16], \ f'input must be float16 or float32 nut get {features.dtype}' assert order in [0, 1, 3], f'order {order} is not supported!' if order == 0: # avoid sample average sampling_ratio = 1 batch_size, num_channels, data_depth, data_height, data_width = features.size() num_rois = rois.size(0) output = features.new_zeros(num_rois, num_channels, out_d, out_h, out_w) _C.affine_3d( features, rois.type(features.type()), output, out_d, out_h, out_w, spatial_scale, sampling_ratio, aligned, order) return output def apply_offset_2d(img, offset, order=1): """ image : b, c, d, h, w offset : b, 2, d, h, w """ assert img.shape[2:] == offset.shape[2:] assert offset.shape[1] == 2 channels = img.shape[1] kernel_size = [1, 1] stride = [1, 1] padding = [0, 0] dilation = [1, 1] group = 1 deformable_groups = 1 im2col_step = 64 weight = torch.eye(channels, channels).unsqueeze(-1).unsqueeze(-1).cuda() bias = torch.zeros(channels).cuda() offset = offset.cuda() if img.dtype == torch.float16: offset = offset.half() bias = bias.half() weight = weight.half() output = _C.deform_2d(img.contiguous(), weight.contiguous(), bias.contiguous(), offset.contiguous(), kernel_size[0], kernel_size[1], stride[0], stride[1], padding[0], padding[1], dilation[0], dilation[1], group, deformable_groups, im2col_step, order) assert img.shape == output.shape return output def apply_offset_3d(img, offset, order=1): assert img.shape[2:] == offset.shape[2:] assert offset.shape[1] == 3 channels = img.shape[1] kernel_size = [1, 1, 1] stride = [1, 1, 1] padding = [0, 0, 0] dilation = [1, 1, 1] group = 1 deformable_groups = 1 im2col_step = 64 weight = torch.eye(channels, channels).unsqueeze(-1).unsqueeze(-1).unsqueeze(-1).cuda() bias = torch.zeros(channels).cuda() offset = offset.cuda() if img.dtype == torch.float16: offset = offset.half() bias = bias.half() weight = weight.half() output = _C.deform_3d(img.contiguous(), weight.contiguous(), bias.contiguous(), offset.contiguous(), kernel_size[0], kernel_size[1], kernel_size[2], stride[0], stride[1], stride[2], padding[0], padding[1], padding[2], dilation[0], dilation[1], dilation[2], group, deformable_groups, im2col_step, order) assert img.shape == output.shape, f"input is {img.shape}, out is {output.shape}" return output def random_noise_2d(img, method, mean=0., std=1., inplace=False): """ method: 0 : uniform , U[-0.5, 0.5] 1 : normal, N(0, 1) mean: std * gen_noise + mean """ if not inplace: out = img.clone() _C.noise_2d(out, method, mean, std) return out else: _C.noise_2d(img, method, mean, std) return img def random_noise_3d(img, method, mean=0., std=1., inplace=False): if not inplace: out = img.clone() _C.noise_3d(out, method, mean, std) return out else: _C.noise_3d(img, method, mean, std) return img
29.784615
91
0.534435
717
5,808
4.172943
0.157601
0.051471
0.051136
0.016043
0.871324
0.818516
0.775067
0.751337
0.692513
0.669118
0
0.03763
0.354855
5,808
194
92
29.938144
0.760875
0.039945
0
0.691824
0
0
0.057769
0
0
0
0
0
0.106918
1
0.037736
false
0
0.012579
0
0.100629
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
abd46f3f796e0232106b0945fe70af10c2a75fcd
117
py
Python
app/src/services/__init__.py
beerjoa/flask-restplus-skeleton
efa221a00620746c9b7227d3840b2a43d788db1b
[ "Apache-2.0" ]
1
2021-02-03T03:30:00.000Z
2021-02-03T03:30:00.000Z
app/src/services/__init__.py
beerjoa/flask-restplus-skeleton
efa221a00620746c9b7227d3840b2a43d788db1b
[ "Apache-2.0" ]
null
null
null
app/src/services/__init__.py
beerjoa/flask-restplus-skeleton
efa221a00620746c9b7227d3840b2a43d788db1b
[ "Apache-2.0" ]
null
null
null
from .calculation import ( create_calc, update_calc, delete_calc, select_calc, select_calc_list )
16.714286
26
0.692308
14
117
5.357143
0.642857
0.266667
0.373333
0
0
0
0
0
0
0
0
0
0.247863
117
7
27
16.714286
0.852273
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.142857
0
0.142857
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
f9ec00be1f12d6ec54ce82bc2487a372d6d7349c
215
py
Python
pyPowerCLI/__init__.py
arielsafari/pyPowerCLI
2dd2143199d32a0cc6256b9be9774a49c06e9d98
[ "MIT" ]
null
null
null
pyPowerCLI/__init__.py
arielsafari/pyPowerCLI
2dd2143199d32a0cc6256b9be9774a49c06e9d98
[ "MIT" ]
null
null
null
pyPowerCLI/__init__.py
arielsafari/pyPowerCLI
2dd2143199d32a0cc6256b9be9774a49c06e9d98
[ "MIT" ]
1
2020-10-13T12:14:04.000Z
2020-10-13T12:14:04.000Z
from .install import is_installed from .install import install_powercli from .install import uninstall_powercli from .powershell import Powershell from .powershell import PowershellException __version__ = "0.0.1"
23.888889
43
0.837209
27
215
6.407407
0.444444
0.190751
0.294798
0
0
0
0
0
0
0
0
0.015789
0.116279
215
9
44
23.888889
0.894737
0
0
0
0
0
0.023256
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
e6064ab9f50b5bb33d70c208b56497aceb510bf2
150
py
Python
dtc/enums/unbundled_trade_indicator_enum.py
jseparovic/python-ws-dtc-client
fd3952cdaf7ab8c9d5a26ccf53b5e9acb3a9ea0f
[ "Apache-2.0" ]
15
2020-04-26T05:25:53.000Z
2022-02-11T19:38:42.000Z
dtc/enums/unbundled_trade_indicator_enum.py
jseparovic/python-ws-dtc-client
fd3952cdaf7ab8c9d5a26ccf53b5e9acb3a9ea0f
[ "Apache-2.0" ]
2
2021-01-08T19:58:08.000Z
2021-11-29T06:08:48.000Z
dtc/enums/unbundled_trade_indicator_enum.py
jseparovic/python-ws-dtc-client
fd3952cdaf7ab8c9d5a26ccf53b5e9acb3a9ea0f
[ "Apache-2.0" ]
4
2020-11-23T13:38:01.000Z
2021-12-27T13:21:06.000Z
class UnbundledTradeIndicatorEnum: UNBUNDLED_TRADE_NONE = 0 FIRST_SUB_TRADE_OF_UNBUNDLED_TRADE = 1 LAST_SUB_TRADE_OF_UNBUNDLED_TRADE = 2
25
42
0.813333
20
150
5.5
0.6
0.381818
0.181818
0.345455
0.436364
0
0
0
0
0
0
0.02381
0.16
150
5
43
30
0.849206
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
1
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e647ab50e8c80fcfb4dd71878f67f512b7143656
138
py
Python
osf/files/addons-github-local.py
sifulan-access-federation/helm-charts
ea6a36cedc0e5743d6b04440816c9dd8071a23e2
[ "Apache-2.0" ]
null
null
null
osf/files/addons-github-local.py
sifulan-access-federation/helm-charts
ea6a36cedc0e5743d6b04440816c9dd8071a23e2
[ "Apache-2.0" ]
null
null
null
osf/files/addons-github-local.py
sifulan-access-federation/helm-charts
ea6a36cedc0e5743d6b04440816c9dd8071a23e2
[ "Apache-2.0" ]
null
null
null
import os # GitHub application credentials CLIENT_ID = os.environ['GITHUB_CLIENT_ID'] CLIENT_SECRET = os.environ['GITHUB_CLIENT_SECRET']
23
50
0.811594
19
138
5.578947
0.473684
0.150943
0.283019
0.396226
0
0
0
0
0
0
0
0
0.094203
138
5
51
27.6
0.848
0.217391
0
0
0
0
0.339623
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
0515ec51b6d64c74e28390fb0f0e6e1a807c4f68
267
py
Python
import_filter.py
jon2718/ipycool_2.0
34cf74ee99f4a725b997c50a7742ba788ac2dacd
[ "MIT" ]
null
null
null
import_filter.py
jon2718/ipycool_2.0
34cf74ee99f4a725b997c50a7742ba788ac2dacd
[ "MIT" ]
null
null
null
import_filter.py
jon2718/ipycool_2.0
34cf74ee99f4a725b997c50a7742ba788ac2dacd
[ "MIT" ]
null
null
null
from icoolobject import * from regularregioncontainer import * from regularregion import * from hard_edge_transport import * from icool_composite import * from container import * from cell import * from field import * from sregion import * from hard_edge_sol import *
26.7
36
0.816479
35
267
6.085714
0.428571
0.422535
0.131455
0.169014
0
0
0
0
0
0
0
0
0.146067
267
10
37
26.7
0.934211
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
55bcd00c2a99ae5705ec0b74e7d09173d52428fe
153
py
Python
exercicios/exercicio027.py
NicoCassio/cursoemvideo-python
2686ff74f4d45bdb0dc194f49f4dd19aae629d52
[ "MIT" ]
null
null
null
exercicios/exercicio027.py
NicoCassio/cursoemvideo-python
2686ff74f4d45bdb0dc194f49f4dd19aae629d52
[ "MIT" ]
null
null
null
exercicios/exercicio027.py
NicoCassio/cursoemvideo-python
2686ff74f4d45bdb0dc194f49f4dd19aae629d52
[ "MIT" ]
null
null
null
n = str(input('Nome Completo: ')).strip() print('Primeiro nome: {}'.format(n.split()[0])) print('Último nome: {}'.format(n.split()[len(n.split()) - 1]))
38.25
62
0.607843
23
153
4.043478
0.608696
0.193548
0.236559
0.344086
0
0
0
0
0
0
0
0.014286
0.084967
153
3
63
51
0.65
0
0
0
0
0
0.30719
0
0
0
0
0
0
1
0
false
0
0
0
0
0.666667
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
5
e99b58a5e9aa2ed895836703ecfa90a2e8eed06f
96
py
Python
enthought/blocks/ast_25/ast.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
3
2016-12-09T06:05:18.000Z
2018-03-01T13:00:29.000Z
enthought/blocks/ast_25/ast.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
1
2020-12-02T00:51:32.000Z
2020-12-02T08:48:55.000Z
enthought/blocks/ast_25/ast.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
null
null
null
# proxy module from __future__ import absolute_import from codetools.blocks.ast_25.ast import *
24
41
0.833333
14
96
5.285714
0.714286
0
0
0
0
0
0
0
0
0
0
0.023529
0.114583
96
3
42
32
0.847059
0.125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e9b8b9aff145ccaafa1e7b2d2c25110a231c7df3
42,189
py
Python
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_7.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_7.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
optimization/first_sdEta_mjj_optimization/loose_analysis_sdeta_2.6_mjj_1250/Output/Histos/MadAnalysis5job_0/selection_7.py
sheride/axion_pheno
7d3fc08f5ae5b17a3500eba19a2e43f87f076ce5
[ "MIT" ]
null
null
null
def selection_7(): # Library import import numpy import matplotlib import matplotlib.pyplot as plt import matplotlib.gridspec as gridspec # Library version matplotlib_version = matplotlib.__version__ numpy_version = numpy.__version__ # Histo binning xBinning = numpy.linspace(0.0,8000.0,161,endpoint=True) # Creating data sequence: middle of each bin xData = numpy.array([25.0,75.0,125.0,175.0,225.0,275.0,325.0,375.0,425.0,475.0,525.0,575.0,625.0,675.0,725.0,775.0,825.0,875.0,925.0,975.0,1025.0,1075.0,1125.0,1175.0,1225.0,1275.0,1325.0,1375.0,1425.0,1475.0,1525.0,1575.0,1625.0,1675.0,1725.0,1775.0,1825.0,1875.0,1925.0,1975.0,2025.0,2075.0,2125.0,2175.0,2225.0,2275.0,2325.0,2375.0,2425.0,2475.0,2525.0,2575.0,2625.0,2675.0,2725.0,2775.0,2825.0,2875.0,2925.0,2975.0,3025.0,3075.0,3125.0,3175.0,3225.0,3275.0,3325.0,3375.0,3425.0,3475.0,3525.0,3575.0,3625.0,3675.0,3725.0,3775.0,3825.0,3875.0,3925.0,3975.0,4025.0,4075.0,4125.0,4175.0,4225.0,4275.0,4325.0,4375.0,4425.0,4475.0,4525.0,4575.0,4625.0,4675.0,4725.0,4775.0,4825.0,4875.0,4925.0,4975.0,5025.0,5075.0,5125.0,5175.0,5225.0,5275.0,5325.0,5375.0,5425.0,5475.0,5525.0,5575.0,5625.0,5675.0,5725.0,5775.0,5825.0,5875.0,5925.0,5975.0,6025.0,6075.0,6125.0,6175.0,6225.0,6275.0,6325.0,6375.0,6425.0,6475.0,6525.0,6575.0,6625.0,6675.0,6725.0,6775.0,6825.0,6875.0,6925.0,6975.0,7025.0,7075.0,7125.0,7175.0,7225.0,7275.0,7325.0,7375.0,7425.0,7475.0,7525.0,7575.0,7625.0,7675.0,7725.0,7775.0,7825.0,7875.0,7925.0,7975.0]) # Creating weights for histo: y8_M_0 y8_M_0_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,91.4454449781,87.8999678868,84.8498903891,82.5039723137,79.5030147757,75.699617896,72.8624202236,68.6373036899,66.2177056749,63.7612676902,59.9824307904,57.4441128728,53.6365959965,51.3930378371,48.7196000304,45.2314428921,41.8988456262,40.3554068924,37.907124901,34.8897833764,32.486557348,30.8652986781,29.1580680787,27.499961439,25.2809712595,23.4386327709,21.6904582051,20.961710803,19.4141480726,18.005781228,16.5032544607,15.6598711526,14.5954080259,13.5227609059,12.4050738229,11.467530592,10.611867294,10.0059397911,9.19122045951,8.46656505402,7.60271376273,7.3611619609,6.72657848151,6.05924302899,5.62527138502,5.15445177128,4.85967601312,4.27831649007,4.06542466472,3.47997154503,3.27526731297,3.24660853648,2.81672968915,2.48510916121,2.25993454595,2.29678131572,2.10026507694,1.82596170198,1.56803431358,1.71132699603,1.43702362106,1.31420092183,1.18319022931,1.05217953679,1.07674431664,0.982579993891,0.814722931601,0.802440541677,0.749217385341,0.695994229006,0.659147459235,0.54041915664,0.454443227174,0.401220070839,0.429878847327,0.446255233892,0.33980900122,0.376655730991,0.30296223145,0.257927308396,0.216986461984,0.257927308396,0.180139692213,0.192421962137,0.188327885496,0.13510476916,0.126916615877,0.122822539236,0.151481115725,0.106446192671,0.10235211603,0.0818816928242,0.110540269313,0.0695994229006,0.0491289996945,0.0655053462594,0.0655053462594,0.0409408464121,0.0409408464121,0.0409408464121,0.0245645038473,0.0245645038473,0.0204704192061,0.00818816928242,0.0122822539236,0.00818816928242,0.00818816928242,0.0204704192061,0.0163763345648,0.00818816928242,0.00818816928242,0.00818816928242,0.00409408464121,0.0122822539236,0.0245645038473,0.00818816928242,0.00409408464121,0.00818816928242,0.0,0.0122822539236,0.00409408464121,0.00818816928242,0.0,0.0,0.0,0.00818816928242,0.0122822539236,0.0122822539236,0.00409408464121,0.0,0.0,0.00409408464121,0.0,0.0,0.0]) # Creating weights for histo: y8_M_1 y8_M_1_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,21.7137475141,18.7239646678,16.7192889547,16.5966432127,14.1680404358,12.6603321101,11.1051772131,8.6274015883,7.07200236908,6.2684987298,5.56554769663,5.7348269376,5.01689214425,4.75184260394,3.9978722879,3.72927887403,3.57229985426,2.87927270845,3.01340359671,2.50314187709,2.16243496185,2.00495408024,2.0054675574,1.91964838135,1.84664530683,1.48223272919,1.61554573885,1.40909868195,1.28775906248,1.14218308074,1.16635976431,0.984083383055,0.959476532193,0.789857643302,0.814075981805,0.486108266684,0.668383846883,0.631731510754,0.558847393114,0.570662574662,0.522333239217,0.449351392694,0.42502851633,0.473857310448,0.255485807905,0.303531729591,0.170176263942,0.255210925397,0.255139150742,0.243264610914,0.218714875374,0.206641913873,0.0850845272169,0.121545971602,0.133787235056,0.0970824697852,0.133591176499,0.145906938199,0.13367440626,0.0606758577112,0.109388458597,0.0363835496599,0.048696419546,0.0486149521089,0.0121471633567,0.0364699956626,0.0243169325254,0.0242461191386,0.0242698063767,0.0,0.0,0.0485277971713,0.0243416250893,0.0121530591319,0.0,0.0,0.0607590474196,0.0,0.0,0.0121471633567,0.0121205322363,0.0121471633567,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121625276187,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0121863350153,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_2 y8_M_2_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,83.1212068927,76.824607181,69.3982532793,63.5827525776,60.2910899273,52.8836609889,50.9139359431,47.399801673,43.4944413076,39.3364905327,35.1412683237,31.092503843,27.0989523227,24.6183441763,19.7977841602,18.2718495015,17.7103111337,15.9945442186,13.3938369334,12.9606370879,11.80666473,11.2243295631,10.6323253102,9.83944372954,8.72532122868,8.20258316753,6.84748968868,6.59617196225,6.12477922703,5.13044182625,4.75881009248,4.79937480072,3.93553907034,3.32309007225,3.01214590225,3.12250281705,2.84131009412,2.42938141141,2.44955013311,2.23888595482,2.07849647845,1.73749963217,1.80718068555,1.64704326656,1.47577813448,1.47551987898,1.48584844623,1.30533570113,0.893687587195,1.10450795841,1.10456126235,0.793184049524,0.893564037762,0.873427546346,0.692699932667,0.672961361328,0.652713303535,0.512095455212,0.552208113029,0.471795613806,0.532079059371,0.462098016395,0.461943889511,0.431856916895,0.391730664509,0.341453896541,0.361380973736,0.361467169092,0.250996126019,0.291145642062,0.230908930413,0.210959994473,0.331238589819,0.21085284943,0.210896856167,0.18076897588,0.1204829271,0.0802791567416,0.160568147853,0.120489662404,0.0802927926321,0.140577932353,0.130570139092,0.100405235296,0.0903320724033,0.0702892967434,0.100369947264,0.0200759397984,0.0703052466032,0.0702111589586,0.0301292356122,0.060251132636,0.0200667830914,0.0602763383731,0.0602229931165,0.0501859446729,0.0301486977468,0.0200792413368,0.0501889610972,0.0401947210856,0.0200842659558,0.0200816792687,0.0,0.0100330444503,0.0200896996516,0.0100458580553,0.0,0.0,0.0,0.0201028396916,0.0,0.0401787051124,0.0100533743234,0.0100697952413,0.0,0.0301234630852,0.0100154872082,0.0100704150545,0.0,0.0100547792334,0.0,0.0100272843196,0.0,0.0,0.0,0.0,0.0,0.0100154872082,0.0,0.0,0.0200556388499,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_3 y8_M_3_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,79.5904910734,72.9289470262,66.3715664121,63.6979705878,59.3818100499,54.1827384781,50.1364116073,47.6928772189,45.4168818312,42.7770863096,38.757291864,35.916498292,33.2511169163,31.5063144119,29.3443909835,27.7343753114,25.9300038359,24.4113099194,23.1007681799,21.6164230086,19.7511622376,18.4819527434,17.3697602807,16.1376579939,15.3015189415,14.635608289,13.3114692403,12.8260083291,11.5174978579,10.917916237,9.71379670085,9.44371521904,8.48753227884,8.06977759921,7.64061937994,6.75987367797,5.85864873006,5.45108289171,4.86207199052,4.77379713728,4.62064357788,3.87219152971,3.5809714523,3.12942012618,2.57427655962,2.44782158312,2.25500019963,2.12876906889,2.08446507828,1.78208968219,1.78223512099,1.56755891673,1.38052136631,1.33634697061,1.15493684559,1.2320726303,1.14386968395,1.0834853618,0.940446299243,0.945851097658,0.753683376276,0.670950228926,0.654483550172,0.759017080303,0.599515401534,0.659978536895,0.57205590556,0.4839853979,0.467600376128,0.423597419462,0.467476062514,0.401526674884,0.429049749552,0.341017714112,0.36312693091,0.30244734361,0.291602280838,0.24201118349,0.214574640847,0.231050094679,0.18689828668,0.14298072453,0.148493383285,0.154001004496,0.120985218125,0.0880644544807,0.142968983799,0.120993383823,0.15407563329,0.060503760725,0.0990176213459,0.0384790081212,0.0880574669181,0.0604752823449,0.0660378696729,0.0495180935686,0.0604598040813,0.0384823190884,0.054997886405,0.0385418718093,0.0495465719487,0.0440156317792,0.0275110901863,0.0219811737759,0.0219859106934,0.0164965302711,0.0550405024117,0.0110186183891,0.033022172678,0.0274857318342,0.0219984314305,0.0385002145612,0.0164806782541,0.0109726954777,0.0109825796287,0.0165265077271,0.0110184762003,0.00550967279154,0.0110136824074,0.0219944623325,0.0,0.0109726954777,0.0109987038357,0.00551624597542,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0109941131696,0.0,0.0,0.0]) # Creating weights for histo: y8_M_4 y8_M_4_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,21.7282444078,20.1516136663,18.8290934357,17.8351540167,16.4726589592,15.612845446,14.7721675814,13.6774273055,12.9720457716,12.394805744,11.3105111112,10.8710766133,10.2453200605,9.57743221272,8.85698746827,8.40210095419,7.91957316805,7.40543187947,6.89525079635,6.5488049688,6.28830121118,5.7819119847,5.4492988181,5.23022490213,4.88673319545,4.66485747421,4.33517838695,4.03515674922,3.76679150279,3.58215975017,3.36709494097,3.15313401901,3.01894999289,2.68122343145,2.43659256208,2.22630525176,2.11786536688,1.93916590583,1.86218760862,1.8029468632,1.54545614854,1.55134314626,1.48226802424,1.33125472827,1.26518583509,1.16351116758,1.11127253036,1.06380938786,0.995836549747,0.94338787734,0.861497803045,0.764815479561,0.751938398554,0.643414339273,0.654317730842,0.563566897264,0.520056344429,0.505239644578,0.467712689616,0.421415162236,0.400638794093,0.397679302096,0.375968838529,0.333550348422,0.287169648711,0.285202292395,0.267429547689,0.24276279877,0.196381056899,0.204312771223,0.197381088942,0.175636033702,0.167760475733,0.17169763343,0.138171407885,0.14013547739,0.138168642154,0.120370003797,0.114477274201,0.100669224248,0.087808136379,0.0779597707988,0.0917720695547,0.0818955656725,0.0592263556062,0.0730177710924,0.0582057208752,0.0631581421748,0.0601987303436,0.0493430776884,0.0384867837044,0.0355326267611,0.0355319493576,0.0306009564571,0.0197422855481,0.025641252067,0.0256502907951,0.0256554254339,0.0197391510535,0.0207323930273,0.0187556773192,0.00986133244729,0.0108594445118,0.0147995282721,0.010860470638,0.00789214433616,0.016774283919,0.0128345488813,0.00790000863079,0.00690554011558,0.0128299152806,0.0128322922055,0.0108522696457,0.00789609251669,0.00888518188644,0.0029547950654,0.000986294378594,0.00887319705415,0.00197195865162,0.00296165167212,0.000986370536391,0.0,0.00691084711155,0.00296223287637,0.0,0.0049384363231,0.0029612532466,0.0,0.0,0.0,0.0,0.000986798222547,0.00295688900399,0.0,0.000985639421539]) # Creating weights for histo: y8_M_5 y8_M_5_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,2.91855460371,3.6174640517,4.20693917985,4.56541470639,4.77335347739,4.95461373448,4.9149800574,4.69500234693,4.36724632096,4.20386242862,3.93484074342,3.69797931057,3.60169940255,3.4032913586,3.16055009011,2.97640872956,2.79858531162,2.6523736121,2.49562453974,2.35541030458,2.20942506036,2.07029669032,2.01006998518,1.88732481532,1.77227032324,1.63877772926,1.56393105445,1.46436154324,1.39673903231,1.29437043766,1.22208358787,1.15098302773,1.08189695945,0.99338774876,0.9523469281,0.900648705295,0.855530489086,0.799559222961,0.744875071102,0.710885572116,0.664713498594,0.630227478376,0.564645865757,0.546731891824,0.523590241484,0.491037893397,0.448725162184,0.424027131833,0.395258587613,0.364749698464,0.369288486669,0.306029641185,0.295952260659,0.280313802323,0.271504891511,0.252317646691,0.245028947059,0.214254593093,0.201663398786,0.189805143437,0.182005879143,0.177967573137,0.158066874118,0.14847825293,0.145944114186,0.123512677254,0.117725144151,0.116209294036,0.104608501549,0.0899986943717,0.0932659721208,0.0857044058644,0.0801640931238,0.0703269312506,0.0710848763129,0.0564613057747,0.0599973291416,0.0549529774964,0.0456265803331,0.0461230615556,0.0418454571219,0.0342865035034,0.0342827305822,0.0332722838682,0.0312525187159,0.023946054746,0.0254629411139,0.0211764505105,0.0181537564798,0.0191579776737,0.0138627167657,0.0184041696215,0.0168915002832,0.0136135519288,0.0131082445513,0.0108440477177,0.00907850863798,0.0105871609954,0.0113437697314,0.00831828301883,0.00932457271778,0.00730944869859,0.00806695365345,0.00655363615696,0.00554359354169,0.00680535361711,0.00378330654434,0.00579944801195,0.00478946941231,0.00605198967334,0.00327663483374,0.00277243172588,0.00201775066051,0.00201705449054,0.00226745042802,0.00226795335081,0.00126151601582,0.0017645552395,0.000252039498778,0.00201699607627,0.00226813219448,0.00100769764271,0.000504526786889,0.000252008691256,0.000252077027941,0.000251635560151,0.000252211300725,0.000755091165496,0.000252349254409,0.00151264973344,0.000252440476682,0.0,0.000252177412451,0.000504245118116,0.0]) # Creating weights for histo: y8_M_6 y8_M_6_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0675709162443,0.0738641563791,0.081865323916,0.0950551519078,0.115955424904,0.1540528524,0.24905860087,0.54450075124,0.769578092043,0.97135674394,1.09050739761,1.21425990848,1.29635138657,1.37854682981,1.37148019896,1.41805758348,1.42397559941,1.40954643692,1.37937555198,1.37231391945,1.30825039732,1.27768864342,1.21645717182,1.20028959215,1.12467694197,1.05503928862,0.977390221163,0.937176703105,0.872525777908,0.838325543843,0.792227198727,0.739314738104,0.732858002782,0.663943107119,0.629830643628,0.601862220293,0.57142292534,0.508836807897,0.48620599508,0.47266213578,0.460110543814,0.387985524373,0.38016644579,0.374773153939,0.332379166903,0.317545440043,0.300352504129,0.276511497041,0.271459890637,0.249025811863,0.249635607424,0.219316172286,0.207579807008,0.202504908413,0.182351465036,0.157740616044,0.166048630707,0.14655876486,0.135969914867,0.131374755435,0.118509868504,0.113604912934,0.10992254748,0.0961775356186,0.0838908848285,0.0839287621298,0.0804638837613,0.0807336333254,0.0655627095136,0.0638522229696,0.0586969113341,0.057831601437,0.0463726431555,0.0486606260887,0.0486765707431,0.041802795238,0.0377761252152,0.0392342863536,0.0283599020885,0.0334874030478,0.0323437764589,0.0286264727183,0.0248971929928,0.026022465735,0.0229178765763,0.0220534463843,0.0186058622178,0.0171680342625,0.0160298058638,0.0154619362472,0.0103145819439,0.0123100529443,0.00802243040826,0.00772660358685,0.0083051526231,0.00830043920332,0.00686764855923,0.00801431912764,0.0080162344855,0.00601087579998,0.00487152077897,0.00544337505969,0.00572330720993,0.00429545191122,0.00457449036108,0.00400743447165,0.00400385767081,0.0037233587106,0.0034295712055,0.00229398991931,0.00143265169067,0.00343798138591,0.000855259166694,0.00229207156245,0.0025750866791,0.00171731625432,0.000861006639802,0.000572159678336,0.00200202780222,0.000570381774395,0.000860475717798,0.00085972337003,0.000858643432089,0.000572915225033,0.00057356520712,0.00057356520712,0.0,0.0,0.000283554335866,0.00028617065872,0.00028711554194,0.00028643716937,0.000858461792985,0.0,0.0]) # Creating weights for histo: y8_M_7 y8_M_7_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00248200333718,0.00228607836919,0.00274382167614,0.00326061588718,0.00356079428486,0.00373654723971,0.00393078034521,0.0037965817458,0.00436280499548,0.00470882167684,0.00503061501125,0.00552396663822,0.00591817017829,0.00565894264782,0.00585045376996,0.00734143430237,0.00801140262673,0.00878636533021,0.00956328098866,0.0114680742377,0.0122667489995,0.0154201719664,0.0292202755919,0.0580403127615,0.0848045966161,0.104900670815,0.119703985612,0.132157971025,0.139424556023,0.148449806302,0.154279041593,0.153735944096,0.157583223463,0.160528078384,0.155438870533,0.154930264065,0.151875817358,0.149781755113,0.145160124913,0.135640517194,0.125316048959,0.118050260229,0.111407237825,0.102772788084,0.101684539554,0.0941787385266,0.0899360486431,0.085380257332,0.0794292347633,0.0727409507418,0.0698421956165,0.0653620917875,0.0632568817741,0.0581214484011,0.0543934417771,0.0507414579063,0.0482575171205,0.0463002874498,0.0431278252675,0.0390447203024,0.0339006453133,0.0324145304047,0.0302970620367,0.0278967169432,0.0267397880998,0.0241396925935,0.0247050269553,0.0221539606767,0.0202093665408,0.0197095316063,0.018398009195,0.0171704638471,0.0159319970386,0.0153326703641,0.0121598771014,0.0115514771491,0.0120716714296,0.0111885544155,0.00965121425265,0.00982154042343,0.00915725494656,0.00842389940373,0.00784253069495,0.00708062680619,0.00622029566369,0.00630578144764,0.00650244736506,0.00518212408378,0.00440692669043,0.00427655227857,0.00414750134993,0.00362707279867,0.00375672931098,0.0030870870379,0.00308767585801,0.00276532303973,0.00280651027317,0.002200073753,0.00224419586703,0.00218116152154,0.00159848609311,0.0016623200605,0.00144700216592,0.00131605402116,0.00127521714617,0.00151054109462,0.0011016074127,0.000948830176713,0.000884146715821,0.00114454015202,0.000583359381757,0.000756130098948,0.000691229549928,0.000691296604176,0.000454014252603,0.000410410509813,0.000323951140092,0.000259355184992,0.000453626595233,0.000302458661186,0.00034528507761,0.000237588915212,0.000172843764411,0.000151174136565,0.00019455157127,0.000172758060701,0.000215993298492,0.000108062404317,8.64719423925e-05,0.000194466077104,0.000107962074399,8.64273513178e-05,6.48593944747e-05,0.0,0.000151257786739]) # Creating weights for histo: y8_M_8 y8_M_8_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.000340529049739,0.000226834382902,0.000226190034121,0.000282455983469,0.000198556099336,0.000311927478019,0.000283915606464,0.000396934293661,0.000426309707626,0.000284036635331,0.000452470951268,0.000423984616851,0.000537416138714,0.000538236313483,0.000708132479939,0.000482514919788,0.000652958314929,0.000369270213036,0.00065283624655,0.000397530230896,0.000482461310682,0.000338402505707,0.000709753078753,0.000963777970429,0.00059755946688,0.000595947481163,0.000737284398878,0.00101727837302,0.000988734271454,0.000823323598187,0.000847222864267,0.000510676184032,0.000878811992748,0.000936591174197,0.000960263529713,0.00133213712567,0.00183731028301,0.00244296843741,0.00493542357407,0.0090128668223,0.0127721155247,0.0155716780719,0.0175810691295,0.0206949265685,0.0216567422378,0.0224287133616,0.0230155919938,0.023692729946,0.0237952258043,0.0244905403619,0.022532011129,0.025089804034,0.0235187156795,0.0237305236231,0.0217177467272,0.021652777243,0.0226582375554,0.0212580449322,0.0208667430102,0.019452754581,0.0174422349106,0.0181196995665,0.0180909941921,0.0161865195701,0.0156444884447,0.0146976981752,0.0153205023334,0.0139210462785,0.0123359245154,0.0118532628759,0.011694077987,0.011057299821,0.0106604647264,0.0100896094253,0.00947285968057,0.00915577147223,0.00821337388241,0.00714859754463,0.00739388667658,0.0062436743533,0.0068207325695,0.00547086270994,0.00654661629178,0.00572259532971,0.00525103122067,0.004399702285,0.00376636044776,0.0036318951482,0.00365377687048,0.00354583842569,0.00351659250519,0.00283266208625,0.00289430513286,0.00238034676474,0.00215560283996,0.00206969016408,0.00187019003958,0.0017550492634,0.00161363111527,0.00130419163436,0.00124945257927,0.00124738781194,0.00116378775269,0.00107706678218,0.000737893107257,0.00098884891475,0.000538168745219,0.000795156542378,0.000651599376072,0.00053781130168,0.000509176911089,0.000821214577354,0.000396265293601,0.00053726006345,0.000423722362887,0.000426096904721,0.000255661380169,0.0004539161696,0.000454472605388,0.000254088004889,0.000226904327193,0.000142027227766,0.000141940072132,0.000170191387793,8.47613423681e-05,0.000141584722466,8.52851226372e-05,0.00017019287281,0.000142127303047,8.4964953019e-05,0.000170293111443,5.67060637719e-05,5.67246858824e-05,0.0,0.000198725985256]) # Creating weights for histo: y8_M_9 y8_M_9_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,52.1413377758,33.8897872924,20.8552853687,15.6417553124,15.6333032133,18.2539922864,15.6320842345,7.82602851461,7.81914147662,7.80668251343,13.0190551166,5.22010562013,2.60351608428,5.20856569774,0.0,0.0,2.61218275442,0.0,0.0,2.60351608428,0.0,0.0,0.0,2.61303911587,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_10 y8_M_10_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,188.512260326,149.540774668,122.164038373,88.4754537986,69.5160542238,53.7174459245,52.6830352241,45.293106684,38.9715241329,43.1924276653,25.2817208533,11.5915943805,6.31543418471,13.6905573971,10.5254876588,7.37189897518,8.42519724155,11.5903208453,6.31768114714,5.26434440539,3.16002984769,4.21735610201,5.26718773627,3.15631158658,3.15886904535,1.05320784921,4.21410493206,2.1078104311,0.0,0.0,0.0,2.10380283509,0.0,1.05320784921,0.0,1.05407970141,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,1.05312782041,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_11 y8_M_11_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,257.521814883,205.008890842,172.758021752,146.025028807,124.15540243,105.714439494,96.7312821077,76.0072972442,68.6244876245,55.2815833135,51.3715475602,37.7713811019,32.7038578038,31.3289304508,23.0357784693,21.1951518373,19.3473207585,15.8948000327,14.5120496111,11.9749278081,10.3667684996,7.37337659746,8.52287969407,7.37318832126,4.60767595098,3.45462634539,4.60783348821,4.14604958583,1.84201027007,3.45644263444,2.07236236805,2.76433775729,0.459422380804,0.921741909787,2.99382916403,2.07329875401,0.921904442105,1.61187257642,0.920906578203,0.460790649327,0.460092490409,0.230531613584,0.230502872644,0.0,0.461045782801,0.0,0.0,0.460503239932,0.230000559406,0.230818869284,0.229683717445,0.0,0.460341476088,0.230166626707,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.230818869284,0.0,0.230732569618,0.0,0.0,0.230345104867,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_12 y8_M_12_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,146.926923549,122.644963462,104.311721772,90.6344052871,76.7066014652,66.37295046,57.9293227849,50.8111120566,42.0902978871,38.3802345653,32.5905646622,30.3782701949,23.7603108785,22.2359890583,18.6347151839,16.7247905945,14.8992568856,13.3482243668,11.7679920494,11.1867583237,8.77754026049,7.67130646617,7.03273110695,6.06453657151,5.23296939428,4.23688308807,4.68024298191,3.51696339915,3.04629688466,2.68672194664,2.10426406078,2.38175024736,2.13170071479,1.91074479977,1.68918219064,1.41222883306,0.83077697451,1.19038884508,0.60924783551,1.02469326045,0.664600497276,0.775557423815,0.415400028677,0.443286798162,0.498712940316,0.360059947062,0.360017359213,0.221452196183,0.332240502542,0.276862295761,0.276993290905,0.166050714201,0.138279589769,0.11072879109,0.110728829562,0.083081852363,0.0830894697018,0.055407291165,0.11079900141,0.0276649856646,0.0829972152655,0.0277334878536,0.0276723914107,0.0,0.0,0.027692458097,0.0276413026659,0.0,0.0276413026659,0.0276409217989,0.0276604729685,0.0277233275548,0.0276901036469,0.0277086391712,0.0276858910277,0.0276935968507,0.0,0.0276261987911,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0276409217989,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_13 y8_M_13_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,29.026318908,35.3250618544,39.2196044282,39.5429728949,39.2384950984,39.479977698,36.1842809013,32.1715335842,28.7040852165,24.6808397113,21.5952159063,18.983776446,16.7865663601,14.2453858718,12.9247866288,11.7354814885,10.0416443511,8.92265886615,8.36796616226,7.56194198062,6.55344233997,5.45462126494,4.87935029188,4.3853237649,3.88109518369,3.44845392916,3.08534466243,2.38958483158,2.48050763616,2.1373024486,1.71428433089,1.54283920993,1.40142740257,1.19999630088,1.18951024941,1.13920208248,0.836908273255,0.826855499611,0.917320753102,0.766500020518,0.594829582954,0.494083144374,0.453466322135,0.393126074517,0.423334459064,0.443669570777,0.302442543671,0.363097918981,0.262124343302,0.201567456774,0.191568266367,0.232050129241,0.252108707757,0.211650389953,0.19161887613,0.141113488899,0.141271932651,0.0907782572724,0.100839951342,0.100880487699,0.0704538257963,0.0605084552915,0.0504475318978,0.0503439821396,0.0807050940837,0.0503857382276,0.070658631489,0.0201428067537,0.0302693954036,0.030226498472,0.0201285280042,0.0,0.0201632023665,0.0201653869605,0.0101024428174,0.0503902287821,0.0201733485922,0.0100987896906,0.0,0.0,0.0201365867289,0.0,0.0201574131922,0.0100787945867,0.0,0.0,0.0100786186055,0.0100787945867,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_14 y8_M_14_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.543124652013,0.529074699622,0.514840841287,0.622532006575,0.636645056403,0.840329737806,1.66342771956,4.10233755586,6.40501310109,7.87374829344,8.73894496188,9.4154879895,9.53442281021,9.75768770349,9.68154371757,9.51718258987,9.08727743892,8.88915148779,8.36249180408,7.94727219678,7.36480206086,7.22585304103,6.51823452585,5.92998943552,5.28221730005,4.61711645394,4.20949547086,3.63827899362,3.28749841299,3.0810062804,2.76983434615,2.52082531398,2.22079161627,1.91255563653,1.8361500271,1.5872671898,1.46554261596,1.37500510441,1.15725931315,1.03840221001,0.797821687258,0.789382405104,0.746872430853,0.763996074838,0.650740408169,0.560155573546,0.472479761597,0.472422820008,0.393182442809,0.367883256259,0.32254028405,0.29428228941,0.31959013262,0.223468151663,0.251803979299,0.183900364715,0.178272111838,0.158434470121,0.186771413505,0.138571358584,0.124473390583,0.141509467638,0.132977270661,0.118823784611,0.084924648288,0.0848683607576,0.0594484041756,0.0622359258105,0.0565856273053,0.0537432032056,0.0197968784648,0.0565879742221,0.0226221162078,0.022638433051,0.0282886045919,0.0197985982547,0.0113217308044,0.0254694726802,0.0198233716934,0.0254744512218,0.0113217192622,0.0113298295913,0.00566533031535,0.00849214934162,0.00847760615195,0.00283038403144,0.00564845175104,0.0113323342517,0.0,0.00283438379334,0.00283013933651,0.00282797555612,0.00283099961619,0.00566126360861,0.00565840113953,0.00283009162869,0.00283013933651,0.00564985220634,0.00565938992253,0.0,0.00282876042667,0.0,0.0,0.00283011125045,0.0028292798263,0.0,0.0,0.00283619822939,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00283094844571,0.0,0.0,0.0028292798263,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_15 y8_M_15_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0274077427031,0.0243797473086,0.0274037252112,0.021268767076,0.0303979145937,0.0198307058821,0.0182284119861,0.0229447228659,0.0274496545956,0.00910363762637,0.0273896403576,0.0350122284461,0.0152631367285,0.0152076126285,0.0213383051328,0.0289049319235,0.0167406047907,0.0228089552749,0.0273415131689,0.0228583940565,0.0335549069238,0.0503323662937,0.189002075139,0.37933252213,0.593795806108,0.743303455349,0.795113384234,0.881734170696,1.00665937835,0.945937234689,0.993189082791,1.0313414301,1.0024574364,0.906516541581,0.939829347359,0.840888922257,0.851375166692,0.762932565855,0.785767162181,0.738620950481,0.621521226068,0.604987120357,0.539364109332,0.452426295501,0.393033114499,0.37635496989,0.383920852263,0.327553078918,0.274384765154,0.280370237146,0.234636882787,0.201138917902,0.164498092804,0.167433225057,0.173553283024,0.162876444051,0.156934691823,0.129430352074,0.118827009569,0.109730545528,0.103670419086,0.0807514081679,0.0746512840499,0.0821665814746,0.0655582821422,0.0670738454797,0.0593457155181,0.044125341445,0.0487332273105,0.0350130555767,0.0365709089235,0.0365708616589,0.0167310100749,0.0273955366176,0.0243484226886,0.0274688676594,0.0121585017697,0.0152244506456,0.0121593289004,0.00455364027307,0.0212863967753,0.00761678990248,0.0106543894656,0.0106824930024,0.00758503162971,0.00456292304239,0.00456440833274,0.00611820401282,0.00305143509593,0.00153593320251,0.00303104632499,0.00455393804011,0.00612720201286,0.00455945145682,0.00305176122174,0.00306713167276,0.00304446238441,0.00304105342445,0.00459029279616,0.0,0.0,0.00152156948767,0.0,0.0,0.00153117011149,0.0,0.0,0.0,0.0,0.0015277280663,0.0,0.0030294109695,0.0,0.0,0.0,0.0,0.00154504581923,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0]) # Creating weights for histo: y8_M_16 y8_M_16_weights = numpy.array([0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.0,0.00162502838019,0.00216863937262,0.00162359379128,0.00198514832545,0.00253011110502,0.00144329041994,0.00180668315355,0.00270776944101,0.00162486739814,0.00198780337404,0.00162450191495,0.00162484005429,0.00126353623619,0.00126474552725,0.00108374315487,0.00198528311904,0.00108362453651,0.000901856536544,0.000901657042031,0.000722885696117,0.00090354723328,0.00198681629984,0.00144446158367,0.00126382122836,0.000720861866653,0.00198573718088,0.000541847122028,0.00162538230965,0.00162537922865,0.00126355549242,0.00072170798527,0.00180596027479,0.00144432447933,0.00180585475066,0.00216756949665,0.00289131286512,0.00216796386418,0.00613874269101,0.0277971301606,0.0565147156772,0.0850477071499,0.109786913792,0.119894969526,0.128199332983,0.142473088317,0.139942974131,0.145165917622,0.142453023329,0.144814491478,0.148240328322,0.139916978225,0.138671562483,0.132703980692,0.14047729592,0.122607555719,0.123861636763,0.120429984538,0.111397620161,0.114653501545,0.103820872499,0.0957081085008,0.0982212386945,0.0756550972763,0.0801734169064,0.0736596514909,0.0715043790208,0.0660858307756,0.0592265316217,0.0547089822407,0.0556135242507,0.0516464718889,0.0471312717677,0.0420645733033,0.0422530147413,0.0359403189976,0.0319568949915,0.0330348739874,0.0321397213137,0.0245523057022,0.0283464255912,0.0229372551286,0.0231140773592,0.0178719277077,0.0194983394552,0.0182355777045,0.0151729402616,0.0106512084281,0.0119191346989,0.0117392045132,0.0117365356002,0.0120963613103,0.00650037848646,0.00848812948357,0.00650126042166,0.00740169315278,0.00632136489721,0.00758273634836,0.00541606034065,0.00324831099084,0.00342979091766,0.00379314167269,0.00289089038349,0.00307108977121,0.00234757709234,0.00198689486525,0.00234853489707,0.00234855530867,0.00144368825359,0.00234903941022,0.000902661831959,0.000902969546468,0.00108361105715,0.0010846632174,0.000902792389179,0.0012651306518,0.00126423947361,0.000723516144991,0.000721795793665,0.0007219344385,0.000542112857961,0.000901268451369,0.000541610270435,0.000903103184684,0.000722723558685,0.000541070325828,0.000181036147804,0.0,0.000722040347749,0.000360682034527,0.0,0.000180220184439,0.0,0.0,0.0,0.000180820863186]) # Creating a new Canvas fig = plt.figure(figsize=(12,6),dpi=80) frame = gridspec.GridSpec(1,1,right=0.7) pad = fig.add_subplot(frame[0]) # Creating a new Stack pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights,\ label="$bg\_dip\_1600\_inf$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#e5e5e5", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights,\ label="$bg\_dip\_1200\_1600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#f2f2f2", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights,\ label="$bg\_dip\_800\_1200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights,\ label="$bg\_dip\_600\_800$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ccc6aa", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights,\ label="$bg\_dip\_400\_600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#c1bfa8", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights,\ label="$bg\_dip\_200\_400$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#bab5a3", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights,\ label="$bg\_dip\_100\_200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#b2a596", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights,\ label="$bg\_dip\_0\_100$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#b7a39b", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights,\ label="$bg\_vbf\_1600\_inf$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#ad998c", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights,\ label="$bg\_vbf\_1200\_1600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#9b8e82", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights,\ label="$bg\_vbf\_800\_1200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#876656", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights,\ label="$bg\_vbf\_600\_800$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#afcec6", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights,\ label="$bg\_vbf\_400\_600$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#84c1a3", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights,\ label="$bg\_vbf\_200\_400$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#89a8a0", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights+y8_M_2_weights,\ label="$bg\_vbf\_100\_200$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#829e8c", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights+y8_M_1_weights,\ label="$bg\_vbf\_0\_100$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#adbcc6", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") pad.hist(x=xData, bins=xBinning, weights=y8_M_0_weights,\ label="$signal$", histtype="step", rwidth=1.0,\ color=None, edgecolor="#7a8e99", linewidth=1, linestyle="solid",\ bottom=None, cumulative=False, normed=False, align="mid", orientation="vertical") # Axis plt.rc('text',usetex=False) plt.xlabel(r"M [ j_{1} , j_{2} ] ( GeV ) ",\ fontsize=16,color="black") plt.ylabel(r"$\mathrm{Events}$ $(\mathcal{L}_{\mathrm{int}} = 40.0\ \mathrm{fb}^{-1})$ ",\ fontsize=16,color="black") # Boundary of y-axis ymax=(y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights).max()*1.1 ymin=0 # linear scale #ymin=min([x for x in (y8_M_0_weights+y8_M_1_weights+y8_M_2_weights+y8_M_3_weights+y8_M_4_weights+y8_M_5_weights+y8_M_6_weights+y8_M_7_weights+y8_M_8_weights+y8_M_9_weights+y8_M_10_weights+y8_M_11_weights+y8_M_12_weights+y8_M_13_weights+y8_M_14_weights+y8_M_15_weights+y8_M_16_weights) if x])/100. # log scale plt.gca().set_ylim(ymin,ymax) # Log/Linear scale for X-axis plt.gca().set_xscale("linear") #plt.gca().set_xscale("log",nonposx="clip") # Log/Linear scale for Y-axis plt.gca().set_yscale("linear") #plt.gca().set_yscale("log",nonposy="clip") # Legend plt.legend(bbox_to_anchor=(1.05,1), loc=2, borderaxespad=0.) # Saving the image plt.savefig('../../HTML/MadAnalysis5job_0/selection_7.png') plt.savefig('../../PDF/MadAnalysis5job_0/selection_7.png') plt.savefig('../../DVI/MadAnalysis5job_0/selection_7.eps') # Running! if __name__ == '__main__': selection_7()
217.469072
2,424
0.789187
7,558
42,189
4.308547
0.271765
0.123326
0.176514
0.224542
0.24871
0.239252
0.236857
0.227521
0.225494
0.224665
0
0.649931
0.039276
42,189
193
2,425
218.595855
0.153484
0.029605
0
0.185841
0
0.00885
0.025499
0.00489
0
0
0
0
0
1
0.00885
false
0
0.035398
0
0.044248
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
e9cb61d999b3e8ff007f5af0d43063c443cb8c6b
62
py
Python
common/darts/api/__init__.py
j-woz/Benchmarks
d518162fdafb7cfa26071b6a30a3b456dad024f6
[ "MIT" ]
51
2017-01-24T20:57:27.000Z
2022-02-15T00:33:45.000Z
common/darts/api/__init__.py
j-woz/Benchmarks
d518162fdafb7cfa26071b6a30a3b456dad024f6
[ "MIT" ]
59
2017-08-21T22:19:44.000Z
2021-11-01T16:05:35.000Z
common/darts/api/__init__.py
j-woz/Benchmarks
d518162fdafb7cfa26071b6a30a3b456dad024f6
[ "MIT" ]
90
2016-11-22T03:57:07.000Z
2022-01-11T04:43:23.000Z
from .model import Model from .dataset import InMemoryDataset
20.666667
36
0.83871
8
62
6.5
0.625
0
0
0
0
0
0
0
0
0
0
0
0.129032
62
2
37
31
0.962963
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
e9cfa5403cc8e3974c33ff70c9186fd105e11ea1
101
py
Python
sunday/sponsorapp/admin.py
H0oxy/zxc-djangoprjct
e5c56e16a876af1ece2a6727df89bb924ae8813b
[ "MIT" ]
2
2020-10-18T21:02:54.000Z
2020-10-18T21:03:46.000Z
sunday/sponsorapp/admin.py
H0oxy/zxc-djangoprjct
e5c56e16a876af1ece2a6727df89bb924ae8813b
[ "MIT" ]
null
null
null
sunday/sponsorapp/admin.py
H0oxy/zxc-djangoprjct
e5c56e16a876af1ece2a6727df89bb924ae8813b
[ "MIT" ]
null
null
null
from django.contrib import admin from sponsorapp.models import Sponsor admin.site.register(Sponsor)
20.2
37
0.841584
14
101
6.071429
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.09901
101
5
38
20.2
0.934066
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
7590ef216c06a46f77e9be95921201f07c9fb14d
191
py
Python
test/test_del_contact.py
Sviatlana-Pi/python_training
ecd7fc7d53b3334d1b21d04c12f0355b3f29751c
[ "Apache-2.0" ]
null
null
null
test/test_del_contact.py
Sviatlana-Pi/python_training
ecd7fc7d53b3334d1b21d04c12f0355b3f29751c
[ "Apache-2.0" ]
null
null
null
test/test_del_contact.py
Sviatlana-Pi/python_training
ecd7fc7d53b3334d1b21d04c12f0355b3f29751c
[ "Apache-2.0" ]
null
null
null
# -*- coding: utf-8 -*- def test_del_first_contact(app): app.session.login(username = "admin", password = "secret") app.contact.delete_first_contact() app.session.logout()
27.285714
63
0.659686
24
191
5.041667
0.708333
0.198347
0.247934
0
0
0
0
0
0
0
0
0.006369
0.17801
191
7
64
27.285714
0.764331
0.109948
0
0
0
0
0.067485
0
0
0
0
0
0
1
0.25
false
0.25
0
0
0.25
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
7592a8369f48ee5c95c640a8890cc582007d851a
55
py
Python
tinychat/__init__.py
alethea/udp-chat
12a699920cab7881aabce502e07bddbf34e51749
[ "Apache-2.0" ]
1
2015-02-15T22:58:47.000Z
2015-02-15T22:58:47.000Z
tinychat/__init__.py
alethea/udp-chat
12a699920cab7881aabce502e07bddbf34e51749
[ "Apache-2.0" ]
null
null
null
tinychat/__init__.py
alethea/udp-chat
12a699920cab7881aabce502e07bddbf34e51749
[ "Apache-2.0" ]
null
null
null
# 9/23/2013 # Charles O. Goddard from . import server
11
20
0.690909
9
55
4.222222
1
0
0
0
0
0
0
0
0
0
0
0.159091
0.2
55
4
21
13.75
0.704545
0.509091
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
759a39ea4e265f566a0116a7299bc0ad7ff6cad7
109
py
Python
library/favourite/admin.py
furkan-34/library-DRF-django-api
3634133b7c543d6d05845dd8fa1f206386c1badb
[ "MIT" ]
null
null
null
library/favourite/admin.py
furkan-34/library-DRF-django-api
3634133b7c543d6d05845dd8fa1f206386c1badb
[ "MIT" ]
null
null
null
library/favourite/admin.py
furkan-34/library-DRF-django-api
3634133b7c543d6d05845dd8fa1f206386c1badb
[ "MIT" ]
null
null
null
from django.contrib import admin from favourite.api.models import Favourite admin.site.register(Favourite)
18.166667
42
0.834862
15
109
6.066667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.100917
109
5
43
21.8
0.928571
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
75e4a759c1c0db903673d30708c14c30316ea084
1,795
py
Python
bot/player_commands/__init__.py
UP929312/CommunityBot
c16294e8ff4f47d9a1e8c18c9cd4011e7ebbd67a
[ "Apache-2.0" ]
1
2021-06-15T07:31:13.000Z
2021-06-15T07:31:13.000Z
bot/player_commands/__init__.py
UP929312/CommunityBot
c16294e8ff4f47d9a1e8c18c9cd4011e7ebbd67a
[ "Apache-2.0" ]
1
2021-06-01T10:14:32.000Z
2021-06-02T10:54:12.000Z
bot/player_commands/__init__.py
UP929312/CommunityBot
c16294e8ff4f47d9a1e8c18c9cd4011e7ebbd67a
[ "Apache-2.0" ]
2
2021-06-01T10:59:15.000Z
2021-06-03T18:29:36.000Z
from player_commands.bazaar import bazaar_cog from player_commands.sky import sky_cog from player_commands.wiki import wiki_cog from player_commands.dungeons import dungeons_cog from player_commands.kills import kills_cog from player_commands.lowest_bin import lowest_bin_cog from player_commands.skills import skills_cog from player_commands.slayer import slayer_cog from player_commands.invite import invite_cog from player_commands.auction_house import auction_house_cog from player_commands.missing import missing_cog from player_commands.weights import weights_cog from player_commands.leaderboard import leaderboard_cog from player_commands.price_check import price_check_cog from player_commands.minions import minions_cog from player_commands.rank import rank_cog from player_commands.guild_print import guild_print_cog from player_commands.maxer import maxer_cog from player_commands.set_prefix import set_prefix_cog from player_commands.link_account import link_account_cog from player_commands.help_command import help_cog from player_commands.regenerate_leaderboard import regenerate_leaderboard_cog #from player_commands._dev import _dev_cog assistant_commands = [set_prefix_cog, link_account_cog, help_cog, regenerate_leaderboard_cog] regular_commands = [sky_cog, wiki_cog, bazaar_cog, dungeons_cog, kills_cog, lowest_bin_cog, skills_cog, slayer_cog, invite_cog, auction_house_cog, missing_cog, weights_cog, leaderboard_cog, price_check_cog, minions_cog, rank_cog, guild_print_cog, maxer_cog] player_commands = regular_commands+assistant_commands
47.236842
93
0.773259
237
1,795
5.447257
0.156118
0.260263
0.320682
0.357862
0.049574
0
0
0
0
0
0
0
0.197772
1,795
37
94
48.513514
0.896528
0.022841
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.709677
0
0.709677
0.064516
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
ddf33eb8cc06c0b70baa29d7b075e961d0bad0f8
5,363
py
Python
tests/test_flask_twitter_oembedder.py
h3xh4wk/flask-twitter-oembedder
4115778a12affe97313e39d2bfa017dd4c032852
[ "MIT" ]
9
2015-03-30T16:15:11.000Z
2021-05-06T12:12:18.000Z
tests/test_flask_twitter_oembedder.py
h3xh4wk/flask-twitter-oembedder
4115778a12affe97313e39d2bfa017dd4c032852
[ "MIT" ]
null
null
null
tests/test_flask_twitter_oembedder.py
h3xh4wk/flask-twitter-oembedder
4115778a12affe97313e39d2bfa017dd4c032852
[ "MIT" ]
3
2016-10-24T13:39:10.000Z
2020-06-28T13:34:41.000Z
from flask import Flask, render_template_string, Markup from flask.ext.testing import TestCase, ContextVariableDoesNotExist from flask.ext.cache import Cache from flask.ext.twitter_oembedder import TwitterOEmbedder import types import httpretty class FlaskStaticTest(TestCase): def create_app(self): app = Flask(__name__) app.config['TESTING']=True app.config['CACHE_TYPE'] = 'simple' app.config['TWITTER_CONSUMER_KEY'] = 'twitter_consumer_key' app.config['TWITTER_CONSUMER_SECRET'] = 'twitter_consumer_secret' app.config['TWITTER_ACCESS_TOKEN'] = 'twitter_access_token' app.config['TWITTER_TOKEN_SECRET'] = 'twitter_token_secret' self.cache = Cache(app) self.twitter_oembedder = TwitterOEmbedder(app,self.cache) @app.route('/') def index(): return render_template_string('') return app def test_big_timeout_exception(self): try: self.twitter_oembedder.init(self.app, self.cache, timeout=60*60*24*365*2) assert False except Exception as e: self.assertIsInstance(e,Exception) def test_jinja_oembed_tweet_avaliable(self): response = self.client.get('/') self.assertIsInstance(self.get_context_variable('oembed_tweet'), types.FunctionType) @httpretty.activate def test_oembed_tweet_valid_id_debug_off(self): with open('tests/data/99530515043983360.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=99530515043983360', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') valid = oembed_tweet('99530515043983360') self.assertIsInstance(valid, Markup) @httpretty.activate def test_oembed_tweet_invaild_id_debug_off(self): with open('tests/data/abc.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=abc', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') invalid = oembed_tweet('abc') self.assertIs(invalid,'') @httpretty.activate def test_oembed_tweet_invalid_id_debug_on(self): self.twitter_oembedder.init(self.app, self.cache, debug=True) with open('tests/data/abc.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=abc', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') try: invalid = oembed_tweet('abc') assert False except Exception as e: self.assertIsInstance(e, KeyError) @httpretty.activate def test_oembed_tweet_valid_id_app_debug_on(self): self.app.config['DEBUG'] = True self.twitter_oembedder.init(self.app, self.cache) with open('tests/data/99530515043983360.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=99530515043983360', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') valid = oembed_tweet('99530515043983360') self.assertIsInstance(valid, Markup) @httpretty.activate def test_oembed_tweet_invalid_id_app_debug_on(self): self.app.config['DEBUG'] = True self.twitter_oembedder.init(self.app, self.cache) with open('tests/data/abc.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=abc', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') try: invalid = oembed_tweet('abc') assert False except Exception as e: self.assertIsInstance(e, KeyError) @httpretty.activate def test_oembed_tweet_valid_id_app_debug_on_override(self): self.app.config['DEBUG'] = True self.twitter_oembedder.init(self.app, self.cache, debug=False) with open('tests/data/99530515043983360.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=99530515043983360', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') valid = oembed_tweet('99530515043983360') self.assertIsInstance(valid, Markup) @httpretty.activate def test_oembed_tweet_invalid_id_app_debug_on_override(self): self.app.config['DEBUG'] = True self.twitter_oembedder.init(self.app, self.cache, debug=False) with open('tests/data/abc.json') as f: httpretty.register_uri(httpretty.GET, 'https://api.twitter.com/1.1/statuses/oembed.json?id=abc', body = f.read()) response = self.client.get('/') oembed_tweet = self.get_context_variable('oembed_tweet') invalid = oembed_tweet('abc') self.assertIs(invalid,'')
42.904
122
0.65262
646
5,363
5.210526
0.139319
0.098039
0.042781
0.049911
0.766191
0.766191
0.756387
0.756387
0.71123
0.683007
0
0.042878
0.230282
5,363
124
123
43.25
0.772529
0
0
0.672727
0
0.063636
0.182547
0.027037
0
0
0
0
0.109091
1
0.1
false
0
0.054545
0.009091
0.181818
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
349b5c8834d9d9f9cd6eaf74debf6fec61ab3188
11,849
py
Python
SimModel_Python_API/simmodel_swig/Release/SimMaterialLayer_OpaqueMaterialLayer_Default.py
EnEff-BIM/EnEffBIM-Framework
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
[ "MIT" ]
3
2016-05-30T15:12:16.000Z
2022-03-22T08:11:13.000Z
SimModel_Python_API/simmodel_swig/Release/SimMaterialLayer_OpaqueMaterialLayer_Default.py
EnEff-BIM/EnEffBIM-Framework
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
[ "MIT" ]
21
2016-06-13T11:33:45.000Z
2017-05-23T09:46:52.000Z
SimModel_Python_API/simmodel_swig/Release/SimMaterialLayer_OpaqueMaterialLayer_Default.py
EnEff-BIM/EnEffBIM-Framework
6328d39b498dc4065a60b5cc9370b8c2a9a1cddf
[ "MIT" ]
null
null
null
# This file was automatically generated by SWIG (http://www.swig.org). # Version 3.0.7 # # Do not make changes to this file unless you know what you are doing--modify # the SWIG interface file instead. from sys import version_info if version_info >= (2, 6, 0): def swig_import_helper(): from os.path import dirname import imp fp = None try: fp, pathname, description = imp.find_module('_SimMaterialLayer_OpaqueMaterialLayer_Default', [dirname(__file__)]) except ImportError: import _SimMaterialLayer_OpaqueMaterialLayer_Default return _SimMaterialLayer_OpaqueMaterialLayer_Default if fp is not None: try: _mod = imp.load_module('_SimMaterialLayer_OpaqueMaterialLayer_Default', fp, pathname, description) finally: fp.close() return _mod _SimMaterialLayer_OpaqueMaterialLayer_Default = swig_import_helper() del swig_import_helper else: import _SimMaterialLayer_OpaqueMaterialLayer_Default del version_info try: _swig_property = property except NameError: pass # Python < 2.2 doesn't have 'property'. def _swig_setattr_nondynamic(self, class_type, name, value, static=1): if (name == "thisown"): return self.this.own(value) if (name == "this"): if type(value).__name__ == 'SwigPyObject': self.__dict__[name] = value return method = class_type.__swig_setmethods__.get(name, None) if method: return method(self, value) if (not static): if _newclass: object.__setattr__(self, name, value) else: self.__dict__[name] = value else: raise AttributeError("You cannot add attributes to %s" % self) def _swig_setattr(self, class_type, name, value): return _swig_setattr_nondynamic(self, class_type, name, value, 0) def _swig_getattr_nondynamic(self, class_type, name, static=1): if (name == "thisown"): return self.this.own() method = class_type.__swig_getmethods__.get(name, None) if method: return method(self) if (not static): return object.__getattr__(self, name) else: raise AttributeError(name) def _swig_getattr(self, class_type, name): return _swig_getattr_nondynamic(self, class_type, name, 0) def _swig_repr(self): try: strthis = "proxy of " + self.this.__repr__() except: strthis = "" return "<%s.%s; %s >" % (self.__class__.__module__, self.__class__.__name__, strthis,) try: _object = object _newclass = 1 except AttributeError: class _object: pass _newclass = 0 try: import weakref weakref_proxy = weakref.proxy except: weakref_proxy = lambda x: x import base class SimMaterialLayer(base.SimResourceObject): __swig_setmethods__ = {} for _s in [base.SimResourceObject]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimMaterialLayer, name, value) __swig_getmethods__ = {} for _s in [base.SimResourceObject]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimMaterialLayer, name) __repr__ = _swig_repr def LayerMaterial(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_LayerMaterial(self, *args) def LayerThickness(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_LayerThickness(self, *args) def IsVentilated(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_IsVentilated(self, *args) def MaterialLayerName(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_MaterialLayerName(self, *args) def __init__(self, *args): this = _SimMaterialLayer_OpaqueMaterialLayer_Default.new_SimMaterialLayer(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer__clone(self, f, c) __swig_destroy__ = _SimMaterialLayer_OpaqueMaterialLayer_Default.delete_SimMaterialLayer __del__ = lambda self: None SimMaterialLayer_swigregister = _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_swigregister SimMaterialLayer_swigregister(SimMaterialLayer) class SimMaterialLayer_OpaqueMaterialLayer(SimMaterialLayer): __swig_setmethods__ = {} for _s in [SimMaterialLayer]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimMaterialLayer_OpaqueMaterialLayer, name, value) __swig_getmethods__ = {} for _s in [SimMaterialLayer]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimMaterialLayer_OpaqueMaterialLayer, name) __repr__ = _swig_repr def SimMatLayer_MaterialLayerName(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_SimMatLayer_MaterialLayerName(self, *args) def SimMatLayer_MaterialName(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_SimMatLayer_MaterialName(self, *args) def SimMatLayer_LayerThickness(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_SimMatLayer_LayerThickness(self, *args) def __init__(self, *args): this = _SimMaterialLayer_OpaqueMaterialLayer_Default.new_SimMaterialLayer_OpaqueMaterialLayer(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer__clone(self, f, c) __swig_destroy__ = _SimMaterialLayer_OpaqueMaterialLayer_Default.delete_SimMaterialLayer_OpaqueMaterialLayer __del__ = lambda self: None SimMaterialLayer_OpaqueMaterialLayer_swigregister = _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_swigregister SimMaterialLayer_OpaqueMaterialLayer_swigregister(SimMaterialLayer_OpaqueMaterialLayer) class SimMaterialLayer_OpaqueMaterialLayer_Default(SimMaterialLayer_OpaqueMaterialLayer): __swig_setmethods__ = {} for _s in [SimMaterialLayer_OpaqueMaterialLayer]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimMaterialLayer_OpaqueMaterialLayer_Default, name, value) __swig_getmethods__ = {} for _s in [SimMaterialLayer_OpaqueMaterialLayer]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimMaterialLayer_OpaqueMaterialLayer_Default, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimMaterialLayer_OpaqueMaterialLayer_Default.new_SimMaterialLayer_OpaqueMaterialLayer_Default(*args) try: self.this.append(this) except: self.this = this def _clone(self, f=0, c=None): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default__clone(self, f, c) __swig_destroy__ = _SimMaterialLayer_OpaqueMaterialLayer_Default.delete_SimMaterialLayer_OpaqueMaterialLayer_Default __del__ = lambda self: None SimMaterialLayer_OpaqueMaterialLayer_Default_swigregister = _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_swigregister SimMaterialLayer_OpaqueMaterialLayer_Default_swigregister(SimMaterialLayer_OpaqueMaterialLayer_Default) class SimMaterialLayer_OpaqueMaterialLayer_Default_sequence(base.sequence_common): __swig_setmethods__ = {} for _s in [base.sequence_common]: __swig_setmethods__.update(getattr(_s, '__swig_setmethods__', {})) __setattr__ = lambda self, name, value: _swig_setattr(self, SimMaterialLayer_OpaqueMaterialLayer_Default_sequence, name, value) __swig_getmethods__ = {} for _s in [base.sequence_common]: __swig_getmethods__.update(getattr(_s, '__swig_getmethods__', {})) __getattr__ = lambda self, name: _swig_getattr(self, SimMaterialLayer_OpaqueMaterialLayer_Default_sequence, name) __repr__ = _swig_repr def __init__(self, *args): this = _SimMaterialLayer_OpaqueMaterialLayer_Default.new_SimMaterialLayer_OpaqueMaterialLayer_Default_sequence(*args) try: self.this.append(this) except: self.this = this def assign(self, n, x): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_assign(self, n, x) def begin(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_begin(self, *args) def end(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_end(self, *args) def rbegin(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_rbegin(self, *args) def rend(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_rend(self, *args) def at(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_at(self, *args) def front(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_front(self, *args) def back(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_back(self, *args) def push_back(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_push_back(self, *args) def pop_back(self): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_pop_back(self) def detach_back(self, pop=True): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_detach_back(self, pop) def insert(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_insert(self, *args) def erase(self, *args): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_erase(self, *args) def detach(self, position, r, erase=True): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_detach(self, position, r, erase) def swap(self, x): return _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_swap(self, x) __swig_destroy__ = _SimMaterialLayer_OpaqueMaterialLayer_Default.delete_SimMaterialLayer_OpaqueMaterialLayer_Default_sequence __del__ = lambda self: None SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_swigregister = _SimMaterialLayer_OpaqueMaterialLayer_Default.SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_swigregister SimMaterialLayer_OpaqueMaterialLayer_Default_sequence_swigregister(SimMaterialLayer_OpaqueMaterialLayer_Default_sequence) # This file is compatible with both classic and new-style classes.
44.378277
181
0.770276
1,162
11,849
7.282272
0.129088
0.384661
0.382179
0.205625
0.738951
0.69239
0.652328
0.587922
0.502836
0.401205
0
0.001707
0.159676
11,849
266
182
44.545113
0.848147
0.024812
0
0.383495
1
0
0.028064
0.007796
0
0
0
0
0
1
0.169903
false
0.009709
0.053398
0.131068
0.558252
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
5
34aa5078e63278f98703ce0f89c002e42d8eb6f0
146
py
Python
python/testData/stubs/AttrsKwOnlyOnField.py
alexey-anufriev/intellij-community
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
[ "Apache-2.0" ]
2
2019-04-28T07:48:50.000Z
2020-12-11T14:18:08.000Z
python/testData/stubs/AttrsKwOnlyOnField.py
alexey-anufriev/intellij-community
ffcd46f14e630acdefcc76e2bfc7c43d2449013a
[ "Apache-2.0" ]
1
2020-07-30T19:04:47.000Z
2020-07-30T19:04:47.000Z
python/testData/stubs/AttrsKwOnlyOnField.py
bradleesand/intellij-community
750ff9c10333c9c1278c00dbe8d88c877b1b9749
[ "Apache-2.0" ]
1
2020-10-15T05:56:42.000Z
2020-10-15T05:56:42.000Z
import attr @attr.s class Foo: bar1 = attr.ib(type=str) bar2 = attr.ib(type=str, kw_only=True) bar3 = attr.ib(type=str, kw_only=False)
24.333333
43
0.664384
27
146
3.518519
0.555556
0.189474
0.315789
0.410526
0.4
0.4
0
0
0
0
0
0.02521
0.184932
146
6
43
24.333333
0.773109
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.166667
0
0.833333
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
34ba58f7e7aeb18886f43477b76462ba7c38e674
13,198
py
Python
mi/dataset/parser/test/test_pco2w_abc_imodem.py
rmanoni/mi-dataset
c1012a0cd8f2ea075e008cdd1ab291ed54f44d43
[ "BSD-2-Clause" ]
null
null
null
mi/dataset/parser/test/test_pco2w_abc_imodem.py
rmanoni/mi-dataset
c1012a0cd8f2ea075e008cdd1ab291ed54f44d43
[ "BSD-2-Clause" ]
null
null
null
mi/dataset/parser/test/test_pco2w_abc_imodem.py
rmanoni/mi-dataset
c1012a0cd8f2ea075e008cdd1ab291ed54f44d43
[ "BSD-2-Clause" ]
null
null
null
#!/usr/bin/env python __author__ = 'mworden' """ @package mi.dataset.parser.test.test_pco2w_abc_imodem @author Mark Worden @brief Test code for the pco2w_abc_imodem parser """ from mi.logging import log import os from nose.plugins.attrib import attr from mi.core.exceptions import RecoverableSampleException from mi.dataset.test.test_parser import ParserUnitTestCase from mi.dataset.dataset_parser import DataSetDriverConfigKeys from mi.dataset.parser.pco2w_abc_imodem import Pco2wAbcImodemParser from mi.dataset.parser.pco2w_abc_particles import \ Pco2wAbcImodemMetadataTelemeteredDataParticle, \ Pco2wAbcImodemMetadataRecoveredDataParticle, \ Pco2wAbcImodemPowerTelemeteredDataParticle, \ Pco2wAbcImodemPowerRecoveredDataParticle, \ Pco2wAbcImodemInstrumentTelemeteredDataParticle, \ Pco2wAbcImodemInstrumentRecoveredDataParticle, \ Pco2wAbcImodemInstrumentBlankTelemeteredDataParticle, \ Pco2wAbcImodemInstrumentBlankRecoveredDataParticle, \ Pco2wAbcImodemControlTelemeteredDataParticle, \ Pco2wAbcImodemControlRecoveredDataParticle, \ Pco2wAbcParticleClassKey from mi.idk.config import Config RESOURCE_PATH = os.path.join( Config().base_dir(), 'mi', 'dataset', 'driver', 'pco2w_abc', 'imodem', 'resource') @attr('UNIT', group='mi') class Pco2wAbcParserUnitTestCase(ParserUnitTestCase): """ pco2w_abc Parser unit test suite """ def setUp(self): ParserUnitTestCase.setUp(self) self._telem_parser_config = { DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.pco2w_abc_particles', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { Pco2wAbcParticleClassKey.METADATA_PARTICLE_CLASS: Pco2wAbcImodemMetadataTelemeteredDataParticle, Pco2wAbcParticleClassKey.POWER_PARTICLE_CLASS: Pco2wAbcImodemPowerTelemeteredDataParticle, Pco2wAbcParticleClassKey.INSTRUMENT_PARTICLE_CLASS: Pco2wAbcImodemInstrumentTelemeteredDataParticle, Pco2wAbcParticleClassKey.INSTRUMENT_BLANK_PARTICLE_CLASS: Pco2wAbcImodemInstrumentBlankTelemeteredDataParticle, Pco2wAbcParticleClassKey.CONTROL_PARTICLE_CLASS: Pco2wAbcImodemControlTelemeteredDataParticle, } } self._recov_parser_config = { DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.pco2w_abc_particles', DataSetDriverConfigKeys.PARTICLE_CLASS: None, DataSetDriverConfigKeys.PARTICLE_CLASSES_DICT: { Pco2wAbcParticleClassKey.METADATA_PARTICLE_CLASS: Pco2wAbcImodemMetadataRecoveredDataParticle, Pco2wAbcParticleClassKey.POWER_PARTICLE_CLASS: Pco2wAbcImodemPowerRecoveredDataParticle, Pco2wAbcParticleClassKey.INSTRUMENT_PARTICLE_CLASS: Pco2wAbcImodemInstrumentRecoveredDataParticle, Pco2wAbcParticleClassKey.INSTRUMENT_BLANK_PARTICLE_CLASS: Pco2wAbcImodemInstrumentBlankRecoveredDataParticle, Pco2wAbcParticleClassKey.CONTROL_PARTICLE_CLASS: Pco2wAbcImodemControlRecoveredDataParticle, } } def test_happy_path(self): """ Read files and verify that all expected particles can be read. Verify that the contents of the particles are correct. There should be no exceptions generated. """ log.debug('===== START TEST HAPPY PATH =====') num_particles_to_request = 10 num_expected_particles = 7 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1624.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._telem_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1624.telem.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1624.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._recov_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1624.recov.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) log.debug('===== END TEST HAPPY PATH =====') def test_invalid_data_telem(self): """ Read files and verify that all expected particles can be read. Verify that invalid data is handled appropriately with the correct exceptions being reported. """ log.debug('===== START TEST INVALID DATA TELEMETERED =====') num_particles_to_request = 10 num_expected_particles = 7 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1625.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._telem_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1625.telem.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 2) for exception in self.exception_callback_value: self.assertIsInstance(exception, RecoverableSampleException) log.debug('===== END TEST INVALID DATA TELEMETERED =====') def test_invalid_data_recov(self): """ Read files and verify that all expected particles can be read. Verify that invalid data is handled appropriately with the correct exceptions being reported. """ log.debug('===== START TEST INVALID DATA RECOVERED =====') num_particles_to_request = 10 num_expected_particles = 7 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1625.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._recov_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1625.recov.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 2) for exception in self.exception_callback_value: self.assertIsInstance(exception, RecoverableSampleException) log.debug('===== END TEST INVALID DATA RECOVERED =====') def test_incomplete_metadata_one(self): """ Read a file containing insufficient data to create a metadata particle. In this case, the line specifying the sample count is missing. Verify that the contents of the particles are correct ensuring no metadata particle was generated. There should be no exceptions generated. """ log.debug('===== START TEST INCOMPLETE METADATA ONE =====') num_particles_to_request = 10 num_expected_particles = 7 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1626.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._telem_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1626.telem.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1626.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._recov_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1626.recov.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) log.debug('===== END TEST INCOMPLETE METADATA ONE =====') def test_incomplete_metadata_two(self): """ Read a file containing insufficient data to create a metadata particle. In this case, the line specifying the serial number is missing. Verify that the contents of the particles are correct ensuring no metadata particle was generated. There should be no exceptions generated. """ log.debug('===== START TEST INCOMPLETE METADATA TWO =====') num_particles_to_request = 10 num_expected_particles = 7 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1627.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._telem_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1627.telem.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1627.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._recov_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1627.recov.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 0) log.debug('===== END TEST INCOMPLETE METADATA TWO =====') def test_missing_file_time_telem(self): """ Read a file that is missing the file time metadata A RecoverableException should be reported. """ log.debug('===== START TEST MISSING FILE TIME TELEM =====') num_particles_to_request = 10 num_expected_particles = 6 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1628.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._telem_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1628.telem.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 1) for exception in self.exception_callback_value: self.assertIsInstance(exception, RecoverableSampleException) log.debug('===== END TEST MISSING FILE TIME TELEM =====') def test_missing_file_time_recov(self): """ Read a file that is missing the file time metadata A RecoverableException should be reported. """ log.debug('===== START TEST MISSING FILE TIME RECOV =====') num_particles_to_request = 10 num_expected_particles = 6 with open(os.path.join(RESOURCE_PATH, 'pco2wXYZ_11212014_1628.DAT'), 'r') as file_handle: parser = Pco2wAbcImodemParser(self._recov_parser_config, file_handle, self.exception_callback) particles = parser.get_records(num_particles_to_request) self.assertEquals(len(particles), num_expected_particles) self.assert_particles(particles, "pco2wXYZ_11212014_1628.recov.yml", RESOURCE_PATH) self.assertEquals(len(self.exception_callback_value), 1) for exception in self.exception_callback_value: self.assertIsInstance(exception, RecoverableSampleException) log.debug('===== END TEST MISSING FILE TIME RECOV =====')
39.873112
97
0.654569
1,263
13,198
6.575614
0.125891
0.037568
0.060686
0.042986
0.755328
0.733895
0.726309
0.726309
0.726309
0.722095
0
0.035555
0.271177
13,198
330
98
39.993939
0.827841
0.095848
0
0.590164
0
0
0.114716
0.056879
0
0
0
0
0.185792
1
0.043716
false
0
0.04918
0
0.098361
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9b2da1805113ec9795d284933fb092c7e5290b97
210
py
Python
Exercicios/Ex11.py
angeloridolfi/Python-CEV
fd11b7ea0725f83c84336b99304c50f183514245
[ "MIT" ]
null
null
null
Exercicios/Ex11.py
angeloridolfi/Python-CEV
fd11b7ea0725f83c84336b99304c50f183514245
[ "MIT" ]
null
null
null
Exercicios/Ex11.py
angeloridolfi/Python-CEV
fd11b7ea0725f83c84336b99304c50f183514245
[ "MIT" ]
null
null
null
n = float(input('Qual a largura da pardede? :')) n2 = float(input('Qual a altura da parede? :')) print(f'A área da parede é igual a {n*n2}, e será necessário {(n*n2)/2} litros de tinta para pintar a parede.')
42
111
0.671429
40
210
3.525
0.625
0.141844
0.198582
0.212766
0
0
0
0
0
0
0
0.022989
0.171429
210
4
112
52.5
0.787356
0
0
0
0
0.333333
0.738095
0
0
0
0
0
0
1
0
false
0
0
0
0
0.333333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
9b635063d21cfdd4fba986ed81f94f977741e211
28
py
Python
public/cmd/echo.py
sgframework/cdn
28acfd0d1fcddc3179da9319d6c353ce95347e37
[ "MIT" ]
3
2019-05-21T22:54:01.000Z
2019-06-05T09:27:40.000Z
public/cmd/echo.py
sgframework/cdn
28acfd0d1fcddc3179da9319d6c353ce95347e37
[ "MIT" ]
10
2019-05-12T22:15:22.000Z
2022-02-26T10:14:35.000Z
public/cmd/echo.py
sgframework/cdn
28acfd0d1fcddc3179da9319d6c353ce95347e37
[ "MIT" ]
1
2019-05-23T16:41:33.000Z
2019-05-23T16:41:33.000Z
#!/bin/python print('hello')
14
14
0.678571
4
28
4.75
1
0
0
0
0
0
0
0
0
0
0
0
0.035714
28
2
14
14
0.703704
0.428571
0
0
0
0
0.3125
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
9b8c82ba5563266d6395cb1aad2eb5e182fdfebd
13
py
Python
windows-virtual-desktop/archive/19H2/powershell/runbooks/Renew-RegistrationTokenAfterExpiration.py
faroukfriha/azure-as-code
687828825b4dbe2504e6e8f52500c751a4c8d452
[ "MIT" ]
1
2020-11-24T19:59:37.000Z
2020-11-24T19:59:37.000Z
windows-virtual-desktop/archive/19H2/powershell/runbooks/Renew-RegistrationTokenAfterExpiration.py
faroukfriha/azure-as-code
687828825b4dbe2504e6e8f52500c751a4c8d452
[ "MIT" ]
null
null
null
windows-virtual-desktop/archive/19H2/powershell/runbooks/Renew-RegistrationTokenAfterExpiration.py
faroukfriha/azure-as-code
687828825b4dbe2504e6e8f52500c751a4c8d452
[ "MIT" ]
1
2020-11-24T19:59:58.000Z
2020-11-24T19:59:58.000Z
print("Toto")
13
13
0.692308
2
13
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0
13
1
13
13
0.692308
0
0
0
0
0
0.285714
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
fd007dd08e8e8e1d33f548f59b3130093cc2c95f
68
py
Python
nitorch/io/volumes/babel/__init__.py
liamchalcroft/nitorch
0de179aff97244a82213c528f0d6393725c868c9
[ "MIT" ]
46
2020-07-31T10:14:05.000Z
2022-03-24T12:51:46.000Z
nitorch/io/volumes/babel/__init__.py
liamchalcroft/nitorch
0de179aff97244a82213c528f0d6393725c868c9
[ "MIT" ]
36
2020-10-06T19:01:38.000Z
2022-02-03T18:07:35.000Z
nitorch/io/volumes/babel/__init__.py
liamchalcroft/nitorch
0de179aff97244a82213c528f0d6393725c868c9
[ "MIT" ]
6
2021-01-05T14:59:05.000Z
2021-11-18T18:26:45.000Z
from .array import BabelArray from . import array, metadata, utils
17
36
0.779412
9
68
5.888889
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.161765
68
3
37
22.666667
0.929825
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
fd527a11499bbd59a17f1c13a3f4f3f0215322a0
34
py
Python
src/b2sum/__main__.py
karlrink/b2sum
2769aa8505583349aeb9b2c06c96fd95a7252e3c
[ "MIT" ]
1
2022-01-18T13:59:30.000Z
2022-01-18T13:59:30.000Z
src/b2sum/__main__.py
karlrink/b2sum
2769aa8505583349aeb9b2c06c96fd95a7252e3c
[ "MIT" ]
null
null
null
src/b2sum/__main__.py
karlrink/b2sum
2769aa8505583349aeb9b2c06c96fd95a7252e3c
[ "MIT" ]
null
null
null
from .b2sum import main main()
5.666667
23
0.676471
5
34
4.6
0.8
0
0
0
0
0
0
0
0
0
0
0.038462
0.235294
34
5
24
6.8
0.846154
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
fd555d537bce1a776aed73925e9bbfc0f2b4c5ba
159
py
Python
src/common/models/line.py
wenksi/pren-robo-cube-ipcv
e2cf655a7e33aa63dae6e2b2a91abaa11d587f8f
[ "MIT" ]
null
null
null
src/common/models/line.py
wenksi/pren-robo-cube-ipcv
e2cf655a7e33aa63dae6e2b2a91abaa11d587f8f
[ "MIT" ]
null
null
null
src/common/models/line.py
wenksi/pren-robo-cube-ipcv
e2cf655a7e33aa63dae6e2b2a91abaa11d587f8f
[ "MIT" ]
null
null
null
from src.common.models.point import Point import logging class Line: def __init__(self, p1: Point, p2: Point): self.p1 = p1 self.p2 = p2
17.666667
45
0.641509
24
159
4.083333
0.583333
0.22449
0
0
0
0
0
0
0
0
0
0.051282
0.264151
159
8
46
19.875
0.786325
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
5
b5da6e9818d71e47b2a8974d6bcaabbe40363f22
109
py
Python
unrpyc/__main__.py
Dobby233Liu/unrpyc
d814a8a02d4fa658ec0e4476457c3832258aeca8
[ "MIT" ]
3
2021-12-05T09:26:41.000Z
2022-03-28T12:15:50.000Z
unrpyc/__main__.py
Dobby233Liu/unrpyc
d814a8a02d4fa658ec0e4476457c3832258aeca8
[ "MIT" ]
null
null
null
unrpyc/__main__.py
Dobby233Liu/unrpyc
d814a8a02d4fa658ec0e4476457c3832258aeca8
[ "MIT" ]
1
2022-02-11T22:49:50.000Z
2022-02-11T22:49:50.000Z
#!/usr/bin/env python # FIXME: main is in __init__ from . import main if __name__ == '__main__': main()
15.571429
28
0.66055
16
109
3.75
0.8125
0
0
0
0
0
0
0
0
0
0
0
0.201835
109
6
29
18.166667
0.689655
0.431193
0
0
0
0
0.133333
0
0
0
0
0.166667
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
1
0
1
0
0
0
0
5
b5e2dcce54598d9e11a7a5a80d2478a94f24382d
856
py
Python
question2.py
gusenov/test-tech-mail-ru-python2
70e37a3de447b6f7c4da5add75f65df1b51405fe
[ "MIT" ]
null
null
null
question2.py
gusenov/test-tech-mail-ru-python2
70e37a3de447b6f7c4da5add75f65df1b51405fe
[ "MIT" ]
null
null
null
question2.py
gusenov/test-tech-mail-ru-python2
70e37a3de447b6f7c4da5add75f65df1b51405fe
[ "MIT" ]
null
null
null
l = ['a', 'b', 'c'] # print l.values() # AttributeError: 'list' object has no attribute 'values' # print l.contains() # AttributeError: 'list' object has no attribute 'contains' # print l.sorted() # AttributeError: 'list' object has no attribute 'sorted' # print l.type() # AttributeError: 'list' object has no attribute 'type' # print l.items() # AttributeError: 'list' object has no attribute 'items' # print l.len() # AttributeError: 'list' object has no attribute 'len' # print l.str() # AttributeError: 'list' object has no attribute 'str' # print values(l) # NameError: name 'values' is not defined # print contains(l) # NameError: name 'contains' is not defined # print items(l) # NameError: name 'items' is not defined print sorted(l) # ['a', 'b', 'c'] print type(l) # <type 'list'> print len(l) # 3 print str(l) # ['a', 'b', 'c']
45.052632
81
0.663551
122
856
4.655738
0.196721
0.073944
0.295775
0.332746
0.5
0.46831
0
0
0
0
0
0.001416
0.175234
856
18
82
47.555556
0.803116
0.857477
0
0
0
0
0.030928
0
0
0
0
0
0
0
null
null
0
0
null
null
0.8
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
5
bd2ee5e4f091521676b07f93e09f40a8276df251
175
py
Python
locintel/graphs/masks/apply/base.py
pedrofreitascampospro/locintel
eb9c56cdc308660c31d90abe9fe62bd3634ba273
[ "MIT" ]
null
null
null
locintel/graphs/masks/apply/base.py
pedrofreitascampospro/locintel
eb9c56cdc308660c31d90abe9fe62bd3634ba273
[ "MIT" ]
null
null
null
locintel/graphs/masks/apply/base.py
pedrofreitascampospro/locintel
eb9c56cdc308660c31d90abe9fe62bd3634ba273
[ "MIT" ]
null
null
null
class ApplyMaskBase(object): def __init__(self): pass def apply_mask(self, *arg, **kwargs): raise NotImplementedError("Please implement in subclass")
25
65
0.674286
19
175
5.947368
0.894737
0
0
0
0
0
0
0
0
0
0
0
0.222857
175
6
66
29.166667
0.830882
0
0
0
0
0
0.16
0
0
0
0
0
0
1
0.4
false
0.2
0
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
5
1f9d52354f52bce8878cf8cf2e60643813c97ead
134
py
Python
lldb/test/API/dotest.py
medismailben/llvm-project
e334a839032fe500c3bba22bf976ab7af13ce1c1
[ "Apache-2.0" ]
2,338
2018-06-19T17:34:51.000Z
2022-03-31T11:00:37.000Z
test/dotest.py
DalavanCloud/lldb
e913eaf2468290fb94c767d474d611b41a84dd69
[ "Apache-2.0" ]
3,740
2019-01-23T15:36:48.000Z
2022-03-31T22:01:13.000Z
test/dotest.py
DalavanCloud/lldb
e913eaf2468290fb94c767d474d611b41a84dd69
[ "Apache-2.0" ]
500
2019-01-23T07:49:22.000Z
2022-03-30T02:59:37.000Z
#!/usr/bin/env python if __name__ == "__main__": import use_lldb_suite import lldbsuite.test lldbsuite.test.run_suite()
16.75
30
0.701493
18
134
4.611111
0.777778
0.313253
0
0
0
0
0
0
0
0
0
0
0.186567
134
7
31
19.142857
0.761468
0.149254
0
0
0
0
0.070796
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
1fe8abbc33a7710e8a33a6cda2fd803e8e6964c5
2,696
py
Python
test/test_make_pdf.py
Konrad-Ziarko/DokiDokiMD
1f1707fe9b5861fb0407daf663f1a45de5a0fdfb
[ "MIT" ]
1
2019-06-30T10:08:01.000Z
2019-06-30T10:08:01.000Z
test/test_make_pdf.py
Konrad-Ziarko/DokiDokiMD
1f1707fe9b5861fb0407daf663f1a45de5a0fdfb
[ "MIT" ]
17
2018-12-23T23:50:06.000Z
2019-12-09T19:17:20.000Z
test/test_make_pdf.py
Konrad-Ziarko/DokiDokiMD
1f1707fe9b5861fb0407daf663f1a45de5a0fdfb
[ "MIT" ]
1
2019-01-30T15:34:29.000Z
2019-01-30T15:34:29.000Z
import os import shutil import unittest from dokidokimd.models import Chapter, Manga, MangaSite RESULTS_DIRECTORY = 'unittest_results_temp_dir' class TestMakePdfMethods(unittest.TestCase): def test_make_pdf1(self): """ Make pdf from previously downloaded images - simulated on copied files """ directory_name = os.path.dirname(__file__) dummy_manga_site = MangaSite('test_site') dummy_manga = Manga('test_manga', 'test_manga_url', dummy_manga_site) dummy_chapter = Chapter(dummy_manga, 'test_chapter_title') self.assertTrue(dummy_chapter.number_of_pages() == 0) source_images_dir = os.path.join(directory_name, 'images') test_tmp_dir = os.path.join(directory_name, RESULTS_DIRECTORY) images_dir = dummy_chapter.get_download_path(test_tmp_dir) os.makedirs(images_dir, exist_ok=True) for file in os.listdir(source_images_dir): file_path = os.path.join(source_images_dir, file) if os.path.isfile(file_path): shutil.copy(file_path, images_dir) result, path_to_pdf = dummy_chapter.make_pdf(test_tmp_dir) self.assertTrue(result) self.assertTrue(dummy_chapter.number_of_pages() == 0) self.assertTrue(os.path.isfile(path_to_pdf)) self.assertTrue(os.path.getsize(path_to_pdf) > 0) os.unlink(path_to_pdf) shutil.rmtree(test_tmp_dir, ignore_errors=True) def test_make_pdf2(self): """ Make pdf from pages in memory - simulated by manually added pages """ directory_name = os.path.dirname(__file__) dummy_manga_site = MangaSite('test_site') dummy_manga = Manga('test_manga', 'test_manga_url', dummy_manga_site) dummy_chapter = Chapter(dummy_manga, 'test_chapter_title') self.assertTrue(dummy_chapter.number_of_pages() == 0) source_images_dir = os.path.join(directory_name, 'images') test_tmp_dir = os.path.join(directory_name, RESULTS_DIRECTORY) for file in os.listdir(source_images_dir): file_path = os.path.join(source_images_dir, file) if os.path.isfile(file_path): with open(file_path, 'rb') as f: dummy_chapter.add_page(f.read()) result, path_to_pdf = dummy_chapter.make_pdf(test_tmp_dir) self.assertTrue(result) self.assertTrue(dummy_chapter.number_of_pages() == 4) self.assertTrue(os.path.isfile(path_to_pdf)) self.assertTrue(os.path.getsize(path_to_pdf) > 0) os.unlink(path_to_pdf) shutil.rmtree(test_tmp_dir, ignore_errors=True) if __name__ == '__main__': unittest.main()
39.647059
78
0.680638
361
2,696
4.714681
0.221607
0.049354
0.042303
0.061105
0.718566
0.718566
0.718566
0.718566
0.717979
0.717979
0
0.00381
0.221068
2,696
67
79
40.238806
0.806667
0.050445
0
0.673469
0
0
0.059292
0.009948
0
0
0
0
0.204082
1
0.040816
false
0
0.081633
0
0.142857
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
951748b7de80db96124b09c7152b8db0c633f16b
123
py
Python
Ejercicio 1 - EspacioPorGuion - CP.py
2167-Team1/TeamProject
c2146bc03999677a09d1ee65b3c09ed5a10d9da0
[ "MIT" ]
null
null
null
Ejercicio 1 - EspacioPorGuion - CP.py
2167-Team1/TeamProject
c2146bc03999677a09d1ee65b3c09ed5a10d9da0
[ "MIT" ]
4
2021-11-16T02:36:24.000Z
2021-11-26T03:33:57.000Z
Ejercicio 1 - EspacioPorGuion - CP.py
2167-Team1/TeamProject
c2146bc03999677a09d1ee65b3c09ed5a10d9da0
[ "MIT" ]
4
2021-11-16T01:02:42.000Z
2021-11-27T03:07:36.000Z
texto = input("Ingrese su texto: ") def SpaceToDash(texto): return texto.replace(" ", "-") print (SpaceToDash(texto))
20.5
35
0.666667
14
123
5.857143
0.642857
0.390244
0
0
0
0
0
0
0
0
0
0
0.154472
123
6
36
20.5
0.788462
0
0
0
0
0
0.16129
0
0
0
0
0
0
1
0.25
false
0
0
0.25
0.5
0.25
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
5
951e2d9bb1d117cc6eb13e5bd74c156839364891
79
py
Python
enthought/chaco/axis.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
3
2016-12-09T06:05:18.000Z
2018-03-01T13:00:29.000Z
enthought/chaco/axis.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
1
2020-12-02T00:51:32.000Z
2020-12-02T08:48:55.000Z
enthought/chaco/axis.py
enthought/etsproxy
4aafd628611ebf7fe8311c9d1a0abcf7f7bb5347
[ "BSD-3-Clause" ]
null
null
null
# proxy module from __future__ import absolute_import from chaco.axis import *
19.75
38
0.822785
11
79
5.454545
0.727273
0
0
0
0
0
0
0
0
0
0
0
0.139241
79
3
39
26.333333
0.882353
0.151899
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
1f5231c878cf2e7e850dff78cdbe053fc880ade1
1,133
py
Python
vjesala_art.py
njecolina/vjesala-igra-python
1c6d4b492bf6b5dfdd6032de7343089acfbd8335
[ "MIT" ]
null
null
null
vjesala_art.py
njecolina/vjesala-igra-python
1c6d4b492bf6b5dfdd6032de7343089acfbd8335
[ "MIT" ]
null
null
null
vjesala_art.py
njecolina/vjesala-igra-python
1c6d4b492bf6b5dfdd6032de7343089acfbd8335
[ "MIT" ]
null
null
null
stages = [''' +---+ | | O | /|\ | / \ | | ========= ''', ''' +---+ | | O | /|\ | / | | ========= ''', ''' +---+ | | O | /|\ | | | ========= ''', ''' +---+ | | O | /| | | | =========''', ''' +---+ | | O | | | | | ========= ''', ''' +---+ | | O | | | | ========= ''', ''' +---+ | | | | | | ========= '''] logo = ''' __ __ _ ______ __˘__ _ \ \ / / | | ____|/ ____| /\ | | /\ \ \ / / | | |__ | (___ / \ | | / \ \ \/ / | | __| \___ \ / /\ \ | | / /\ \ \ / |__| | |____ ____) / ____ \| |____ / ____ \ \/ \____/|______|_____/_/ \_\______/_/ \_\ "Vješala" igra, ver 0.0.1 na hrvatskom jeziku - Sonja Hranjec 2021. '''
15.736111
68
0.129744
21
1,133
3.190476
0.714286
0.149254
0.179104
0.179104
0.089552
0
0
0
0
0
0
0.015766
0.60812
1,133
71
69
15.957746
0.132883
0
0
0.538462
0
0
0.805825
0.019417
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
2f0681496b5d81721c08f5a41bffb98565162e3a
97
py
Python
app/ws/admin.py
profefonso/ValuesArray
97cc0b82404d428d4a009e65aa52bdac0e129401
[ "MIT" ]
1
2020-09-07T19:42:58.000Z
2020-09-07T19:42:58.000Z
app/ws/admin.py
profefonso/ValuesArray
97cc0b82404d428d4a009e65aa52bdac0e129401
[ "MIT" ]
null
null
null
app/ws/admin.py
profefonso/ValuesArray
97cc0b82404d428d4a009e65aa52bdac0e129401
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import ValueArray admin.site.register(ValueArray)
19.4
32
0.835052
13
97
6.230769
0.692308
0
0
0
0
0
0
0
0
0
0
0
0.103093
97
4
33
24.25
0.931034
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
2f91534d6203d0dd0817a4758101a85db1d5248c
35
py
Python
addr2line/__init__.py
ramwin/addr2line
24553fd5096fe2983a699cb12f97e8d124aa2fb5
[ "MIT" ]
null
null
null
addr2line/__init__.py
ramwin/addr2line
24553fd5096fe2983a699cb12f97e8d124aa2fb5
[ "MIT" ]
null
null
null
addr2line/__init__.py
ramwin/addr2line
24553fd5096fe2983a699cb12f97e8d124aa2fb5
[ "MIT" ]
null
null
null
from .base import Addr2lineContext
17.5
34
0.857143
4
35
7.5
1
0
0
0
0
0
0
0
0
0
0
0.032258
0.114286
35
1
35
35
0.935484
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2f9d217917b5cd53b39a95aa30af0b5f70d42126
47
py
Python
src_old/nsessoracle/mixins/__init__.py
rishikesh67/django-tenant-oracle-schemas
918a64e842b678fc506eadbb4d7e51b0b38ab0a2
[ "MIT" ]
null
null
null
src_old/nsessoracle/mixins/__init__.py
rishikesh67/django-tenant-oracle-schemas
918a64e842b678fc506eadbb4d7e51b0b38ab0a2
[ "MIT" ]
8
2019-12-04T23:26:11.000Z
2022-02-10T09:42:18.000Z
src_old/nsessoracle/mixins/__init__.py
rishikesh67/django-tenant-oracle-schemas
918a64e842b678fc506eadbb4d7e51b0b38ab0a2
[ "MIT" ]
2
2019-06-26T05:31:16.000Z
2019-07-01T12:22:50.000Z
from .tenant_data_mixin import TenantDataMixin
23.5
46
0.893617
6
47
6.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.085106
47
1
47
47
0.930233
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
85d8fe5218e7c4f0b78a62ea39a8268976962f92
196
py
Python
rsopt/conversion.py
radiasoft/rsopt
6d4d123dd61e30c7f562b2f5a28c3ccbbcddbde3
[ "Apache-2.0" ]
6
2020-11-03T16:51:50.000Z
2022-02-13T20:40:05.000Z
rsopt/conversion.py
radiasoft/rsopt
6d4d123dd61e30c7f562b2f5a28c3ccbbcddbde3
[ "Apache-2.0" ]
97
2020-05-18T18:24:49.000Z
2022-03-23T15:42:42.000Z
rsopt/conversion.py
radiasoft/rsopt
6d4d123dd61e30c7f562b2f5a28c3ccbbcddbde3
[ "Apache-2.0" ]
4
2020-08-18T23:19:55.000Z
2021-12-08T20:55:09.000Z
def create_switchyard(input_file, file_code): from rsbeams.rsdata.switchyard import Switchyard switchyard = Switchyard() switchyard.read(input_file, file_code) return switchyard
24.5
52
0.770408
23
196
6.347826
0.521739
0.410959
0.178082
0.232877
0
0
0
0
0
0
0
0
0.163265
196
8
53
24.5
0.890244
0
0
0
0
0
0
0
0
0
0
0
0
1
0.2
false
0
0.2
0
0.6
0
1
0
0
null
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
85fcae2a03f0c51bb31ec2fb7187d02c3d1ffd8e
1,609
py
Python
iguanas/rule_selection/tests/test_base_filter.py
Aditya-Kapadiya/Iguanas
dcc2c1e71f00574c3427fa530191e7079834c11b
[ "Apache-2.0" ]
20
2021-12-22T14:15:03.000Z
2022-03-31T22:46:42.000Z
iguanas/rule_selection/tests/test_base_filter.py
Aditya-Kapadiya/Iguanas
dcc2c1e71f00574c3427fa530191e7079834c11b
[ "Apache-2.0" ]
12
2022-01-18T16:55:56.000Z
2022-03-10T11:39:39.000Z
iguanas/rule_selection/tests/test_base_filter.py
Aditya-Kapadiya/Iguanas
dcc2c1e71f00574c3427fa530191e7079834c11b
[ "Apache-2.0" ]
5
2021-12-25T07:28:29.000Z
2022-02-23T09:40:03.000Z
import pytest import pandas as pd from iguanas.rule_selection._base_filter import _BaseFilter from iguanas.rules import Rules @pytest.fixture def _create_data(): X_rules = pd.DataFrame({ 'A': [1, 0, 1], 'B': [1, 1, 1] }) return X_rules def test_transform(_create_data): X_rules = _create_data bf = _BaseFilter(rules_to_keep=['A'], rules=None) X_rules_ = bf.transform(X_rules) pd.testing.assert_frame_equal(X_rules_, X_rules[['A']]) # With rules rules = Rules( rule_strings={ 'A': "X['a']>1", 'B': "X['b']>1" } ) bf = _BaseFilter(rules_to_keep=['A'], rules=rules) X_rules_ = bf.transform(X_rules) pd.testing.assert_frame_equal(X_rules_, X_rules[['A']]) assert bf.rules.rule_strings == {'A': "X['a']>1"} def test_fit_transform(_create_data): bf = _BaseFilter(rules_to_keep=['A'], rules=None) # Just create dummy fit method for testing bf.fit = lambda X_rules, y, sample_weight: None X_rules = _create_data bf.rules_to_keep = ['A'] X_rules_ = bf.fit_transform(X_rules) pd.testing.assert_frame_equal(X_rules_, X_rules[['A']]) # With rules rules = Rules( rule_strings={ 'A': "X['a']>1", 'B': "X['b']>1" } ) bf = _BaseFilter(rules_to_keep=['A'], rules=rules) # Just create dummy fit method for testing bf.fit = lambda X_rules, y, sample_weight: None X_rules_ = bf.fit_transform(X_rules) pd.testing.assert_frame_equal(X_rules_, X_rules[['A']]) assert bf.rules.rule_strings == {'A': "X['a']>1"}
29.254545
59
0.620261
239
1,609
3.853556
0.188285
0.143322
0.043431
0.065147
0.7557
0.736156
0.736156
0.736156
0.736156
0.736156
0
0.00967
0.228713
1,609
54
60
29.796296
0.732474
0.064015
0
0.590909
0
0
0.043304
0
0
0
0
0
0.136364
1
0.068182
false
0
0.090909
0
0.181818
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c809d932b9311dfb1f5787254fe055da5226a1c6
122
py
Python
take_a_number/admin.py
take-a-number/api
fd44f8b328e76511dcb248d330faa1d3faf0ff6f
[ "MIT" ]
1
2019-02-12T16:23:37.000Z
2019-02-12T16:23:37.000Z
take_a_number/admin.py
take-a-number/api
fd44f8b328e76511dcb248d330faa1d3faf0ff6f
[ "MIT" ]
8
2019-02-17T23:06:29.000Z
2019-02-26T02:53:29.000Z
take_a_number/admin.py
take-a-number/api
fd44f8b328e76511dcb248d330faa1d3faf0ff6f
[ "MIT" ]
null
null
null
from django.contrib import admin # from .models import Course # Register your models here. # admin.site.register(Course)
20.333333
32
0.778689
17
122
5.588235
0.647059
0
0
0
0
0
0
0
0
0
0
0
0.139344
122
5
33
24.4
0.904762
0.663934
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c80fd5c863a9702cad2fa4b9b77f75606d32289d
38
py
Python
lesson01/xuegangqiang/hello.py
herrywen-nanj/51reboot
1130c79a360e1b548a6eaad176eb60f8bed22f40
[ "Apache-2.0" ]
null
null
null
lesson01/xuegangqiang/hello.py
herrywen-nanj/51reboot
1130c79a360e1b548a6eaad176eb60f8bed22f40
[ "Apache-2.0" ]
null
null
null
lesson01/xuegangqiang/hello.py
herrywen-nanj/51reboot
1130c79a360e1b548a6eaad176eb60f8bed22f40
[ "Apache-2.0" ]
null
null
null
#encoding: utf8 print("hello world")
9.5
20
0.710526
5
38
5.4
1
0
0
0
0
0
0
0
0
0
0
0.030303
0.131579
38
3
21
12.666667
0.787879
0.368421
0
0
0
0
0.478261
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
c816140cbe126f091d030f258bd5da93c340d0b9
78
py
Python
src/api/__init__.py
stephan-01010011/botty
d2d82602d1ecfb6be9d6af91b57895aae39e45de
[ "MIT" ]
5
2022-01-21T20:08:24.000Z
2022-01-28T14:37:17.000Z
src/api/__init__.py
stephan-01010011/botty
d2d82602d1ecfb6be9d6af91b57895aae39e45de
[ "MIT" ]
1
2022-02-10T08:21:22.000Z
2022-02-10T08:38:54.000Z
src/api/__init__.py
stephan-01010011/botty
d2d82602d1ecfb6be9d6af91b57895aae39e45de
[ "MIT" ]
6
2022-01-29T05:09:57.000Z
2022-02-11T21:48:52.000Z
from .generic_api import GenericApi from .discord_embeds import DiscordEmbeds
26
41
0.871795
10
78
6.6
0.8
0
0
0
0
0
0
0
0
0
0
0
0.102564
78
2
42
39
0.942857
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c83d790655616d4075e57241f69eac74f562e716
384
py
Python
environments_utils/is_os.py
LucaCappelletti94/environments_utils
c6b8cc7a0fa07f770ed361f3bafaf1adee138f77
[ "MIT" ]
null
null
null
environments_utils/is_os.py
LucaCappelletti94/environments_utils
c6b8cc7a0fa07f770ed361f3bafaf1adee138f77
[ "MIT" ]
null
null
null
environments_utils/is_os.py
LucaCappelletti94/environments_utils
c6b8cc7a0fa07f770ed361f3bafaf1adee138f77
[ "MIT" ]
null
null
null
"""Utilities relative to operative systems.""" import sys def is_macos() -> bool: """Return whether OS is macOS.""" return sys.platform == "darwin" def is_windows() -> bool: """Return whether OS is Windows.""" return sys.platform in ("win32", "cygwin") def is_linux() -> bool: """Return whether OS is Linux.""" return sys.platform in ("linux", "linux2")
21.333333
46
0.630208
50
384
4.78
0.44
0.062762
0.213389
0.238494
0.263598
0
0
0
0
0
0
0.009804
0.203125
384
17
47
22.588235
0.771242
0.328125
0
0
0
0
0.118143
0
0
0
0
0
0
1
0.428571
true
0
0.142857
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
1
0
0
5
c84b2d6f8b394fc074927d883ef63a8dfb52867a
41
py
Python
test/com/facebook/buck/features/python/testdata/python_binary/preload_deps/preload_order.py
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
8,027
2015-01-02T05:31:44.000Z
2022-03-31T07:08:09.000Z
test/com/facebook/buck/features/python/testdata/python_binary/preload_deps/preload_order.py
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
2,355
2015-01-01T15:30:53.000Z
2022-03-30T20:21:16.000Z
test/com/facebook/buck/features/python/testdata/python_binary/preload_deps/preload_order.py
Unknoob/buck
2dfc734354b326f2f66896dde7746a11965d5a13
[ "Apache-2.0" ]
1,280
2015-01-09T03:29:04.000Z
2022-03-30T15:14:14.000Z
import ctypes ctypes.CDLL(None).func()
8.2
24
0.731707
6
41
5
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.121951
41
4
25
10.25
0.833333
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c08c0a07011156bc1e1739857200ee053b7961b1
115
py
Python
oiasg/define/_resource.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
1
2018-03-17T10:07:11.000Z
2018-03-17T10:07:11.000Z
oiasg/define/_resource.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
1
2018-03-17T11:35:54.000Z
2018-03-17T11:35:54.000Z
oiasg/define/_resource.py
will7101/OIASG
44badff57689da99a2c9896d176b32e7b51d42b5
[ "BSD-3-Clause" ]
null
null
null
{ 'FONTS': [ ('杨任东竹石体-Regular.ttf', '杨任东竹石体-Regular'), ('杨任东竹石体-Semibold.ttf', '杨任东竹石体-Semibold') ], }
16.428571
45
0.565217
11
115
5.909091
0.454545
0.4
0
0
0
0
0
0
0
0
0
0
0.182609
115
6
46
19.166667
0.691489
0
0
0
0
0
0.651376
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
c0a17ee1377f938454f5acedd448736b70de37ee
154
py
Python
follows/admin.py
mohamed17717/Like-Reddit-Backend-Clone
d60d7a4625ee0f7354a21e53c26c7c52746d735f
[ "MIT" ]
1
2022-01-10T12:00:59.000Z
2022-01-10T12:00:59.000Z
follows/admin.py
mohamed17717/Like-Reddit-Backend-Clone
d60d7a4625ee0f7354a21e53c26c7c52746d735f
[ "MIT" ]
null
null
null
follows/admin.py
mohamed17717/Like-Reddit-Backend-Clone
d60d7a4625ee0f7354a21e53c26c7c52746d735f
[ "MIT" ]
null
null
null
from django.contrib import admin from follows.models import UserFollow, ThreadFollow admin.site.register(UserFollow) admin.site.register(ThreadFollow)
19.25
51
0.837662
19
154
6.789474
0.578947
0.139535
0.263566
0
0
0
0
0
0
0
0
0
0.090909
154
7
52
22
0.921429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
c0a4439eec24d573b0d6e5fdfb7e5e9388f02d65
134
py
Python
cpg-language-python/src/test/resources/python/issue473.py
anon767/cpg
985f981a94c1a43f4f4363f6d51a8087bad2430a
[ "Apache-2.0" ]
null
null
null
cpg-language-python/src/test/resources/python/issue473.py
anon767/cpg
985f981a94c1a43f4f4363f6d51a8087bad2430a
[ "Apache-2.0" ]
null
null
null
cpg-language-python/src/test/resources/python/issue473.py
anon767/cpg
985f981a94c1a43f4f4363f6d51a8087bad2430a
[ "Apache-2.0" ]
1
2021-12-17T09:16:39.000Z
2021-12-17T09:16:39.000Z
if sys.version_info.minor > 9: phr = {"user_id": user_id} | content else: z = {"user_id": user_id} phr = {**z, **content}
22.333333
40
0.574627
21
134
3.428571
0.571429
0.333333
0.277778
0.333333
0
0
0
0
0
0
0
0.009709
0.231343
134
5
41
26.8
0.68932
0
0
0
0
0
0.104478
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
c0a6ad76b49e490ec9d2c5e7a6cbb2dd09728dbf
55
py
Python
gym_duckhunt/envs/__init__.py
borijang/gym-duckhunt
08fc5a5f6117ce782a696db5eb15f67bbc5ff8e6
[ "MIT" ]
null
null
null
gym_duckhunt/envs/__init__.py
borijang/gym-duckhunt
08fc5a5f6117ce782a696db5eb15f67bbc5ff8e6
[ "MIT" ]
null
null
null
gym_duckhunt/envs/__init__.py
borijang/gym-duckhunt
08fc5a5f6117ce782a696db5eb15f67bbc5ff8e6
[ "MIT" ]
null
null
null
from gym_duckhunt.envs.duckhunt_env import DuckHuntEnv
27.5
54
0.890909
8
55
5.875
0.875
0
0
0
0
0
0
0
0
0
0
0
0.072727
55
1
55
55
0.921569
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c0e2a4256a320717e44188ca15cbdec7a0618d0d
231
py
Python
src/accent_analyser/__init__.py
stefantaubert/eng2ipa-accent-transformer
d620c70b06c83119402e255085046747ade87444
[ "MIT" ]
null
null
null
src/accent_analyser/__init__.py
stefantaubert/eng2ipa-accent-transformer
d620c70b06c83119402e255085046747ade87444
[ "MIT" ]
null
null
null
src/accent_analyser/__init__.py
stefantaubert/eng2ipa-accent-transformer
d620c70b06c83119402e255085046747ade87444
[ "MIT" ]
null
null
null
from accent_analyser.app import load_probabilities from accent_analyser.core import (ProbabilitiesDict, Symbols, check_probabilities_are_valid, replace_with_prob)
46.2
64
0.627706
21
231
6.52381
0.761905
0.145985
0.262774
0
0
0
0
0
0
0
0
0
0.341991
231
4
65
57.75
0.901316
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
2398a6803f4dd0c8aff6e76170e4cffab68edd4e
73
py
Python
backend/server/processor/__init__.py
shiv12095/realtimeviz
ee2bf10b5f9467212f9a9ce8957d80456ebd0259
[ "MIT" ]
1
2021-03-03T13:54:15.000Z
2021-03-03T13:54:15.000Z
backend/server/processor/__init__.py
shiv12095/realtimeviz
ee2bf10b5f9467212f9a9ce8957d80456ebd0259
[ "MIT" ]
null
null
null
backend/server/processor/__init__.py
shiv12095/realtimeviz
ee2bf10b5f9467212f9a9ce8957d80456ebd0259
[ "MIT" ]
1
2021-03-03T13:59:48.000Z
2021-03-03T13:59:48.000Z
from .lime_bike_socket_feed_processor import LimeBikeSocketFeedProcessor
36.5
72
0.931507
8
73
8
1
0
0
0
0
0
0
0
0
0
0
0
0.054795
73
1
73
73
0.927536
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f1a7d25bbf44b54e295f1543f476ff76fa5c58f7
145
py
Python
tests2/config.py
Nlioxa/QA-Labs
211cbfb9e8be50e8192d4097c8c4f3f71c9bb59b
[ "MIT" ]
null
null
null
tests2/config.py
Nlioxa/QA-Labs
211cbfb9e8be50e8192d4097c8c4f3f71c9bb59b
[ "MIT" ]
null
null
null
tests2/config.py
Nlioxa/QA-Labs
211cbfb9e8be50e8192d4097c8c4f3f71c9bb59b
[ "MIT" ]
null
null
null
from exceptionManager import ExceptionManager from exception import TrueException, FalseException from server import Server, FactoryServerHandler
48.333333
51
0.896552
14
145
9.285714
0.571429
0
0
0
0
0
0
0
0
0
0
0
0.089655
145
3
52
48.333333
0.984848
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f1b818c9af09b63a55ef05c345ca4dcd557a0323
167
py
Python
SimCalorimetry/EcalTrigPrimProducers/python/ecalTriggerPrimitiveDigis_readDBOffline_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
852
2015-01-11T21:03:51.000Z
2022-03-25T21:14:00.000Z
SimCalorimetry/EcalTrigPrimProducers/python/ecalTriggerPrimitiveDigis_readDBOffline_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
30,371
2015-01-02T00:14:40.000Z
2022-03-31T23:26:05.000Z
SimCalorimetry/EcalTrigPrimProducers/python/ecalTriggerPrimitiveDigis_readDBOffline_cff.py
ckamtsikis/cmssw
ea19fe642bb7537cbf58451dcf73aa5fd1b66250
[ "Apache-2.0" ]
3,240
2015-01-02T05:53:18.000Z
2022-03-31T17:24:21.000Z
import FWCore.ParameterSet.Config as cms # Trigger Primitive Producer from SimCalorimetry.EcalTrigPrimProducers.ecalTriggerPrimitiveDigis_readDBOffline_cfi import *
27.833333
94
0.88024
16
167
9.0625
0.9375
0
0
0
0
0
0
0
0
0
0
0
0.083832
167
5
95
33.4
0.947712
0.155689
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
f1bed78838325dc43f0863523b3cefb43090b929
31
py
Python
src/data/__init__.py
super6liu/technical-analysis-julian
dd8868b65d80f78e536f3471d4dc09440de48e62
[ "MIT" ]
null
null
null
src/data/__init__.py
super6liu/technical-analysis-julian
dd8868b65d80f78e536f3471d4dc09440de48e62
[ "MIT" ]
null
null
null
src/data/__init__.py
super6liu/technical-analysis-julian
dd8868b65d80f78e536f3471d4dc09440de48e62
[ "MIT" ]
1
2021-10-03T13:18:09.000Z
2021-10-03T13:18:09.000Z
from src.data.data import Data
15.5
30
0.806452
6
31
4.166667
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.129032
31
1
31
31
0.925926
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
f1e3ccd1951c725074018b732f5020fc51f1a744
24
py
Python
src/test.py
MayD524/c69_shell
e89a2b6c90f59b5ef6db329dab39e595d28b5aa5
[ "MIT" ]
null
null
null
src/test.py
MayD524/c69_shell
e89a2b6c90f59b5ef6db329dab39e595d28b5aa5
[ "MIT" ]
null
null
null
src/test.py
MayD524/c69_shell
e89a2b6c90f59b5ef6db329dab39e595d28b5aa5
[ "MIT" ]
null
null
null
print("it worked again")
24
24
0.75
4
24
4.5
1
0
0
0
0
0
0
0
0
0
0
0
0.083333
24
1
24
24
0.818182
0
0
0
0
0
0.6
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
f1f7da6b4e80330bb82573cae9657385086edceb
22
py
Python
GetErDone/GetErDone.py
kdmundale/GetErDone
725fa14e1dce6476766ce44910034a5d95c1bd3b
[ "MIT" ]
null
null
null
GetErDone/GetErDone.py
kdmundale/GetErDone
725fa14e1dce6476766ce44910034a5d95c1bd3b
[ "MIT" ]
null
null
null
GetErDone/GetErDone.py
kdmundale/GetErDone
725fa14e1dce6476766ce44910034a5d95c1bd3b
[ "MIT" ]
null
null
null
print('initial test')
11
21
0.727273
3
22
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.090909
22
1
22
22
0.8
0
0
0
0
0
0.545455
0
0
0
0
0
0
1
0
true
0
0
0
0
1
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
1
0
5
7b3164568dfb6b206fb5de8b12ea33c2f90e025c
77
py
Python
nicos_ess/commands/epics.py
ebadkamil/nicos
0355a970d627aae170c93292f08f95759c97f3b5
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
12
2019-11-06T15:40:36.000Z
2022-01-01T16:23:00.000Z
nicos_ess/commands/epics.py
ebadkamil/nicos
0355a970d627aae170c93292f08f95759c97f3b5
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
4
2019-11-08T10:18:16.000Z
2021-01-13T13:07:29.000Z
nicos_ess/commands/epics.py
ISISComputingGroup/nicos
94cb4d172815919481f8c6ee686f21ebb76f2068
[ "CC-BY-3.0", "Apache-2.0", "CC-BY-4.0" ]
6
2020-01-11T10:52:30.000Z
2022-02-25T12:35:23.000Z
from nicos.devices.epics import pvget, pvput # pylint:disable=unused-import
38.5
76
0.805195
11
77
5.636364
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.103896
77
1
77
77
0.898551
0.363636
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
9e80571da093975821272bc18d47fdfac379f563
173
py
Python
katas/kyu_7/dropcaps.py
the-zebulan/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
40
2016-03-09T12:26:20.000Z
2022-03-23T08:44:51.000Z
katas/kyu_7/dropcaps.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
null
null
null
katas/kyu_7/dropcaps.py
akalynych/CodeWars
1eafd1247d60955a5dfb63e4882e8ce86019f43a
[ "MIT" ]
36
2016-11-07T19:59:58.000Z
2022-03-31T11:18:27.000Z
from re import split def drop_cap(string): return ''.join(a.capitalize() if not a.isspace() and len(a) > 2 else a for a in split(r'(\s+)', string))
24.714286
74
0.578035
29
173
3.413793
0.793103
0
0
0
0
0
0
0
0
0
0
0.007937
0.271676
173
6
75
28.833333
0.777778
0
0
0
0
0
0.028902
0
0
0
0
0
0
1
0.25
false
0
0.25
0.25
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
9e985837b8b900c0e1e92ddff4a3a11c794d65c1
29
py
Python
example_snippets/multimenus_snippets/NewSnippets/SymPy/Manipulating expressions/Exponentials and Logarithms/Simplification, possibly to trig functions.py
kuanpern/jupyterlab-snippets-multimenus
477f51cfdbad7409eab45abe53cf774cd70f380c
[ "BSD-3-Clause" ]
null
null
null
example_snippets/multimenus_snippets/NewSnippets/SymPy/Manipulating expressions/Exponentials and Logarithms/Simplification, possibly to trig functions.py
kuanpern/jupyterlab-snippets-multimenus
477f51cfdbad7409eab45abe53cf774cd70f380c
[ "BSD-3-Clause" ]
null
null
null
example_snippets/multimenus_snippets/NewSnippets/SymPy/Manipulating expressions/Exponentials and Logarithms/Simplification, possibly to trig functions.py
kuanpern/jupyterlab-snippets-multimenus
477f51cfdbad7409eab45abe53cf774cd70f380c
[ "BSD-3-Clause" ]
1
2021-02-04T04:51:48.000Z
2021-02-04T04:51:48.000Z
exptrigsimp(exp(z) + exp(-z))
29
29
0.655172
5
29
3.8
0.6
0.421053
0
0
0
0
0
0
0
0
0
0
0.068966
29
1
29
29
0.703704
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
7b85e98cb7ac332ea84237eb0f930cdedca00d37
158
py
Python
Python/Python.py
JarryShaw/HelloWorld
669984fa415e9bb65f5b7c261ec4f87ffbe56c6d
[ "Apache-2.0" ]
1
2017-12-22T14:15:08.000Z
2017-12-22T14:15:08.000Z
Python/Python.py
JarryShaw/HelloWorld
669984fa415e9bb65f5b7c261ec4f87ffbe56c6d
[ "Apache-2.0" ]
1
2018-01-16T09:22:52.000Z
2018-01-16T09:22:52.000Z
Python/Python.py
JarryShaw/HelloWorld
669984fa415e9bb65f5b7c261ec4f87ffbe56c6d
[ "Apache-2.0" ]
1
2018-01-16T07:50:00.000Z
2018-01-16T07:50:00.000Z
#!/usr/bin/env python # -*- coding: utf-8 -*- from __future__ import print_function print('Across the Great Wall, we can reach every corner in the world.')
22.571429
71
0.71519
25
158
4.32
0.92
0
0
0
0
0
0
0
0
0
0
0.007519
0.158228
158
6
72
26.333333
0.804511
0.265823
0
0
0
0
0.54386
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
1
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5
7b8e36151963bb1d6549ff828fe4342c5da0a7a2
46
py
Python
tests/__init__.py
BenjaminDavison/aws_data_toolkit
3980b745a2cac032bd751fd1aa80f2d49f959faa
[ "MIT" ]
null
null
null
tests/__init__.py
BenjaminDavison/aws_data_toolkit
3980b745a2cac032bd751fd1aa80f2d49f959faa
[ "MIT" ]
147
2020-04-26T16:08:08.000Z
2022-03-27T18:32:18.000Z
tests/__init__.py
BenjaminDavison/aws_data_toolkit
3980b745a2cac032bd751fd1aa80f2d49f959faa
[ "MIT" ]
null
null
null
"""Unit test package for aws_data_toolkit."""
23
45
0.73913
7
46
4.571429
1
0
0
0
0
0
0
0
0
0
0
0
0.108696
46
1
46
46
0.780488
0.847826
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
5
7ba3bd6037ba5caed4f968e6f6c641b2d589d8ea
138
py
Python
flowcat/classifier/__init__.py
xiamaz/flowCat
5fea92eff3112ea3bb669595b469735b2bfa3938
[ "MIT" ]
4
2020-03-06T14:06:12.000Z
2021-06-25T15:03:54.000Z
flowcat/classifier/__init__.py
xiamaz/flowCat
5fea92eff3112ea3bb669595b469735b2bfa3938
[ "MIT" ]
3
2020-03-25T10:54:52.000Z
2020-11-26T19:06:23.000Z
flowcat/classifier/__init__.py
xiamaz/flowCat
5fea92eff3112ea3bb669595b469735b2bfa3938
[ "MIT" ]
2
2020-04-14T11:26:25.000Z
2021-04-02T19:25:52.000Z
from .classifier import SOMClassifier, SOMClassifierConfig from .saliency import SOMSaliency from .models import create_model_multi_input
34.5
58
0.876812
16
138
7.375
0.75
0
0
0
0
0
0
0
0
0
0
0
0.094203
138
3
59
46
0.944
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
c8a61c626df704a345f1d0e0cafb9c3e98bf1a83
1,093
py
Python
Anotacoes/aula9.py
kaiquesouzasantos/python-solyd
0dffcc8f5a163bca15d0967dd243a9f703779936
[ "MIT" ]
null
null
null
Anotacoes/aula9.py
kaiquesouzasantos/python-solyd
0dffcc8f5a163bca15d0967dd243a9f703779936
[ "MIT" ]
null
null
null
Anotacoes/aula9.py
kaiquesouzasantos/python-solyd
0dffcc8f5a163bca15d0967dd243a9f703779936
[ "MIT" ]
null
null
null
# ===================== abrir texto(.txt) ===================== open('Caminho\NomeDoArvivo.[tipo_primitivo]') # open() => abrir arquivos em python # \ = \\ dentro open('Caminho\NomeDoArvivo.[tipo_primitivo]','w') # 'w' => (w = write) cria o arquivo ou sobreescreve open('Caminho\NomeDoArvivo.[tipo_primitivo]','r') # 'r' => (r = read) lê o arquivo open('Caminho\NomeDoArvivo.[tipo_primitivo]','r+') # 'r+' => escreve e lê o arquivo open('Caminho\NomeDoArvivo.[tipo_primitivo]','a') # 'a' => (a = append) cria(adiciona) o arquivo open('Caminho\NomeDoArvivo.png','rb') # => hexadecimal do arquivo arquivo = open('Caminho\NomeDoArvivo.[tipo_primitivo]','w') arquivo = open('Caminho\NomeDoArvivo.[tipo_primitivo]','r') arquivo.write("texto que sera escrito dento do arquivo") # write() => metodo que escreve print(arquivo.read()) # read() => metodo de leitura for i in range(0,1001): arquivo.write(str(i)+" - ") for linha in arquivo: print(linha) # ===================== abrir bytes ===================== open('Caminho\NomeDoArvivo.[tipo_primitivo]','b')
43.72
102
0.617566
131
1,093
5.091603
0.374046
0.148426
0.310345
0.323838
0.53973
0.385307
0.263868
0.137931
0
0
0
0.005308
0.138152
1,093
24
103
45.541667
0.70276
0.376029
0
0
0
0
0.57764
0.496894
0
0
0
0.041667
0
0
null
null
0
0
null
null
0.133333
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
null
0
0
1
0
1
0
0
0
0
0
0
0
0
5
c8f0fb57dcd152c68fb189cf235fd2233078697d
89
py
Python
django_apollo_example/api/admin.py
jknaresh/django-graphql-ariadne
6583d76c45f0e7244925601926cbd1c017c4f74f
[ "Apache-2.0" ]
null
null
null
django_apollo_example/api/admin.py
jknaresh/django-graphql-ariadne
6583d76c45f0e7244925601926cbd1c017c4f74f
[ "Apache-2.0" ]
null
null
null
django_apollo_example/api/admin.py
jknaresh/django-graphql-ariadne
6583d76c45f0e7244925601926cbd1c017c4f74f
[ "Apache-2.0" ]
null
null
null
from django.contrib import admin from api.models import Post admin.site.register(Post)
14.833333
32
0.808989
14
89
5.142857
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.123596
89
5
33
17.8
0.923077
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a82d92eec56c0ad3e867e6cf1b2ea6843ef9862a
2,326
py
Python
detection/sample_tests/detection/unit_testing.py
Taicon42/CapCorrect
686851cba54b44b5edb6c2b56ff3b95f387ff4b1
[ "MIT" ]
null
null
null
detection/sample_tests/detection/unit_testing.py
Taicon42/CapCorrect
686851cba54b44b5edb6c2b56ff3b95f387ff4b1
[ "MIT" ]
null
null
null
detection/sample_tests/detection/unit_testing.py
Taicon42/CapCorrect
686851cba54b44b5edb6c2b56ff3b95f387ff4b1
[ "MIT" ]
null
null
null
import unittest import preparation_functions as pf import error_detection_functions as edf import error_correction_functions as ecf import exporting_functions as ef class ErrorTotalTests(unittest.TestCase): def test_filtered_total_errors_detected(self): """Test with profanity filter on""" text_list, timestamps = pf.get_file("GenerateSRT.txt") client = pf.initialize_api() sentences = pf.print_sentences(text_list) final_error_total = 0 for i, token in enumerate(sentences): sequence_switched, end_matches, offset_list, err_message, sentence_error_total = \ edf.detect_errors(str(sentences[i]), client, False) final_error_total += sentence_error_total self.assertEqual(final_error_total, 8) def test_unfiltered_total_errors_detected(self): """Test with profanity filter off""" text_list, timestamps = pf.get_file("GenerateSRT.txt") client = pf.initialize_api() sentences = pf.print_sentences(text_list) final_error_total = 0 for i, token in enumerate(sentences): sequence_switched, end_matches, offset_list, err_message, sentence_error_total = \ edf.detect_errors(str(sentences[i]), client, True) final_error_total += sentence_error_total self.assertEqual(final_error_total, 6) def test_error_type_detected(self): client = pf.initialize_api() test_str = "An eror in a short sentence." _, _, _, err_message, _ = \ edf.detect_errors(test_str, client, False) self.assertEqual(err_message, "Spelling mistake") def test_multiple_error_types_detected(self): client = pf.initialize_api() test_str = "An eror in a shit short sentence." _, _, _, err_message, _ = \ edf.detect_errors(test_str, client, False) self.assertEqual(err_message, "Spelling mistake, Profanity, ") def test_unfiltered_multiple_error_types_detected(self): client = pf.initialize_api() test_str = "An eror in a shit short sentence." _, _, _, err_message, _ = \ edf.detect_errors(test_str, client, True) self.assertEqual(err_message, "Spelling mistake, ") if __name__ == '__main__': unittest.main()
35.242424
94
0.675838
280
2,326
5.242857
0.271429
0.06812
0.061308
0.071526
0.79564
0.79564
0.768392
0.768392
0.705722
0.705722
0
0.002256
0.237747
2,326
65
95
35.784615
0.825719
0.025795
0
0.521739
0
0
0.086475
0
0
0
0
0
0.108696
1
0.108696
false
0
0.108696
0
0.23913
0.043478
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
b575efaa222a5d25dc5d3faf4b1e7fe66c8e008c
507
py
Python
main/sitemaps.py
fideledev/Bhano-Blog
1ad541cb18bc9cb468d48136d17d058d2863fb59
[ "MIT" ]
null
null
null
main/sitemaps.py
fideledev/Bhano-Blog
1ad541cb18bc9cb468d48136d17d058d2863fb59
[ "MIT" ]
3
2021-09-08T03:40:19.000Z
2022-01-13T03:55:28.000Z
main/sitemaps.py
fidele000/Bhano-Blog
21810f6ce2d0e5ae32151673a6a42a7eb5168a7e
[ "MIT" ]
null
null
null
from django.contrib.sitemaps import Sitemap from .models import Post,Category class PostSitemap(Sitemap): changefreq = 'always' priority = 0.9 def items(self): return Post.published.all().order_by('-publish') def lastmod(self, obj): return obj.updated class CategorySitemap(Sitemap): changefreq = 'always' priority = 0.9 def items(self): return Category.objects.all().order_by('-publish') def lastmod(self, obj): return obj.updated
25.35
58
0.658777
61
507
5.442623
0.491803
0.10241
0.138554
0.186747
0.608434
0.608434
0.608434
0.608434
0.608434
0.608434
0
0.010256
0.230769
507
20
59
25.35
0.841026
0
0
0.625
0
0
0.055118
0
0
0
0
0
0
1
0.25
false
0
0.125
0.25
1
0
0
0
0
null
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
a90ba8de35136419270c289b2ea24ac17c583f67
153
py
Python
silasdk/__init__.py
xuru/Sila-Python
12fef8886580327779d32cf7596dae4516b36c11
[ "Apache-2.0" ]
null
null
null
silasdk/__init__.py
xuru/Sila-Python
12fef8886580327779d32cf7596dae4516b36c11
[ "Apache-2.0" ]
null
null
null
silasdk/__init__.py
xuru/Sila-Python
12fef8886580327779d32cf7596dae4516b36c11
[ "Apache-2.0" ]
null
null
null
from .ethwallet import EthWallet from .client import App from .processingTypes import ProcessingTypes from .registrationFields import RegistrationFields
30.6
50
0.869281
16
153
8.3125
0.4375
0
0
0
0
0
0
0
0
0
0
0
0.104575
153
4
51
38.25
0.970803
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
a90d5461929597e9f6c1a52129cdbaa7dc5181f2
3,755
py
Python
ray_tracer/tests/test_ray.py
jjason/RayTracerChallenge
ab3cea8968407426bddfa9e11319664fc0b595f6
[ "MIT" ]
1
2020-05-13T03:54:00.000Z
2020-05-13T03:54:00.000Z
ray_tracer/tests/test_ray.py
jjason/RayTracerChallenge
ab3cea8968407426bddfa9e11319664fc0b595f6
[ "MIT" ]
null
null
null
ray_tracer/tests/test_ray.py
jjason/RayTracerChallenge
ab3cea8968407426bddfa9e11319664fc0b595f6
[ "MIT" ]
null
null
null
import unittest from matrix import Matrix from point import Point from vector import Vector from ray import Ray class TestRay(unittest.TestCase): def test_create(self): o = Point(x=1, y=2, z=3) d = Vector(x=4, y=5, z=6) r = Ray(origin=o, direction=d) self.assertEqual(r.origin, o) self.assertEqual(r.direction, d) def test_position(self): r = Ray(origin=Point(x=2, y=3, z=4), direction=Vector(x=1, y=0, z=0)) self.assertEqual(r.position(time=0), Point(x=2, y=3, z=4)) self.assertEqual(r.position(time=1), Point(x=3, y=3, z=4)) self.assertEqual(r.position(time=-1), Point(x=1, y=3, z=4)) self.assertEqual(r.position(time=2.5), Point(x=4.5, y=3, z=4)) def test_transform_by_identity(self): r1 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) m = Matrix.identity() r2 = r1.transform(transformation=m) self.assertIsNot(r1, r2) self.assertEqual(r2.origin, r1.origin) self.assertEqual(r2.direction, r1.direction) def test_transform_by_translation(self): r1 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) m = Matrix.translation_transform(x=3, y=4, z=5) r2 = r1.transform(transformation=m) self.assertIsNot(r1, r2) self.assertEqual(r2.origin, Point(x=4, y=6, z=8)) self.assertEqual(r2.direction, Vector(x=0, y=1, z=0)) def test_transform_by_scaling(self): r1 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) m = Matrix.scaling_transform(x=2, y=3, z=4) r2 = r1.transform(transformation=m) self.assertIsNot(r1, r2) self.assertEqual(r2.origin, Point(x=2, y=6, z=12)) self.assertEqual(r2.direction, Vector(x=0, y=3, z=0)) def test_equal(self): r1 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) r2 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) self.assertTrue(r1 == r1) self.assertTrue(r1 == r2) r3 = Ray(origin=Point(x=0, y=2, z=3), direction=Vector(x=0, y=1, z=0)) self.assertFalse(r1 == r3) r3 = Ray(origin=Point(x=1, y=3, z=3), direction=Vector(x=0, y=1, z=0)) self.assertFalse(r1 == r3) r3 = Ray(origin=Point(x=1, y=2, z=4), direction=Vector(x=0, y=1, z=0)) self.assertFalse(r1 == r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=1, y=1, z=0)) self.assertFalse(r1 == r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=0, z=0)) self.assertFalse(r1 == r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=1)) self.assertFalse(r1 == r3) def test_not_equal(self): r1 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) r2 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=0)) self.assertFalse(r1 != r1) self.assertFalse(r1 != r2) r3 = Ray(origin=Point(x=0, y=2, z=3), direction=Vector(x=0, y=1, z=0)) self.assertTrue(r1 != r3) r3 = Ray(origin=Point(x=1, y=3, z=3), direction=Vector(x=0, y=1, z=0)) self.assertTrue(r1 != r3) r3 = Ray(origin=Point(x=1, y=2, z=4), direction=Vector(x=0, y=1, z=0)) self.assertTrue(r1 != r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=1, y=1, z=0)) self.assertTrue(r1 != r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=0, z=0)) self.assertTrue(r1 != r3) r3 = Ray(origin=Point(x=1, y=2, z=3), direction=Vector(x=0, y=1, z=1)) self.assertTrue(r1 != r3) if __name__ == '__main__': unittest.main()
37.929293
78
0.578961
678
3,755
3.171091
0.076696
0.075349
0.030698
0.139535
0.744186
0.717674
0.714419
0.70093
0.666977
0.652093
0
0.080941
0.230093
3,755
98
79
38.316327
0.662746
0
0
0.493333
0
0
0.00213
0
0
0
0
0
0.413333
1
0.093333
false
0
0.066667
0
0.173333
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
5
a957be4a18b530737ff5086013ea9ddd64c3f8b7
316
py
Python
skills_utils/__init__.py
workforce-data-initiative/skills-utils
4cf9b7c2938984f34bbcc33d45482d23c52c7539
[ "MIT" ]
null
null
null
skills_utils/__init__.py
workforce-data-initiative/skills-utils
4cf9b7c2938984f34bbcc33d45482d23c52c7539
[ "MIT" ]
12
2017-04-06T22:34:14.000Z
2018-02-11T20:08:32.000Z
skills_utils/__init__.py
workforce-data-initiative/skills-utils
4cf9b7c2938984f34bbcc33d45482d23c52c7539
[ "MIT" ]
3
2018-03-05T18:36:26.000Z
2020-07-29T23:08:06.000Z
from skills_utils.io import stream_json_file from skills_utils.iteration import Batch from skills_utils.job_posting_import import JobPostingImportBase from skills_utils.s3 import split_s3_path from skills_utils.time import datetime_to_quarter, overlaps, quarter_to_daterange from skills_utils.common import safe_get
45.142857
81
0.892405
49
316
5.408163
0.510204
0.226415
0.339623
0
0
0
0
0
0
0
0
0.006897
0.082278
316
6
82
52.666667
0.906897
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
a98ff71e1cf9356cebd4a942b3483bfcd14aa0df
15,673
py
Python
RL_forest/ddpg_plant/multi_ddpg/models.py
NoListen/RL-forest
6c43d43cc223a8be02256a60c38d72839b9d3fca
[ "MIT" ]
2
2017-08-14T09:11:14.000Z
2018-07-16T06:19:39.000Z
RL_forest/ddpg_plant/multi_ddpg/models.py
NoListen/RL-forest
6c43d43cc223a8be02256a60c38d72839b9d3fca
[ "MIT" ]
null
null
null
RL_forest/ddpg_plant/multi_ddpg/models.py
NoListen/RL-forest
6c43d43cc223a8be02256a60c38d72839b9d3fca
[ "MIT" ]
null
null
null
# https://github.com/openai/baselines/baselines/ddpg/models.py import tensorflow as tf import tensorflow.contrib as tc from tensorflow.contrib import rnn import numpy as np # cover 2d and 3d def get_w_bound(filter_shape): # return np.sqrt(6./(np.prod(filter_shape[:-2]))*np.sum(filter_shape[-2:])) return np.sqrt(6./((np.prod(filter_shape[:-2]))*np.sum(filter_shape[-2:]))) # modified from https://github.com/openai/universe-starter-agent/model.py def conv2d(x, num_filters, name, filter_size=(3, 3), stride=(1, 1), pad="SAME", dtype=tf.float32, collections=None): with tf.variable_scope(name): stride_shape = [1, stride[0], stride[1], 1] filter_shape = [filter_size[0], filter_size[1], int(x.get_shape()[3]), num_filters] w_bound = get_w_bound(filter_shape) w = tf.get_variable("W", filter_shape, dtype, tf.random_uniform_initializer(-w_bound, w_bound), collections=collections) b = tf.get_variable("b", [1, 1, 1, num_filters], initializer=tf.constant_initializer(0.0), collections=collections) return tf.nn.conv2d(x, w, stride_shape, pad) + b # TODO check about the conv3 layer. # Doubt about some problems # modified from https://github.com/openai/universe-starter-agent/model.py def conv3d(x, num_filters, name, filter_size=(1, 3, 3), stride=(1, 1, 1), pad="SAME", dtype=tf.float32, collections=None): with tf.variable_scope( name): stride_shape = [1, stride[0], stride[1], stride[2], 1] filter_shape = [filter_size[0], filter_size[1], filter_size[2], int(x.get_shape()[4]), num_filters] w_bound = get_w_bound(filter_shape) w = tf.get_variable("W", filter_shape, dtype, tf.random_uniform_initializer(-w_bound, w_bound), collections=collections) b = tf.get_variable("b", [1, 1, 1, 1, num_filters], initializer=tf.constant_initializer(0.0), collections=collections) return tf.nn.conv3d(x, w, stride_shape, pad) + b class Model(object): def __init__(self, name): self.name = name @property def vars(self): vars = tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES, scope=self.name) vars_without_optimizer = [var for var in vars if 'optimizer' not in var.name] return vars_without_optimizer @property def trainable_vars(self): return tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope=self.name) @property def perturbable_vars(self): return [var for var in self.trainable_vars if 'LayerNorm' not in var.name] # Initialization can't be determined temporally # simple class Dynamic_Actor(Model): def __init__(self, nb_unit_actions, name='actor', layer_norm=True, time_step=5): super(Dynamic_Actor, self).__init__(name=name) self.nb_unit_actions = nb_unit_actions self.layer_norm = layer_norm self.time_step = time_step # au alive units. def __call__(self, ud, mask, au, n_hidden=64, reuse=False): with tf.variable_scope(self.name) as scope: if reuse: scope.reuse_variables() x = ud if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.layers.dense(x, 64) if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.nn.relu(x) # no need to extend one dimension shape = x.get_shape().as_list() x = tf.reshape(x, [-1, self.time_step, shape[-1]]) # build bidirection lstm lstm_fw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) lstm_bw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) x, _ = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, x, dtype=tf.float32, sequence_length=au) x = tf.concat(x, 2) # TODO v2 turn on the batch_norm after lstm if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.nn.relu(x) x = tf.reshape(x, [-1, self.time_step * n_hidden * 2, 1]) x = tf.layers.conv1d(x, self.nb_unit_actions, kernel_size=n_hidden * 2, strides=n_hidden * 2, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) x = tf.nn.tanh(x) return x class Dynamic_Critic(Model): def __init__(self, name='critic', layer_norm=True, time_step=5): super(Dynamic_Critic, self).__init__(name=name) self.layer_norm = layer_norm self.time_step = time_step def __call__(self, ud, action, mask, au, n_hidden=64, reuse=False, unit_data = False): with tf.variable_scope(self.name) as scope: if reuse: scope.reuse_variables() # x [ batch_size*time_step, DATA_NUM] x = ud if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.layers.dense(x, 64) if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.nn.relu(x) # format action to be [ batch_size*time_step, nb_actions] x = tf.concat([x, action], axis=-1) # another dense layer x = tf.layers.dense(x, 64) if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.nn.relu(x) shape = x.get_shape().as_list() x = tf.reshape(x, [-1, self.time_step, shape[-1]]) lstm_fw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) lstm_bw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) x, _ = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, x, dtype=tf.float32, sequence_length=au) x = tf.concat(x, 2) # TODO v2 turn on the batch_norm after lstm if self.layer_norm: x = tc.layers.layer_norm(x, center=True, scale=True) x = tf.nn.relu(x) x = tf.reshape(x, [-1, self.time_step * n_hidden * 2, 1]) # Q value of each q = tf.layers.conv1d(x, 1, kernel_size=n_hidden*2, strides=n_hidden*2, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) q = tf.squeeze(q, [-1]) #q = tf.multiply(q, mask) Q = tf.reduce_sum(q, axis=1, keep_dims=True) #print(Q.get_shape().as_list, "Q") if unit_data: return Q, q return Q @property def output_vars(self): output_vars = [var for var in self.trainable_vars if 'output' in var.name] return output_vars class Dynamic_Conv_Actor(Model): def __init__(self, nb_unit_actions, name='actor', layer_norm=True, time_step=5): super(Dynamic_Conv_Actor, self).__init__(name=name) self.nb_unit_actions = nb_unit_actions self.layer_norm = layer_norm self.time_step = time_step # the differences between static ones and dynamic ones. # static [1, 0, 0, 0, 1] dynamic [1, 1, 0, 0, 0] # different arrangements of alive units. # not use mask temproally def __call__(self, s, ul, mask, au, n_hidden=256, reuse=False): with tf.variable_scope(self.name) as scope: if reuse: scope.reuse_variables() # embedding x = s # [batch_size, myself_num, ms, ms, 1] u = ul u_shape = u.get_shape().as_list() # tf.shape maybe also ok assert (u_shape[1] == self.time_step) # 40 -> 20 # different soldiers share the same gerneral state x = conv2d(x, 24, "conv1", (3, 3), (2, 2)) # 4 map # u_shape = u.get_shape u = tf.reshape(u, []) # apply conv2d but multiple timesteps simultaneously u = conv3d(u, 8, "u_conv1", (1, 3, 3), (1, 2, 2)) # 1 map # u [batch_size*myself_num, ms/2, ms/2, 1] -> [batch_size, myself, ms/2, ms/2, 1] # x [batch_size, ms/2, ms/2, c] u_list = tf.split(u, self.time_step, axis=1) u_list = [tf.squeeze(unit, [1]) for unit in u_list] # Concat the unit location with general state along the channel axis ux = tf.stack([tf.concat([x, unit], -1) for unit in u_list], axis=1) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # 20 -> 10 ux = conv3d(ux, 32, "conv2", (1, 3, 3), (1, 2, 2)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # 10 -> 5. ux = conv3d(ux, 32, "conv3", (1, 3, 3), (1, 2, 2)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) #TODO add one (1,1,1) layer to reorganize the features. ux_shape = ux.get_shape().as_list() ux = tf.reshape(ux, (-1, self.time_step, int(np.prod(ux_shape[2:])))) # TODO is it necessary to add one layer here??? # ux = tf.layers.dense(ux, n_hidden) # # if self.layer_norm: # ux = tc.layers.layer_norm(ux, center=True, scale=True) # ux = tf.nn.relu(ux) # no need to extend one dimension # build bidirection lstm lstm_fw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) lstm_bw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) # TODO use output_states for process like policy iteration ---- WOW exciting ideas. ux, _ = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, ux, dtype=tf.float32, sequence_length=au) ux = tf.concat(ux, 2) # TODO v2 turn on the batch_norm after lstm if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) ux = tf.layers.dense(ux, 256, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # number of convolution kernel. --> num_actions. default (-1, 1) # convert to [batch_size, time_step*n_hidden], channels_last ux = tf.reshape(ux, [-1, self.time_step*256, 1]) ux = tf.layers.conv1d(ux, self.nb_unit_actions, kernel_size = 256, strides = 256, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) # [ batch_size, time_step, nb_actions] ux = tf.nn.tanh(ux) return ux class Dynamic_Conv_Critic(Model): def __init__(self, name='critic', layer_norm=True, time_step=5): super(Dynamic_Conv_Critic, self).__init__(name=name) self.layer_norm = layer_norm self.time_step = time_step # the parameter's location has been changed def __call__(self, s, ul, mask, au, action, n_hidden=64, reuse=False, unit_data=False): with tf.variable_scope(self.name) as scope: if reuse: scope.reuse_variables() # x [ batch_size*time_step, DATA_NUM] x = s u = ul u_shape = u.get_shape().as_list() assert (u_shape[1] == self.time_step) x = conv2d(x, 24, "conv1", (3, 3), (2, 2)) # 4 map # TODO Notice! Fixing! the conv3 get wrong implementation u = conv3d(u, 8, "u_conv1", (1, 3, 3), (1, 2, 2)) # 1 map u_list = tf.split(u, self.time_step, axis=1) u_list = [tf.squeeze(unit, [1]) for unit in u_list] # stack in the time_step axis # TODO extract the weight for unit location ux = tf.stack([tf.concat([x, unit], -1) for unit in u_list], axis=1) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # [batch_size, myself_num, ms/2, ms/2, ???] ux = conv3d(ux, 32, "conv2", (1, 3, 3), (1, 2, 2)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # 10 -> 5. ux = conv3d(ux, 32, "conv3", (1, 3, 3), (1, 2, 2)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) #TODO add one (1,1,1) layer to reorganize the features. ux_shape = ux.get_shape().as_list() ux = tf.reshape(ux, (-1, self.time_step, int(np.prod(ux_shape[2:])))) ux = tf.layers.dense(ux, n_hidden) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) # Maybe I need an embedding. ux = tf.concat([ux, action], axis=-1) lstm_fw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) lstm_bw_cell = rnn.BasicLSTMCell(n_hidden, forget_bias=1.0) ux, _ = tf.nn.bidirectional_dynamic_rnn(lstm_fw_cell, lstm_bw_cell, ux, dtype=tf.float32, sequence_length=au) ux = tf.concat(ux, 2) # TODO v2 turn on the batch_norm after lstm if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) ux = tf.nn.relu(ux) ux = tf.layers.dense(ux, 256, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) if self.layer_norm: ux = tc.layers.layer_norm(ux, center=True, scale=True) #ux = tf.nn.relu(ux) ux = tf.nn.dropout(ux, keep_prob=0.5) ux = tf.reshape(ux, [-1, self.time_step * 256, 1]) # Q value of each # some computations are wasted q = tf.layers.conv1d(ux, 1, kernel_size=256, strides=256, kernel_initializer=tf.random_uniform_initializer(minval=-3e-3, maxval=3e-3)) q = tf.squeeze(q, [-1]) """ kill the gradient using the mask """ #print(p.get_shape().as_list(), q.get_shape().as_list(), "pq") #pQ = tf.multiply(p,q) Q_mask = tf.multiply(q, mask) #print(mask.get_shape().as_list(), pQ_mask.get_shape().as_list(), "mask") Q = tf.reduce_sum(Q_mask, axis=1, keep_dims=True) if unit_data: return Q,Q_mask # if mask_loss: # #TODO check this putput # qm = tf.multiply(1-mask, q) # return Q,qm # #print(Q.get_shape().as_list, "Q") return Q @property def output_vars(self): output_vars = [var for var in self.trainable_vars if 'output' in var.name] return output_vars
42.474255
118
0.55988
2,203
15,673
3.791648
0.123922
0.053873
0.031605
0.034119
0.784389
0.753382
0.718544
0.700347
0.691009
0.676164
0
0.030334
0.320615
15,673
368
119
42.589674
0.754132
0.166146
0
0.751073
0
0
0.008339
0
0
0
0
0.002717
0.008584
1
0.072961
false
0
0.017167
0.012876
0.171674
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
5
8d17516e9b780e3355cc21b96e4b4d6b07f6b793
169
py
Python
kompassi/wsgi.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
13
2015-11-29T12:19:12.000Z
2021-02-21T15:42:11.000Z
kompassi/wsgi.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
23
2015-04-29T19:43:34.000Z
2021-02-10T05:50:17.000Z
kompassi/wsgi.py
darkismus/kompassi
35dea2c7af2857a69cae5c5982b48f01ba56da1f
[ "CC-BY-3.0" ]
11
2015-09-20T18:59:00.000Z
2020-02-07T08:47:34.000Z
import os os.environ.setdefault("DJANGO_SETTINGS_MODULE", "kompassi.settings") from django.core.wsgi import get_wsgi_application application = get_wsgi_application()
21.125
68
0.828402
22
169
6.090909
0.590909
0.104478
0.268657
0
0
0
0
0
0
0
0
0
0.08284
169
7
69
24.142857
0.864516
0
0
0
0
0
0.230769
0.130178
0
0
0
0
0
1
0
false
0.25
0.5
0
0.5
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
0
0
0
5
8d328c4601a037477cdc36cefc6cb622fe71093d
11,701
py
Python
build/lib/Scopuli/Interfaces/MySQL/Schema/Core/__init__.py
MaxOnNet/scopuli-core
17d72d7f67286ae84b21aa2541f3e1f03b6154ca
[ "Apache-2.0" ]
null
null
null
build/lib/Scopuli/Interfaces/MySQL/Schema/Core/__init__.py
MaxOnNet/scopuli-core
17d72d7f67286ae84b21aa2541f3e1f03b6154ca
[ "Apache-2.0" ]
null
null
null
build/lib/Scopuli/Interfaces/MySQL/Schema/Core/__init__.py
MaxOnNet/scopuli-core
17d72d7f67286ae84b21aa2541f3e1f03b6154ca
[ "Apache-2.0" ]
null
null
null
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright [2017] Tatarnikov Viktor [viktor@tatarnikov.org] # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ """ from Scopuli.Interfaces.MySQL.SQLAlchemy import * class Image(Base, Schema): """ Базовая таблица храниения изображений """ __tablename__ = 'image' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком и информацией об используемых изображениях' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") uuid = Column(String(64), index=True, nullable=False, doc="UUID файла в кэше") file = Column(String(256), nullable=False, doc="Относительный путь до изображения") md5sum = Column(String(64), index=True, nullable=False, doc="Контрольная сумма изображения") file_thumbnail = Column(String(256), nullable=False, doc="Относительный путь до изображения") md5sum_thumbnail = Column(String(64), index=True, nullable=False, doc="Контрольная сумма изображения") size = Column(Integer(), ColumnDefault(0), nullable=False, doc="Размер изображения") height = Column(Integer(), ColumnDefault(0), nullable=False, doc="Высота изображения") width = Column(Integer(), ColumnDefault(0), nullable=False, doc="Ширина изображения") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") class File(Base, Schema): """ Базовая таблица хранения файлов """ __tablename__ = 'file' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком и информацией об используемых файлов' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") uuid = Column(String(64), index=True, nullable=False, doc="UUID файла в кэше") file = Column(String(256), nullable=False, doc="Относительный путь до файла") type = Column(String(16), nullable=False, doc="Тип файла") md5sum = Column(String(64), index=True, nullable=False, doc="Сонтрольная сумма изображения") file_thumbnail = Column(String(256), nullable=False, doc="Относительный путь до изображения") md5sum_thumbnail = Column(String(64), index=True, nullable=False, doc="Сонтрольная сумма изображения") size = Column(Integer(), ColumnDefault(0), nullable=False, doc="Размер файла") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") class Address(Base, Schema): """ Базовая таблица хранения адресов """ __tablename__ = 'address' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком и информацией об используемых файлов' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") country = Column(String(64), nullable=False, doc="Страна") city = Column(String(64), nullable=False, doc="Город") street = Column(String(64), nullable=False, doc="Улица") house = Column(String(8), nullable=False, doc="Дом") room = Column(String(8), nullable=False, doc="Кабинет \ квартира") floor = Column(String(256), nullable=False, doc="Этаж") index = Column(String(64), nullable=False, doc="Почтовый индекс") type = Column(String(16), nullable=False, doc="Тип файла") latitude = Column(String(16), nullable=False, doc="Широта") longitude = Column(String(16), nullable=False, doc="Долгота") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") # Rela phones = association_proxy('address_phones', 'phone') urls = association_proxy('address_urls', 'url') emails = association_proxy('address_emails', 'email') class Phone(Base, Schema): """ Базовая таблица хранения телефонов """ __tablename__ = 'phone' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком телефонов' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") country = Column(String(16), ColumnDefault(""), nullable=False, doc="Код страны") city = Column(String(16), ColumnDefault(""), nullable=True, doc="Код города") number = Column(String(16), ColumnDefault(""), nullable=False, doc="Номер телефона") title = Column(String(128), ColumnDefault(""), nullable=False, doc="Название") description = Column(String(256), ColumnDefault(""), nullable=False, doc="Краткое описание") is_enable = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования") is_published = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования в интернете") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") @hybrid_property def phone(self): if self.city is None: return "+{}{}".format(self.country, self.number) else: return "+{}{}{}".format(self.country, self.city, self.number) class AddressPhone(Base, Schema): """ Базовая таблица хранения телефонов """ __tablename__ = 'address_phone' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком телефонов у адреса' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") cd_address = Column(Integer(), ForeignKey(Address.id), nullable=False, doc="Ссылка на Address") cd_phone = Column(Integer(), ForeignKey(Phone.id), nullable=False, doc="Ссылка на Phone") phone = relationship(Phone) address = relationship(Address, backref=backref("address_phones", cascade="all, delete-orphan")) class Email(Base, Schema): """ Базовая таблица хранения телефонов """ __tablename__ = 'email' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком почтовых адресов' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") email = Column(String(128), ColumnDefault(""), nullable=False, doc="Адрес") title = Column(String(128), ColumnDefault(""), nullable=False, doc="Название") description = Column(String(256), ColumnDefault(""), nullable=False, doc="Краткое описание") is_enable = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования") is_published = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования в интернете") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") class AddressEmail(Base, Schema): """ Базовая таблица хранения почтовых адресов """ __tablename__ = 'address_email' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком электронных почтовых адресов у адреса' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") cd_address = Column(Integer(), ForeignKey(Address.id), nullable=False, doc="Ссылка на Address") cd_email = Column(Integer(), ForeignKey(Email.id), nullable=False, doc="Ссылка на Email") email = relationship(Email) address = relationship(Address, backref=backref("address_emails", cascade="all, delete-orphan")) class Url(Base, Schema): """ Базовая таблица хранения телефонов """ __tablename__ = 'url' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком почтовых адресов' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") url = Column(String(256), ColumnDefault(""), nullable=False, doc="Адрес") title = Column(String(128), ColumnDefault(""), nullable=False, doc="Название") description = Column(String(256), ColumnDefault(""), nullable=False, doc="Краткое описание") is_enable = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования") is_published = Column(Boolean(), ColumnDefault(False), default=False, nullable=False, doc="Метка использования в интернете") # Automatic Logger date_create = Column(DateTime(), nullable=False, default=func.utc_timestamp(), doc="AutoLogger - Время создания") date_change = Column(DateTime(), nullable=False, default=func.utc_timestamp(), onupdate=func.utc_timestamp(), doc="AutoLogger - Время последнего изменения") class AddressUrl(Base, Schema): """ Базовая таблица хранения почтовых адресов """ __tablename__ = 'address_url' __table_args__ = { 'mysql_engine': 'InnoDB', 'mysql_charset': 'utf8', 'mysql_collate': 'utf8_general_ci', 'mysql_comment': 'Таблица со списком электронных адресов у адреса' } id = Column(Integer(), primary_key=True, autoincrement=True, doc="Row ID - Сурогатный ключ") cd_address = Column(Integer(), ForeignKey(Address.id), nullable=False, doc="Ссылка на Address") cd_url = Column(Integer(), ForeignKey(Url.id), nullable=False, doc="Ссылка на Email") url = relationship(Url) address = relationship(Address, backref=backref("address_urls", cascade="all, delete-orphan"))
43.988722
160
0.678574
1,329
11,701
5.82167
0.177577
0.099134
0.097195
0.041877
0.825126
0.798371
0.74357
0.685149
0.685149
0.657231
0
0.011575
0.187847
11,701
265
161
44.154717
0.802589
0.090249
0
0.534161
0
0
0.260532
0
0
0
0
0
0
1
0.006211
false
0
0.006211
0
0.677019
0
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
5
8d3efcf59f1ae679322dc231bb9a8466aabe9ee1
89
py
Python
irekia/__init__.py
eillarra/irekia
bbceb8d26c9dddf93f5015459bf53887703fb87e
[ "MIT" ]
1
2017-04-29T11:37:59.000Z
2017-04-29T11:37:59.000Z
irekia/__init__.py
eillarra/irekia
bbceb8d26c9dddf93f5015459bf53887703fb87e
[ "MIT" ]
1
2021-03-31T18:50:59.000Z
2021-03-31T18:50:59.000Z
irekia/__init__.py
eillarra/irekia
bbceb8d26c9dddf93f5015459bf53887703fb87e
[ "MIT" ]
null
null
null
from __future__ import absolute_import from .client import Client, get_metadata # noqa
22.25
48
0.820225
12
89
5.583333
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.146067
89
3
49
29.666667
0.881579
0.044944
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
8d6df2bbf0e86020114e72dfe033f37b704f09e3
1,682
py
Python
modules/runnable/script5.py
Asichurter/Few-Shot-Project
865cd6aa7b996c518dfa48dcc9ffad90445f9efe
[ "MIT" ]
null
null
null
modules/runnable/script5.py
Asichurter/Few-Shot-Project
865cd6aa7b996c518dfa48dcc9ffad90445f9efe
[ "MIT" ]
null
null
null
modules/runnable/script5.py
Asichurter/Few-Shot-Project
865cd6aa7b996c518dfa48dcc9ffad90445f9efe
[ "MIT" ]
null
null
null
import requests import os # save_path = 'C:/Users/Asichurter/Desktop/dl_malwares/' # # url = 'https://www.virustotal.com/vtapi/v2/file/download' # apikey = 'c424abc9c8d7102cfaf9cf2d8f01fb95f4ddfd81a563d6e07738fa960b501d87' # hashes = [ "63956d6417f8f43357d9a8e79e52257e" # "6f7bde7a1126debf0cc359a54953efc1" # "7520c8f9534ca818726a4feaebf49e2b" # "e435a536968941854bcec3b902c439f6" # "e93049e2df82ab26f35ad0049173cb14" # "4235e2d487958ff377f0f92b266591f0" # "e4647acec12b82944f5df603dc682660" # "6524a10da9701301b2582f12cc66f90c" # "14a3f5108958b61c6bdc2de17c785a89" # "1515a80662d5bd0d8a6fb9ecfeedb652" # "94e4b62861ab7a4d3246a4888e9025b5" # "cfb9fbcd2bb1ca2d326720971f385a4b" # "a89153d58a70f143ed1fd3b89f26a90f" # "323037966ab54ce841f528870908e259" # "5af2ee5a9e61b194f6cb076775237980" # "ac79fefb5ddfe4f20061bca398884233" # "52411226cbdd24441966c08f959ad5dc" # "3742f0a58ca91a0c56c74f49dd22ab0b" # "485a4912b2d639694f836451a2b30435" # "b29bea9ae0292d8a6a18219b63a62787"] # # for i,hash_val in enumerate(hashes): # print(i, hash_val) # params = {'apikey': apikey, 'hash': hash_val} # # response = requests.get(url, params=params) # print(response) # downloaded_file = response.content # # with open(save_path+str(i)+'.pe', 'wb') as f: # f.write(downloaded_file) url = 'https://www.virustotal.com/gui/file/968c37e74571c6f3bf8f2749c9e1d0ea6999eb503de2a9a6cc78c68530559c6d/detection.html' response = requests.get(url)
39.116279
123
0.685493
101
1,682
11.336634
0.663366
0.018341
0.019214
0.036681
0.041921
0
0
0
0
0
0
0.381749
0.218193
1,682
42
124
40.047619
0.488973
0.849584
0
0
0
0
0.532407
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
5
8da0ed0bb0b0f2ad312d1b93fba7462bedf16e43
164
py
Python
django_review/review/admin.py
edfranceschini/django_review
6a03bd41d81a84949d635f4cc64efdfaff51e10c
[ "MIT" ]
null
null
null
django_review/review/admin.py
edfranceschini/django_review
6a03bd41d81a84949d635f4cc64efdfaff51e10c
[ "MIT" ]
null
null
null
django_review/review/admin.py
edfranceschini/django_review
6a03bd41d81a84949d635f4cc64efdfaff51e10c
[ "MIT" ]
null
null
null
from django.contrib import admin from .models import Company, Profile, Review admin.site.register(Company) admin.site.register(Profile) admin.site.register(Review)
27.333333
44
0.823171
23
164
5.869565
0.478261
0.2
0.377778
0
0
0
0
0
0
0
0
0
0.079268
164
6
45
27.333333
0.89404
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
5
a5f2a97d7d498665423cabcef1dc59adede23787
382
py
Python
Server/Data/scripts/player/skills/prayer.py
CoderMMK/RSPS
5cf72f4203626e3bf3ab8790072547e260afa3f5
[ "WTFPL" ]
null
null
null
Server/Data/scripts/player/skills/prayer.py
CoderMMK/RSPS
5cf72f4203626e3bf3ab8790072547e260afa3f5
[ "WTFPL" ]
null
null
null
Server/Data/scripts/player/skills/prayer.py
CoderMMK/RSPS
5cf72f4203626e3bf3ab8790072547e260afa3f5
[ "WTFPL" ]
2
2019-07-19T21:28:47.000Z
2020-01-07T14:23:31.000Z
# Bone Bury Functions # Author: Lmctruck30 # from server.util import ScriptManager def itemClick_526(player, itemId, itemSlot): player.getPA().buryBone(5, 1600, itemId, itemSlot) def itemClick_532(player, itemId, itemSlot): player.getPA().buryBone(10, 1600, itemId, itemSlot) def itemClick_536(player, itemId, itemSlot): player.getPA().buryBone(15, 1600, itemId, itemSlot)
22.470588
52
0.759162
49
382
5.857143
0.489796
0.292683
0.209059
0.271777
0.616725
0.407666
0
0
0
0
0
0.083086
0.117801
382
16
53
23.875
0.768546
0.099476
0
0
0
0
0
0
0
0
0
0
0
1
0.428571
false
0
0.142857
0
0.571429
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
5
a5f4e46f6d82fde46b3af94a9cc6258e191f0f2f
245
py
Python
ch01/c118.py
claudiordgz/GoodrichTamassiaGoldwasser
0e434caa5bf6f2adefaeff9c17b4f3910c1cff7b
[ "MIT" ]
2
2017-01-04T02:00:39.000Z
2018-10-10T17:43:51.000Z
ch01/c118.py
claudiordgz/GoodrichTamassiaGoldwasser
0e434caa5bf6f2adefaeff9c17b4f3910c1cff7b
[ "MIT" ]
null
null
null
ch01/c118.py
claudiordgz/GoodrichTamassiaGoldwasser
0e434caa5bf6f2adefaeff9c17b4f3910c1cff7b
[ "MIT" ]
null
null
null
__author__ = 'Claudio' """Demonstrate how to use Python’s list comprehension syntax to produce the list [0, 2, 6, 12, 20, 30, 42, 56, 72, 90]. """ def demonstration_list_comprehension(): return [idx*x for idx, x in enumerate(range(1,11))]
27.222222
71
0.693878
40
245
4.1
0.85
0.207317
0
0
0
0
0
0
0
0
0
0.098039
0.167347
245
8
72
30.625
0.705882
0
0
0
0
0
0.057377
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
5
a5ff164795bd2b1745e48b05cf366f3770ea9640
89
py
Python
cart/admin.py
geraldofada/flea-market
174a269657dc148f52e2785933da73758e6e86be
[ "MIT" ]
null
null
null
cart/admin.py
geraldofada/flea-market
174a269657dc148f52e2785933da73758e6e86be
[ "MIT" ]
null
null
null
cart/admin.py
geraldofada/flea-market
174a269657dc148f52e2785933da73758e6e86be
[ "MIT" ]
null
null
null
from cart.models import Cart from django.contrib import admin admin.site.register(Cart)
17.8
32
0.820225
14
89
5.214286
0.642857
0
0
0
0
0
0
0
0
0
0
0
0.11236
89
5
33
17.8
0.924051
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
5
572392deacfc0e33849ccd26e9c8fa2d3b4a5aac
434
py
Python
Bot/ExchangeBase.py
JHJohny/CryptoBuyInBot
4a7ab4ff1c5dd92fc3853e4de75c54053f66871e
[ "MIT" ]
null
null
null
Bot/ExchangeBase.py
JHJohny/CryptoBuyInBot
4a7ab4ff1c5dd92fc3853e4de75c54053f66871e
[ "MIT" ]
null
null
null
Bot/ExchangeBase.py
JHJohny/CryptoBuyInBot
4a7ab4ff1c5dd92fc3853e4de75c54053f66871e
[ "MIT" ]
null
null
null
from abc import ABC, abstractmethod class Exchange(ABC): @abstractmethod def get_current_minute_candle(self, symbol): """Takes keyword of cryptocurrency and returns dict of - Open, High, Low, Close values""" pass @abstractmethod def create_buy_order(self): pass @abstractmethod def set_stop_loss(self): pass @abstractmethod def set_stop_profit(self): pass
19.727273
97
0.66129
51
434
5.45098
0.647059
0.244604
0.226619
0.179856
0.230216
0.230216
0
0
0
0
0
0
0.267281
434
21
98
20.666667
0.874214
0.191244
0
0.571429
0
0
0
0
0
0
0
0
0
1
0.285714
false
0.285714
0.071429
0
0.428571
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
0
0
0
5
573ad505fac4d4515eb374d85148e9fb951de2e4
80
py
Python
01_basic/exercise_042.py
sideroff/python-exercises
6a9cc55735d977a71697204c734b3ade84a0c4fd
[ "MIT" ]
null
null
null
01_basic/exercise_042.py
sideroff/python-exercises
6a9cc55735d977a71697204c734b3ade84a0c4fd
[ "MIT" ]
4
2020-03-24T18:00:07.000Z
2021-06-02T00:51:22.000Z
01_basic/exercise_042.py
sideroff/python-exercises
6a9cc55735d977a71697204c734b3ade84a0c4fd
[ "MIT" ]
null
null
null
import struct print("python is running on %i bit os" %(struct.calcsize("P")*8))
26.666667
65
0.7
14
80
4
0.928571
0
0
0
0
0
0
0
0
0
0
0.014286
0.125
80
3
65
26.666667
0.785714
0
0
0
0
0
0.382716
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
5