hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
b530bd6817587ae7bf40c4c98c7e591a1c852149
22
py
Python
hqc/__init__.py
leockl/helstrom-quantum-centroid-classifier
9f5d056e98c255aa0bbc9e22ffea6c66627c3189
[ "BSD-3-Clause" ]
7
2020-01-25T02:43:41.000Z
2021-10-21T21:17:03.000Z
hqc/__init__.py
leockl/helstrom-quantum-centroid-classifier
9f5d056e98c255aa0bbc9e22ffea6c66627c3189
[ "BSD-3-Clause" ]
1
2020-01-27T07:15:44.000Z
2020-01-27T07:16:03.000Z
hqc/__init__.py
leockl/helstrom-quantum-centroid-classifier
9f5d056e98c255aa0bbc9e22ffea6c66627c3189
[ "BSD-3-Clause" ]
2
2020-01-27T07:14:17.000Z
2020-12-17T08:04:22.000Z
from .hqc import HQC
11
21
0.727273
4
22
4
0.75
0
0
0
0
0
0
0
0
0
0
0
0.227273
22
1
22
22
0.941176
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b54a19fb77035d3d0a72edba4b80d0425d565c25
39
py
Python
amocrm_asterisk_ng/telephony/impl/instances/asterisk_16/ami_handlers/ami_store/impl/__init__.py
iqtek/amocrn_asterisk_ng
429a8d0823b951c855a49c1d44ab0e05263c54dc
[ "MIT" ]
null
null
null
amocrm_asterisk_ng/telephony/impl/instances/asterisk_16/ami_handlers/ami_store/impl/__init__.py
iqtek/amocrn_asterisk_ng
429a8d0823b951c855a49c1d44ab0e05263c54dc
[ "MIT" ]
null
null
null
amocrm_asterisk_ng/telephony/impl/instances/asterisk_16/ami_handlers/ami_store/impl/__init__.py
iqtek/amocrn_asterisk_ng
429a8d0823b951c855a49c1d44ab0e05263c54dc
[ "MIT" ]
null
null
null
from .AmiStoreImpl import AmiStoreImpl
19.5
38
0.871795
4
39
8.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.971429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b54c79250fa758411c671299b102752ca7202fea
79
py
Python
foundry/__init__.py
MLMI2-CSSI/foundry
d72f3af2591f678149e303ab217d6badcda92f09
[ "MIT" ]
10
2020-09-11T01:40:46.000Z
2022-02-24T05:02:35.000Z
foundry/__init__.py
MLMI2-CSSI/foundry
d72f3af2591f678149e303ab217d6badcda92f09
[ "MIT" ]
73
2020-02-14T20:11:56.000Z
2022-03-31T17:16:18.000Z
foundry/__init__.py
MLMI2-CSSI/foundry
d72f3af2591f678149e303ab217d6badcda92f09
[ "MIT" ]
4
2020-06-24T20:11:27.000Z
2022-01-29T02:08:07.000Z
from .foundry import Foundry from . import models from . import xtract_method
15.8
28
0.797468
11
79
5.636364
0.545455
0.322581
0
0
0
0
0
0
0
0
0
0
0.164557
79
4
29
19.75
0.939394
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b56593bd8246cc536ddb96a956890292cedf9933
45
py
Python
FlyBIDS/__init__.py
PennLINC/FlyBIDS
0b44d624c75f537c668d75664c239c51100bdf8d
[ "MIT" ]
null
null
null
FlyBIDS/__init__.py
PennLINC/FlyBIDS
0b44d624c75f537c668d75664c239c51100bdf8d
[ "MIT" ]
null
null
null
FlyBIDS/__init__.py
PennLINC/FlyBIDS
0b44d624c75f537c668d75664c239c51100bdf8d
[ "MIT" ]
1
2021-11-25T21:33:13.000Z
2021-11-25T21:33:13.000Z
from FlyBIDS.BIDSLayout import FlyBIDSLayout
22.5
44
0.888889
5
45
8
1
0
0
0
0
0
0
0
0
0
0
0
0.088889
45
1
45
45
0.97561
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b56b8373a52a1ad06a645595f23ac13235e600ec
204
py
Python
SimPEG/PF/__init__.py
kimjaed/simpeg
b8d716f86a4ea07ba3085fabb24c2bc974788040
[ "MIT" ]
3
2020-11-27T03:18:28.000Z
2022-03-18T01:29:58.000Z
SimPEG/PF/__init__.py
kimjaed/simpeg
b8d716f86a4ea07ba3085fabb24c2bc974788040
[ "MIT" ]
null
null
null
SimPEG/PF/__init__.py
kimjaed/simpeg
b8d716f86a4ea07ba3085fabb24c2bc974788040
[ "MIT" ]
1
2020-05-26T17:00:53.000Z
2020-05-26T17:00:53.000Z
from . import MagAnalytics from . import GravAnalytics from . import BaseMag from . import Magnetics from . import BaseGrav from . import Gravity from . import MagneticsDriver from . import GravityDriver
22.666667
29
0.803922
24
204
6.833333
0.416667
0.487805
0
0
0
0
0
0
0
0
0
0
0.156863
204
8
30
25.5
0.953488
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
b59393335f0033da58f47547bc08f9edcb4ee947
1,086
py
Python
100 Days of Code/Day 007/hangman_art.py
jburke234/learning
f192e1ab5d6ec4a7d7dfc66d9e8d1b170e6685ea
[ "MIT" ]
null
null
null
100 Days of Code/Day 007/hangman_art.py
jburke234/learning
f192e1ab5d6ec4a7d7dfc66d9e8d1b170e6685ea
[ "MIT" ]
null
null
null
100 Days of Code/Day 007/hangman_art.py
jburke234/learning
f192e1ab5d6ec4a7d7dfc66d9e8d1b170e6685ea
[ "MIT" ]
null
null
null
# -*- coding: utf-8 -*- """ Created on Tue Mar 30 23:08:01 2021 @author: James """ stages = [''' +---+ | | O | /|\ | / \ | | ========= ''', ''' +---+ | | O | /|\ | / | | ========= ''', ''' +---+ | | O | /|\ | | | ========= ''', ''' +---+ | | O | /| | | | =========''', ''' +---+ | | O | | | | | ========= ''', ''' +---+ | | O | | | | ========= ''', ''' +---+ | | | | | | ========= '''] logo = ''' _ | | | |__ __ _ _ __ __ _ _ __ ___ __ _ _ __ | '_ \ / _` | '_ \ / _` | '_ ` _ \ / _` | '_ \ | | | | (_| | | | | (_| | | | | | | (_| | | | | |_| |_|\__,_|_| |_|\__, |_| |_| |_|\__,_|_| |_| __/ | |___/ '''
13.575
68
0.111418
22
1,086
2.909091
0.772727
0.15625
0.1875
0.1875
0.09375
0
0
0
0
0
0
0.030374
0.605893
1,086
79
69
13.746835
0.119159
0.06814
0
0.553846
0
0.046154
0.7749
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
1
0
0
0
0
1
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a91d6f20ef10bea5a5871eb7093356312447b1f0
92
py
Python
install/super_prove/lib/pyzz/__init__.py
ljbrooks/superkb_release
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
[ "MIT" ]
null
null
null
install/super_prove/lib/pyzz/__init__.py
ljbrooks/superkb_release
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
[ "MIT" ]
null
null
null
install/super_prove/lib/pyzz/__init__.py
ljbrooks/superkb_release
cd8c476ba687dea3cdd979eb4b1a7bd9471ece66
[ "MIT" ]
null
null
null
from _pyzz import * from pyzz import * import utils import bmc import primitives import tt
11.5
19
0.793478
14
92
5.142857
0.5
0.222222
0.388889
0
0
0
0
0
0
0
0
0
0.184783
92
7
20
13.142857
0.96
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
a9617ae27481bbd8ad002b5466041f24da135099
4,506
py
Python
src/PDBParser.py
yingyulou/PDBTools
7dadc1916d9a5c71b6e05a6e52c08820344640ec
[ "MIT" ]
8
2018-10-22T05:20:06.000Z
2021-08-17T11:01:29.000Z
src/PDBParser.py
yingyulou/PDBTools
7dadc1916d9a5c71b6e05a6e52c08820344640ec
[ "MIT" ]
null
null
null
src/PDBParser.py
yingyulou/PDBTools
7dadc1916d9a5c71b6e05a6e52c08820344640ec
[ "MIT" ]
3
2018-12-15T04:41:41.000Z
2020-08-09T06:30:01.000Z
#!/usr/bin/env python3 # coding=UTF-8 ''' PDBParser ========= PDB parser function define. ''' # Import Python Lib from os.path import splitext, basename from numpy import array # Import PDBTools from .Protein import Protein from .Chain import Chain from .Residue import Residue from .Atom import Atom from .Util import IsH ################################################################################ # Parse PDB File ################################################################################ def Load(pdbFilePath, parseHBool = False): proObj = Protein(splitext(basename(pdbFilePath))[0]) lastChainName = None lastResName = None lastResNum = None lastResIns = None with open(pdbFilePath) as f: for line in f: if line[:4] != 'ATOM': continue atomName = line[12:16].strip() if IsH(atomName) and not parseHBool: continue atomNum = int(line[6:11]) atomAltLoc = line[16].strip() resName = line[17:20].strip() chainName = line[21].strip() resNum = int(line[22:26]) resIns = line[26].strip() atomCoord = array((float(line[30:38]), float(line[38:46]), float(line[46:54]))) atomOccupancy = line[54:60].strip() atomTempFactor = line[60:66].strip() atomElement = line[76:78].strip() atomCharge = line[78:80].strip() if chainName != lastChainName: lastChainName, lastResNum, lastResName, lastResIns = chainName, resNum, resName, resIns chainObj = Chain(chainName, proObj) resObj = Residue(resName, resNum, resIns, chainObj) elif lastResNum != resNum or lastResName != resName or lastResIns != resIns: lastResNum, lastResName, lastResIns = resNum, resName, resIns resObj = Residue(resName, resNum, resIns, chainObj) Atom(atomName, atomNum, atomCoord, atomAltLoc, atomOccupancy, atomTempFactor, atomElement, atomCharge, resObj) return proObj ################################################################################ # Parse PDB File With Model ################################################################################ def LoadModel(pdbFilePath, parseHBool = False): pdbIdStr = splitext(basename(pdbFilePath))[0] proObj = Protein(pdbIdStr) proObjList = [proObj] lastChainName = None lastResName = None lastResNum = None lastResIns = None with open(pdbFilePath) as f: for line in f: if line[:5] == 'MODEL': proObj = Protein(pdbIdStr, int(line[10:14])) proObjList.append(proObj) lastChainName = None lastResName = None lastResNum = None lastResIns = None continue elif line[:4] != 'ATOM': continue atomName = line[12:16].strip() if IsH(atomName) and not parseHBool: continue atomNum = int(line[6:11]) atomAltLoc = line[16].strip() resName = line[17:20].strip() chainName = line[21].strip() resNum = int(line[22:26]) resIns = line[26].strip() atomCoord = array((float(line[30:38]), float(line[38:46]), float(line[46:54]))) atomOccupancy = line[54:60].strip() atomTempFactor = line[60:66].strip() atomElement = line[76:78].strip() atomCharge = line[78:80].strip() if chainName != lastChainName: lastChainName, lastResNum, lastResName, lastResIns = chainName, resNum, resName, resIns chainObj = Chain(chainName, proObj) resObj = Residue(resName, resNum, resIns, chainObj) elif lastResNum != resNum or lastResName != resName or lastResIns != resIns: lastResNum, lastResName, lastResIns = resNum, resName, resIns resObj = Residue(resName, resNum, resIns, chainObj) Atom(atomName, atomNum, atomCoord, atomAltLoc, atomOccupancy, atomTempFactor, atomElement, atomCharge, resObj) if not proObjList[0]: proObjList.pop(0) return proObjList
31.291667
103
0.517088
409
4,506
5.696822
0.249389
0.023176
0.053219
0.044635
0.751931
0.751931
0.751931
0.751931
0.751931
0.72103
0
0.036168
0.318908
4,506
143
104
31.51049
0.723037
0.03573
0
0.747126
0
0
0.00325
0
0
0
0
0
0
1
0.022989
false
0
0.08046
0
0.126437
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
8d225e482f7c312fd2cf296e450e24a5936008f1
41
py
Python
tests/test_bot.py
dead-beef/telegram-bot
3abe33e179ddc65093ec55a4fb53d64d948d4e86
[ "MIT" ]
null
null
null
tests/test_bot.py
dead-beef/telegram-bot
3abe33e179ddc65093ec55a4fb53d64d948d4e86
[ "MIT" ]
null
null
null
tests/test_bot.py
dead-beef/telegram-bot
3abe33e179ddc65093ec55a4fb53d64d948d4e86
[ "MIT" ]
null
null
null
import pytest from bot.bot import Bot
6.833333
23
0.756098
7
41
4.428571
0.571429
0
0
0
0
0
0
0
0
0
0
0
0.219512
41
5
24
8.2
0.96875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
8d511c517334c2f9153ee7a1500daa5144ecd02a
2,516
py
Python
python/ql/test/library-tests/frameworks/stdlib/xml_dom.py
adityasharad/ql
439dcc0731ae665402466a13daf12737ea3a2a44
[ "MIT" ]
643
2018-08-03T11:16:54.000Z
2020-04-27T23:10:55.000Z
python/ql/test/library-tests/frameworks/stdlib/xml_dom.py
DirtyApexAlpha/codeql
4c59b0d2992ee0d90cc2f46d6a85ac79e1d57f21
[ "MIT" ]
1,880
2018-08-03T11:28:32.000Z
2020-04-28T13:18:51.000Z
python/ql/test/library-tests/frameworks/stdlib/xml_dom.py
DirtyApexAlpha/codeql
4c59b0d2992ee0d90cc2f46d6a85ac79e1d57f21
[ "MIT" ]
218
2018-08-03T11:16:58.000Z
2020-04-24T02:24:00.000Z
from io import StringIO import xml.dom.minidom import xml.dom.pulldom import xml.sax x = "some xml" # minidom xml.dom.minidom.parse(StringIO(x)) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' decodeOutput=xml.dom.minidom.parse(..) getAPathArgument=StringIO(..) xml.dom.minidom.parse(file=StringIO(x)) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' decodeOutput=xml.dom.minidom.parse(..) getAPathArgument=StringIO(..) xml.dom.minidom.parseString(x) # $ decodeFormat=XML decodeInput=x xmlVuln='XML bomb' decodeOutput=xml.dom.minidom.parseString(..) xml.dom.minidom.parseString(string=x) # $ decodeFormat=XML decodeInput=x xmlVuln='XML bomb' decodeOutput=xml.dom.minidom.parseString(..) # pulldom xml.dom.pulldom.parse(StringIO(x))['START_DOCUMENT'][1] # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' decodeOutput=xml.dom.pulldom.parse(..) getAPathArgument=StringIO(..) xml.dom.pulldom.parse(stream_or_string=StringIO(x))['START_DOCUMENT'][1] # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' decodeOutput=xml.dom.pulldom.parse(..) getAPathArgument=StringIO(..) xml.dom.pulldom.parseString(x)['START_DOCUMENT'][1] # $ decodeFormat=XML decodeInput=x xmlVuln='XML bomb' decodeOutput=xml.dom.pulldom.parseString(..) xml.dom.pulldom.parseString(string=x)['START_DOCUMENT'][1] # $ decodeFormat=XML decodeInput=x xmlVuln='XML bomb' decodeOutput=xml.dom.pulldom.parseString(..) # These are based on SAX parses, and you can specify your own, so you can expose yourself to XXE (yay/) parser = xml.sax.make_parser() parser.setFeature(xml.sax.handler.feature_external_ges, True) xml.dom.minidom.parse(StringIO(x), parser) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' xmlVuln='DTD retrieval' xmlVuln='XXE' decodeOutput=xml.dom.minidom.parse(..) getAPathArgument=StringIO(..) xml.dom.minidom.parse(StringIO(x), parser=parser) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' xmlVuln='DTD retrieval' xmlVuln='XXE' decodeOutput=xml.dom.minidom.parse(..) getAPathArgument=StringIO(..) xml.dom.pulldom.parse(StringIO(x), parser) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' xmlVuln='DTD retrieval' xmlVuln='XXE' decodeOutput=xml.dom.pulldom.parse(..) getAPathArgument=StringIO(..) xml.dom.pulldom.parse(StringIO(x), parser=parser) # $ decodeFormat=XML decodeInput=StringIO(..) xmlVuln='XML bomb' xmlVuln='DTD retrieval' xmlVuln='XXE' decodeOutput=xml.dom.pulldom.parse(..) getAPathArgument=StringIO(..)
78.625
221
0.766693
326
2,516
5.889571
0.162577
0.08125
0.088021
0.075
0.821875
0.821875
0.803125
0.796354
0.796354
0.796354
0
0.001707
0.06876
2,516
31
222
81.16129
0.817755
0.680843
0
0
0
0
0.081321
0
0
0
0
0
0
1
0
false
0
0.210526
0
0.210526
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a5c338ae59d3c029cd2f1e9bc28acc6734a93b08
79
py
Python
gblog/common/exceptions.py
nanvel/gblog
c0eb2f597645dda6c1d8631c0e31921ed37e38f4
[ "MIT" ]
1
2017-01-11T11:02:03.000Z
2017-01-11T11:02:03.000Z
gblog/common/exceptions.py
nanvel/gblog
c0eb2f597645dda6c1d8631c0e31921ed37e38f4
[ "MIT" ]
3
2015-04-13T07:06:47.000Z
2015-04-14T02:13:50.000Z
gblog/common/exceptions.py
nanvel/gblog
c0eb2f597645dda6c1d8631c0e31921ed37e38f4
[ "MIT" ]
null
null
null
from tornado.web import HTTPError class GBlogException(HTTPError): pass
11.285714
33
0.772152
9
79
6.777778
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.177215
79
6
34
13.166667
0.938462
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
571e14b4cf52410b3562b5a1e223074fb632d203
25,943
py
Python
pybind/slxos/v17r_2_00/cpu_state/summary/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17r_2_00/cpu_state/summary/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
null
null
null
pybind/slxos/v17r_2_00/cpu_state/summary/__init__.py
extremenetworks/pybind
44c467e71b2b425be63867aba6e6fa28b2cfe7fb
[ "Apache-2.0" ]
1
2021-11-05T22:15:42.000Z
2021-11-05T22:15:42.000Z
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ class summary(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-RAS-operational - based on the path /cpu-state/summary. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: Overall CPU utilization summary """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__summary_cpu_load_average_one_min','__summary_cpu_load_average_five_min','__summary_cpu_load_average_fifteen_min','__summary_cpu_util_current','__summary_cpu_util_current_user','__summary_cpu_util_current_kernel','__summary_cpu_util_current_iowait',) _yang_name = 'summary' _rest_name = 'summary' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__summary_cpu_util_current_user = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-user", rest_name="summary-cpu-util-current-user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_load_average_fifteen_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-fifteen-min", rest_name="summary-cpu-load-average-fifteen-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_load_average_one_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-one-min", rest_name="summary-cpu-load-average-one-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_util_current_kernel = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-kernel", rest_name="summary-cpu-util-current-kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_util_current = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current", rest_name="summary-cpu-util-current", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_load_average_five_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-five-min", rest_name="summary-cpu-load-average-five-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) self.__summary_cpu_util_current_iowait = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-iowait", rest_name="summary-cpu-util-current-iowait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'cpu-state', u'summary'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'cpu-state', u'summary'] def _get_summary_cpu_load_average_one_min(self): """ Getter method for summary_cpu_load_average_one_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_one_min (decimal64) YANG Description: CPU load average in the last one minute """ return self.__summary_cpu_load_average_one_min def _set_summary_cpu_load_average_one_min(self, v, load=False): """ Setter method for summary_cpu_load_average_one_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_one_min (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_load_average_one_min is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_load_average_one_min() directly. YANG Description: CPU load average in the last one minute """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-one-min", rest_name="summary-cpu-load-average-one-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_load_average_one_min must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-one-min", rest_name="summary-cpu-load-average-one-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_load_average_one_min = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_load_average_one_min(self): self.__summary_cpu_load_average_one_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-one-min", rest_name="summary-cpu-load-average-one-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_load_average_five_min(self): """ Getter method for summary_cpu_load_average_five_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_five_min (decimal64) YANG Description: CPU load average in the last five minute """ return self.__summary_cpu_load_average_five_min def _set_summary_cpu_load_average_five_min(self, v, load=False): """ Setter method for summary_cpu_load_average_five_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_five_min (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_load_average_five_min is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_load_average_five_min() directly. YANG Description: CPU load average in the last five minute """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-five-min", rest_name="summary-cpu-load-average-five-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_load_average_five_min must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-five-min", rest_name="summary-cpu-load-average-five-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_load_average_five_min = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_load_average_five_min(self): self.__summary_cpu_load_average_five_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-five-min", rest_name="summary-cpu-load-average-five-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_load_average_fifteen_min(self): """ Getter method for summary_cpu_load_average_fifteen_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_fifteen_min (decimal64) YANG Description: CPU load average in the last fifteen minute """ return self.__summary_cpu_load_average_fifteen_min def _set_summary_cpu_load_average_fifteen_min(self, v, load=False): """ Setter method for summary_cpu_load_average_fifteen_min, mapped from YANG variable /cpu_state/summary/summary_cpu_load_average_fifteen_min (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_load_average_fifteen_min is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_load_average_fifteen_min() directly. YANG Description: CPU load average in the last fifteen minute """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-fifteen-min", rest_name="summary-cpu-load-average-fifteen-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_load_average_fifteen_min must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-fifteen-min", rest_name="summary-cpu-load-average-fifteen-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_load_average_fifteen_min = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_load_average_fifteen_min(self): self.__summary_cpu_load_average_fifteen_min = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-load-average-fifteen-min", rest_name="summary-cpu-load-average-fifteen-min", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_util_current(self): """ Getter method for summary_cpu_util_current, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current (decimal64) YANG Description: Current total CPU utilization percentage """ return self.__summary_cpu_util_current def _set_summary_cpu_util_current(self, v, load=False): """ Setter method for summary_cpu_util_current, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_util_current is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_util_current() directly. YANG Description: Current total CPU utilization percentage """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current", rest_name="summary-cpu-util-current", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_util_current must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current", rest_name="summary-cpu-util-current", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_util_current = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_util_current(self): self.__summary_cpu_util_current = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current", rest_name="summary-cpu-util-current", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_util_current_user(self): """ Getter method for summary_cpu_util_current_user, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_user (decimal64) YANG Description: Current CPU utilization percentage of user processes """ return self.__summary_cpu_util_current_user def _set_summary_cpu_util_current_user(self, v, load=False): """ Setter method for summary_cpu_util_current_user, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_user (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_util_current_user is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_util_current_user() directly. YANG Description: Current CPU utilization percentage of user processes """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-user", rest_name="summary-cpu-util-current-user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_util_current_user must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-user", rest_name="summary-cpu-util-current-user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_util_current_user = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_util_current_user(self): self.__summary_cpu_util_current_user = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-user", rest_name="summary-cpu-util-current-user", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_util_current_kernel(self): """ Getter method for summary_cpu_util_current_kernel, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_kernel (decimal64) YANG Description: Current CPU utilization percentage of kernel processes """ return self.__summary_cpu_util_current_kernel def _set_summary_cpu_util_current_kernel(self, v, load=False): """ Setter method for summary_cpu_util_current_kernel, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_kernel (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_util_current_kernel is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_util_current_kernel() directly. YANG Description: Current CPU utilization percentage of kernel processes """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-kernel", rest_name="summary-cpu-util-current-kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_util_current_kernel must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-kernel", rest_name="summary-cpu-util-current-kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_util_current_kernel = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_util_current_kernel(self): self.__summary_cpu_util_current_kernel = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-kernel", rest_name="summary-cpu-util-current-kernel", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) def _get_summary_cpu_util_current_iowait(self): """ Getter method for summary_cpu_util_current_iowait, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_iowait (decimal64) YANG Description: Current CPU utilization percentage of iowait """ return self.__summary_cpu_util_current_iowait def _set_summary_cpu_util_current_iowait(self, v, load=False): """ Setter method for summary_cpu_util_current_iowait, mapped from YANG variable /cpu_state/summary/summary_cpu_util_current_iowait (decimal64) If this variable is read-only (config: false) in the source YANG file, then _set_summary_cpu_util_current_iowait is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_summary_cpu_util_current_iowait() directly. YANG Description: Current CPU utilization percentage of iowait """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-iowait", rest_name="summary-cpu-util-current-iowait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) except (TypeError, ValueError): raise ValueError({ 'error-string': """summary_cpu_util_current_iowait must be of a type compatible with decimal64""", 'defined-type': "decimal64", 'generated-type': """YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-iowait", rest_name="summary-cpu-util-current-iowait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False)""", }) self.__summary_cpu_util_current_iowait = t if hasattr(self, '_set'): self._set() def _unset_summary_cpu_util_current_iowait(self): self.__summary_cpu_util_current_iowait = YANGDynClass(base=RestrictedPrecisionDecimalType(precision=2), is_leaf=True, yang_name="summary-cpu-util-current-iowait", rest_name="summary-cpu-util-current-iowait", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-RAS-operational', defining_module='brocade-RAS-operational', yang_type='decimal64', is_config=False) summary_cpu_load_average_one_min = __builtin__.property(_get_summary_cpu_load_average_one_min) summary_cpu_load_average_five_min = __builtin__.property(_get_summary_cpu_load_average_five_min) summary_cpu_load_average_fifteen_min = __builtin__.property(_get_summary_cpu_load_average_fifteen_min) summary_cpu_util_current = __builtin__.property(_get_summary_cpu_util_current) summary_cpu_util_current_user = __builtin__.property(_get_summary_cpu_util_current_user) summary_cpu_util_current_kernel = __builtin__.property(_get_summary_cpu_util_current_kernel) summary_cpu_util_current_iowait = __builtin__.property(_get_summary_cpu_util_current_iowait) _pyangbind_elements = {'summary_cpu_load_average_one_min': summary_cpu_load_average_one_min, 'summary_cpu_load_average_five_min': summary_cpu_load_average_five_min, 'summary_cpu_load_average_fifteen_min': summary_cpu_load_average_fifteen_min, 'summary_cpu_util_current': summary_cpu_util_current, 'summary_cpu_util_current_user': summary_cpu_util_current_user, 'summary_cpu_util_current_kernel': summary_cpu_util_current_kernel, 'summary_cpu_util_current_iowait': summary_cpu_util_current_iowait, }
71.46832
500
0.778322
3,527
25,943
5.38588
0.050184
0.099495
0.079596
0.119394
0.920825
0.905612
0.882765
0.852337
0.837124
0.820541
0
0.006369
0.116448
25,943
362
501
71.665746
0.822354
0.195313
0
0.475962
0
0.033654
0.359707
0.276728
0
0
0
0
0
1
0.115385
false
0
0.038462
0
0.274038
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
573dcc511aa24922ff01ccebb31593dc8a2b77d3
49
py
Python
handlers/api/__init__.py
kocsob/tornado-template
ef71439b0526fd532684743ef6365dc16d90a26a
[ "Apache-2.0" ]
null
null
null
handlers/api/__init__.py
kocsob/tornado-template
ef71439b0526fd532684743ef6365dc16d90a26a
[ "Apache-2.0" ]
null
null
null
handlers/api/__init__.py
kocsob/tornado-template
ef71439b0526fd532684743ef6365dc16d90a26a
[ "Apache-2.0" ]
null
null
null
from example_apihandler import ExampleApiHandler
24.5
48
0.918367
5
49
8.8
1
0
0
0
0
0
0
0
0
0
0
0
0.081633
49
1
49
49
0.977778
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
27949839a80a8e23ab08afb28d8946ec11809ce3
15,126
py
Python
lammps_interface/BTW.py
zmzeng/lammps_interface
07da45c444cadcab06683f3fea0fe4a781377365
[ "MIT" ]
74
2016-10-19T23:42:21.000Z
2022-03-31T08:05:54.000Z
lammps_interface/BTW.py
zmzeng/lammps_interface
07da45c444cadcab06683f3fea0fe4a781377365
[ "MIT" ]
44
2017-01-22T02:25:12.000Z
2021-12-08T03:25:51.000Z
lammps_interface/BTW.py
mwitman1/lammps_interface
4ebea5493df9e7f2381b7cad3cd5b6b2ae698a27
[ "MIT" ]
46
2016-08-10T09:22:41.000Z
2022-03-01T03:33:14.000Z
""" Parameters for BTW-FF. """ #### BTW-FF atom types and properties ##### BTW_atoms = { #FF_num at_num at_mass valance vdW_rad[A] epsilo[kcal/mol] H-bond charge atom_type description "21" :( 1 , 1.008 , 1.0 , 1.62 , 0.02 , 0.923 , 0.622 ), # H H-Oi "75" :( 8 , 15.9994 , 4.0 , 1.82 , 0.059 , 0 , -1.242 ), # O O-H "170":( 8 , 15.9994 , 2.0 , 1.82 , 0.059 , 0 , -1.0908 ), # O O-Carboxylate "171":( 8 , 15.9994 , 4.0 , 1.82 , 0.059 , 0 , -1.1145 ), # O O-inorganic "172":( 30 , 65.38 , 4.0 , 2.29 , 0.276 , 0 , 1.281 ), # Zn Zn "192":( 40 , 91.224 , 8.0 , 3.52 , 0.367 , 0 , 2.601 ), # Zr Zr "185":( 29 , 63.546 , 5.0 , 2.29 , 0.276 , 0 , 1.0358 ), # Cu Cu "902":( 6 , 12.0 , 3.0 , 1.96 , 0.056 , 0 , -0.0114 ), # C Calpha "903":( 6 , 12 , 3.0 , 1.96 , 0.056 , 0 , -0.0124 ), # C C-doublephenolligand "913":( 6 , 12.0 , 3.0 , 1.94 , 0.056 , 0 , 1.5398 ), # C Cacid "912":( 6 , 12.0 , 3.0 , 1.96 , 0.056 , 0 , -0.0228 ), # C Cbenzene "915":( 1 , 1.008 , 1.0 , 1.62 , 0.02 , 0.923 , 0.1582 ) # H Hbenzene } #### BONDs in BTW-FF #### BTW_bonds = { #FF_type k[mdyne] r[A] "21_75" :( 3.630 , 0.989), "75_192" :( 5.500 , 2.276), "170_172":( 3.665 , 2.009), "170_192":( 5.821 , 2.338), "170_185":( 5.091 , 1.969), "170_913":( 5.999 , 1.299), "171_172":( 4.329 , 2.039), "171_192":( 5.809 , 2.192), "185_185":( 4.349 , 2.422), "902_912":( 4.500 , 1.389), "902_913":( 5.299 , 1.485), "903_903":( 5.899 , 1.465), "903_912":( 5.999 , 1.389), "912_912":( 4.500 , 1.389), "912_915":( 5.150 , 1.101) } #### ANGLES in BTW-FF #### BTW_angles = { # at1_atcen_at2 k[mdyne/rad^2], Theta1[degree] , Theta2[degree], Theta3[degree], Ksb1 , Ksb2 , Kss "21_75_192" :( 2.099, 116.848 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "75_192_75" :( 2.099, 123.230 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "75_192_170" :( 2.099, 89.658 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "75_192_171" :( 2.099, 71.110 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_172_170":( 1.000, 110.103 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_172_171":( 3.000, 113.584 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_192_170":( 2.099, 73.103 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_192_171":( 2.099, 84.318 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_185_185":( 4.299, 87.822 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), #"170_185_170":( 0.05, 175.945 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ),# Fourier equation used instead "170_913_170":( 2.867, 126.299 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "170_913_902":( 1.867, 117.082 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "171_192_171":( 2.099, 91.479 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "172_170_913":( 3.022, 130.606 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "172_171_172":( 1.198, 110.992 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "185_170_913":( 3.322, 120.962 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "192_75_192" :( 2.099, 103.406 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "192_170_913":( 2.099, 139.820 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "192_171_192":( 2.099, 118.408 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "902_912_902":( 0.06, 121.797 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), # Added from MM3 "902_912_912":( 0.060, 121.582 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "902_912_915":( 0.090, 119.859 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "902_913_170":( 1.867, 117.082 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "903_903_912":( 5.00 , 122.690 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "903_912_912":( 5.00 , 122.904 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "903_912_915":( 5.00 , 120.00 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "912_902_912":( 0.000, 119.406 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "912_902_913":( 0.360, 121.797 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "912_903_912":( 5.00 , 117.621 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ), "912_912_915":( 0.49 , 120.0 , 0.0 , 0.0 , 0.0 , 0.0 , 0.0 ) # Added from MM3 } #### DIHEDRALs in BTW-FF BTW_dihedrals = { #at1_at2_at3_at4 k1 , t1 , n1 , k2 , t2 , n2 , k3 , t3 , n3 , k4 , t4 , n4 "170_913_902_912":( 0.0 , 0.0 , 1 , 2.5 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "915_912_902_913":( 0.0 , 0.0 , 1 , 1.999 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "913_902_912_912":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "912_902_912_912":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "912_902_912_915":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "902_912_912_915":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "902_912_912_902":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "903_903_912_915":( 0.0 , 0.0 , 1 , 6.999 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "903_903_912_912":( 0.0 , 0.0 , 1 , 6.9 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "912_903_903_912":( 0.0 , 0.0 , 1 , 6.9 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "912_903_912_912":( 0.0 , 0.0 , 1 , 4.930 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "912_903_912_915":( 0.0 , 0.0 , 1 , 4.930 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "915_912_912_903":( 0.0 , 0.0 , 1 , 4.930 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "902_912_912_903":( 0.0 , 0.0 , 1 , 5.930 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "902_912_912_912":( 0.0 , 0.0 , 1 , 8.030 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "171_172_170_913":( 0.0 , 0.0 , 1 , 4.690 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "172_170_913_170":( 0.0 , 0.0 , 1 , 2.176 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_172_170_913":( 0.0 , 0.0 , 1 , 0.860 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "172_170_913_902":( 0.0 , 0.0 , 1 , 0.072 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_172_171_172":( 0.0 , 0.0 , 1 , 1.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_185_170_913":( 0.0 , 0.0 , 1 , 0.860 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "185_170_913_902":( 0.0 , 0.0 , 1 , 0.072 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_913_170_185":( 0.0 , 0.0 , 1 , 5.805 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "913_170_185_185":( 0.0 , 0.0 , 1 , 0.850 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_185_185_170":( 0.0 , 0.0 , 1 , 2.071 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_185_185_170":( 0.0 , 0.0 , 1 , 2.071 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "171_192_170_913":( 0.0 , 0.0 , 1 , 2.064 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_170_913_170":( 0.0 , 0.0 , 1 , 2.017 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "915_912_902_913":( 0.0 , 0.0 , 1 , 1.999 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_192_170_913":( 0.0 , 0.0 , 1 , 0.860 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_170_913_902":( 0.0 , 0.0 , 1 , 0.072 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "170_192_171_192":( 0.0 , 0.0 , 1 , 1.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "913_170_192_75" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "21_75_192_170" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "21_75_191_170" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_75_192_170" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_75_192_75" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_75_192_171" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "21_75_192_75" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_75_192_171" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_171_192_75" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_171_192_171":( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "21_75_192_171" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_75_192_171" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "192_171_192_75" :( 0.0 , 0.0 , 1 , 5.000 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), "915_912_912_915":( 0.0 , 0.0 , 1 , 11.5 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), # Added from MM3 "902_912_902_913":( 0.0 , 0.0 , 1 , 8.03 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), # Added from MM3 "902_912_902_912":( 0.0 , 0.0 , 1 , 8.03 , 180.0 , 2 , 0.0 , 0.0 , 3 , 0.0 , 0.0 , 4 ), # Added from MM3 } #### OUT-OF-PLANE bending in BTW-FF BTW_opbends = { #at1_at2_at3_at4 K_opb, phi , Ka1 , ka2 , ka3 ### """ ### H ### / ### C = C ### \ ### C ### """ "902_912_912_915":( 0.0 , 0.0 , 0.24 , 0.300 , 0.0 ),# BTW-FF coefficient is 0.0 while in MM3 is 0.2 "902_912_915_912":( 0.0 , 0.0 , 0.24 , 0.300 , 0.0 ),# BTW-FF coefficient is 0.0 while in MM3 is 0.2 "902_912_902_915":( 0.0 , 0.0 , 0.24 , 0.300 , 0.0 ),# BTW-FF coefficient is 0.0 while in MM3 is 0.2 "902_912_915_902":( 0.0 , 0.0 , 0.24 , 0.300 , 0.0 ),# BTW-FF coefficient is 0.0 while in MM3 is 0.2 "903_912_915_912":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "903_912_912_915":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_912_915_903":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_912_903_915":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_912_902_915":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_912_915_902":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "915_912_912_902":( 0.11, 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "915_912_902_912":( 0.11, 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "915_912_902_902":( 0.11, 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "915_912_912_903":( 0.11, 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "915_912_903_912":( 0.11, 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 ### """ ### O ### / ### C = C ### \ ### O ### """ "170_913_170_902":( 1.5 , 0.0 , 0.00 , 0.0 , 0.0 ),# "170_913_902_170":( 1.5 , 0.0 , 0.00 , 0.0 , 0.0 ),# "902_913_170_170":( 0.0 , 0.0 , 0.00 , 0.0 , 0.0 ),# BTW-FF coefficient is 0.0 while in MM3 is 0.2 ### ----------- ######## ----------- ### O | C | ######## C | C | ### \| / | ## ## \| / | ### |C = C | ## Or ## |C = C | ### /| \ | ## ## /| \ | ### O | C | ######## C | C | ### ----------- ######## ----------- "912_902_912_913":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_902_913_912":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "913_902_912_912":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "903_903_912_912":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_903_912_903":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 "912_903_903_912":( 0.2 , 0.0 , 0.24 , 0.300 , 0.0 ),# Added from MM3 # # UIO special opbend # "192_171_192_192":( 2.0 , 0.0 , 0.00 , 0.00 , 0.0 ) } BTW_charges = { "Cu Paddlewheel_185" :( 1.0358 ), # Cu Cu "Cu Paddlewheel_170" :( -1.0908 ), # O O-Carboxylate "Cu Paddlewheel_902" :( -0.0114 ), # C Calpha "Cu Paddlewheel_913" :( 1.5398 ), # C Cacid "Cu Paddlewheel_912" :( -0.0228 ), # C Cbenzene "Cu Paddlewheel_915" :( 0.1582 ), # H Hbenzene "Zn4O_170" :( -1.1513 ), # O O-Carboxylate "Zn4O_171" :( -1.1145 ), # O O-inorganic "Zn4O_902" :( -0.0081 ), # C Calpha "Zn4O_913" :( 1.4972 ), # C Cacid "Zn4O_912" :( -0.0536 ), # C Cbenzene "Zn4O_915" :( 0.1259 ), # H Hbenzene "Zn4O_172" :( 1.281 ), # Zn Zn "IRMOF10_170" :( -1.1630 ), # O O-Carboxylate "IRMOF10_902" :( -0.0279 ), # C Calpha "IRMOF10_913" :( 1.5377 ), # C Cacid "IRMOF10_912" :( -0.0460 ), # C Cbenzene "IRMOF10_915" :( 0.1047 ), # H Hbenzene "IRMOF10_172" :( 1.2954 ), # Zn Zn "IRMOF10_171" :( -1.2144 ), # O O-inorganic "IRMOF10_903" :( -0.0124 ), # C C-doublephenolligand "Zr_UiO_170" :( -1.181 ), # O O-Carboxylate "Zr_UiO_902" :( -0.056 ), # C Calpha "Zr_UiO_913" :( 1.576 ), # C Cacid "Zr_UiO_912" :( -0.058 ), # C Cbenzene "Zr_UiO_915" :( 0.129 ), # H Hbenzene "Zr_UiO_75" :( -1.242 ), # O O-H "Zr_UiO_192" :( 2.601 ), # Zr Zr "Zr_UiO_21" :( 0.622 ), # H H-Oi "Zr_UiO_171" :( -1.189 ), # O O-inorganic "Zr_UiO_903" :( -0.035 ), # C C-doublephenolligand "TFF_171" :( -1.186 ), # O O-inorganic "TFF_172" :( 1.291 ), # Zn Zn "TFF_170" :( -1.154 ), # O O-Carboxylate "TFF_913" :( 1.539 ), # C Cacid "TFF_912" :( -0.050 ), # C Cbenzene "TFF_902" :( -0.008 ), # C Calpha "TFF_915" :( 0.118 ), # H Hbenzene "TFF_192" :( 2.605 ), # Zr Zr "TFF_75" :( -1.243 ), # O O-H "TFF_21" :( 0.622 ), # H O-H "TFF_903" :( -0.0124 ), # C C-doublephenolligand !!!!!! temporary! }
67.526786
133
0.391974
2,835
15,126
1.961905
0.095944
0.288745
0.307983
0.265372
0.62046
0.588637
0.571197
0.557893
0.541532
0.538296
0
0.443548
0.393362
15,126
223
134
67.829596
0.162598
0.171427
0
0.04918
0
0
0.158627
0
0
0
0
0
0
1
0
false
0
0
0
0
0
0
0
1
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
27a39de9fbea8f36a4994c7d2701499b02d10873
307
py
Python
netutils_linux_hardware/__init__.py
AlexeyAB/netutils-linux
f97a919ecd765c50c364415ba43eeb09e8e829ed
[ "MIT" ]
1
2019-02-09T23:37:41.000Z
2019-02-09T23:37:41.000Z
netutils_linux_hardware/__init__.py
shildenbrand/PyNetUtils
feafd5cf11ae9402bcdd1e38db38478a3ed0dee1
[ "MIT" ]
null
null
null
netutils_linux_hardware/__init__.py
shildenbrand/PyNetUtils
feafd5cf11ae9402bcdd1e38db38478a3ed0dee1
[ "MIT" ]
1
2020-05-28T07:47:20.000Z
2020-05-28T07:47:20.000Z
from netutils_linux_hardware import netdev from netutils_linux_hardware import parsers from netutils_linux_hardware import interrupts from netutils_linux_hardware.reader import Reader from netutils_linux_hardware.assessor import Assessor __all__ = ['parsers', 'netdev', 'interrupts', 'Reader', 'Assessor']
38.375
67
0.846906
38
307
6.473684
0.289474
0.243902
0.345528
0.50813
0.378049
0
0
0
0
0
0
0
0.091205
307
7
68
43.857143
0.88172
0
0
0
0
0
0.120521
0
0
0
0
0
0
1
0
false
0
0.833333
0
0.833333
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
27b0f7108012cf23bcdcf9f11468342c9e1cb9b0
275
py
Python
src/waldur_auth_social/extension.py
ahti87/waldur-mastermind
772268e62dfd8eadb387b2ec3789785817a6e621
[ "MIT" ]
null
null
null
src/waldur_auth_social/extension.py
ahti87/waldur-mastermind
772268e62dfd8eadb387b2ec3789785817a6e621
[ "MIT" ]
null
null
null
src/waldur_auth_social/extension.py
ahti87/waldur-mastermind
772268e62dfd8eadb387b2ec3789785817a6e621
[ "MIT" ]
null
null
null
from waldur_core.core import WaldurExtension class AuthSocialExtension(WaldurExtension): @staticmethod def django_app(): return 'waldur_auth_social' @staticmethod def django_urls(): from .urls import urlpatterns return urlpatterns
19.642857
44
0.712727
27
275
7.074074
0.592593
0.157068
0.219895
0
0
0
0
0
0
0
0
0
0.232727
275
13
45
21.153846
0.905213
0
0
0.222222
0
0
0.065455
0
0
0
0
0
0
1
0.222222
true
0
0.222222
0.111111
0.777778
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
1
1
0
0
6
27d3fc4bd038d42e3e283e83f2dd7d3819a32962
113
py
Python
socialnews/loadtest.py
agiliq/django-socialnews
aa4a1a4a0e3279e6c7999071648ba37c71df9d15
[ "BSD-3-Clause" ]
30
2015-01-18T16:34:03.000Z
2021-05-23T20:05:54.000Z
socialnews/loadtest.py
agiliq/django-socialnews
aa4a1a4a0e3279e6c7999071648ba37c71df9d15
[ "BSD-3-Clause" ]
null
null
null
socialnews/loadtest.py
agiliq/django-socialnews
aa4a1a4a0e3279e6c7999071648ba37c71df9d15
[ "BSD-3-Clause" ]
11
2015-02-21T10:45:41.000Z
2021-01-24T21:08:20.000Z
from django.core.management import setup_environ import settings setup_environ(settings) import models
14.125
49
0.79646
14
113
6.285714
0.642857
0.272727
0
0
0
0
0
0
0
0
0
0
0.168142
113
7
50
16.142857
0.93617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fd8bd7cc5f572899ab277c4ecf147622ffd96904
213
py
Python
dewaveADCP/__init__.py
apaloczy/dewaveADCP
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
[ "MIT" ]
4
2019-09-25T21:39:50.000Z
2022-02-16T19:11:21.000Z
dewaveADCP/__init__.py
apaloczy/dewaveADCP
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
[ "MIT" ]
null
null
null
dewaveADCP/__init__.py
apaloczy/dewaveADCP
f37702905ccaeb5a4ecc738cba9ee46cd76cd03f
[ "MIT" ]
1
2021-12-10T12:32:26.000Z
2021-12-10T12:32:26.000Z
from . import VerticalDetrend from . import VarianceFit from . import StructureFunction from . import CospectraFit from . import AdaptiveFiltering from . import beam2earth from . import stress from . import utils
23.666667
31
0.812207
24
213
7.208333
0.416667
0.462428
0
0
0
0
0
0
0
0
0
0.005525
0.150235
213
8
32
26.625
0.950276
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
fdb254829993dd33e8b019985ab64d68f1148c5f
119
py
Python
tictactoe/adapters/match_channel.py
pitzer42/nano_tcg
c984b253b8a53a707460aac21c10f140d16d902e
[ "MIT" ]
1
2020-09-30T21:03:37.000Z
2020-09-30T21:03:37.000Z
tictactoe/adapters/match_channel.py
pitzer42/nano_tcg
c984b253b8a53a707460aac21c10f140d16d902e
[ "MIT" ]
null
null
null
tictactoe/adapters/match_channel.py
pitzer42/nano_tcg
c984b253b8a53a707460aac21c10f140d16d902e
[ "MIT" ]
null
null
null
from gloop.adapters.match_channel import MatchClientChannel class TicTacToeMatchClient(MatchClientChannel): pass
19.833333
59
0.848739
11
119
9.090909
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.109244
119
5
60
23.8
0.943396
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
e30f5285c593fa7b807efa56263d5f7715d02464
5,674
py
Python
tests/search_api/test_stats.py
EnriqueSoria/pydoof
e5a2b7129e6c18e92b69501946be35cd386fcb47
[ "MIT" ]
null
null
null
tests/search_api/test_stats.py
EnriqueSoria/pydoof
e5a2b7129e6c18e92b69501946be35cd386fcb47
[ "MIT" ]
null
null
null
tests/search_api/test_stats.py
EnriqueSoria/pydoof
e5a2b7129e6c18e92b69501946be35cd386fcb47
[ "MIT" ]
null
null
null
from unittest import mock import unittest from pydoof.search_api import stats class TestStats(unittest.TestCase): @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_init_session(self, APIClientMock): hashid = 'aab32d8' session_id = 'SESSION_ID' stats.init_session(hashid, session_id) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/init', query_params={'session_id': session_id} ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_log_checkout(self, APIClientMock): hashid = 'aab32d8' session_id = 'SESSION_ID' stats.log_checkout(hashid, session_id) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/checkout', query_params={'session_id': session_id} ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_log_redirect_minimum_requirements(self, APIClientMock): hashid = 'aab32d8' redirection_id = 'ID' session_id = 'SESSION_ID' stats.log_redirect(hashid, redirection_id, session_id) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/redirect', query_params={ 'id': redirection_id, 'session_id': session_id } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_log_redirect(self, APIClientMock): hashid = 'aab32d8' redirection_id = 'ID' session_id = 'SESSION_ID' query = 'QUERY' stats.log_redirect(hashid, redirection_id, session_id, query) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/redirect', query_params={ 'id': redirection_id, 'session_id': session_id, 'query': query } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_click_stats_minimum_requirements(self, APIClientMock): hashid = 'aab32d8' dfid = 'ID' session_id = 'SESSION_ID' stats.click_stats(hashid, dfid, session_id) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/click', query_params={ 'dfid': dfid, 'session_id': session_id } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_click_stats(self, APIClientMock): hashid = 'aab32d8' dfid = 'ID' session_id = 'SESSION_ID' query = 'QUERY' stats.click_stats(hashid, dfid, session_id, query) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/click', query_params={ 'dfid': dfid, 'session_id': session_id, 'query': query } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_log_banner_image_click_minimum_requirements(self, APIClientMock): hashid = 'aab32d8' redirection_id = 'ID' session_id = 'SESSION_ID' stats.log_banner_image_click(hashid, redirection_id, session_id) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/image', query_params={ 'id': redirection_id, 'session_id': session_id } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_log_banner_image_click(self, APIClientMock): hashid = 'aab32d8' redirection_id = 'ID' session_id = 'SESSION_ID' query = 'QUERY' stats.log_banner_image_click(hashid, redirection_id, session_id, query) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/image', query_params={ 'id': redirection_id, 'session_id': session_id, 'query': query } ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_add_to_cart(self, APIClientMock): hashid = 'aab32d8' index_name = 'product' session_id = '4affa6' amount = 2 item_id = 1235 title = 'Product' price = 12.99 stats.add_to_cart( hashid, index_name, session_id, item_id, amount, title, price ) APIClientMock.return_value.put.assert_called_once_with( f'/6/{hashid}/stats/cart/{session_id}', query_params={ 'index': index_name, 'id': item_id, 'amount': amount, 'title': title, 'price': price} ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_remove_from_cart(self, APIClientMock): hashid = 'aab32d8' index_name = 'product' session_id = '4affa6' amount = 2 item_id = 1235 stats.remove_from_cart( hashid, index_name, session_id, item_id, amount ) APIClientMock.return_value.patch.assert_called_once_with( f'/6/{hashid}/stats/cart/{session_id}', query_params={'index': index_name, 'id': item_id, 'amount': amount} ) @mock.patch('pydoof.search_api.stats.SearchAPIClient') def test_clear_cart(self, APIClientMock): hashid = 'aab32d8' session_id = '4affa6' stats.clear_cart(hashid, session_id) APIClientMock.return_value.delete.assert_called_once_with( f'/6/{hashid}/stats/cart/{session_id}' )
31.005464
79
0.603983
617
5,674
5.246353
0.098865
0.136237
0.101946
0.088971
0.908248
0.907013
0.87612
0.862218
0.852641
0.747915
0
0.015897
0.290448
5,674
182
80
31.175824
0.788127
0
0
0.609589
0
0
0.193162
0.127952
0
0
0
0
0.075342
1
0.075342
false
0
0.020548
0
0.10274
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
e3276ff249e741a061ca24242060f8f4ae6a4c80
764
py
Python
xmlns/rev/status.py
danja/danja.github.io
26662fdf910b8121e14b8470fc4abb94707c574a
[ "Apache-2.0" ]
null
null
null
xmlns/rev/status.py
danja/danja.github.io
26662fdf910b8121e14b8470fc4abb94707c574a
[ "Apache-2.0" ]
null
null
null
xmlns/rev/status.py
danja/danja.github.io
26662fdf910b8121e14b8470fc4abb94707c574a
[ "Apache-2.0" ]
null
null
null
import urllib u="""http://xmlarmyknife.org/api/rdf/sparql/query?default-graph-uri=http%3A%2F%2Fxmlns.com%2Ffoaf%2F0.1%2Findex.rdf&query=PREFIX+rdf%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F1999%2F02%2F22-rdf-syntax-ns%23%3E%0D%0APREFIX+rdfs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2000%2F01%2Frdf-schema%23%3E%0D%0APREFIX+vs%3A+%3Chttp%3A%2F%2Fwww.w3.org%2F2003%2F06%2Fsw-vocab-status%2Fns%23%3E%0D%0ASELECT+DISTINCT+%3Fstatus+%3Fx+%3Flabel%0D%0AWHERE+%7B%0D%0A%7B%0D%0A++%3Fx+a+rdf%3AProperty+.%0D%0A++%3Fx+rdfs%3Alabel+%3Flabel+.%0D%0A++%3Fx+vs%3Aterm_status+%3Fstatus+.%0D%0A%7D+UNION+%7B%0D%0A++%3Fx+a+rdfs%3AClass+.%0D%0A++%3Fx+rdfs%3Alabel+%3Flabel+.%0D%0A++%3Fx+vs%3Aterm_status+%3Fstatus+.%0D%0A%7D%0D%0A%7D%0D%0AORDER+BY+%3Fstatus&format=html""" print urllib.unquote(u)
191
723
0.747382
146
764
3.89726
0.458904
0.070299
0.073814
0.063269
0.362039
0.326889
0.326889
0.210896
0.210896
0.210896
0
0.160053
0.010471
764
3
724
254.666667
0.592593
0
0
0
0
0.333333
0.938239
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0.333333
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
1
0
0
0
0
0
1
1
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
6
e32921374d9783826f5e1170d9bfc9c62fb0dbee
64
py
Python
py_tdlib/constructors/user_type_regular.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/user_type_regular.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/user_type_regular.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Type class userTypeRegular(Type): pass
10.666667
28
0.765625
8
64
6.125
0.875
0
0
0
0
0
0
0
0
0
0
0
0.15625
64
5
29
12.8
0.907407
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
e358234aec1716efc24fdaf2b86796526ef63b2e
5,547
py
Python
blmath/geometry/transform/test_coordinate_manager.py
metabolize/blmath
8ea8d7be60349a60ffeb08a3e34fca20ef9eb0da
[ "BSD-2-Clause" ]
6
2019-09-28T16:48:34.000Z
2022-03-25T17:05:46.000Z
blmath/geometry/transform/test_coordinate_manager.py
metabolize/blmath
8ea8d7be60349a60ffeb08a3e34fca20ef9eb0da
[ "BSD-2-Clause" ]
6
2019-09-09T16:42:02.000Z
2021-06-25T15:25:50.000Z
blmath/geometry/transform/test_coordinate_manager.py
metabolize/blmath
8ea8d7be60349a60ffeb08a3e34fca20ef9eb0da
[ "BSD-2-Clause" ]
4
2017-05-09T16:15:07.000Z
2019-02-15T14:15:30.000Z
# pylint: disable=invalid-unary-operand-type import unittest import numpy as np from blmath.geometry.transform.test_composite import create_cube_verts class TestCoordinateManager(unittest.TestCase): def test_coordinate_manager_forward(self): from blmath.geometry.transform.coordinate_manager import CoordinateManager cube_v = create_cube_verts([1., 0., 0.], 4.) cube_floor_point = np.array([3., 0., 2.]) # as lace.mesh.floor_point coordinate_manager = CoordinateManager() coordinate_manager.tag_as('source') coordinate_manager.translate(-cube_floor_point) coordinate_manager.scale(2) coordinate_manager.tag_as('floored_and_scaled') coordinate_manager.translate(np.array([0., -4., 0.])) coordinate_manager.tag_as('centered_at_origin') coordinate_manager.source = cube_v floored_and_scaled_v = coordinate_manager.do_transform( cube_v, from_tag='source', to_tag='floored_and_scaled' ) # Sanity check np.testing.assert_array_almost_equal(cube_v[0], [1., 0., 0.]) np.testing.assert_array_almost_equal(cube_v[6], [5., 4., 4.]) np.testing.assert_array_almost_equal(floored_and_scaled_v[0], [-4., 0., -4.]) np.testing.assert_array_almost_equal(floored_and_scaled_v[6], [4., 8., 4.]) centered_at_origin_v_1 = coordinate_manager.do_transform( cube_v, from_tag='source', to_tag='centered_at_origin' ) centered_at_origin_v_2 = coordinate_manager.do_transform( floored_and_scaled_v, from_tag='floored_and_scaled', to_tag='centered_at_origin' ) np.testing.assert_array_almost_equal(centered_at_origin_v_1[0], [-4., -4., -4.]) np.testing.assert_array_almost_equal(centered_at_origin_v_1[6], [4., 4., 4.]) np.testing.assert_array_almost_equal(centered_at_origin_v_2[0], [-4., -4., -4.]) np.testing.assert_array_almost_equal(centered_at_origin_v_2[6], [4., 4., 4.]) source_v_1 = coordinate_manager.do_transform( floored_and_scaled_v, from_tag='floored_and_scaled', to_tag='source' ) source_v_2 = coordinate_manager.do_transform( centered_at_origin_v_1, from_tag='centered_at_origin', to_tag='source' ) np.testing.assert_array_almost_equal(source_v_1, cube_v) np.testing.assert_array_almost_equal(source_v_2, cube_v) def test_coordinate_manager_forward_with_attrs(self): from blmath.geometry.transform.coordinate_manager import CoordinateManager cube_v = create_cube_verts([1., 0., 0.], 4.) cube_floor_point = np.array([3., 0., 2.]) # as lace.mesh.floor_point coordinate_manager = CoordinateManager() coordinate_manager.tag_as('source') coordinate_manager.translate(-cube_floor_point) coordinate_manager.scale(2) coordinate_manager.tag_as('floored_and_scaled') coordinate_manager.translate(np.array([0., -4., 0.])) coordinate_manager.tag_as('centered_at_origin') coordinate_manager.source = cube_v # Sanity check np.testing.assert_array_almost_equal(cube_v[0], [1., 0., 0.]) np.testing.assert_array_almost_equal(cube_v[6], [5., 4., 4.]) floored_and_scaled_v = coordinate_manager.floored_and_scaled np.testing.assert_array_almost_equal(floored_and_scaled_v[0], [-4., 0., -4.]) np.testing.assert_array_almost_equal(floored_and_scaled_v[6], [4., 8., 4.]) centered_at_origin_v = coordinate_manager.centered_at_origin np.testing.assert_array_almost_equal(centered_at_origin_v[0], [-4., -4., -4.]) np.testing.assert_array_almost_equal(centered_at_origin_v[6], [4., 4., 4.]) source_v = coordinate_manager.source np.testing.assert_array_almost_equal(source_v, cube_v) def test_coordinate_manager_forward_on_mesh(self): from mock import MagicMock from blmath.geometry.transform.coordinate_manager import CoordinateManager cube_v = create_cube_verts([1., 0., 0.], 4.) cube_floor_point = np.array([3., 0., 2.]) # as lace.mesh.floor_point # By default a magic mock will always have any attribute it's asked for; # here we set the spec property so that it will not respond to having a copy method # when the CoodinateManager looks for it. cube = MagicMock(spec=['v', 'other_thing'], v=cube_v, other_thing=np.array([-9.])) coordinate_manager = CoordinateManager() coordinate_manager.tag_as('source') coordinate_manager.translate(-cube_floor_point) coordinate_manager.scale(2) coordinate_manager.tag_as('floored_and_scaled') coordinate_manager.translate(np.array([0., -4., 0.])) coordinate_manager.tag_as('centered_at_origin') coordinate_manager.source = cube # Sanity check np.testing.assert_array_almost_equal(cube.v[0], [1., 0., 0.]) np.testing.assert_array_almost_equal(cube.v[6], [5., 4., 4.]) np.testing.assert_array_equal(cube.other_thing, [-9.]) floored_and_scaled = coordinate_manager.floored_and_scaled np.testing.assert_array_almost_equal(floored_and_scaled.v[0], [-4., 0., -4.]) np.testing.assert_array_almost_equal(floored_and_scaled.v[6], [4., 8., 4.]) np.testing.assert_array_equal(floored_and_scaled.other_thing, [-9.])
42.669231
91
0.678204
759
5,547
4.571805
0.129117
0.191066
0.099424
0.132565
0.839769
0.811816
0.777233
0.756484
0.745533
0.720173
0
0.027052
0.206959
5,547
129
92
43
0.761764
0.062917
0
0.553191
0
0
0.052053
0
0
0
0
0
0.244681
1
0.031915
false
0
0.074468
0
0.117021
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
e380a94587fb8a7cbcbda297628c0dc5c28d53b0
77
py
Python
templates/models_header.py
fecitpotentiam/djmodels_creator
dd46f00b054eabc9dc4a65c4e0b32a0c174bcf5a
[ "MIT" ]
1
2020-04-12T14:17:35.000Z
2020-04-12T14:17:35.000Z
templates/models_header.py
fecitpotentiam/djmodels_creator
dd46f00b054eabc9dc4a65c4e0b32a0c174bcf5a
[ "MIT" ]
null
null
null
templates/models_header.py
fecitpotentiam/djmodels_creator
dd46f00b054eabc9dc4a65c4e0b32a0c174bcf5a
[ "MIT" ]
null
null
null
header = """from django.db import models from django.db.models import * """
15.4
40
0.701299
11
77
4.909091
0.545455
0.37037
0.444444
0
0
0
0
0
0
0
0
0
0.155844
77
5
41
15.4
0.830769
0
0
0
0
0
0.794872
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
6
8bc321bd3ec6dc7e868a4ec6d304ac8febd2aa9b
343
py
Python
gym_pybullet_drones/envs/single_agent_rl/__init__.py
Yashupadhyay603/gym-pybullet-drones
30317a176672f0a462cc249b4e5d17a7078ea3d2
[ "MIT" ]
1
2021-06-10T07:43:55.000Z
2021-06-10T07:43:55.000Z
gym_pybullet_drones/envs/single_agent_rl/__init__.py
Yashupadhyay603/gym-pybullet-drones
30317a176672f0a462cc249b4e5d17a7078ea3d2
[ "MIT" ]
null
null
null
gym_pybullet_drones/envs/single_agent_rl/__init__.py
Yashupadhyay603/gym-pybullet-drones
30317a176672f0a462cc249b4e5d17a7078ea3d2
[ "MIT" ]
1
2021-04-01T01:56:45.000Z
2021-04-01T01:56:45.000Z
from gym_pybullet_drones.envs.single_agent_rl.BaseSingleAgentAviary import BaseSingleAgentAviary from gym_pybullet_drones.envs.single_agent_rl.TakeoffAviary import TakeoffAviary from gym_pybullet_drones.envs.single_agent_rl.HoverAviary import HoverAviary from gym_pybullet_drones.envs.single_agent_rl.FlyThruGateAviary import FlyThruGateAviary
85.75
96
0.921283
44
343
6.818182
0.295455
0.093333
0.2
0.28
0.506667
0.506667
0.506667
0.506667
0
0
0
0
0.043732
343
4
97
85.75
0.914634
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
4756cb1fdc7950deeba4df6617bbbf5e41336350
111
py
Python
translatable/exceptions.py
artscoop/django-translatable
5b5c818120bb7afe1f7639fa3181991307fbb3e8
[ "BSD-3-Clause" ]
null
null
null
translatable/exceptions.py
artscoop/django-translatable
5b5c818120bb7afe1f7639fa3181991307fbb3e8
[ "BSD-3-Clause" ]
null
null
null
translatable/exceptions.py
artscoop/django-translatable
5b5c818120bb7afe1f7639fa3181991307fbb3e8
[ "BSD-3-Clause" ]
1
2021-01-05T15:16:46.000Z
2021-01-05T15:16:46.000Z
from django.core.exceptions import ObjectDoesNotExist class MissingTranslation(ObjectDoesNotExist): pass
18.5
53
0.837838
10
111
9.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.117117
111
5
54
22.2
0.94898
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
4777476a90e0453eb0ade1e3682444b53e53b386
489
py
Python
fault/logging.py
makaimann/fault
8c805415f398e64971d18fbd3014bc0b59fb38b8
[ "BSD-3-Clause" ]
31
2018-07-16T15:03:14.000Z
2022-03-10T08:36:09.000Z
fault/logging.py
makaimann/fault
8c805415f398e64971d18fbd3014bc0b59fb38b8
[ "BSD-3-Clause" ]
216
2018-07-18T20:00:34.000Z
2021-10-05T17:40:47.000Z
fault/logging.py
makaimann/fault
8c805415f398e64971d18fbd3014bc0b59fb38b8
[ "BSD-3-Clause" ]
10
2019-02-17T00:56:58.000Z
2021-11-05T13:31:37.000Z
from __future__ import absolute_import from __future__ import print_function import logging import traceback import inspect import sys log = logging.getLogger("fault") def info(message, *args, **kwargs): log.info(message, *args, **kwargs) def debug(message, *args, **kwargs): log.debug(message, *args, **kwargs) def warning(message, *args, **kwargs): log.warning(message, *args, **kwargs) def error(message, *args, **kwargs): log.error(message, *args, **kwargs)
18.111111
41
0.705521
62
489
5.403226
0.33871
0.262687
0.40597
0.238806
0
0
0
0
0
0
0
0
0.151329
489
26
42
18.807692
0.807229
0
0
0
0
0
0.010225
0
0
0
0
0
0
1
0.266667
false
0
0.4
0
0.666667
0.066667
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
0
1
0
0
6
477eec003a7a24a49da2ed718fed04a481eb8b2f
46
py
Python
collagen/data/utils/__init__.py
MIPT-Oulu/Collagen
0cbc4285d60e5c9fcc89f629fcf4321e80b7452c
[ "MIT" ]
4
2019-05-14T14:44:51.000Z
2020-03-13T08:37:48.000Z
collagen/data/utils/__init__.py
MIPT-Oulu/Collagen
0cbc4285d60e5c9fcc89f629fcf4321e80b7452c
[ "MIT" ]
26
2019-04-21T20:35:22.000Z
2022-03-12T00:32:57.000Z
collagen/data/utils/__init__.py
MIPT-Oulu/Collagen
0cbc4285d60e5c9fcc89f629fcf4321e80b7452c
[ "MIT" ]
1
2019-05-14T14:53:28.000Z
2019-05-14T14:53:28.000Z
from ._utils import * from .datasets import *
15.333333
23
0.73913
6
46
5.5
0.666667
0
0
0
0
0
0
0
0
0
0
0
0.173913
46
2
24
23
0.868421
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
478e0739c7312ba55495eedcbda7e015ef9d2b3b
127
py
Python
week1/exe1.py
mikealford/ktbyers
05f11dd0aa7f3b1a75013d923fadeac2bba6e083
[ "Apache-2.0" ]
null
null
null
week1/exe1.py
mikealford/ktbyers
05f11dd0aa7f3b1a75013d923fadeac2bba6e083
[ "Apache-2.0" ]
null
null
null
week1/exe1.py
mikealford/ktbyers
05f11dd0aa7f3b1a75013d923fadeac2bba6e083
[ "Apache-2.0" ]
null
null
null
ip_addr1 = '192.168.5.1' ip_addr2 = '192.168.5.2' ip_addr3 = '192.168.5.3' print(ip_addr1 + ' ' + ip_addr2 + ' ' + ip_addr3)
18.142857
49
0.606299
25
127
2.84
0.44
0.253521
0.295775
0
0
0
0
0
0
0
0
0.285714
0.173228
127
6
50
21.166667
0.390476
0
0
0
0
0
0.277778
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
1
1
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
47c51f9940cc37c23ef4e9e8c6195a90db47282e
11,908
py
Python
tests/test_figure_area.py
rbardaji/graffiti
e10490a58b7eff041ff8212784f05daa076e3f53
[ "MIT" ]
null
null
null
tests/test_figure_area.py
rbardaji/graffiti
e10490a58b7eff041ff8212784f05daa076e3f53
[ "MIT" ]
null
null
null
tests/test_figure_area.py
rbardaji/graffiti
e10490a58b7eff041ff8212784f05daa076e3f53
[ "MIT" ]
null
null
null
import unittest from run import app from config import test_token class ResourceTest(unittest.TestCase): def setUp(self): self.app = app.test_client() # Add some data to the DB query = '/admin_data/R' for i in range(3): payload = { 'platform_code': 'test_platform', 'parameter': 'test_parameter', 'depth': 10, 'depth_qc': 1, 'time': f'3000-03-09T21:4{i}:00Z', "time_qc": 1, "lat": 20, "lat_qc": 1, "lon": 20, "lon_qc": 1, "value": i, "qc": 1 } self.app.post(query, json=payload, headers={'Authorization': test_token}) for i in range(3): payload = { 'platform_code': 'test_platform2', 'parameter': 'test_parameter', 'depth': 10, 'depth_qc': 1, 'time': f'3000-03-09T21:4{i}:00Z', "time_qc": 1, "lat": 20, "lat_qc": 1, "lon": 20, "lon_qc": 1, "value": i, "qc": 1 } self.app.post(query, json=payload, headers={'Authorization': test_token}) for i in range(3): payload = { 'platform_code': 'test_platform2', 'parameter': 'test_parameter2', 'depth': 10, 'depth_qc': 1, 'time': f'3000-03-09T21:4{i}:00Z', "time_qc": 1, "lat": 20, "lat_qc": 1, "lon": 20, "lon_qc": 1, "value": i, "qc": 1 } self.app.post(query, json=payload, headers={'Authorization': test_token}) def test_get_area_201(self): """ GET figure/area/test_platform/test_parameter should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_multiplatform(self): """ GET figure/area/test_platform,test_platform2/test_parameter should return a status_code = 201 """ query = 'figure/area/test_platform,test_platform2/test_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_multiplatform_with_parameter_for_one(self): """ GET figure/area/test_platform,test_platform2/test_parameter2 should return a status_code = 201 """ query = 'figure/area/test_platform,test_platform2/test_parameter2' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_404_multiplatform_bad_parameter(self): """ GET figure/area/test_platform,test_platform2/bad_parameter should return a status_code = 404 """ query = 'figure/area/test_platform,test_platform2/bad_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(404, response.status_code) def test_get_area_404_bad_parameter(self): """ GET figure/area/test_platform/bad_parameter should return a status_code = 404 """ query = 'figure/area/test_platform/bad_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(404, response.status_code) def test_get_area_404_bad_platform(self): """ GET figure/area/bad_platform/test_parameter should return a status_code = 404 """ query = 'figure/area/test_platform/bad_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(404, response.status_code) def test_get_area_404_bad_platform_and_parameter(self): """ GET figure/area/bad_platform/test_parameter should return a status_code = 404 """ query = 'figure/area/bad_platform/bad_parameter' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(404, response.status_code) def test_get_area_201_depth_min(self): """ GET figure/area/test_platform/test_parameter?depth_min=0 should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?depth_min=0' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_depth_max(self): """ GET figure/area/test_platform/test_parameter?depth_max=20 should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?depth_max=20' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_time_min(self): """ GET figure/area/test_platform/test_parameter?time_min=2000-01-01T00:00:00Z should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?time_min=2000-01-01T00:00:00Z' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_time_max(self): """ GET figure/area/test_platform/test_parameter?time_max=4000-01-01T00:00:00Z should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?time_max=4000-01-01T00:00:00Z' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_qc(self): """ GET figure/area/test_platform/test_parameter?qc=1 should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?qc=1' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_all(self): """ GET figure/area/test_platform/test_parameter?... should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?qc=1&' + \ 'time_max=4000-01-01T00:00:00Z&time_min=2000-01-01T00:00:00Z&' + \ 'depth_max=20&depth_min=0&template=plotly_white' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_ggplot2(self): """ GET figure/area/test_platform/test_parameter? template=ggplot2 should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=ggplot2' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_seaborn(self): """ GET figure/area/test_platform/test_parameter? template=seaborn should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=seaborn' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_simple_white(self): """ GET figure/area/test_platform/test_parameter? template=simple_white should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=simple_white' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_plotly(self): """ GET figure/area/test_platform/test_parameter? template=plotly should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=plotly' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_plotly_white(self): """ GET figure/area/test_platform/test_parameter? template=plotly_white should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=plotly_white' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_plotly_dark(self): """ GET figure/area/test_platform/test_parameter? template=plotly_dark should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=plotly_dark' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_presentation(self): """ GET figure/area/test_platform/test_parameter? template=presentation should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=presentation' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_xgridoff(self): """ GET figure/area/test_platform/test_parameter? template=xgridoff should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=xgridoff' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_ygridoff(self): """ GET figure/area/test_platform/test_parameter? template=ygridoff should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=ygridoff' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def test_get_area_201_template_gridon(self): """ GET figure/area/test_platform/test_parameter? template=gridon should return a status_code = 201 """ query = 'figure/area/test_platform/test_parameter?template=gridon' response = self.app.get(query, headers={'Authorization': test_token}) self.assertEqual(201, response.status_code) def tearDown(self): """ Delete all generated data """ for i in range(3): query = '/admin_data/R/test_platform_test_parameter_10_' + \ f'3000-03-09T21:4{i}:00Z' self.app.delete(query, headers={'Authorization': test_token}) for i in range(3): query = '/admin_data/R/test_platform2_test_parameter_10_' + \ f'3000-03-09T21:4{i}:00Z' self.app.delete(query, headers={'Authorization': test_token}) for i in range(3): query = '/admin_data/R/test_platform2_test_parameter2_10_' + \ f'3000-03-09T21:4{i}:00Z' self.app.delete(query, headers={'Authorization': test_token})
38.289389
88
0.615553
1,406
11,908
4.962304
0.066145
0.065931
0.086284
0.135588
0.950838
0.947112
0.947112
0.940519
0.901964
0.816827
0
0.051374
0.275865
11,908
310
89
38.412903
0.757741
0.184918
0
0.624242
0
0.006061
0.262459
0.18447
0
0
0
0
0.139394
1
0.151515
false
0
0.018182
0
0.175758
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
9a6c12d4caf225bc212d0b255bd69c14714335c4
62
py
Python
park/envs/load_balance/__init__.py
utkarsh5k/park
e7eba74f532204564df42a8e82a65ed025ce3b30
[ "MIT" ]
180
2019-04-30T05:50:32.000Z
2022-03-28T01:32:07.000Z
park/envs/load_balance/__init__.py
utkarsh5k/park
e7eba74f532204564df42a8e82a65ed025ce3b30
[ "MIT" ]
21
2019-05-03T17:42:54.000Z
2022-01-25T19:31:42.000Z
park/envs/load_balance/__init__.py
utkarsh5k/park
e7eba74f532204564df42a8e82a65ed025ce3b30
[ "MIT" ]
42
2019-05-01T15:15:19.000Z
2021-11-19T05:27:09.000Z
from park.envs.load_balance.load_balance import LoadBalanceEnv
62
62
0.903226
9
62
6
0.777778
0.407407
0
0
0
0
0
0
0
0
0
0
0.048387
62
1
62
62
0.915254
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
9a6c98160d9006d50265a5d1795be152d3926ccc
92
py
Python
lib/model/__init__.py
n2westman/CS410_Project
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
[ "MIT" ]
null
null
null
lib/model/__init__.py
n2westman/CS410_Project
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
[ "MIT" ]
null
null
null
lib/model/__init__.py
n2westman/CS410_Project
f8cfd5ab4d07354f3bb5f712e848853fbc9d7f83
[ "MIT" ]
null
null
null
from .wordrepr import * from .model import * from .VAT import * from .meanteachers import *
18.4
27
0.73913
12
92
5.666667
0.5
0.441176
0
0
0
0
0
0
0
0
0
0
0.173913
92
4
28
23
0.894737
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d025ae46a357921dda5cb2e082f425fb68138faf
452
py
Python
AJAX/apidemo/ui/views.py
shivampip/FrontEnd
b43e5088baaa3accb9210a3093e982035c58cff1
[ "MIT" ]
null
null
null
AJAX/apidemo/ui/views.py
shivampip/FrontEnd
b43e5088baaa3accb9210a3093e982035c58cff1
[ "MIT" ]
null
null
null
AJAX/apidemo/ui/views.py
shivampip/FrontEnd
b43e5088baaa3accb9210a3093e982035c58cff1
[ "MIT" ]
null
null
null
from django.shortcuts import render # Create your views here. def index(request): return render(request, "index.html") def text(request): return render(request, "text.html") def rating(request): return render(request, "rating.html") def slider(request): return render(request, "slider.html") def dialog(request): return render(request, "dialog.html") def progress(request): return render(request, "progress.html")
18.08
43
0.70354
57
452
5.578947
0.333333
0.245283
0.358491
0.490566
0
0
0
0
0
0
0
0
0.170354
452
25
43
18.08
0.848
0.050885
0
0
0
0
0.151869
0
0
0
0
0
0
1
0.461538
false
0
0.076923
0.461538
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
d051d79133dfe795c74d49312c0c994054af6515
3,480
py
Python
build_dataset/build-stackoverflow/statistics.py
Faffola/emotions-online-qa
46935a8e0489b677769def629294d00dd860f5a1
[ "MIT" ]
null
null
null
build_dataset/build-stackoverflow/statistics.py
Faffola/emotions-online-qa
46935a8e0489b677769def629294d00dd860f5a1
[ "MIT" ]
null
null
null
build_dataset/build-stackoverflow/statistics.py
Faffola/emotions-online-qa
46935a8e0489b677769def629294d00dd860f5a1
[ "MIT" ]
null
null
null
import csv # Calcola la media del sentiment score (positivo e negativo) per ogni topic # SPECIFICA PER IL RAGGRUPPAMENTO PER TOPIC def mean_sentiscore_per_topic(file_name, output_file): dict_reader = csv.DictReader(open(file_name, 'r'), delimiter=';') topic_posscore_mean = {} # Media positive sentiment score per ogni topic topic_negscore_mean = {} # Media negative sentiment score per ogni topic topic_numb = {} # Conta il numero di post per ogni topic (utilizzato per calcolare la media) for row in dict_reader: topic = row['Topic'] if topic_numb.has_key(topic): topic_numb[topic] += 1 topic_posscore_mean[topic] += int(row['SentimentPositiveScore']) topic_negscore_mean[topic] += int(row['SentimentNegativeScore']) else: topic_numb[topic] = 1 topic_posscore_mean[topic] = int(row['SentimentPositiveScore']) topic_negscore_mean[topic] = int(row['SentimentNegativeScore']) out = open(output_file, 'w') for topic in sorted(topic_numb, key=int): topic_posscore_mean[topic] = float(topic_posscore_mean[topic]) / float(topic_numb[topic]) topic_negscore_mean[topic] = float(topic_negscore_mean[topic]) / float(topic_numb[topic]) out.write("Topic -> " + topic) out.write("\nNumber of posts -> " + str(topic_numb[topic])) out.write("\n\tMean sentiment positive score -> " + str(topic_posscore_mean[topic])) out.write("\n\tMean sentiment negative score -> " + str(topic_negscore_mean[topic])) out.write("\n\n") print "Topic -> " + topic print "\n\tMean sentiment positive score -> ", "|" * int(topic_posscore_mean[topic] / 0.1) print "\n\tMean sentiment negative score -> ", "|" * int((topic_negscore_mean[topic] / 0.1) * -1) print "\n" # Calcola la media del sentiment score (positivo e negativo) per ogni valore del campo group_by # es. group_by = "Topic" def mean_sentiscore(file_name, output_file, group_by): dict_reader = csv.DictReader(open(file_name, 'r'), delimiter=';') topic_posscore_mean = {} # Media positive sentiment score per ogni topic topic_negscore_mean = {} # Media negative sentiment score per ogni topic topic_numb = {} # Conta il numero di post per ogni topic (utilizzato per calcolare la media) for row in dict_reader: group_val = row[group_by] if topic_numb.has_key(group_val): topic_numb[group_val] += 1 topic_posscore_mean[group_val] += int(row['SentimentPositiveScore']) topic_negscore_mean[group_val] += int(row['SentimentNegativeScore']) else: topic_numb[group_val] = 1 topic_posscore_mean[group_val] = int(row['SentimentPositiveScore']) topic_negscore_mean[group_val] = int(row['SentimentNegativeScore']) out = open(output_file, 'w') for group_val in sorted(topic_numb): topic_posscore_mean[group_val] = float(topic_posscore_mean[group_val]) / float(topic_numb[group_val]) topic_negscore_mean[group_val] = float(topic_negscore_mean[group_val]) / float(topic_numb[group_val]) out.write(group_by + " -> " + group_val) out.write("\nNumber of posts -> " + str(topic_numb[group_val])) out.write("\n\tMean sentiment positive score -> " + str(topic_posscore_mean[group_val])) out.write("\n\tMean sentiment negative score -> " + str(topic_negscore_mean[group_val])) out.write("\n\n") print group_by, " -> " + group_val print "\n\tMean sentiment positive score -> ", "|" * int(topic_posscore_mean[group_val] / 0.1) print "\n\tMean sentiment negative score -> ", "|" * int((topic_negscore_mean[group_val] / 0.1) * -1) print "\n"
45.789474
103
0.724713
495
3,480
4.856566
0.141414
0.073211
0.099002
0.054908
0.867304
0.821547
0.747088
0.704659
0.648087
0.630616
0
0.004693
0.142816
3,480
75
104
46.4
0.801207
0.162931
0
0.315789
0
0
0.195382
0.060648
0
0
0
0
0
0
null
null
0
0.017544
null
null
0.140351
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
d06b7d082cbdf1b9163e2dc07fc03a82988d5394
224
py
Python
models/networks/__init__.py
kevinchoy/oct-schlemm-seg
e8b78695521dc65a7bbd1bcdb65b0a6200af25b3
[ "BSD-4-Clause-UC" ]
1
2021-11-17T01:54:53.000Z
2021-11-17T01:54:53.000Z
models/networks/__init__.py
kevinchoy/oct-schlemm-seg
e8b78695521dc65a7bbd1bcdb65b0a6200af25b3
[ "BSD-4-Clause-UC" ]
1
2022-01-24T18:20:04.000Z
2022-01-24T18:20:04.000Z
models/networks/__init__.py
kevinchoy/oct-schlemm-seg
e8b78695521dc65a7bbd1bcdb65b0a6200af25b3
[ "BSD-4-Clause-UC" ]
null
null
null
from .UNet2dFusedAttentionDsvMultiscale import * from .UNet2dAttentionDsvMultiscale import * from .UNet2d import * from .ResUNet2d import * from .DRAGUNet import * from .LFDRAGUNet import * from .UNet2dAttentionDsv import *
28
48
0.8125
21
224
8.666667
0.428571
0.32967
0
0
0
0
0
0
0
0
0
0.02551
0.125
224
7
49
32
0.903061
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
d077c9af42896d230847e4a68f9e4f1074582769
131
py
Python
mvatv/exception/exceptions.py
Ilcyb/MvATv
ac358a736dd9f571aa04c73687e674d1cf8663e9
[ "0BSD" ]
null
null
null
mvatv/exception/exceptions.py
Ilcyb/MvATv
ac358a736dd9f571aa04c73687e674d1cf8663e9
[ "0BSD" ]
1
2021-06-01T21:42:13.000Z
2021-06-01T21:42:13.000Z
mvatv/exception/exceptions.py
Ilcyb/MvATv
ac358a736dd9f571aa04c73687e674d1cf8663e9
[ "0BSD" ]
null
null
null
class CantPlugingError(Exception): pass class NoResourceError(Exception): pass class SearchInfoError(Exception): pass
16.375
34
0.763359
12
131
8.333333
0.5
0.39
0.36
0
0
0
0
0
0
0
0
0
0.167939
131
8
35
16.375
0.917431
0
0
0.5
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
6
d08100ce567c8dc777a98c80c17f35416bce02c4
3,939
py
Python
server/tests/test_project.py
OpenChemistry/experimentaldataplatform
f45a7ee4f9087a3e8fa61374ade4bd7b04584f61
[ "BSD-3-Clause" ]
2
2018-10-10T20:38:14.000Z
2020-07-01T13:14:59.000Z
server/tests/test_project.py
OpenChemistry/experimentaldataplatform
f45a7ee4f9087a3e8fa61374ade4bd7b04584f61
[ "BSD-3-Clause" ]
23
2018-09-06T22:31:53.000Z
2021-05-24T13:22:04.000Z
server/tests/test_project.py
OpenChemistry/edp
f45a7ee4f9087a3e8fa61374ade4bd7b04584f61
[ "BSD-3-Clause" ]
null
null
null
import pytest import datetime import json from pytest_girder.assertions import assertStatus, assertStatusOk @pytest.mark.plugin('edp') def test_create_public(server, user, project_request): from girder.plugins.edp.models.project import Project r = server.request('/edp/projects', method='POST', body=json.dumps(project_request), type='application/json', user=user) assertStatus(r, 201) assert '_id' in r.json project = Project().load(r.json['_id'], force=True) assert project['owner'] == user['_id'] assert project_request.items() <= project.items() @pytest.mark.plugin('edp') def test_create_private(server, user, project_request): from girder.plugins.edp.models.project import Project project_request['public'] = False r = server.request('/edp/projects', method='POST', body=json.dumps(project_request), type='application/json', user=user) assertStatus(r, 201) assert '_id' in r.json project = Project().load(r.json['_id'], force=True) assert project_request.items() <= project.items() @pytest.mark.plugin('edp') def test_update(server, user, project): from girder.plugins.edp.models.project import Project updates = { 'title': 'Nothing to see here.' } r = server.request('/edp/projects/%s' % project['_id'], method='PATCH', body=json.dumps(updates), type='application/json', user=user) assertStatusOk(r) project = Project().load(r.json['_id'], force=True) assert updates.items() <= project.items() @pytest.mark.plugin('edp') def test_update_non_existent(server, user, project): from girder.plugins.edp.models.project import Project updates = { 'title': 'Nothing to see here.' } non_existent = '5ae71e1ff657102b11ce2233' r = server.request('/edp/projects/%s' % non_existent, method='PATCH', body=json.dumps(updates), type='application/json', user=user) assertStatus(r, 400) @pytest.mark.plugin('edp') def test_delete(server, user, project): from girder.plugins.edp.models.project import Project r = server.request('/edp/projects/%s' % project['_id'], method='DELETE', user=user) assertStatusOk(r) project = Project().load(project['_id'], force=True) assert project is None @pytest.mark.plugin('edp') def test_delete_with_cycle(server, user, project, cycle): from girder.plugins.edp.models.project import Project from girder.plugins.edp.models.cycle import Cycle r = server.request('/edp/projects/%s' % project['_id'], method='DELETE', user=user) assertStatusOk(r) project = Project().load(project['_id'], force=True) assert project is None cycle = Cycle().load(cycle['_id'], force=True) assert cycle is None @pytest.mark.plugin('edp') def test_find(server, user, project): from girder.plugins.edp.models.project import Project r = server.request('/edp/projects', method='GET', user=user) assertStatusOk(r) assert len(r.json) == 1 @pytest.mark.plugin('edp') def test_find_owner(server, user, admin, project): from girder.plugins.edp.models.project import Project params = { 'owner': admin['_id'] } r = server.request('/edp/projects', params=params, method='GET', user=user) assertStatusOk(r) assert len(r.json) == 0 params['owner'] = user['_id'] r = server.request('/edp/projects', params=params, method='GET', user=user) assertStatusOk(r) assert len(r.json) == 1 @pytest.mark.plugin('edp') def test_get(server, user, admin, project): r = server.request('/edp/projects/%s' % project['_id'], method='GET', user=user) assertStatusOk(r) assert project.items() <= r.json.items()
30.3
88
0.639249
486
3,939
5.096708
0.135802
0.02826
0.05652
0.068631
0.842551
0.842551
0.821558
0.763424
0.721437
0.669762
0
0.009064
0.215791
3,939
129
89
30.534884
0.792813
0
0
0.670213
0
0
0.106145
0.006094
0
0
0
0
0.255319
1
0.095745
false
0
0.138298
0
0.234043
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d097c0cab34056141857fa9380638f16b78d47a5
218
py
Python
Doc/PREPRODUCTION/django-simple-email-confirmation-develop/simple_email_confirmation/__init__.py
zerxen/django_boilerplate
c29214ba02497b559a1f5c57b880e71b0f9041ea
[ "Unlicense" ]
null
null
null
Doc/PREPRODUCTION/django-simple-email-confirmation-develop/simple_email_confirmation/__init__.py
zerxen/django_boilerplate
c29214ba02497b559a1f5c57b880e71b0f9041ea
[ "Unlicense" ]
3
2020-02-12T01:06:54.000Z
2021-06-10T20:32:53.000Z
venv/lib/python3.6/site-packages/simple_email_confirmation/__init__.py
zerxen/django_boilerplate
c29214ba02497b559a1f5c57b880e71b0f9041ea
[ "Unlicense" ]
null
null
null
__version__ = '0.23' __all__ = [ 'email_confirmed', 'unconfirmed_email_created', 'primary_email_changed', ] from .signals import ( email_confirmed, unconfirmed_email_created, primary_email_changed, )
18.166667
70
0.733945
23
218
6.173913
0.565217
0.197183
0.352113
0.422535
0.788732
0.788732
0.788732
0.788732
0
0
0
0.016484
0.165138
218
11
71
19.818182
0.763736
0
0
0
0
0
0.298165
0.211009
0
0
0
0
0
1
0
false
0
0.111111
0
0.111111
0
1
0
0
null
0
1
1
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
d0a976b98b93980f7287fab38b593f0caa250dc4
213
py
Python
tests/detector/config_module_missing/__init__.py
dadaloop82/viseron
1c6c446a4856e16c0e2ed6b9323d169fbdcae20f
[ "MIT" ]
399
2020-08-31T21:13:07.000Z
2022-03-31T18:54:26.000Z
tests/detector/config_module_missing/__init__.py
dadaloop82/viseron
1c6c446a4856e16c0e2ed6b9323d169fbdcae20f
[ "MIT" ]
157
2020-09-01T18:59:56.000Z
2022-03-25T07:14:19.000Z
tests/detector/config_module_missing/__init__.py
dadaloop82/viseron
1c6c446a4856e16c0e2ed6b9323d169fbdcae20f
[ "MIT" ]
53
2020-09-01T07:35:59.000Z
2022-03-28T23:21:16.000Z
"""Dummy module that is missing the Config class.""" from viseron.detector import AbstractObjectDetection class ObjectDetection(AbstractObjectDetection): """Dummy module that is missing the Config class."""
30.428571
56
0.784038
24
213
6.958333
0.583333
0.131737
0.179641
0.203593
0.45509
0.45509
0.45509
0.45509
0
0
0
0
0.131455
213
6
57
35.5
0.902703
0.43662
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
ef2f59346362d55b3180a7f4dcc1d4e9f3b4df6e
1,660
py
Python
tests/test_complex_example.py
valerybriz/vanguard-kit
3d3516537215e7195fc4df3acc9b9c9209d01781
[ "MIT" ]
null
null
null
tests/test_complex_example.py
valerybriz/vanguard-kit
3d3516537215e7195fc4df3acc9b9c9209d01781
[ "MIT" ]
null
null
null
tests/test_complex_example.py
valerybriz/vanguard-kit
3d3516537215e7195fc4df3acc9b9c9209d01781
[ "MIT" ]
null
null
null
from vanguardkit import create_html_tree, calcuate_html_tree_distance def test_calculate_impact_when_a_branch_changes(): with open("tests/html_examples/complex_example_a.html") as example_a: with open("tests/html_examples/complex_example_b.html") as example_b: a_tree = create_html_tree(example_a) b_tree = create_html_tree(example_b) assert calcuate_html_tree_distance(a_tree, b_tree) > 1 def test_calculate_difference_between_div_class_branch_a(): with open("tests/html_examples/complex_example_a.html") as example_a: with open("tests/html_examples/complex_example_b.html") as example_b: a_tree = create_html_tree(example_a, specific_tag="div", class_="branch-a") b_tree = create_html_tree(example_b, specific_tag="div", class_="branch-a") assert calcuate_html_tree_distance(a_tree, b_tree) == 4 def test_calculate_difference_between_div_class_branch_b(): with open("tests/html_examples/complex_example_a.html") as example_a: with open("tests/html_examples/complex_example_b.html") as example_b: a_tree = create_html_tree(example_a, specific_tag="div", class_="branch-b") b_tree = create_html_tree(example_b, specific_tag="div", class_="branch-b") assert calcuate_html_tree_distance(a_tree, b_tree) == 0
48.823529
77
0.604819
204
1,660
4.45098
0.166667
0.096916
0.10793
0.112335
0.886564
0.886564
0.884361
0.884361
0.75
0.705947
0
0.002655
0.319277
1,660
33
78
50.30303
0.800885
0
0
0.666667
0
0
0.178313
0.151807
0
0
0
0
0.111111
1
0.111111
false
0
0.037037
0
0.148148
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
327340e2bbb58c862390c309a564713307935f41
44
py
Python
src/pygmy/validator/__init__.py
TinLe/pygmy
54ff3ed4ce0dbbe2868556e7fbf8bde97baa8b07
[ "MIT" ]
null
null
null
src/pygmy/validator/__init__.py
TinLe/pygmy
54ff3ed4ce0dbbe2868556e7fbf8bde97baa8b07
[ "MIT" ]
null
null
null
src/pygmy/validator/__init__.py
TinLe/pygmy
54ff3ed4ce0dbbe2868556e7fbf8bde97baa8b07
[ "MIT" ]
null
null
null
from pygmy.validator.link import LinkSchema
22
43
0.863636
6
44
6.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.090909
44
1
44
44
0.95
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3274a5cb2371e1e1ab85b901cac29cb999cf3510
41
py
Python
src/dirtyfields/__init__.py
idonethis/django-dirtyfields
e2ac2976fda6914ce18f4d3e8423ce0aa8395c8f
[ "BSD-3-Clause" ]
null
null
null
src/dirtyfields/__init__.py
idonethis/django-dirtyfields
e2ac2976fda6914ce18f4d3e8423ce0aa8395c8f
[ "BSD-3-Clause" ]
null
null
null
src/dirtyfields/__init__.py
idonethis/django-dirtyfields
e2ac2976fda6914ce18f4d3e8423ce0aa8395c8f
[ "BSD-3-Clause" ]
1
2019-01-25T09:32:36.000Z
2019-01-25T09:32:36.000Z
from dirtyfields import DirtyFieldsMixin
20.5
40
0.902439
4
41
9.25
1
0
0
0
0
0
0
0
0
0
0
0
0.097561
41
1
41
41
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3296391d0a170976a567e976dcee5c6f913da986
130
py
Python
insurancecompany/insurancecompany/views.py
karthikpalavalli/csci5448
4d2c84f5ee9080e032e7d73c33c7378f8a813938
[ "MIT" ]
null
null
null
insurancecompany/insurancecompany/views.py
karthikpalavalli/csci5448
4d2c84f5ee9080e032e7d73c33c7378f8a813938
[ "MIT" ]
null
null
null
insurancecompany/insurancecompany/views.py
karthikpalavalli/csci5448
4d2c84f5ee9080e032e7d73c33c7378f8a813938
[ "MIT" ]
null
null
null
from django.http import HttpResponse def insurance_home(request): return HttpResponse('Welcome to the Insurance Company!')
18.571429
60
0.784615
16
130
6.3125
0.875
0
0
0
0
0
0
0
0
0
0
0
0.146154
130
6
61
21.666667
0.90991
0
0
0
0
0
0.255814
0
0
0
0
0
0
1
0.333333
false
0
0.333333
0.333333
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
1
1
1
0
0
6
32b452acbbfe615e9a5381fb36b72bdd26aaca7c
44
py
Python
bede/tagging/__init__.py
SeanMatthewNolan/Bede
5fa0396be35007ecea14acdaecaca8e1810cd8f8
[ "MIT" ]
null
null
null
bede/tagging/__init__.py
SeanMatthewNolan/Bede
5fa0396be35007ecea14acdaecaca8e1810cd8f8
[ "MIT" ]
null
null
null
bede/tagging/__init__.py
SeanMatthewNolan/Bede
5fa0396be35007ecea14acdaecaca8e1810cd8f8
[ "MIT" ]
null
null
null
from .classes import Library, Document, Tag
22
43
0.795455
6
44
5.833333
1
0
0
0
0
0
0
0
0
0
0
0
0.136364
44
1
44
44
0.921053
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3ee28d430d403c4e3d588ec75331f15931a46fa5
43
py
Python
server/train.py
bfortuner/label-ai
f05896c2b2c2d282763ee7db54b5f66066073961
[ "MIT" ]
1
2017-08-26T20:08:12.000Z
2017-08-26T20:08:12.000Z
server/train.py
bfortuner/label-ai
f05896c2b2c2d282763ee7db54b5f66066073961
[ "MIT" ]
null
null
null
server/train.py
bfortuner/label-ai
f05896c2b2c2d282763ee7db54b5f66066073961
[ "MIT" ]
1
2018-04-11T16:42:53.000Z
2018-04-11T16:42:53.000Z
import os import pandas as pd import utils
10.75
19
0.813953
8
43
4.375
0.75
0
0
0
0
0
0
0
0
0
0
0
0.186047
43
3
20
14.333333
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
411419506936de1073e70bbf89af8ec45e9823fe
52
py
Python
examples/pytest/directory/test_func.py
Gnonpi/balto
18d51f0a6ba90bc2083b34518d1ced5c2e86b7a0
[ "MIT" ]
16
2018-10-07T11:45:05.000Z
2021-11-03T05:22:47.000Z
examples/pytest/directory/test_func.py
Lothiraldan/litr
6a4b57ebd95d5bc968f9d4057de81138d59dcae2
[ "MIT" ]
15
2018-10-08T13:29:24.000Z
2021-09-11T10:01:52.000Z
examples/pytest/directory/test_func.py
Lothiraldan/litr
6a4b57ebd95d5bc968f9d4057de81138d59dcae2
[ "MIT" ]
5
2018-10-08T09:11:17.000Z
2019-11-28T14:04:14.000Z
import pytest def test_success(): assert True
8.666667
19
0.711538
7
52
5.142857
1
0
0
0
0
0
0
0
0
0
0
0
0.230769
52
5
20
10.4
0.9
0
0
0
0
0
0
0
0
0
0
0
0.333333
1
0.333333
true
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
6
eb1426e1b13bc25c028345d203f9f70614a058d2
23
py
Python
copypastor/config/__init__.py
Venomen/copypastor
add35e6b4ea17632c2b678a629d541b74b55ff0e
[ "MIT" ]
null
null
null
copypastor/config/__init__.py
Venomen/copypastor
add35e6b4ea17632c2b678a629d541b74b55ff0e
[ "MIT" ]
null
null
null
copypastor/config/__init__.py
Venomen/copypastor
add35e6b4ea17632c2b678a629d541b74b55ff0e
[ "MIT" ]
null
null
null
from ..config import *
11.5
22
0.695652
3
23
5.333333
1
0
0
0
0
0
0
0
0
0
0
0
0.173913
23
1
23
23
0.842105
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
eb20888f33e247664aa8e2e3ea3b6e7699167653
102
py
Python
Codewars/8kyu/find-the-integral/Python/solution1.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
7
2017-09-20T16:40:39.000Z
2021-08-31T18:15:08.000Z
Codewars/8kyu/find-the-integral/Python/solution1.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
null
null
null
Codewars/8kyu/find-the-integral/Python/solution1.py
RevansChen/online-judge
ad1b07fee7bd3c49418becccda904e17505f3018
[ "MIT" ]
null
null
null
# Python - 3.6.0 integrate = lambda c, e: f'{c / (e + 1) if c % (e + 1) else c // (e + 1)}x^{e + 1}'
25.5
83
0.441176
23
102
1.956522
0.565217
0.177778
0.2
0
0
0
0
0
0
0
0
0.09589
0.284314
102
3
84
34
0.520548
0.137255
0
0
0
1
0.639535
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
0
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
de16af77c5421f63d2c8fb579146a25773d7f440
69
py
Python
numertweak/__init__.py
kmedian/numertweak
ecbe6a6d02e0f921ec9a22ec9b6563a72e5628e0
[ "MIT" ]
null
null
null
numertweak/__init__.py
kmedian/numertweak
ecbe6a6d02e0f921ec9a22ec9b6563a72e5628e0
[ "MIT" ]
1
2019-03-23T21:49:57.000Z
2019-08-15T10:05:10.000Z
numertweak/__init__.py
kmedian/numertweak
ecbe6a6d02e0f921ec9a22ec9b6563a72e5628e0
[ "MIT" ]
null
null
null
from .get_cols import get_cols from .load_pandas import load_dataset
23
37
0.855072
12
69
4.583333
0.583333
0.254545
0
0
0
0
0
0
0
0
0
0
0.115942
69
2
38
34.5
0.901639
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
de3ea6a2d8b27eaae34ba46cbf89151a21bb824e
128
py
Python
qlist/qlist.py
QsonLabs/open-qlist-py
f6c9ae430b1b36846a7765b920edc0fe2fbe1260
[ "Apache-2.0" ]
null
null
null
qlist/qlist.py
QsonLabs/open-qlist-py
f6c9ae430b1b36846a7765b920edc0fe2fbe1260
[ "Apache-2.0" ]
null
null
null
qlist/qlist.py
QsonLabs/open-qlist-py
f6c9ae430b1b36846a7765b920edc0fe2fbe1260
[ "Apache-2.0" ]
null
null
null
import os def main(): """Prints info about qlist""" print("qlist enterprise available at - https://www.qsonlabs.com")
18.285714
69
0.664063
17
128
5
0.941176
0
0
0
0
0
0
0
0
0
0
0
0.1875
128
6
70
21.333333
0.817308
0.179688
0
0
0
0
0.565657
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
0.666667
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
6
de552914dfa4dda55c16c915d4c8109959b5b29c
2,479
py
Python
d06.py
f-koehler/adventofcode
b1f5f36b64e1e0e9decc3a3941cf207096d0102e
[ "MIT" ]
1
2020-07-01T16:10:06.000Z
2020-07-01T16:10:06.000Z
d06.py
f-koehler/adventofcode
b1f5f36b64e1e0e9decc3a3941cf207096d0102e
[ "MIT" ]
null
null
null
d06.py
f-koehler/adventofcode
b1f5f36b64e1e0e9decc3a3941cf207096d0102e
[ "MIT" ]
null
null
null
#!/bin/env python3 import re def part1(): lights = [] for x in range(0, 1000): lights.append([False for y in range(0, 1000)]) regex = re.compile(r"^(?P<command>turn\son|turn\soff|toggle)\s(?P<x1>\d+),(?P<y1>\d+)\s+through\s+(?P<x2>\d+),(?P<y2>\d+)$") with open("d06.txt") as f: commands = f.read().splitlines() for cmd in commands: m = regex.match(cmd) grpdict = m.groupdict() c = grpdict["command"] x1 = int(grpdict["x1"]) y1 = int(grpdict["y1"]) x2 = int(grpdict["x2"]) y2 = int(grpdict["y2"]) if c == "turn on": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] = True elif c == "turn off": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] = False elif c == "toggle": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] = not lights[x][y] turned_on = 0 for x in range(0, 1000): for y in range(0, 1000): if lights[x][y]: turned_on += 1 print(turned_on) def part2(): lights = [] for x in range(0, 1000): lights.append([0 for y in range(0, 1000)]) regex = re.compile(r"^(?P<command>turn\son|turn\soff|toggle)\s(?P<x1>\d+),(?P<y1>\d+)\s+through\s+(?P<x2>\d+),(?P<y2>\d+)$") with open("d06.txt") as f: commands = f.read().splitlines() for cmd in commands: m = regex.match(cmd) grpdict = m.groupdict() c = grpdict["command"] x1 = int(grpdict["x1"]) y1 = int(grpdict["y1"]) x2 = int(grpdict["x2"]) y2 = int(grpdict["y2"]) if c == "turn on": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] += 1 elif c == "turn off": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] -= 1 if lights[x][y] < 0: lights[x][y] = 0 elif c == "toggle": for x in range(x1, x2+1): for y in range(y1, y2+1): lights[x][y] += 2 brightness = 0 for x in range(0, 1000): for y in range(0, 1000): brightness += lights[x][y] print(brightness) if __name__ == "__main__": part1() part2()
32.618421
128
0.454215
367
2,479
3.038147
0.171662
0.125561
0.078924
0.098655
0.860987
0.832287
0.832287
0.832287
0.832287
0.7713
0
0.075544
0.369907
2,479
75
129
33.053333
0.638284
0.006858
0
0.676056
0
0.028169
0.120276
0.08208
0
0
0
0
0
1
0.028169
false
0
0.014085
0
0.042254
0.028169
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
deb11fddab76b8741ef4b0d45c26405f53f00b5b
6,975
py
Python
test_preprocessor.py
xingdi-eric-yuan/gata
059cd2e486adfdb5edc3e2df628d573ee9a3796b
[ "MIT" ]
1
2021-04-28T03:31:07.000Z
2021-04-28T03:31:07.000Z
test_preprocessor.py
xingdi-eric-yuan/gata
059cd2e486adfdb5edc3e2df628d573ee9a3796b
[ "MIT" ]
null
null
null
test_preprocessor.py
xingdi-eric-yuan/gata
059cd2e486adfdb5edc3e2df628d573ee9a3796b
[ "MIT" ]
1
2021-04-28T03:32:57.000Z
2021-04-28T03:32:57.000Z
import pytest import torch from preprocessor import SpacyPreprocessor @pytest.mark.parametrize( "batch,expected_preprocessed,expected_mask", [ ( ["My name is Peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["my name is peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["My name is Peter", "Is my name David?"], torch.tensor([[2, 3, 4, 5, 0], [4, 2, 3, 1, 1]]), torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]]).float(), ), ], ) def test_spacy_preprocessor_preprocess(batch, expected_preprocessed, expected_mask): sp = SpacyPreprocessor(["<pad>", "<unk>", "my", "name", "is", "peter"]) preprocessed, mask = sp.preprocess(batch) assert preprocessed.equal(expected_preprocessed) assert mask.equal(expected_mask) @pytest.mark.parametrize( "batch,expected_preprocessed,expected_mask", [ ( ["My name is Peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["my name is peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["My name is Peter", "Is my name David?"], torch.tensor([[2, 3, 4, 5, 0], [4, 2, 3, 1, 1]]), torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]]).float(), ), ], ) def test_spacy_preprocessor_decode(batch, expected_preprocessed, expected_mask): sp = SpacyPreprocessor( ["<pad>", "<unk>", "my", "name", "is", "peter", "david", "?"] ) preprocessed, _ = sp.preprocess(batch) assert sp.decode(preprocessed.tolist()) == [" ".join(sp.tokenize(s)) for s in batch] def test_spacy_preprocessor_load_from_file(): sp = SpacyPreprocessor.load_from_file("vocabs/word_vocab.txt") assert len(sp.word_to_id_dict) == 772 @pytest.mark.parametrize("batch_size", list(range(3))) @pytest.mark.parametrize( "raw_str,cleaned", [ (None, "nothing"), ("double spaces!", "double spaces!"), ("many spaces!", "many spaces!"), (" ", "nothing"), ( "\n\n\n" " ________ ________ __ __ ________ \n" " | \\| \\| \\ | \\| \\ \n" " \\$$$$$$$$| $$$$$$$$| $$ | $$ \\$$$$$$$$ \n" " | $$ | $$__ \\$$\\/ $$ | $$ \n" " | $$ | $$ \\ >$$ $$ | $$ \n" " | $$ | $$$$$ / $$$$\\ | $$ \n" " | $$ | $$_____ | $$ \\$$\\ | $$ \n" " | $$ | $$ \\| $$ | $$ | $$ \n" " \\$$ \\$$$$$$$$ \\$$ \\$$ \\$$ \n" " __ __ ______ _______ __ _______ \n" " | \\ _ | \\ / \\ | \\ | \\ | \\ \n" " | $$ / \\ | $$| $$$$$$\\| $$$$$$$\\| $$ | $$$$$$$\\\n" " | $$/ $\\| $$| $$ | $$| $$__| $$| $$ | $$ | $$\n" " | $$ $$$\\ $$| $$ | $$| $$ $$| $$ | $$ | $$\n" " | $$ $$\\$$\\$$| $$ | $$| $$$$$$$\\| $$ | $$ | $$\n" " | $$$$ \\$$$$| $$__/ $$| $$ | $$| $$_____ | $$__/ $$\n" " | $$$ \\$$$ \\$$ $$| $$ | $$| $$ \\| $$ $$\n" " \\$$ \\$$ \\$$$$$$ \\$$ \\$$ \\$$$$$$$$ \\$$$$$$$" " \n\n" "You are hungry! Let's cook a delicious meal. " "Check the cookbook in the kitchen for the recipe. " "Once done, enjoy your meal!\n\n" "-= Kitchen =-\n" "If you're wondering why everything seems so normal all of a sudden, " "it's because you've just shown up in the kitchen.\n\n" "You can see a closed fridge, which looks conventional, " "right there by you. " "You see a closed oven right there by you. Oh, great. Here's a table. " "Unfortunately, there isn't a thing on it. Hm. " "Oh well You scan the room, seeing a counter. The counter is vast. " "On the counter you can make out a cookbook and a knife. " "You make out a stove. Looks like someone's already been here and " "taken everything off it, though. Sometimes, just sometimes, " "TextWorld can just be the worst.\n\n\n", "You are hungry! Let's cook a delicious meal. " "Check the cookbook in the kitchen for the recipe. " "Once done, enjoy your meal! -= Kitchen =- " "If you're wondering why everything seems so normal all of a sudden, " "it's because you've just shown up in the kitchen. " "You can see a closed fridge, which looks conventional, " "right there by you. You see a closed oven right there by you. " "Oh, great. Here's a table. Unfortunately, there isn't a thing on it. " "Hm. Oh well You scan the room, seeing a counter. The counter is vast. " "On the counter you can make out a cookbook and a knife. " "You make out a stove. " "Looks like someone's already been here and taken everything off it, " "though. Sometimes, just sometimes, TextWorld can just be the worst.", ), ], ) def test_spacy_preprocessor_clean(raw_str, cleaned, batch_size): sp = SpacyPreprocessor.load_from_file("vocabs/word_vocab.txt") assert sp.clean(raw_str) == cleaned assert sp.batch_clean([raw_str] * batch_size) == [cleaned] * batch_size @pytest.mark.parametrize( "batch,expected_preprocessed,expected_mask", [ ( ["$$$$$$$ My name is Peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["my name is peter"], torch.tensor([[2, 3, 4, 5]]), torch.tensor([[1, 1, 1, 1]]).float(), ), ( ["My name\n is Peter", "$$$$$$$Is my name \n\nDavid?"], torch.tensor([[2, 3, 4, 5, 0], [4, 2, 3, 1, 1]]), torch.tensor([[1, 1, 1, 1, 0], [1, 1, 1, 1, 1]]).float(), ), ], ) def test_spacy_preprocessor_clean_preprocess( batch, expected_preprocessed, expected_mask ): sp = SpacyPreprocessor(["<pad>", "<unk>", "my", "name", "is", "peter"]) preprocessed, mask = sp.clean_and_preprocess(batch) assert preprocessed.equal(expected_preprocessed) assert mask.equal(expected_mask)
43.59375
88
0.440143
729
6,975
4.035665
0.196159
0.028552
0.027532
0.025833
0.837525
0.82087
0.82087
0.82087
0.82087
0.82087
0
0.025653
0.37405
6,975
159
89
43.867925
0.648191
0
0
0.432432
0
0.006757
0.474122
0.023656
0
0
0
0
0.054054
1
0.033784
false
0
0.02027
0
0.054054
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
9d264587324672c1423c1263e03d06f48f9fce7b
352
py
Python
pedal/__init__.py
acbart/python-analysis
3cd2cc22d50a414ae6b62c74d2643be4742238d4
[ "MIT" ]
14
2019-08-22T03:40:23.000Z
2022-03-13T00:30:53.000Z
pedal/__init__.py
pedal-edu/pedal
3cd2cc22d50a414ae6b62c74d2643be4742238d4
[ "MIT" ]
74
2019-09-12T04:35:56.000Z
2022-01-26T19:21:32.000Z
pedal/__init__.py
acbart/python-analysis
3cd2cc22d50a414ae6b62c74d2643be4742238d4
[ "MIT" ]
2
2018-09-16T22:39:15.000Z
2018-09-17T12:53:28.000Z
""" A package for analyzing student code. """ import sys import os # Core Commands from pedal.core.report import MAIN_REPORT from pedal.core.submission import Submission from pedal.core.commands import * from pedal.source import * from pedal.sandbox.commands import * from pedal.cait import * from pedal.assertions.commands import * student: Sandbox
20.705882
44
0.792614
50
352
5.56
0.4
0.226619
0.215827
0.165468
0
0
0
0
0
0
0
0
0.133523
352
16
45
22
0.911475
0.147727
0
0
0
0
0
0
0
0
0
0
0.1
1
0
true
0
0.9
0
0.9
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
c24714b9098420ab2a075741c1a1ef5a78b697e0
17,620
py
Python
tests/test_vault_core.py
speraxdev/USDs
8ff2dfaf2173fadacf49619473d681707fc8507c
[ "MIT" ]
null
null
null
tests/test_vault_core.py
speraxdev/USDs
8ff2dfaf2173fadacf49619473d681707fc8507c
[ "MIT" ]
null
null
null
tests/test_vault_core.py
speraxdev/USDs
8ff2dfaf2173fadacf49619473d681707fc8507c
[ "MIT" ]
null
null
null
#!/usr/bin/python3 import pytest import brownie from brownie import Wei, Contract, reverts, SperaxTokenL2 def test_chi_redeem(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax core_proxy.chiRedeem(vault_proxy, {'from': owner_l2}) def test_mint_usds(sperax, owner_l2, accounts, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax ( usdt_strategy, wbtc_strategy, weth_strategy, ) = strategy_proxies ( two_hops_buyback, three_hops_buyback ) = buybacks invalid_coll = brownie.convert.to_address('0x0000000000000000000000000000000000000000') deadline = 1637632800 + brownie.chain.time() amount = 100000 slippage_collateral = 1000000000000000000000000000000 slippage_spa = 1000000000000000000000000000000 spa.approve(vault_proxy.address, slippage_spa, {'from': owner_l2 }) weth_erc20 = brownie.interface.IERC20(weth.address) weth_erc20.approve(vault_proxy.address, slippage_spa, {'from': owner_l2}) #collateral not addedd with reverts(): vault_proxy.mintBySpecifyingUSDsAmt( invalid_coll, int(amount), slippage_collateral, slippage_spa, deadline, {'from': owner_l2} ) #zero amount with reverts("Amount needs to be greater than 0"): vault_proxy.mintBySpecifyingUSDsAmt( weth.address, 0, slippage_collateral, slippage_spa, deadline, {'from': owner_l2} ) with reverts('Deadline expired'): vault_proxy.mintBySpecifyingUSDsAmt( weth.address, int(amount), slippage_collateral, slippage_spa, 0, {'from': owner_l2} ) txn = vault_proxy.mintBySpecifyingUSDsAmt( weth.address, int(amount), slippage_collateral, slippage_spa, deadline, {'from': owner_l2} ) txn = vault_proxy.updateAllocationPermission(True, {'from': owner_l2}) txn = vault_proxy.allocate({'from': owner_l2}) assert txn.events["CollateralAllocated"]["allocateAmount"] > 0 vault_proxy.updateCollateralInfo( weth, weth_strategy, True, 80, two_hops_buyback, True, {'from': owner_l2} ) txn = vault_proxy.allocate({'from': owner_l2}) vault_proxy.updateCollateralInfo( weth, weth_strategy, False, 80, two_hops_buyback, True, {'from': owner_l2} ) with reverts('Rebase paused'): txn = vault_proxy.rebase({'from': owner_l2}) vault_proxy.updateRebasePermission(True, {'from': owner_l2}) with reverts('Caller is not a rebaser'): vault_proxy.rebase({'from': owner_l2}) vault_proxy.grantRole(vault_proxy.REBASER_ROLE(), owner_l2, {'from': owner_l2}) txn = vault_proxy.rebase({'from': owner_l2}) vault_proxy.revokeRole(vault_proxy.REBASER_ROLE(), owner_l2, {'from': owner_l2}) with reverts('Caller is not a rebaser'): vault_proxy.rebase({'from': owner_l2}) vault_proxy.renounceRole(vault_proxy.REBASER_ROLE(), owner_l2, {'from': owner_l2}) core_proxy.chiRedeem(vault_proxy, {'from': owner_l2}) def test_mint_spa(sperax, weth, owner_l2, accounts): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax invalid_coll = brownie.convert.to_address('0x0000000000000000000000000000000000000000') deadline = 1637632800 + brownie.chain.time() amount = 1000 slippage_collateral = 1000000000000000000000000000000 slippage_usds = 10 spa.approve(vault_proxy.address, slippage_collateral, {'from': owner_l2 }) weth_erc20 = brownie.interface.IERC20(weth.address) weth_erc20.approve(vault_proxy.address, slippage_collateral, {'from': owner_l2}) with reverts(): vault_proxy.mintBySpecifyingSPAamt( invalid_coll, int(amount), slippage_usds, slippage_collateral, deadline, {'from': owner_l2} ) with reverts("Amount needs to be greater than 0"): vault_proxy.mintBySpecifyingSPAamt( weth.address, 0, slippage_usds, slippage_collateral, deadline, {'from': owner_l2} ) vault_proxy.mintBySpecifyingSPAamt( weth.address, int(amount), slippage_usds, slippage_collateral, deadline, {'from': owner_l2} ) def test_mint_collateral(sperax, weth, owner_l2, accounts): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax invalid_coll = brownie.convert.to_address('0x0000000000000000000000000000000000000000') deadline = 1637632800 + brownie.chain.time() amount = 10000 slippage_collateral = 10 slippage_coll = 1000000000000000000000000000000 spa.approve(vault_proxy.address, slippage_coll, {'from': owner_l2}) weth_erc20 = brownie.interface.IERC20(weth.address) weth_erc20.approve(vault_proxy.address, slippage_coll, {'from': owner_l2}) with reverts(): vault_proxy.mintBySpecifyingCollateralAmt( invalid_coll, int(amount), slippage_collateral, slippage_coll, deadline, {'from': owner_l2} ) with reverts("Amount needs to be greater than 0"): vault_proxy.mintBySpecifyingCollateralAmt( weth.address, 0, slippage_collateral, slippage_coll, deadline, {'from': owner_l2} ) vault_proxy.mintBySpecifyingCollateralAmt( weth.address, int(amount), slippage_collateral, slippage_coll, deadline, {'from': owner_l2} ) def test_allow_allocate(sperax, accounts, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax txn = vault_proxy.updateAllocationPermission(True, {'from': owner_l2}) assert txn.events["AllocationPermssionChanged"]["permission"] == True def test_vault_core_fail_allocate(sperax, accounts, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax with reverts('Allocate paused'): txn = vault_proxy.allocate({'from': owner_l2}) with reverts('Ownable: caller is not the owner'): txn = vault_proxy.updateAllocationPermission(True, {'from': owner_l2}) txn = vault_proxy.allocate({'from': accounts[5]}) def test_upgrage_collateral(sperax, accounts, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax (two_hops_buyback, three_hops_buyback) = buybacks collateralAddr = weth.address defaultStrategyAddr = brownie.convert.to_address('0x0000000000000000000000000000000000000000') invalid_coll = brownie.convert.to_address('0x0000000000000000000000000000000000000000') allocationAllowed = True allocatePercentage = 0 buyBackAddr = two_hops_buyback.address rebaseAllowed = True with reverts('Ownable: caller is not the owner'): vault_proxy.updateCollateralInfo( collateralAddr, defaultStrategyAddr, allocationAllowed, allocatePercentage, buyBackAddr, rebaseAllowed, {'from': accounts[5]}) with reverts('Collateral not added'): vault_proxy.updateCollateralInfo( invalid_coll, defaultStrategyAddr, allocationAllowed, allocatePercentage, buyBackAddr, rebaseAllowed, {'from': owner_l2}) txn = vault_proxy.updateCollateralInfo( collateralAddr, defaultStrategyAddr, allocationAllowed, allocatePercentage, buyBackAddr, rebaseAllowed, {'from': owner_l2}) assert txn.events["CollateralChanged"]["collateralAddr"] == collateralAddr assert txn.events["CollateralChanged"]["addded"] == True def test_vault_core_add_collatral(sperax, accounts, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax collateralAddr = weth.address defaultStrategyAddr = brownie.convert.to_address('0x0000000000000000000000000000000000000000') allocationAllowed = True allocatePercentage = 0 buyBackAddr = (two_hops_buyback, three_hops_buyback) = buybacks rebaseAllowed = True with reverts('Collateral added'): vault_proxy.addCollateral( collateralAddr, defaultStrategyAddr, allocationAllowed, allocatePercentage, two_hops_buyback, rebaseAllowed, {'from': owner_l2}) def test_add_strategy(sperax, accounts, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax ( usdt_strategy, wbtc_strategy, weth_strategy, ) = strategy_proxies defaultStrategyAddr = brownie.convert.to_address('0x0000000000000000000000000000000000000000') txn = vault_proxy.addStrategy(defaultStrategyAddr, {'from': owner_l2}) assert txn.events["StrategyAdded"]["added"] == True with reverts('Strategy added'): vault_proxy.addStrategy(defaultStrategyAddr, {'from': owner_l2}) def test_update_strategy_rwd_buyback_addr(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax ( usdt_strategy, wbtc_strategy, weth_strategy, ) = strategy_proxies ( two_hops_buyback, three_hops_buyback ) = buybacks defaultStrategyAddr = brownie.convert.to_address('0x0000000000000000000000000000000000000000') with reverts('Strategy not added'): txn = vault_proxy.updateStrategyRwdBuybackAddr( defaultStrategyAddr, two_hops_buyback, {'from': owner_l2}) vault_proxy.addStrategy(defaultStrategyAddr, {'from': owner_l2}) txn = vault_proxy.updateStrategyRwdBuybackAddr( defaultStrategyAddr, two_hops_buyback, {'from': owner_l2}) assert txn.events["StrategyRwdBuyBackUpdateded"]["strategyAddr"] == defaultStrategyAddr assert txn.events["StrategyRwdBuyBackUpdateded"]["buybackAddr"] == two_hops_buyback.address def test_reedem(sperax, accounts, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax deadline = brownie.chain.time() + 2000 amount = 1000000 invalid_coll = brownie.convert.to_address('0x0000000000000000000000000000000000000000') ( usdt_strategy, wbtc_strategy, weth_strategy, ) = strategy_proxies ( two_hops_buyback, three_hops_buyback ) = buybacks vault_proxy.updateCollateralInfo( weth, weth_strategy, True, 80, two_hops_buyback, True, {'from': owner_l2} ) amount = 100000 slippage_collateral = 10 slippage_spa = 10 with reverts('Amount needs to be greater than 0'): vault_proxy.redeem(weth.address, 0, slippage_collateral, slippage_spa, deadline, {'from': owner_l2}) with reverts(): vault_proxy.redeem(invalid_coll, amount, slippage_collateral, slippage_spa, deadline, {'from': owner_l2}) txn = spa.setMintable( vault_proxy, True, {'from': owner_l2} ) expired_deadline = brownie.chain.time() - 200 with reverts('Deadline expired'): vault_proxy.redeem(weth.address, amount, slippage_collateral, slippage_spa, expired_deadline, {'from': owner_l2}) txn = vault_proxy.redeem(weth.address, amount, slippage_collateral, slippage_spa, deadline, {'from': owner_l2}) def test_reedem_collateral_from_strategy(sperax, accounts, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax deadline = brownie.chain.time() + 2000 amount = 100000 slippage_collateral = 10 slippage_spa = 10 txn = spa.setMintable( vault_proxy, True, {'from': owner_l2} ) ( usdt_strategy, wbtc_strategy, weth_strategy, ) = strategy_proxies ( two_hops_buyback, three_hops_buyback ) = buybacks vault_proxy.updateCollateralInfo( weth, weth_strategy, True, 80, two_hops_buyback, True, {'from': owner_l2} ) txn = vault_proxy.redeem(weth.address, amount, slippage_collateral, slippage_spa, deadline, {'from': owner_l2}) def test_vault_core_allocate(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax txn = vault_proxy.updateAllocationPermission(True, {'from': owner_l2}) txn = vault_proxy.allocate({'from': owner_l2}) def test_vault_core_tools_spa_amount_calculator(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax with reverts('invalid valueType'): txn = core_proxy.SPAAmountCalculator(1, 10000, vault_proxy, 3000,{'from': owner_l2}) amount = core_proxy.SPAAmountCalculator.call(0, 10000, vault_proxy, 3000,{'from': owner_l2}) assert amount > 0 amount = core_proxy.SPAAmountCalculator.call(0, 10000, vault_proxy, 0,{'from': owner_l2}) assert amount > 0 def test_usds_amount_calculator(sperax, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax amt = core_proxy.USDsAmountCalculator.call(2, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) assert amt > 0 txn = core_proxy.USDsAmountCalculator.call(2, 10000, vault_proxy, weth, 0,{'from': owner_l2}) assert amt > 0 with reverts('invalid valueType'): amt = core_proxy.USDsAmountCalculator.call(0, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) amt = core_proxy.USDsAmountCalculator.call(1, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) assert amt > 0 def test_colla_dept_amount_calculator(sperax, owner_l2, weth): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax amt = core_proxy.collaDeptAmountCalculator.call(1, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) assert amt > 0 amt = core_proxy.collaDeptAmountCalculator.call(1, 10000, vault_proxy, weth, 0,{'from': owner_l2}) assert amt > 0 amt = core_proxy.collaDeptAmountCalculator.call(0, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) assert amt > 0 amt = core_proxy.collaDeptAmountCalculator.call(0, 10000, vault_proxy, weth, 0,{'from': owner_l2}) assert amt > 0 amt = core_proxy.collaDeptAmountCalculator.call(1, 10000, vault_proxy, weth, 3000,{'from': owner_l2}) assert amt > 0 def test_calculate_swapfeein(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax txn = vault_proxy.updateSwapInOutFeePermission(True, False, {'from': owner_l2}) fee = core_proxy.calculateSwapFeeIn.call(vault_proxy, {'from': owner_l2}) assert fee > 0 def test_calculate_swapfeeout(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax txn = vault_proxy.updateSwapInOutFeePermission(False, True, {'from': owner_l2}) fee = core_proxy.calculateSwapFeeOut.call(vault_proxy, {'from': owner_l2}) fee > 0 def test_chi_target(sperax, owner_l2): ( spa, usds_proxy, core_proxy, vault_proxy, oracle_proxy, strategy_proxies, buybacks, bancor ) = sperax core_proxy.chiTarget(10, 1000, 1000000, vault_proxy, {'from':owner_l2 }) core_proxy.chiTarget(10, 100000, 10000, vault_proxy, {'from':owner_l2 })
25.760234
120
0.627128
1,743
17,620
6.065404
0.086632
0.08986
0.073874
0.028755
0.850927
0.813091
0.761256
0.707813
0.671585
0.600643
0
0.068085
0.282293
17,620
683
121
25.79795
0.767911
0.002781
0
0.780669
0
0
0.073543
0.02607
0
0
0.021516
0
0.033457
1
0.035316
false
0
0.005576
0
0.040892
0
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
dfcf2eebc224f9a4902fb9f63d6a34e7318eeb31
86
py
Python
autopandas_v2/generators/ml/traindata/dsl/values.py
chyanju/autopandas
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
[ "BSD-3-Clause" ]
16
2019-08-13T02:49:44.000Z
2022-02-08T03:14:34.000Z
autopandas_v2/generators/ml/traindata/dsl/values.py
chyanju/autopandas
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
[ "BSD-3-Clause" ]
2
2020-09-25T22:40:40.000Z
2022-02-09T23:42:53.000Z
autopandas_v2/generators/ml/traindata/dsl/values.py
chyanju/autopandas
16080ad12f0e8e7b0a614671aea1ed57b3fed7fe
[ "BSD-3-Clause" ]
3
2021-07-06T10:30:36.000Z
2022-01-11T23:21:31.000Z
from autopandas_v2.generators.dsl.values import Value class NewInp(Value): pass
14.333333
53
0.77907
12
86
5.5
0.916667
0
0
0
0
0
0
0
0
0
0
0.013699
0.151163
86
5
54
17.2
0.890411
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.333333
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
dfdc265cece3387c5def226e2381c70c1faca9cc
655
py
Python
dltk/core/modules/__init__.py
mseitzer/DLTK
3237aa6c7ed63aa177ca90eafcc076d144155a34
[ "Apache-2.0" ]
17
2019-03-24T08:36:56.000Z
2021-12-28T11:42:56.000Z
dltk/core/modules/__init__.py
mseitzer/DLTK
3237aa6c7ed63aa177ca90eafcc076d144155a34
[ "Apache-2.0" ]
null
null
null
dltk/core/modules/__init__.py
mseitzer/DLTK
3237aa6c7ed63aa177ca90eafcc076d144155a34
[ "Apache-2.0" ]
6
2019-05-19T10:37:18.000Z
2021-12-04T05:13:01.000Z
from __future__ import division from __future__ import absolute_import from __future__ import print_function from dltk.core.modules.activations import * from dltk.core.modules.base import * from dltk.core.modules.batch_normalization import * from dltk.core.modules.bilinear_upsample import * from dltk.core.modules.convolution import * from dltk.core.modules.graph_convolution import * from dltk.core.modules.linear import * from dltk.core.modules.residual_units import * from dltk.core.modules.tranposed_convolution import * from dltk.core.modules.summaries import * from dltk.core.modules.losses import * from dltk.core.modules.regularization import *
38.529412
53
0.833588
91
655
5.791209
0.274725
0.227704
0.273245
0.432638
0.58444
0.204934
0
0
0
0
0
0
0.09313
655
16
54
40.9375
0.887205
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.066667
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
dff1b65592bababd6411646b44e68712bcac45fb
26
py
Python
pyglmnet/__init__.py
jasmainak/pyglmnet
95733c8d661632542a294722048209c85121c0ab
[ "MIT" ]
1
2018-04-10T19:42:51.000Z
2018-04-10T19:42:51.000Z
pyglmnet/__init__.py
jasmainak/pyglmnet
95733c8d661632542a294722048209c85121c0ab
[ "MIT" ]
3
2019-11-04T15:36:40.000Z
2019-11-07T19:05:35.000Z
pyglmnet/__init__.py
jasmainak/pyglmnet
95733c8d661632542a294722048209c85121c0ab
[ "MIT" ]
1
2017-06-01T15:40:56.000Z
2017-06-01T15:40:56.000Z
from .pyglmnet import GLM
13
25
0.807692
4
26
5.25
1
0
0
0
0
0
0
0
0
0
0
0
0.153846
26
1
26
26
0.954545
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5f03ba44a58ac81bd5cb22140fb908fcc7bc7e86
32
py
Python
common/scheduler/celery.py
universalengineer/blockchain
65274c0a7b26bb30ce38109e3ecc566a5e72a0ac
[ "MIT" ]
null
null
null
common/scheduler/celery.py
universalengineer/blockchain
65274c0a7b26bb30ce38109e3ecc566a5e72a0ac
[ "MIT" ]
null
null
null
common/scheduler/celery.py
universalengineer/blockchain
65274c0a7b26bb30ce38109e3ecc566a5e72a0ac
[ "MIT" ]
1
2020-03-29T02:42:19.000Z
2020-03-29T02:42:19.000Z
from datetime import timedelta
10.666667
30
0.84375
4
32
6.75
1
0
0
0
0
0
0
0
0
0
0
0
0.15625
32
2
31
16
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5f0f0e5d282cc7951a002f9f322b9a30c8bbb27c
108
py
Python
tseries_patterns/ml/rf/__init__.py
tr8dr/patterns
757a0b9d4936a0c6af633af6f16c0ca8ee676bb0
[ "MIT" ]
127
2020-07-12T21:48:20.000Z
2022-03-27T21:12:26.000Z
tseries_patterns/ml/rf/__init__.py
kumprj/tseries-patterns
99c5279d1a06e4ab0fe92f2a04102d09ae6300c7
[ "MIT" ]
11
2020-08-08T05:17:16.000Z
2022-02-23T13:29:23.000Z
tseries_patterns/ml/rf/__init__.py
kumprj/tseries-patterns
99c5279d1a06e4ab0fe92f2a04102d09ae6300c7
[ "MIT" ]
46
2020-07-22T20:50:55.000Z
2021-12-16T00:57:50.000Z
#from .DeepRandomForest import DeepRandomForest from .RelabeledRandomForest import RelabeledRandomForest
18
56
0.87037
8
108
11.75
0.5
0
0
0
0
0
0
0
0
0
0
0
0.101852
108
5
57
21.6
0.969072
0.425926
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a04800324bdf2a4bf36134f5d2f8d64b3e0a27e6
16
py
Python
config/version.py
veltzer/python-sigfd
250cefa5bfb21bdb05e4cdc266f872f68a46be29
[ "MIT" ]
null
null
null
config/version.py
veltzer/python-sigfd
250cefa5bfb21bdb05e4cdc266f872f68a46be29
[ "MIT" ]
null
null
null
config/version.py
veltzer/python-sigfd
250cefa5bfb21bdb05e4cdc266f872f68a46be29
[ "MIT" ]
null
null
null
tup = (1, 3, 5)
8
15
0.375
4
16
1.5
1
0
0
0
0
0
0
0
0
0
0
0.272727
0.3125
16
1
16
16
0.272727
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
1
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a053ca5b1be6511e086f224b55f60408a50b1bc4
1,588
py
Python
plot.py
piotr147/SnakeGameAI_RL
7180a7e874fdd1125262ad541986960bdfb6a038
[ "MIT" ]
null
null
null
plot.py
piotr147/SnakeGameAI_RL
7180a7e874fdd1125262ad541986960bdfb6a038
[ "MIT" ]
null
null
null
plot.py
piotr147/SnakeGameAI_RL
7180a7e874fdd1125262ad541986960bdfb6a038
[ "MIT" ]
null
null
null
import pandas as pd import matplotlib.pyplot as plt def plot1(): df = pd.read_csv('waz1_rozrz_stan.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 1, rozrzeszony stan") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz1_rozrz_stan") df = pd.read_csv('waz2_rozrz_stan.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 2, rozrzeszony stan") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz2_rozrz_stan") df = pd.read_csv('waz3_rozrz_stan.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 3, rozrzeszony stan") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz3_rozrz_stan") df = pd.read_csv('waz1_walls.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 1, ściany") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz1_walls") df = pd.read_csv('waz2_walls.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 2, ściany") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz2_walls") df = pd.read_csv('waz3_walls.csv') plt.figure(figsize=(14, 8)) plt.plot(df.iloc[1:, 1:]) plt.title("Wąż numer 3, ściany") plt.xlabel('Numer gry') plt.ylabel('Wynik') #plt.show() plt.savefig("waz3_walls") if(__name__=="__main__"): plot1()
25.206349
46
0.611461
246
1,588
3.817073
0.166667
0.025559
0.051118
0.070288
0.924388
0.853035
0.789137
0.789137
0.789137
0.789137
0
0.039308
0.198992
1,588
62
47
25.612903
0.698899
0.037783
0
0.510638
0
0
0.26956
0
0
0
0
0
0
1
0.021277
false
0
0.042553
0
0.06383
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a095f1e7d371dcd8a66a00eb3526f7705b19e0fe
64
py
Python
multilingual_t5/baseline_pa/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
multilingual_t5/baseline_pa/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
multilingual_t5/baseline_pa/__init__.py
sumanthd17/mt5
c99b4e3ad1c69908c852c730a1323ccb52d48f58
[ "Apache-2.0" ]
null
null
null
"""baseline_pa dataset.""" from .baseline_pa import BaselinePa
16
35
0.765625
8
64
5.875
0.75
0.425532
0
0
0
0
0
0
0
0
0
0
0.109375
64
3
36
21.333333
0.824561
0.3125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
a09e4a5f049ca43f26f3bf91d9c1114e07b1013b
968
py
Python
python/auto it/wrapper.py
lucaszdevzn/learning3
a1382bce05e0f4420b56a9cb06b712b90dc70390
[ "MIT" ]
1
2020-07-20T04:01:46.000Z
2020-07-20T04:01:46.000Z
python/auto it/wrapper.py
lucaszdevzn/learning3
a1382bce05e0f4420b56a9cb06b712b90dc70390
[ "MIT" ]
null
null
null
python/auto it/wrapper.py
lucaszdevzn/learning3
a1382bce05e0f4420b56a9cb06b712b90dc70390
[ "MIT" ]
null
null
null
#coding=utf-8 import logging from functools import wraps def log_call(f): @wraps(f) def call(*args): (ret_code, ret_value, error_msg, log_path) = f(*args) param_desc = ', '.join(list(map(lambda x:str(x), args))) if ret_code == 0: logging.info('(%i) %s.%s(%s)' % (ret_code, f.__module__, f.__name__, param_desc), log_path) else: logging.error('(%i) %s.%s(%s) : %s' % (ret_code, f.__module__, f.__name__, param_desc, error_msg), log_path) return (ret_code, ret_value) return call def log_call_error(f): @wraps(f) def call(*args): (ret_code, ret_value, error_msg, log_path) = f(*args) param_desc = ', '.join(list(map(lambda x:str(x), args))) if ret_code != 0: logging.error('(%i) %s.%s(%s) : %s' % (ret_code, f.__module__, f.__name__, param_desc, error_msg), log_path) return (ret_code, ret_value) return call
35.851852
121
0.576446
145
968
3.475862
0.248276
0.125
0.029762
0.119048
0.825397
0.825397
0.825397
0.825397
0.825397
0.825397
0
0.004184
0.259298
968
27
122
35.851852
0.698745
0.012397
0
0.636364
0
0
0.060215
0
0
0
0
0
0
1
0.181818
false
0
0.090909
0
0.454545
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
a0b6df1625b4ac59e5176aadb43bd2a6017c66b6
122
py
Python
astwro/starlist/__init__.py
majkelx/astwro
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
[ "MIT" ]
6
2017-06-15T20:34:51.000Z
2020-04-15T14:21:43.000Z
astwro/starlist/__init__.py
majkelx/astwro
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
[ "MIT" ]
18
2017-08-15T20:53:55.000Z
2020-10-05T23:40:34.000Z
astwro/starlist/__init__.py
majkelx/astwro
4a9bbe3e4757c4076ad7c0d90cf08e38dab4e794
[ "MIT" ]
2
2017-11-06T15:33:53.000Z
2020-10-02T21:06:05.000Z
from .daofiles import * from .fileformats import * from .ds9 import * from ._version import __version__, __version_info__
24.4
51
0.795082
15
122
5.8
0.466667
0.344828
0
0
0
0
0
0
0
0
0
0.009524
0.139344
122
4
52
30.5
0.819048
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
261c000447f6e13de4b57749f6a40e377eebd986
211
py
Python
djangosite/home/admin.py
awcrosby/jobpost_data
465a5f1e44febb01477dab4d0935c6c43a70fe97
[ "MIT" ]
null
null
null
djangosite/home/admin.py
awcrosby/jobpost_data
465a5f1e44febb01477dab4d0935c6c43a70fe97
[ "MIT" ]
null
null
null
djangosite/home/admin.py
awcrosby/jobpost_data
465a5f1e44febb01477dab4d0935c6c43a70fe97
[ "MIT" ]
null
null
null
from django.contrib import admin # Register your models here. from .models import JobSite, QueryLoc, ScraperParams admin.site.register(ScraperParams) admin.site.register(JobSite) admin.site.register(QueryLoc)
23.444444
52
0.819905
27
211
6.407407
0.481481
0.156069
0.294798
0.346821
0
0
0
0
0
0
0
0
0.094787
211
8
53
26.375
0.905759
0.123223
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
26301f016317936457218b36d65d4ff1c6571819
97
py
Python
src/operations/sub.py
macielti/tests-python
23378f147258c612227b3c9f1017e3d7bd33674e
[ "MIT" ]
1
2021-03-13T23:41:34.000Z
2021-03-13T23:41:34.000Z
src/operations/sub.py
macielti/unittests-python
23378f147258c612227b3c9f1017e3d7bd33674e
[ "MIT" ]
null
null
null
src/operations/sub.py
macielti/unittests-python
23378f147258c612227b3c9f1017e3d7bd33674e
[ "MIT" ]
null
null
null
class SubOperation: def difference(self, number1, number2): return number1 - number2
24.25
43
0.701031
10
97
6.8
0.8
0.411765
0
0
0
0
0
0
0
0
0
0.053333
0.226804
97
4
44
24.25
0.853333
0
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
false
0
0
0.333333
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
266f558453e18d54a17797c67db4f203e8de1bc3
355
py
Python
chainchomp_service_layer/__init__.py
trashtatur/chainchomp_service_layer
45a981c4eb1f851eb8ab84b7b40e890940f7331d
[ "MIT" ]
null
null
null
chainchomp_service_layer/__init__.py
trashtatur/chainchomp_service_layer
45a981c4eb1f851eb8ab84b7b40e890940f7331d
[ "MIT" ]
null
null
null
chainchomp_service_layer/__init__.py
trashtatur/chainchomp_service_layer
45a981c4eb1f851eb8ab84b7b40e890940f7331d
[ "MIT" ]
null
null
null
from chainchomp_service_layer.service_layer.ConnectionThread import ConnectionThread from chainchomp_service_layer.service_layer.ServiceLayerInterface import ServiceLayerInterface from chainchomp_service_layer.resolver.ChainfileNameResolver import ChainfileNameResolver interface = ServiceLayerInterface(ConnectionThread(), ChainfileNameResolver())
59.166667
95
0.898592
30
355
10.366667
0.333333
0.192926
0.202572
0.250804
0.244373
0.244373
0
0
0
0
0
0
0.061972
355
5
96
71
0.933934
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
0
0
1
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
6
cd33ea67b836867711f067d4429419f4ea80034d
19
py
Python
zhusuan/variational/__init__.py
McGrady00H/Zhusuan-Jittor
53c3d2b09c9a575806da966e557e99d533b7d35f
[ "MIT" ]
12
2021-07-02T15:27:04.000Z
2021-12-28T05:59:04.000Z
zhusuan/variational/__init__.py
thu-ml/Zhusuan-Jittor
e73c6e3081afde305b9caba80858543abf168466
[ "MIT" ]
1
2021-07-29T08:50:00.000Z
2021-07-29T08:50:00.000Z
zhusuan/variational/__init__.py
thu-ml/Zhusuan-Jittor
e73c6e3081afde305b9caba80858543abf168466
[ "MIT" ]
2
2021-08-17T12:05:15.000Z
2022-01-12T09:47:49.000Z
from .elbo import *
19
19
0.736842
3
19
4.666667
1
0
0
0
0
0
0
0
0
0
0
0
0.157895
19
1
19
19
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
cd5d45ffa7d1d9568aeff23bf5a30ff14169ce02
3,255
py
Python
06_Banner/python/test_banner.py
roygilliam/basic-computer-games
4b3f00ce738c612d702eaf12683a0d9cd76563a5
[ "Unlicense" ]
null
null
null
06_Banner/python/test_banner.py
roygilliam/basic-computer-games
4b3f00ce738c612d702eaf12683a0d9cd76563a5
[ "Unlicense" ]
1
2022-03-24T20:16:26.000Z
2022-03-24T20:16:26.000Z
06_Banner/python/test_banner.py
roygilliam/basic-computer-games
4b3f00ce738c612d702eaf12683a0d9cd76563a5
[ "Unlicense" ]
1
2022-03-11T14:14:06.000Z
2022-03-11T14:14:06.000Z
import io from _pytest.monkeypatch import MonkeyPatch from _pytest.capture import CaptureFixture from banner import print_banner def test_print_banner(monkeypatch: MonkeyPatch) -> None: horizontal = "1" vertical = "1" centered = "1" char = "*" statement = "O" # only capital letters set_page = "2" monkeypatch.setattr( "sys.stdin", io.StringIO( f"{horizontal}\n{vertical}\n{centered}\n{char}\n{statement}\n{set_page}" ), ) print_banner() def test_print_banner_horizontal_0( monkeypatch: MonkeyPatch, capsys: CaptureFixture ) -> None: horizontal = "1" vertical = "1" centered = "1" char = "*" statement = "O" # only capital letters set_page = "2" monkeypatch.setattr( "sys.stdin", io.StringIO( f"0\n{horizontal}\n{vertical}\n{centered}\n{char}\n{statement}\n{set_page}" ), ) print_banner() captured = capsys.readouterr() assert "Please enter a number greater than zero" in captured.out def test_print_banner_vertical_0( monkeypatch: MonkeyPatch, capsys: CaptureFixture ) -> None: horizontal = "1" vertical = "1" centered = "1" char = "*" statement = "O" # only capital letters set_page = "2" monkeypatch.setattr( "sys.stdin", io.StringIO( f"{horizontal}\n0\n{vertical}\n{centered}\n{char}\n{statement}\n{set_page}" ), ) print_banner() captured = capsys.readouterr() assert "Please enter a number greater than zero" in captured.out def test_print_banner_centered( monkeypatch: MonkeyPatch, capsys: CaptureFixture ) -> None: horizontal = "1" vertical = "1" centered = "Y" char = "*" statement = "O" # only capital letters set_page = "2" monkeypatch.setattr( "sys.stdin", io.StringIO( f"{horizontal}\n{vertical}\n{centered}\n{char}\n{statement}\n{set_page}" ), ) print_banner() captured = capsys.readouterr() expected = ( "Horizontal Vertical Centered Character " "(type 'ALL' if you want character being printed) Statement Set page " " *****\n" " * *\n" " * *\n" " * *\n" " * *\n" " * *\n" " *****\n\n\n" ) assert captured.out.split("\n") == expected.split("\n") def test_print_banner_all_statement( monkeypatch: MonkeyPatch, capsys: CaptureFixture ) -> None: horizontal = "1" vertical = "1" centered = "1" char = "UNIT TESTING" statement = "ALL" # only capital letters set_page = "2" monkeypatch.setattr( "sys.stdin", io.StringIO( f"{horizontal}\n{vertical}\n{centered}\n{char}\n{statement}\n{set_page}" ), ) print_banner()
29.590909
87
0.509985
316
3,255
5.142405
0.186709
0.074462
0.012923
0.014769
0.792
0.792
0.763077
0.763077
0.757538
0.757538
0
0.01123
0.370814
3,255
109
88
29.862385
0.782227
0.031951
0
0.737374
0
0.050505
0.354849
0.111606
0
0
0
0
0.030303
1
0.050505
false
0
0.040404
0
0.090909
0.121212
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
cd8a8ba6a1479b3e8b8dcc536407743cf0f6f114
45
py
Python
numpy_version.py
Effie375/NumPy
14e24ff34caa1753cc2b0f87e2586b0541361731
[ "MIT" ]
null
null
null
numpy_version.py
Effie375/NumPy
14e24ff34caa1753cc2b0f87e2586b0541361731
[ "MIT" ]
null
null
null
numpy_version.py
Effie375/NumPy
14e24ff34caa1753cc2b0f87e2586b0541361731
[ "MIT" ]
1
2022-03-12T09:28:24.000Z
2022-03-12T09:28:24.000Z
import numpy as np print(np.__version__)
11.25
22
0.733333
7
45
4.142857
0.857143
0
0
0
0
0
0
0
0
0
0
0
0.2
45
3
23
15
0.805556
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
1
0
6
cd93b32dcffaa6f3f80a687c0108edea08176c28
39
py
Python
__init__.py
gregnero/color
fe86eed3bd222ec91772364f27820a7300c4732c
[ "CC0-1.0" ]
null
null
null
__init__.py
gregnero/color
fe86eed3bd222ec91772364f27820a7300c4732c
[ "CC0-1.0" ]
null
null
null
__init__.py
gregnero/color
fe86eed3bd222ec91772364f27820a7300c4732c
[ "CC0-1.0" ]
null
null
null
from .colorPalette import colorPalette
19.5
38
0.871795
4
39
8.5
0.75
0
0
0
0
0
0
0
0
0
0
0
0.102564
39
1
39
39
0.971429
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
cd94781edde6fc9d6823dc1c9f5ad61afe069d58
109
py
Python
melodiam/auth/__init__.py
HarrySky/melodiam
53a5ce6a5472e88939402023d907aa120ae02bf9
[ "Unlicense" ]
null
null
null
melodiam/auth/__init__.py
HarrySky/melodiam
53a5ce6a5472e88939402023d907aa120ae02bf9
[ "Unlicense" ]
6
2020-10-05T15:27:18.000Z
2020-10-06T15:47:59.000Z
melodiam/auth/__init__.py
HarrySky/melodiam
53a5ce6a5472e88939402023d907aa120ae02bf9
[ "Unlicense" ]
null
null
null
from melodiam.auth.application import api # noqa: F401 from melodiam.auth.models import Token # noqa: F401
36.333333
55
0.779817
16
109
5.3125
0.625
0.282353
0.376471
0
0
0
0
0
0
0
0
0.064516
0.146789
109
2
56
54.5
0.849462
0.192661
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
6
26959314dae862e55260a15812438d1f8b7d1158
97
py
Python
accounts/models.py
AhmedElmougy/socialwebsite
4d53e8d17b78958d52f078156ebc9c2019f00fd3
[ "BSD-3-Clause" ]
1
2021-01-24T18:40:19.000Z
2021-01-24T18:40:19.000Z
accounts/models.py
AhmedElmougy/socialwebsite
4d53e8d17b78958d52f078156ebc9c2019f00fd3
[ "BSD-3-Clause" ]
null
null
null
accounts/models.py
AhmedElmougy/socialwebsite
4d53e8d17b78958d52f078156ebc9c2019f00fd3
[ "BSD-3-Clause" ]
null
null
null
from django.db import models from django.contrib import auth # Create your models here.
9.7
31
0.731959
14
97
5.071429
0.714286
0.28169
0
0
0
0
0
0
0
0
0
0
0.226804
97
9
32
10.777778
0.946667
0.247423
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f84901fb5e1a70caa87bc7a730a920a8e6d3c5d8
112
py
Python
xbee/thread/ieee.py
PowerFlex/python-xbee-intercept
0c07f3a5f16f479ad7c925cd31638598030cf5a7
[ "MIT" ]
65
2015-12-06T02:38:28.000Z
2017-09-05T16:46:07.000Z
xbee/thread/ieee.py
PowerFlex/python-xbee-intercept
0c07f3a5f16f479ad7c925cd31638598030cf5a7
[ "MIT" ]
44
2015-10-23T15:33:54.000Z
2017-09-01T06:39:50.000Z
xbee/thread/ieee.py
PowerFlex/python-xbee-intercept
0c07f3a5f16f479ad7c925cd31638598030cf5a7
[ "MIT" ]
43
2015-12-15T02:52:21.000Z
2017-06-24T17:14:53.000Z
from xbee.thread.base import XBeeBase import xbee.backend as _xbee class XBee(_xbee.XBee, XBeeBase): pass
16
37
0.767857
17
112
4.941176
0.588235
0.190476
0
0
0
0
0
0
0
0
0
0
0.160714
112
6
38
18.666667
0.893617
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.25
0.5
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
1
0
1
0
0
6
6ef4c228b8c9b9a4377f745d33c41704c7950df3
179
py
Python
src/reminder/blueprint.py
arnulfojr/sanic-persistance-patterns
c3c433014401725ab60f1dde3c35848f9ce3ef88
[ "MIT" ]
null
null
null
src/reminder/blueprint.py
arnulfojr/sanic-persistance-patterns
c3c433014401725ab60f1dde3c35848f9ce3ef88
[ "MIT" ]
null
null
null
src/reminder/blueprint.py
arnulfojr/sanic-persistance-patterns
c3c433014401725ab60f1dde3c35848f9ce3ef88
[ "MIT" ]
null
null
null
from sanic.blueprints import Blueprint blueprint = Blueprint('reminders', url_prefix='/reminders', strict_slashes=False) # register controllers from reminder import controller
22.375
81
0.815642
20
179
7.2
0.75
0.25
0
0
0
0
0
0
0
0
0
0
0.106145
179
7
82
25.571429
0.9
0.111732
0
0
0
0
0.121019
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0.666667
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
1
0
6
3e18728a33ed66f97c8fb02061926c3855cd018d
11,426
py
Python
utils.py
lehommee/DPDNet
d12a61cab4204010e8dd446a507819172e15a5cf
[ "Apache-2.0" ]
15
2020-02-18T14:11:24.000Z
2021-12-06T13:36:55.000Z
utils.py
lehommee/DPDNet
d12a61cab4204010e8dd446a507819172e15a5cf
[ "Apache-2.0" ]
6
2020-02-18T19:08:38.000Z
2021-06-24T01:16:33.000Z
utils.py
lehommee/DPDNet
d12a61cab4204010e8dd446a507819172e15a5cf
[ "Apache-2.0" ]
5
2020-02-27T08:43:12.000Z
2021-03-08T15:31:28.000Z
from tensorflow.keras.preprocessing import image from tensorflow.keras.layers import Input, Flatten, Dense, Dropout, Reshape from tensorflow.keras.models import Model import numpy as np from scipy.ndimage import rotate import tensorflow.keras import tensorflow.keras.layers as layers import tensorflow.keras.backend as K import scipy.io import math from tensorflow.keras.layers import Dense, Flatten from tensorflow.keras.layers import Conv2D,Add,SeparableConv2D, MaxPooling2D,concatenate,ZeroPadding2D,Cropping2D,Dropout,Lambda,Reshape,Input,Concatenate, concatenate,Conv3D,BatchNormalization,Activation,UpSampling2D,Conv2DTranspose from tensorflow.keras.models import Sequential from tensorflow.keras.preprocessing import image from tensorflow.keras.utils import plot_model from tensorflow.keras.models import Sequential, load_model,Model from skimage import data, img_as_float from skimage import exposure from tensorflow.keras.preprocessing.image import ImageDataGenerator import tensorflow as tf from tensorflow.keras.applications.resnet50 import ResNet50 import matplotlib.pylab as plt import numpy as np import random import scipy import cv2 as cv from skimage.transform import rescale, resize, downscale_local_mean from scipy import ndimage from tensorflow.keras.models import Sequential, load_model,Model from scipy import * import imageio import os def to_rgb3(im): # we can use dstack and an array copy # this has to be slow, we create an array with # 3x the data we need and truncate afterwards im=im*(255/np.max(im)) return np.asarray(np.dstack((im, im, im)), dtype=np.uint8) def encoding_identity_block(input_tensor, kernel_size, filters, stage, block): filters1, filters2, filters3 = filters if K.image_data_format() == 'channels_last': bn_axis = 3 else: bn_axis = 1 conv_name_base = 'res' + str(stage) + block + '_branch' bn_name_base = 'bn' + str(stage) + block + '_branch' x = SeparableConv2D(filters1, (1, 1), name=conv_name_base + '2a')(input_tensor) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x) x = Activation('relu')(x) x = SeparableConv2D(filters2, kernel_size, padding='same', name=conv_name_base + '2b')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x) x = Activation('relu')(x) x = SeparableConv2D(filters3, (1, 1), name=conv_name_base + '2c')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x) x = layers.add([x, input_tensor]) x = Activation('relu')(x) return x def decoding_identity_block(input_tensor, kernel_size, filters, stage, block): filters1, filters2, filters3 = filters if K.image_data_format() == 'channels_last': bn_axis = 3 else: bn_axis = 1 conv_name_base = 'res' + str(stage) + block + '_branch' bn_name_base = 'bn' + str(stage) + block + '_branch' x = Conv2DTranspose(filters1, (1, 1), name=conv_name_base + '2a')(input_tensor) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x) x = Activation('relu')(x) x = Conv2DTranspose(filters2, kernel_size, padding='same', name=conv_name_base + '2b')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x) x = Activation('relu')(x) x = Conv2DTranspose(filters3, (1, 1), name=conv_name_base + '2c')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x) x = layers.add([x, input_tensor]) x = Activation('relu')(x) return x def encoding_conv_block(input_tensor, kernel_size, filters, stage, block, strides=(2,2)): filters1, filters2, filters3 = filters if K.image_data_format() == 'channels_last': bn_axis = 3 else: bn_axis = 1 conv_name_base = 'res' + str(stage) + block + '_branch' bn_name_base = 'bn' + str(stage) + block + '_branch' x = SeparableConv2D(filters1, (1, 1), strides=strides, name=conv_name_base + '2a')(input_tensor) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2a')(x) x = Activation('relu')(x) x = SeparableConv2D(filters2, kernel_size, padding='same', name=conv_name_base + '2b')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2b')(x) x = Activation('relu')(x) x = SeparableConv2D(filters3, (1, 1), name=conv_name_base + '2c')(x) x = BatchNormalization(axis=bn_axis, name=bn_name_base + '2c')(x) shortcut = SeparableConv2D(filters3, (1, 1), strides=strides, name=conv_name_base + '1')(input_tensor) shortcut = BatchNormalization(axis=bn_axis, name=bn_name_base + '1')(shortcut) x = layers.add([x, shortcut]) x = Activation('relu')(x) return x def decoding_conv_block(input_tensor, kernel_size, filters, stage, block, strides=(2,2)): filters1, filters2, filters3 = filters if K.image_data_format() == 'channels_last': bn_axis = 3 else: bn_axis = 1 x=UpSampling2D(size=strides, data_format=None)(input_tensor) x = SeparableConv2D(filters1, (1, 1))(x) x = BatchNormalization(axis=bn_axis)(x) x = Activation('relu')(x) x = SeparableConv2D(filters2, kernel_size,padding='same')(x) x = BatchNormalization(axis=bn_axis)(x) x = Activation('relu')(x) x = SeparableConv2D(filters3, (1,1))(x) x = BatchNormalization(axis=bn_axis)(x) shortcut = UpSampling2D(size=strides, data_format=None)(input_tensor) shortcut = SeparableConv2D(filters3, (1, 1) )(shortcut) shortcut = BatchNormalization(axis=bn_axis)(shortcut) x = layers.add([x, shortcut]) x = Activation('relu')(x) return x def refunit(divider,ch,img_y,img_x): image_input = Input(shape=(int(img_y/divider), int(img_x/divider), ch)) x = Conv2D(64, (7, 7), strides=(2, 2), padding='same', name='conv1')(image_input) x = BatchNormalization(axis=3, name='bn_conv1')(x) x = Activation('relu')(x) x = MaxPooling2D((3, 3))(x) x = encoding_conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1)) x = encoding_conv_block(x, 3, [128, 128, 512], stage=3, block='a') x = decoding_conv_block(x, 3, [512, 512, 128], stage=6, block='a') x = decoding_conv_block(x, 3, [256, 256, 64], stage=7, block='a') x=ZeroPadding2D(padding=(0,1),data_format=None)(x) x = UpSampling2D(size=(3, 3))(x) x = Cropping2D(cropping=((2, 2), (1, 1)), data_format=None)(x) x = Conv2DTranspose(1, (3, 3), padding='same', name='c8o')(x) x = Activation('sigmoid')(x) modelo = Model(inputs=image_input, outputs=x) modelo.summary() return modelo def fastrefunit(divider,ch,img_y,img_x): image_input = Input(shape=(int(img_y/divider), int(img_x/divider), ch)) x = Conv2D(64, (7, 7), strides=(2, 2), padding='same', name='conv1')(image_input) x = BatchNormalization(axis=3, name='bn_conv1')(x) x = Activation('relu')(x) x = MaxPooling2D((3, 3))(x) x = encoding_conv_block(x, 3, [64, 64, 256], stage=2, block='a', strides=(1, 1)) x = encoding_conv_block(x, 3, [128, 128, 512], stage=3, block='a') x = decoding_conv_block(x, 3, [512, 512, 128], stage=6, block='a') x = decoding_conv_block(x, 3, [256, 256, 64], stage=7, block='a') x = UpSampling2D(size=(3, 3))(x) x = Cropping2D(cropping=((1, 1), (2, 2)), data_format=None)(x) x = Conv2DTranspose(1, (3, 3), padding='same', name='c8o')(x) x = Activation('sigmoid')(x) modelo = Model(inputs=image_input, outputs=x) modelo.summary() return modelo def load_valdata(divider,canales,batch_size,lengthdataset,path,img_y,img_x): valinput=[] valoutput=[] multiplier=6 counter=0 l1=["validation/imagenes/seq-P01-M04-A0002-G00-C00-S0101/image%04d.png","validation/imagenes/seq-P05-M04-A0001-G03-C00-S0030/image%04d.png","validation/imagenes/seq-P00-M02-A0032-G00-C00-S0037/image%04d.png","validation/imagenes/seq-P00-M02-A0032-G00-C00-S0036/image%04d.png"] l2=["validation/gaussianas/seq-P01-M04-A0002-G00-C00-S0101/image%04d.png","validation/gaussianas/seq-P05-M04-A0001-G03-C00-S0030/image%04d.png","validation/gaussianas/seq-P00-M02-A0032-G00-C00-S0037/image%04d.png","validation/gaussianas/seq-P00-M02-A0032-G00-C00-S0036/image%04d.png"] l3=[741,509,920,868] while 1: valinput = [] valoutput= [] for j in range(batch_size*counter+1, batch_size*(counter+1)+1): ind=np.uint16(rand()*4) j=np.uint16(rand()*(l3[ind]-5))+1 img_path = path+l1[ind] % (j) imgc = imageio.imread(img_path) imgc = cv.resize(imgc, (int(img_x/divider), int(img_y/divider))) xc = image.img_to_array(imgc) xc = xc / 65536 if(canales is 3): xc=np.asarray(np.dstack((xc, xc, xc)), dtype=np.float64) valinput.append(xc) img_path = path+l2[ind] % (j) imgc = image.load_img(img_path, grayscale=True, target_size=(int(img_y/divider), int(img_x/divider), 1)) xc = image.img_to_array(imgc) xc = cv.blur(xc, (3, 3)) xc = np.expand_dims(xc, axis=2) xc = xc / 255 valoutput.append(xc) valinput=np.array(valinput) valoutput=np.array(valoutput) yield valinput,[valoutput,valoutput] def TrainGen(divider,canales,batch_size,lengthdataset,path,img_y,img_x): counter=0 while 1: X = [] Y= [] for j in range(batch_size*counter+1, batch_size*(counter+1)+1): j=math.floor(rand()*(lengthdataset-5))+1 img_path = path+"train/imagenes/image%05d.png" % (j) imgc = imageio.imread(img_path) imgc = cv.resize(imgc, (int(img_x / divider), int(img_y / divider))) xc = image.img_to_array(imgc) xc = xc / 65536 if (canales is 3): xc = np.asarray(np.dstack((xc, xc, xc)), dtype=np.float64) X.append(xc) img_path = path+"train/gaussianas/image%05d.png" % (j) imgc = image.load_img(img_path, grayscale=True, target_size=(int(img_y/divider), int(img_x/divider), 1)) xc = image.img_to_array(imgc) xc = cv.blur(xc, (3, 3)) xc = np.expand_dims(xc, axis=2) xcaux = np.copy(xc) xcaux = abs(xcaux - 255) xc = xc / 255 Y.append(xc) X = np.array(X) Y= np.array(Y) counter = counter + 1 yield X,[Y,Y] def to_rgb3(im): # we can use dstack and an array copy # this has to be slow, we create an array with # 3x the data we need and truncate afterwards im=im*(255/np.max(im)) return np.asarray(np.dstack((im, im, im)), dtype=np.uint8) def test(divider,canales,path,img_x,img_y): valinput=[] valoutput=[] multiplier=6 counter=0 valinput = [] valoutput= [] for j in range(1,741,1): img_path = path+"validation/imagenes/seq-P01-M04-A0002-G00-C00-S0101/image%04d.png" % (j) imgc = imageio.imread(img_path) imgc = cv.resize(imgc, (int(img_x / divider), int(img_y / divider))) xc = image.img_to_array(imgc) xc = xc / 65536 valinput.append(xc) img_path = path+"validation/gaussianas/seq-P01-M04-A0002-G00-C00-S0101/image%04d.png" % (j) imgc = image.load_img(img_path, grayscale=True, target_size=(int(img_y/divider), int(img_x/divider), 1)) xc = image.img_to_array(imgc) xc = cv.blur(xc, (3, 3)) xc = np.expand_dims(xc, axis=2) xc = xc / 255 valoutput.append(xc) valinput=np.array(valinput) valoutput=np.array(valoutput) return valinput,valoutput
38.732203
288
0.665412
1,706
11,426
4.325322
0.130129
0.010842
0.043637
0.053124
0.833582
0.793197
0.759724
0.755658
0.728554
0.696842
0
0.057453
0.185017
11,426
294
289
38.863946
0.734966
0.021792
0
0.661157
0
0.041322
0.090201
0.064314
0
0
0
0
0
1
0.045455
false
0
0.132231
0
0.214876
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
3e1f239d34ee1a3335ed744a17231773fa3bc34a
8,589
py
Python
test/functional/whc_tokenrevokenegtive.py
quangdo3112/wormhole
c911826b43b5de666b9ae0e69b9f9deb95039a9a
[ "MIT" ]
78
2018-07-16T14:55:15.000Z
2022-02-26T10:43:23.000Z
test/functional/whc_tokenrevokenegtive.py
quangdo3112/wormhole
c911826b43b5de666b9ae0e69b9f9deb95039a9a
[ "MIT" ]
14
2018-07-20T02:17:45.000Z
2019-05-13T09:50:13.000Z
test/functional/whc_tokenrevokenegtive.py
quangdo3112/wormhole
c911826b43b5de666b9ae0e69b9f9deb95039a9a
[ "MIT" ]
28
2018-07-17T01:50:37.000Z
2021-11-04T08:04:56.000Z
#!/usr/bin/env python3 # Copyleft (c) 2017 eric sun from test_framework.test_framework import BitcoinTestFramework from test_framework.util import (assert_equal, assert_raises_rpc_error) from test_framework.authproxy import JSONRPCException import time class WHC_TOKEN_MANAGE(BitcoinTestFramework): def set_test_params(self): self.num_nodes = 1 self.tip = None self.setup_clean_chain = True def token_manage_test(self): # generate 200whc for node[0] address = self.nodes[0].getnewaddress("") address_dst = self.nodes[0].getnewaddress("") self.nodes[0].generatetoaddress(110, address) self.nodes[0].whc_burnbchgetwhc(4) self.nodes[0].sendtoaddress(address_dst, 10) self.nodes[0].generatetoaddress(1, address) self.nodes[0].whc_sendissuancefixed(address, 1, 1, 0, "", "", "whctoken", "", "", "500") managed_trans_id = self.nodes[0].whc_sendissuancemanaged(address, 1, 1, 0, "", "", "managede token", "", "") self.nodes[0].generatetoaddress(1, address) managed_trans = self.nodes[0].whc_gettransaction(managed_trans_id) managed_property_id = managed_trans["propertyid"] # exp3: token value exceed max item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_grant(managed_property_id, "50", "") p = payload[:16] + "ffffffffffffffff" + payload[32:] ret = self.nodes[0].whc_createrawtx_opreturn(ret, p) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is False assert trans["invalidreason"] == "Value out of range or zero" else: assert False # exp3: token value exceed max item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_grant(managed_property_id, "50", "") ret = self.nodes[0].whc_createrawtx_opreturn(ret, payload) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is True else: assert False # not managed token item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_revoke(managed_property_id, "20", "") p = payload[:15] + '6' + payload[16:] ret = self.nodes[0].whc_createrawtx_opreturn(ret, p) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is False else: assert False # not managed token issuer item = self.getSpent(address_dst) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_revoke(managed_property_id, "20", "") ret = self.nodes[0].whc_createrawtx_opreturn(ret, payload) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id2 = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id2) assert trans["valid"] is False assert trans["invalidreason"] == "Sender is not the issuer of the property" else: assert False # no issuer raise a change issuer action item = self.getSpent(address_dst) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_changeissuer(managed_property_id) ret = self.nodes[0].whc_createrawtx_opreturn(ret, payload) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) ret = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(ret) assert trans["valid"] is False assert trans["invalidreason"] == "Sender is not the issuer of the property" else: assert False # issuer is himself item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_changeissuer(managed_property_id) ret = self.nodes[0].whc_createrawtx_opreturn(ret, payload) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].whc_createrawtx_reference(ret, address) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is True else: assert False # property id not exit item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_changeissuer(managed_property_id) p = payload[:15] + 'f' ret = self.nodes[0].whc_createrawtx_opreturn(ret, p) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].whc_createrawtx_reference(ret, address_dst) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is False assert trans["invalidreason"] == "Property does not exist" else: assert False # token revoke > token left item = self.getSpent(address) if item: ret = self.nodes[0].whc_createrawtx_input("", item["txid"], item["vout"]) payload = self.nodes[0].whc_createpayload_revoke(managed_property_id, "60", "") ret = self.nodes[0].whc_createrawtx_opreturn(ret, payload) ret = self.nodes[0].whc_createrawtx_reference(ret, item["address"], round(float(item["amount"]) - 0.01, 8)) ret = self.nodes[0].signrawtransactionwithwallet(ret) trans_id = self.nodes[0].sendrawtransaction(ret["hex"]) self.nodes[0].generatetoaddress(1, address) trans = self.nodes[0].whc_gettransaction(trans_id) assert trans["valid"] is False assert trans["invalidreason"] == "Sender has insufficient balance" else: assert False def getSpent(self, addr): item = None ret = self.nodes[0].listunspent() for it in ret: if it["address"] == addr and it["amount"] > 1: item = it break return item def run_test(self): self.token_manage_test() if __name__ == '__main__': WHC_TOKEN_MANAGE().main()
48.801136
119
0.61765
1,025
8,589
5.026341
0.136585
0.134511
0.149457
0.116071
0.786297
0.776592
0.748059
0.748059
0.748059
0.746894
0
0.026476
0.252416
8,589
175
120
49.08
0.775892
0.032716
0
0.70068
0
0
0.065204
0
0
0
0
0
0.14966
1
0.027211
false
0
0.027211
0
0.068027
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
3e579ffbb8e4d476ba95a772cec7ac3d69c3a5d6
2,193
py
Python
elasticsearch/es.py
dumingcode/my-fintech-idc
5546b2b2ab8b66224941ae1600e4ffd3ce571ec6
[ "MIT" ]
null
null
null
elasticsearch/es.py
dumingcode/my-fintech-idc
5546b2b2ab8b66224941ae1600e4ffd3ce571ec6
[ "MIT" ]
7
2019-07-10T10:49:48.000Z
2021-12-13T20:02:04.000Z
elasticsearch/es.py
dumingcode/my-fintech-idc
5546b2b2ab8b66224941ae1600e4ffd3ce571ec6
[ "MIT" ]
null
null
null
import requests import json from loguru import logger from config import cons as ct def create_index(index_name: str): ret_jsons = None try: html = requests.put( ct.conf('ES')['url'] + index_name) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons def create_index_setting(index_name: str, settings: str): ret_jsons = None try: headers = {"Content-Type": "application/json"} html = requests.put( ct.conf('ES')['url'] + index_name, data=settings, headers=headers) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons def delete_index(index_name: str): ret_jsons = None try: html = requests.delete(ct.conf('ES')['url'] + index_name) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons def create_mapping(index_name: str, mapping_obj: str): ret_jsons = None headers = {"Content-Type": "application/json"} try: html = requests.post( ct.conf('ES')['url'] + index_name + '/_doc/_mapping', data=mapping_obj, headers=headers) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons def insert_document(index_name: str, document: str, key: str): ret_jsons = None headers = {"Content-Type": "application/json"} try: html = requests.put( ct.conf('ES')['url'] + index_name + '/_doc/' + key, data=document, headers=headers) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons def delete_document(index_name: str, key: str): ret_jsons = None try: html = requests.delete( ct.conf('ES')['url'] + index_name + '/_doc/' + key) ret_jsons = json.loads(html.text) except Exception as err: logger.error(err) return None return ret_jsons
26.743902
65
0.609667
282
2,193
4.588652
0.163121
0.111283
0.055641
0.069552
0.814529
0.775116
0.765842
0.748068
0.738794
0.711747
0
0
0.279526
2,193
81
66
27.074074
0.818987
0
0
0.695652
0
0
0.063839
0
0
0
0
0
0
1
0.086957
false
0
0.057971
0
0.318841
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
e40fc710fd4003d8817c42800be2ac5d6e249eda
145
py
Python
olfactory/detection/__init__.py
OctaveLauby/olfactory
679b67459c12002041a8f77e1bdffe33d776500b
[ "Apache-2.0" ]
null
null
null
olfactory/detection/__init__.py
OctaveLauby/olfactory
679b67459c12002041a8f77e1bdffe33d776500b
[ "Apache-2.0" ]
null
null
null
olfactory/detection/__init__.py
OctaveLauby/olfactory
679b67459c12002041a8f77e1bdffe33d776500b
[ "Apache-2.0" ]
null
null
null
from .drop import detect_drop from .xregularity import reg_bounds, stepreg_bounds from .yregularity import detect_elbow, detect_iso, detect_leap
36.25
62
0.855172
21
145
5.619048
0.571429
0.20339
0
0
0
0
0
0
0
0
0
0
0.103448
145
3
63
48.333333
0.907692
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
e412ebdb8c6aac3f1006ccdb7daa0f01cc4ae7d5
129
py
Python
tests/dao/test_base.py
kmjbyrne/flask-kbpc
859ea26146ea69cfff7699c75a0612388b84c756
[ "MIT" ]
null
null
null
tests/dao/test_base.py
kmjbyrne/flask-kbpc
859ea26146ea69cfff7699c75a0612388b84c756
[ "MIT" ]
3
2020-05-29T01:28:25.000Z
2021-04-30T21:05:42.000Z
tests/dao/test_base.py
kmjbyrne/flask-kbpc
859ea26146ea69cfff7699c75a0612388b84c756
[ "MIT" ]
null
null
null
import unittest class TestDAOBase(unittest.TestCase): def setUp(self) -> None: pass def tearDown(self) -> None: pass
14.333333
37
0.689922
16
129
5.5625
0.6875
0.179775
0.269663
0
0
0
0
0
0
0
0
0
0.20155
129
8
38
16.125
0.864078
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0.5
0.25
0
1
0
1
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
6
e45d3a8ccb81b704f80870f0d94418c49fb243be
4,812
py
Python
test/hyperLSTM_unittest.py
Ar-Kareem/Sketch-RNN
350824040715ea281182de01bca467130f326566
[ "MIT" ]
1
2020-07-05T17:18:36.000Z
2020-07-05T17:18:36.000Z
test/hyperLSTM_unittest.py
Ar-Kareem/Sketch-RNN
350824040715ea281182de01bca467130f326566
[ "MIT" ]
null
null
null
test/hyperLSTM_unittest.py
Ar-Kareem/Sketch-RNN
350824040715ea281182de01bca467130f326566
[ "MIT" ]
null
null
null
import unittest import torch import torch.nn as nn import hyperLSTM class MyTestCase(unittest.TestCase): def test_lstm_single(self): torch.manual_seed(42) pytorch_lstm = nn.LSTM(5, 2) lstm = hyperLSTM.LSTM(5, 2, forget_bias=0) lstm.wx.weight.data = dict(pytorch_lstm.named_parameters())['weight_ih_l0'] lstm.wh.weight.data = dict(pytorch_lstm.named_parameters())['weight_hh_l0'] lstm.wh.bias.data = dict(pytorch_lstm.named_parameters())['bias_ih_l0'] + dict(pytorch_lstm.named_parameters())['bias_hh_l0'] input_ = torch.normal(torch.ones(1, 1, 5)) state = (torch.ones(2) * 2, torch.ones(2) * 3) state_torch = (torch.ones(1, 1, 2) * 2, torch.ones(1, 1, 2) * 3) lstm_out = lstm(input_, state) pytorch_lstm = pytorch_lstm(input_, state_torch) self.assertTrue(torch.allclose(lstm_out[1][0].data, pytorch_lstm[1][0].data), "Short term memory differs") self.assertTrue(torch.allclose(lstm_out[1][1].data, pytorch_lstm[1][1].data), "Long term memory differs") def test_lstm_long(self): torch.manual_seed(42) pytorch_lstm = nn.LSTM(5, 2) lstm = hyperLSTM.LSTM(5, 2, forget_bias=0) lstm.wx.weight.data = dict(pytorch_lstm.named_parameters())['weight_ih_l0'] lstm.wh.weight.data = dict(pytorch_lstm.named_parameters())['weight_hh_l0'] lstm.wh.bias.data = dict(pytorch_lstm.named_parameters())['bias_ih_l0'] + dict(pytorch_lstm.named_parameters())['bias_hh_l0'] input_ = torch.normal(torch.ones(10, 1, 5)) state = (torch.ones(2) * 2, torch.ones(2) * 3) state_torch = (torch.ones(1, 1, 2) * 2, torch.ones(1, 1, 2) * 3) lstm_out = lstm(input_, state) pytorch_lstm = pytorch_lstm(input_, state_torch) self.assertTrue(torch.allclose(lstm_out[1][0].data, pytorch_lstm[1][0].data), "Short term memory differs") self.assertTrue(torch.allclose(lstm_out[1][1].data, pytorch_lstm[1][1].data), "Long term memory differs") def test_lstm_multple_states(self): torch.manual_seed(42) pytorch_lstm = nn.LSTM(5, 2) lstm = hyperLSTM.LSTM(5, 2, forget_bias=0) lstm.wx.weight.data = dict(pytorch_lstm.named_parameters())['weight_ih_l0'] lstm.wh.weight.data = dict(pytorch_lstm.named_parameters())['weight_hh_l0'] lstm.wh.bias.data = dict(pytorch_lstm.named_parameters())['bias_ih_l0'] + dict(pytorch_lstm.named_parameters())['bias_hh_l0'] input_ = torch.normal(torch.ones(3, 10, 5)) state = (torch.ones(10, 2) * 2, torch.ones(10, 2) * 3) state[0][0,1] = 2 state[0][1,1] = 3 state[1][0,0] = 4 state[1][1,0] = 5 state_torch = (state[0].clone().unsqueeze(0), state[1].clone().unsqueeze(0)) lstm_out = lstm(input_, state) pytorch_lstm_out = pytorch_lstm(input_, state_torch) self.assertTrue(torch.allclose(lstm_out[1][0].data, pytorch_lstm_out[1][0].data), "Short term memory differs") self.assertTrue(torch.allclose(lstm_out[1][1].data, pytorch_lstm_out[1][1].data), "Long term memory differs") state[1][1,0] = 5.1 lstm_out = lstm(input_, state) pytorch_lstm_out = pytorch_lstm(input_, state_torch) self.assertFalse(torch.allclose(lstm_out[1][0].data, pytorch_lstm_out[1][0].data), "Short term memory should be different") self.assertFalse(torch.allclose(lstm_out[1][1].data, pytorch_lstm_out[1][1].data), "Long term memory should be different") def test_lstm_empty_state(self): torch.manual_seed(42) pytorch_lstm = nn.LSTM(5, 2) lstm = hyperLSTM.LSTM(5, 2, forget_bias=0) lstm.wx.weight.data = dict(pytorch_lstm.named_parameters())['weight_ih_l0'] lstm.wh.weight.data = dict(pytorch_lstm.named_parameters())['weight_hh_l0'] lstm.wh.bias.data = dict(pytorch_lstm.named_parameters())['bias_ih_l0'] + dict(pytorch_lstm.named_parameters())['bias_hh_l0'] input_ = torch.normal(torch.ones(3, 10, 5)) state_torch = (torch.zeros(1, 10, 2), torch.zeros(1, 10, 2)) lstm_out = lstm(input_) pytorch_lstm_out = pytorch_lstm(input_, state_torch) self.assertTrue(torch.allclose(lstm_out[1][0].data, pytorch_lstm_out[1][0].data), "Short term memory differs") self.assertTrue(torch.allclose(lstm_out[1][1].data, pytorch_lstm_out[1][1].data), "Long term memory differs") def test_hyper_lstm_not_crashing(self): torch.manual_seed(42) lstm = hyperLSTM.HyperLSTM(5, 2, layer_norm=True, dropout=0.1) input_ = torch.normal(torch.ones(3, 10, 5)) lstm_out = lstm(input_) if __name__ == '__main__': unittest.main()
48.606061
134
0.646509
715
4,812
4.113287
0.096504
0.149609
0.081605
0.108807
0.875213
0.84087
0.84087
0.83203
0.822169
0.819789
0
0.045431
0.204073
4,812
98
135
49.102041
0.722454
0
0
0.671053
0
0
0.096097
0
0
0
0
0
0.131579
1
0.065789
false
0
0.052632
0
0.131579
0
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
e47f42ff74c99fb1f9c3c0da582352321f3f88f6
170
py
Python
python/testData/refactoring/rename/renameLocalWithComprehension.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2018-12-29T09:53:39.000Z
2018-12-29T09:53:42.000Z
python/testData/refactoring/rename/renameLocalWithComprehension.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
173
2018-07-05T13:59:39.000Z
2018-08-09T01:12:03.000Z
python/testData/refactoring/rename/renameLocalWithComprehension.py
truthiswill/intellij-community
fff88cfb0dc168eea18ecb745d3e5b93f57b0b95
[ "Apache-2.0" ]
2
2020-03-15T08:57:37.000Z
2020-04-07T04:48:14.000Z
parameter_lists_copy = [m for m in parameter_lists] for <caret>m in parameter_lists_copy: if param_index >= len(m.GetParameters()): parameter_lists.remove(m)
34
51
0.735294
26
170
4.538462
0.5
0.474576
0.305085
0.288136
0
0
0
0
0
0
0
0
0.164706
170
4
52
42.5
0.830986
0
0
0
0
0
0
0
0
0
0
0
0
0
null
null
0
0
null
null
0
1
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
6
e4d2efec244250ccc3b27856baca40b60216c65d
29
py
Python
pyKinectTools/configs/__init__.py
colincsl/pyKinectTools
a84bb5b7ff9dd613576415932865c2ad435520b3
[ "BSD-2-Clause-FreeBSD" ]
33
2015-04-07T16:28:04.000Z
2021-11-22T00:28:43.000Z
pyKinectTools/dataset_readers/__init__.py
colincsl/pyKinectTools
a84bb5b7ff9dd613576415932865c2ad435520b3
[ "BSD-2-Clause-FreeBSD" ]
null
null
null
pyKinectTools/dataset_readers/__init__.py
colincsl/pyKinectTools
a84bb5b7ff9dd613576415932865c2ad435520b3
[ "BSD-2-Clause-FreeBSD" ]
13
2015-04-07T16:28:34.000Z
2021-04-26T08:04:36.000Z
# __all__ = ["algs", "utils"]
29
29
0.551724
3
29
4
1
0
0
0
0
0
0
0
0
0
0
0
0.137931
29
1
29
29
0.48
0.931034
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
6
e4d67884a93ae28baf78b86f1ae83c7ab6dbcd76
98
py
Python
fancyfuncs.py
Nicksname/fancyfuncs
1fbedec9ae6c2be9e580e13c1c872d9378b9ba8f
[ "MIT" ]
null
null
null
fancyfuncs.py
Nicksname/fancyfuncs
1fbedec9ae6c2be9e580e13c1c872d9378b9ba8f
[ "MIT" ]
null
null
null
fancyfuncs.py
Nicksname/fancyfuncs
1fbedec9ae6c2be9e580e13c1c872d9378b9ba8f
[ "MIT" ]
null
null
null
from __future__ import absolute_import, division, print_function from timefuncs import * # noqa
24.5
64
0.816327
12
98
6.166667
0.75
0
0
0
0
0
0
0
0
0
0
0
0.142857
98
3
65
32.666667
0.880952
0.040816
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0.5
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
1
0
6
e4e4a7fd87a00cd5fafa5e0b376c26b2a269e1a0
125
py
Python
BluePlug/__init__.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
1
2019-01-27T04:08:05.000Z
2019-01-27T04:08:05.000Z
BluePlug/__init__.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
5
2021-03-18T21:35:20.000Z
2022-01-13T00:58:18.000Z
BluePlug/__init__.py
liufeng3486/BluePlug
c7c5c769ed35c71ebc542d34848d6bf309abd051
[ "MIT" ]
null
null
null
from .demo_test import * from .QtWork import * from .SubMom import * from .BaseClass import * from .img import test_iamge
13.888889
27
0.744
18
125
5.055556
0.5
0.43956
0
0
0
0
0
0
0
0
0
0
0.184
125
8
28
15.625
0.892157
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
5fa94f14424ce95fbdf3e35fb3d7cfb4555ec0e8
3,011
py
Python
3.py
hydrapheetz/adventofcode-soutions
02bcf5411da3944e3cbfd7db6fec5333aed46650
[ "Unlicense" ]
null
null
null
3.py
hydrapheetz/adventofcode-soutions
02bcf5411da3944e3cbfd7db6fec5333aed46650
[ "Unlicense" ]
null
null
null
3.py
hydrapheetz/adventofcode-soutions
02bcf5411da3944e3cbfd7db6fec5333aed46650
[ "Unlicense" ]
null
null
null
house_map = {} problem_input = open("input_3.txt", "r").read() #problem_input = "^v^v^v^v^v" def process_map(): global house_map coords = (0,0) for house in problem_input: presents = house_map.setdefault(coords, 0) house_map[coords] = presents+1 if (house == "v"): coords = (coords[0], coords[1]+1) house_map.setdefault(coords, 0) house_map[coords] = presents+1 elif (house == ">"): coords = (coords[0]+1, coords[1]) house_map.setdefault(coords, 0) house_map[coords] = presents+1 elif (house == "<"): coords = (coords[0]-1, coords[1]) house_map.setdefault(coords, 0) house_map[coords] = presents+1 elif (house == "^"): coords = (coords[0], coords[1]-1) house_map.setdefault(coords, 0) house_map[coords] = presents+1 def process_robo(): global house_map coords = (0,0) robo_coords = (0,0) robo_move = False presents = house_map.setdefault(coords, 0) robo_presents = house_map.setdefault(robo_coords,0) house_map[coords] = presents+1 house_map[robo_coords] = presents+1 for house in problem_input: if (house == "v"): if (not robo_move): coords = (coords[0], coords[1]+1) house_map.setdefault(coords, 0) house_map[coords] = presents+1 else: robo_coords = (robo_coords[0], robo_coords[1]+1) house_map.setdefault(robo_coords,0) house_map[robo_coords] = presents+1 elif (house == ">"): if (not robo_move): coords = (coords[0]+1, coords[1]) house_map.setdefault(coords, 0) house_map[coords] = presents+1 else: robo_coords = (robo_coords[0]+1, robo_coords[1]) house_map.setdefault(robo_coords, 0) house_map[robo_coords] = presents+1 elif (house == "<"): if (not robo_move): coords = (coords[0]-1, coords[1]) house_map.setdefault(coords, 0) house_map[coords] = presents+1 else: robo_coords = (robo_coords[0]-1, robo_coords[1]) house_map.setdefault(robo_coords, 0) house_map[robo_coords] = presents+1 elif (house == "^"): if (not robo_move): coords = (coords[0], coords[1]-1) house_map.setdefault(coords, 0) house_map[coords] = presents+1 else: robo_coords = (robo_coords[0], robo_coords[1]-1) house_map.setdefault(robo_coords, 0) house_map[robo_coords] = presents+1 robo_move = (not robo_move) process_map() print(len(house_map.values())) house_map = {} # problem_input = "^v^v^v^v^v" process_robo() print(house_map.keys()) print(len(house_map.values()))
35.423529
64
0.539688
370
3,011
4.181081
0.083784
0.191338
0.174531
0.135747
0.881707
0.824822
0.752424
0.72075
0.696833
0.696833
0
0.036192
0.330123
3,011
84
65
35.845238
0.730788
0.018931
0
0.818182
0
0
0.006775
0
0
0
0
0
0
1
0.025974
false
0
0
0
0.025974
0.038961
0
0
0
null
0
0
0
1
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
395f4b5b761a741f5390b99532b96c18dfdc2689
153
py
Python
testapi/admin.py
matheusmatos/django-rest-models
18da71bd921064279b03129aac38d3fbb9e29ae2
[ "BSD-2-Clause" ]
61
2016-12-05T09:09:49.000Z
2022-03-09T13:23:06.000Z
testapi/admin.py
matheusmatos/django-rest-models
18da71bd921064279b03129aac38d3fbb9e29ae2
[ "BSD-2-Clause" ]
51
2016-12-07T10:19:52.000Z
2022-03-11T23:35:23.000Z
testapi/admin.py
matheusmatos/django-rest-models
18da71bd921064279b03129aac38d3fbb9e29ae2
[ "BSD-2-Clause" ]
18
2017-03-11T18:07:17.000Z
2022-03-09T13:14:40.000Z
# -*- coding: utf-8 -*- from django.contrib import admin from testapi.models import Menu, Pizza, Topping admin.site.register([Pizza, Topping, Menu])
17
47
0.718954
21
153
5.238095
0.714286
0.218182
0
0
0
0
0
0
0
0
0
0.007634
0.143791
153
8
48
19.125
0.832061
0.137255
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
3967a40b234b6cc2e21575b1b07b0ed080960686
34,550
py
Python
contractor/lib/ip_test.py
T3kton/contractor
dd78f5b770ee7b5c41cddfc0a61869908b96e385
[ "Apache-2.0" ]
5
2019-02-15T15:55:56.000Z
2020-08-02T03:36:44.000Z
contractor/lib/ip_test.py
T3kton/contractor
dd78f5b770ee7b5c41cddfc0a61869908b96e385
[ "Apache-2.0" ]
4
2017-05-17T22:18:41.000Z
2020-05-10T03:46:33.000Z
contractor/lib/ip_test.py
T3kton/contractor
dd78f5b770ee7b5c41cddfc0a61869908b96e385
[ "Apache-2.0" ]
4
2017-05-09T21:05:51.000Z
2020-09-25T16:37:20.000Z
import pytest from contractor.lib.ip import IpIsV4, StrToIp, IpToStr, CIDRNetwork, CIDRNetmask, CIDRNetmaskToPrefix, CIDRNetworkSize, CIDRNetworkBounds, CIDRNetworkRange def test_isv4(): assert IpIsV4( 281470681743360 ) assert IpIsV4( 281470681743361 ) assert not IpIsV4( 1 ) assert not IpIsV4( 0 ) assert not IpIsV4( 281470681743359 ) def test_strtoip(): with pytest.raises( ValueError ): StrToIp( '127' ) with pytest.raises( ValueError ): StrToIp( '127.00.1' ) with pytest.raises( ValueError ): StrToIp( '127.0.1' ) with pytest.raises( ValueError ): StrToIp( 'a.0.0.0' ) with pytest.raises( ValueError ): StrToIp( '0.a.0.0' ) with pytest.raises( ValueError ): StrToIp( '0.0.a.0' ) with pytest.raises( ValueError ): StrToIp( '0.0.0.a' ) with pytest.raises( ValueError ): StrToIp( '256.0.0.0' ) with pytest.raises( ValueError ): StrToIp( '0.256.0.0' ) with pytest.raises( ValueError ): StrToIp( '0.0.256.0' ) with pytest.raises( ValueError ): StrToIp( '0.0.0.256' ) with pytest.raises( ValueError ): StrToIp( '0.0.0.-1' ) with pytest.raises( ValueError ): StrToIp( '0.0.-1.0' ) with pytest.raises( ValueError ): StrToIp( '0.-1.0.0' ) with pytest.raises( ValueError ): StrToIp( '-1.0.0.0' ) assert StrToIp( '0.0.0.0' ) == 281470681743360 assert StrToIp( '127.0.0.1' ) == 281472812449793 assert StrToIp( '1.2.3.4' ) == 281470698652420 assert StrToIp( ':ffff:0.0.0.0' ) == 281470681743360 assert StrToIp( ':ffff:127.0.0.1' ) == 281472812449793 assert StrToIp( ':ffff:1.2.3.4' ) == 281470698652420 with pytest.raises( ValueError ): StrToIp( ':fff:0.0.0.0' ) with pytest.raises( ValueError ): StrToIp( ':' ) with pytest.raises( ValueError ): StrToIp( ':::' ) with pytest.raises( ValueError ): StrToIp( '::x' ) assert StrToIp( '::' ) == 0 assert StrToIp( '::1' ) == 1 assert StrToIp( '::a' ) == 10 assert StrToIp( '::ffff' ) == 65535 assert StrToIp( '2001:db8:0:0:1:0:0:1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:0db8:0:0:1:0:0:1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:db8::1:0:0:1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:db8::0:1:0:0:1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:0db8::1:0:0:1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:db8:0:0:1::1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:db8:0000:0:1::1' ) == 42540766411282592856904266426630537217 assert StrToIp( '2001:DB8:0:0:1::1' ) == 42540766411282592856904266426630537217 with pytest.raises( ValueError ): StrToIp( '2001:db8::1::1' ) assert StrToIp( '2001:db8:0:0:0:0:0:1' ) == 42540766411282592856903984951653826561 assert StrToIp( '2001:DB8:0:0:0:0:0:1' ) == 42540766411282592856903984951653826561 assert StrToIp( '2001:db8:0:0:0::1' ) == 42540766411282592856903984951653826561 assert StrToIp( '2001:db8:0:0::1' ) == 42540766411282592856903984951653826561 assert StrToIp( '2001:db8:0::1' ) == 42540766411282592856903984951653826561 assert StrToIp( '2001:db8::1' ) == 42540766411282592856903984951653826561 with pytest.raises( ValueError ): StrToIp( '::db8::1' ) with pytest.raises( ValueError ): StrToIp( '2001:db8::0::1' ) assert StrToIp( '2001:0:0:0:1:0:0:1' ) == 42540488161975842760550637900276957185 assert StrToIp( '2001::1:0:0:1' ) == 42540488161975842760550637900276957185 assert StrToIp( '2001:0:0:0:1::1' ) == 42540488161975842760550637900276957185 assert StrToIp( '2001:0:1:0:1:0:1:0' ) == 42540488161977051686370252529451728896 assert StrToIp( '2001::' ) == 42540488161975842760550356425300246528 assert StrToIp( None ) is None with pytest.raises( ValueError ): StrToIp( 1 ) with pytest.raises( ValueError ): StrToIp( 42540766411282592856904266426630537217 ) with pytest.raises( ValueError ): StrToIp( 281472812449793 ) def test_iptostr(): assert IpToStr( 0 ) == '::' assert IpToStr( 1 ) == '::1' assert IpToStr( 10 ) == '::a' assert IpToStr( 65535 ) == '::ffff' assert IpToStr( 0, False ) == '::' assert IpToStr( 1, False ) == '::1' assert IpToStr( 10, False ) == '::a' assert IpToStr( 65535, False ) == '::ffff' assert IpToStr( 0, True ) == '::' assert IpToStr( 1, True ) == '::1' assert IpToStr( 10, True ) == '::a' assert IpToStr( 65535, True ) == '::ffff' assert IpToStr( 42540766411282592856903984951653826561 ) == '2001:db8::1' assert IpToStr( 42540766411282592856904266426630537217 ) == '2001:db8::1:0:0:1' assert IpToStr( 42540488161975842760550637900276957185 ) == '2001::1:0:0:1' assert IpToStr( 42540488161977051686370252529451728896 ) == '2001:0:1:0:1:0:1:0' assert IpToStr( 42540488161975842760550356425300246528 ) == '2001::' assert IpToStr( 42540766411282592856903984951653826561, False ) == '2001:db8::1' assert IpToStr( 42540766411282592856904266426630537217, False ) == '2001:db8::1:0:0:1' assert IpToStr( 42540488161975842760550637900276957185, False ) == '2001::1:0:0:1' assert IpToStr( 42540488161977051686370252529451728896, False ) == '2001:0:1:0:1:0:1:0' assert IpToStr( 42540488161975842760550356425300246528, False ) == '2001::' assert IpToStr( 42540766411282592856903984951653826561, True ) == '2001:db8::1' assert IpToStr( 42540766411282592856904266426630537217, True ) == '2001:db8::1:0:0:1' assert IpToStr( 42540488161975842760550637900276957185, True ) == '2001::1:0:0:1' assert IpToStr( 42540488161977051686370252529451728896, True ) == '2001:0:1:0:1:0:1:0' assert IpToStr( 42540488161975842760550356425300246528, True ) == '2001::' assert IpToStr( 281470681743360 ) == '0.0.0.0' assert IpToStr( 281470681743361 ) == '0.0.0.1' assert IpToStr( 281472812449793 ) == '127.0.0.1' assert IpToStr( 281470698652420 ) == '1.2.3.4' assert IpToStr( 281470681743360, False ) == '0.0.0.0' assert IpToStr( 281470681743361, False ) == '0.0.0.1' assert IpToStr( 281472812449793, False ) == '127.0.0.1' assert IpToStr( 281470698652420, False ) == '1.2.3.4' assert IpToStr( 281470681743360, True ) == ':ffff:0.0.0.0' assert IpToStr( 281470681743361, True ) == ':ffff:0.0.0.1' assert IpToStr( 281472812449793, True ) == ':ffff:127.0.0.1' assert IpToStr( 281470698652420, True ) == ':ffff:1.2.3.4' assert IpToStr( StrToIp( '1:1:1:1:2:3:4:5' ) ) == '1:1:1:1:2:3:4:5' assert IpToStr( StrToIp( '2:2:2:2:2:3:4:5' ) ) == '2:2:2:2:2:3:4:5' assert IpToStr( StrToIp( '0:0:0:0:2:3:4:5' ) ) == '::2:3:4:5' assert IpToStr( StrToIp( '1:2:2:2:2:3:4:5' ) ) == '1:2:2:2:2:3:4:5' assert IpToStr( StrToIp( '1:0:0:0:0:3:4:5' ) ) == '1::3:4:5' assert IpToStr( StrToIp( '1:1:1:1:0:0:4:5' ) ) == '1:1:1:1::4:5' assert IpToStr( StrToIp( '1:1:1:1:0:0:0:0' ) ) == '1:1:1:1::' with pytest.raises( ValueError ): IpToStr( -1 ) with pytest.raises( ValueError ): IpToStr( 0x100000000000000000000000000000000 ) with pytest.raises( ValueError ): IpToStr( '0' ) assert IpToStr( None ) is None def test_cidrnetwork(): assert CIDRNetwork( 24, False ) == StrToIp( '0.0.0.255' ) assert CIDRNetwork( 25, False ) == StrToIp( '0.0.0.127' ) assert CIDRNetwork( 26, False ) == StrToIp( '0.0.0.63' ) assert CIDRNetwork( 27, False ) == StrToIp( '0.0.0.31' ) assert CIDRNetwork( 23, False ) == StrToIp( '0.0.1.255' ) assert CIDRNetwork( 32, False ) == StrToIp( '0.0.0.0' ) assert CIDRNetwork( 31, False ) == StrToIp( '0.0.0.1' ) assert CIDRNetwork( 8, False ) == StrToIp( '0.255.255.255' ) assert CIDRNetwork( 8, True ) == StrToIp( '00ff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ) assert CIDRNetwork( 16, True ) == StrToIp( '0:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ) assert CIDRNetwork( 128, True ) == StrToIp( '::' ) assert CIDRNetwork( 127, True ) == StrToIp( '::1' ) assert CIDRNetwork( 120, True ) == StrToIp( '::ff' ) assert CIDRNetwork( 32, True ) == StrToIp( '::ffff:ffff:ffff:ffff:ffff:ffff' ) assert CIDRNetwork( 64, True ) == StrToIp( '::ffff:ffff:ffff:ffff' ) assert CIDRNetwork( 96, True ) == StrToIp( '::ffff:ffff' ) assert CIDRNetwork( 80, True ) == StrToIp( '::ffff:ffff:ffff' ) with pytest.raises( ValueError ): CIDRNetwork( -1, True ) with pytest.raises( ValueError ): CIDRNetwork( -1, False ) with pytest.raises( ValueError ): CIDRNetwork( 33, False ) with pytest.raises( ValueError ): CIDRNetwork( 129, True ) with pytest.raises( ValueError ): CIDRNetwork( 'a', False ) with pytest.raises( ValueError ): CIDRNetwork( 'a', True ) def test_cidrnetmask(): assert CIDRNetmask( 24, False ) == StrToIp( '255.255.255.0' ) assert CIDRNetmask( 25, False ) == StrToIp( '255.255.255.128' ) assert CIDRNetmask( 26, False ) == StrToIp( '255.255.255.192' ) assert CIDRNetmask( 27, False ) == StrToIp( '255.255.255.224' ) assert CIDRNetmask( 23, False ) == StrToIp( '255.255.254.0' ) assert CIDRNetmask( 32, False ) == StrToIp( '255.255.255.255' ) assert CIDRNetmask( 31, False ) == StrToIp( '255.255.255.254' ) assert CIDRNetmask( 8, False ) == StrToIp( '255.0.0.0' ) assert CIDRNetmask( 8, True ) == StrToIp( 'ff00::' ) assert CIDRNetmask( 16, True ) == StrToIp( 'ffff::' ) assert CIDRNetmask( 128, True ) == StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ) assert CIDRNetmask( 127, True ) == StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe' ) assert CIDRNetmask( 120, True ) == StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00' ) assert CIDRNetmask( 32, True ) == StrToIp( 'ffff:ffff::' ) assert CIDRNetmask( 64, True ) == StrToIp( 'ffff:ffff:ffff:ffff::' ) assert CIDRNetmask( 96, True ) == StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff::' ) assert CIDRNetmask( 80, True ) == StrToIp( 'ffff:ffff:ffff:ffff:ffff::' ) with pytest.raises( ValueError ): CIDRNetmask( -1, True ) with pytest.raises( ValueError ): CIDRNetmask( -1, False ) with pytest.raises( ValueError ): CIDRNetmask( 33, False ) with pytest.raises( ValueError ): CIDRNetmask( 129, True ) with pytest.raises( ValueError ): CIDRNetmask( 'a', False ) with pytest.raises( ValueError ): CIDRNetmask( 'a', True ) def test_cidrnetmasktoprefix(): assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.0' ) ) == 24 assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.128' ) ) == 25 assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.192' ) ) == 26 assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.224' ) ) == 27 assert CIDRNetmaskToPrefix( StrToIp( '255.255.254.0' ) ) == 23 assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.255' ) ) == 32 assert CIDRNetmaskToPrefix( StrToIp( '255.255.255.254' ) ) == 31 assert CIDRNetmaskToPrefix( StrToIp( '255.0.0.0' ) ) == 8 assert CIDRNetmaskToPrefix( StrToIp( 'ff00::' ) ) == 8 assert CIDRNetmaskToPrefix( StrToIp( 'ffff::' ) ) == 16 assert CIDRNetmaskToPrefix( StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ) ) == 128 assert CIDRNetmaskToPrefix( StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:fffe' ) ) == 127 assert CIDRNetmaskToPrefix( StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ff00' ) ) == 120 assert CIDRNetmaskToPrefix( StrToIp( '10.0.0.3' ) ) == 1 # yea, with CIDR are relying on leading bits being set with pytest.raises( ValueError ): CIDRNetmaskToPrefix( '127.0.0.1' ) def test_cidrnetworksize(): assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 32 ) == 1 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 32, True ) == 1 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 32, False ) == 1 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 31 ) == 2 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 31, True ) == 2 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 31, False ) == 2 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 30 ) == 2 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 30, True ) == 4 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 30, False ) == 2 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 29 ) == 6 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 29, True ) == 8 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 29, False ) == 6 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 8 ) == 16777214 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 8, True ) == 16777216 assert CIDRNetworkSize( StrToIp( '3.2.5.3' ), 8, False ) == 16777214 assert CIDRNetworkSize( StrToIp( '::3' ), 128 ) == 1 assert CIDRNetworkSize( StrToIp( '::3' ), 128, True ) == 1 assert CIDRNetworkSize( StrToIp( '::3' ), 128, False ) == 1 assert CIDRNetworkSize( StrToIp( '::3' ), 127 ) == 2 assert CIDRNetworkSize( StrToIp( '::3' ), 127, True ) == 2 assert CIDRNetworkSize( StrToIp( '::3' ), 127, False ) == 2 assert CIDRNetworkSize( StrToIp( '::3' ), 126 ) == 2 assert CIDRNetworkSize( StrToIp( '::3' ), 126, True ) == 4 assert CIDRNetworkSize( StrToIp( '::3' ), 126, False ) == 2 assert CIDRNetworkSize( StrToIp( '::3' ), 125 ) == 6 assert CIDRNetworkSize( StrToIp( '::3' ), 125, True ) == 8 assert CIDRNetworkSize( StrToIp( '::3' ), 125, False ) == 6 assert CIDRNetworkSize( StrToIp( '::3' ), 8 ) == 1329227995784915872903807060280344574 assert CIDRNetworkSize( StrToIp( '::3' ), 8, True ) == 1329227995784915872903807060280344576 assert CIDRNetworkSize( StrToIp( '::3' ), 8, False ) == 1329227995784915872903807060280344574 with pytest.raises( ValueError ): CIDRNetworkSize( -1, 0 ) with pytest.raises( ValueError ): CIDRNetworkSize( 0x100000000000000000000000000000000, 0 ) with pytest.raises( ValueError ): CIDRNetworkSize( StrToIp( '255.255.255.255' ), 33 ) with pytest.raises( ValueError ): CIDRNetworkSize( StrToIp( '0.0.0.0' ), 33 ) with pytest.raises( ValueError ): CIDRNetworkSize( StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ), 129 ) with pytest.raises( ValueError ): CIDRNetworkSize( StrToIp( '::' ), 129 ) with pytest.raises( ValueError ): CIDRNetworkSize( '::', 129 ) def test_cidrnetworkbounds(): assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 8 ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 8, False ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 8, True ) == ( StrToIp( '10.0.0.0' ), StrToIp( '10.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 8 ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 8, False ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 8, True ) == ( StrToIp( '10.0.0.0' ), StrToIp( '10.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 8 ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 8, False ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.255.255.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 8, True ) == ( StrToIp( '10.0.0.0' ), StrToIp( '10.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 24 ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.0.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 24, False ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.0.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 24, True ) == ( StrToIp( '10.0.0.0' ), StrToIp( '10.0.0.255' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 24 ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.0.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 24, False ) == ( StrToIp( '10.0.0.1' ), StrToIp( '10.0.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 24, True ) == ( StrToIp( '10.0.0.0' ), StrToIp( '10.0.0.255' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 24 ) == ( StrToIp( '10.3.0.1' ), StrToIp( '10.3.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 24, False ) == ( StrToIp( '10.3.0.1' ), StrToIp( '10.3.0.254' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 24, True ) == ( StrToIp( '10.3.0.0' ), StrToIp( '10.3.0.255' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 112 ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 112, False ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 112, True ) == ( StrToIp( '2001::' ), StrToIp( '2001::ffff' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 112 ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 112, False ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 112, True ) == ( StrToIp( '2001::' ), StrToIp( '2001::ffff' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 112 ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 112, False ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fffe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 112, True ) == ( StrToIp( '2001::' ), StrToIp( '2001::ffff' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 120 ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 120, False ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 120, True ) == ( StrToIp( '2001::' ), StrToIp( '2001::ff' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 120 ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 120, False ) == ( StrToIp( '2001::1' ), StrToIp( '2001::fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 120, True ) == ( StrToIp( '2001::' ), StrToIp( '2001::ff' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 120 ) == ( StrToIp( '2001::f001' ), StrToIp( '2001::f0fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 120, False ) == ( StrToIp( '2001::f001' ), StrToIp( '2001::f0fe' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 120, True ) == ( StrToIp( '2001::f000' ), StrToIp( '2001::f0ff' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 32 ) == ( StrToIp( '10.3.2.5' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 32, False ) == ( StrToIp( '10.3.2.5' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 32, True ) == ( StrToIp( '10.3.2.5' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 31 ) == ( StrToIp( '10.3.2.4' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 31, False ) == ( StrToIp( '10.3.2.4' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 31, True ) == ( StrToIp( '10.3.2.4' ), StrToIp( '10.3.2.5' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 128 ) == ( StrToIp( '2001::f009' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 128, False ) == ( StrToIp( '2001::f009' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 128, True ) == ( StrToIp( '2001::f009' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 127 ) == ( StrToIp( '2001::f008' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 127, False ) == ( StrToIp( '2001::f008' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 127, True ) == ( StrToIp( '2001::f008' ), StrToIp( '2001::f009' ) ) assert CIDRNetworkBounds( StrToIp( '254.0.0.0' ), 8, True ) == ( StrToIp( '254.0.0.0' ), StrToIp( '254.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '255.0.0.0' ), 8, True ) == ( StrToIp( '255.0.0.0' ), StrToIp( '255.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '1.2.3.4' ), 0, True ) == ( StrToIp( '0.0.0.0' ), StrToIp( '255.255.255.255' ) ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 0, True ) == ( StrToIp( '::' ), StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ) ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 8, False, True ) == ( 1, 16777214 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 8, True, True ) == ( 0, 16777215 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 8, False, True ) == ( 1, 16777214 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 8, True, True ) == ( 0, 16777215 ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 8, False, True ) == ( 1, 16777214 ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 8, True, True ) == ( 0, 16777215 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 24, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.0' ), 24, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 24, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '10.0.0.1' ), 24, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 24, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '10.3.0.0' ), 24, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 112, False, True ) == ( 1, 65534 ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 112, True, True ) == ( 0, 65535 ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 112, False, True ) == ( 1, 65534 ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 112, True, True ) == ( 0, 65535 ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 112, False, True ) == ( 1, 65534 ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 112, True, True ) == ( 0, 65535 ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 120, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 120, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 120, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '2001::1' ), 120, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 120, False, True ) == ( 1, 254 ) assert CIDRNetworkBounds( StrToIp( '2001::f001' ), 120, True, True ) == ( 0, 255 ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 32, False, True ) == ( 0, 0 ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 32, True, True ) == ( 0, 0 ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 31, False, True ) == ( 0, 1 ) assert CIDRNetworkBounds( StrToIp( '10.3.2.5' ), 31, True, True ) == ( 0, 1 ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 128, False, True ) == ( 0, 0 ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 128, True, True ) == ( 0, 0 ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 127, False, True ) == ( 0, 1 ) assert CIDRNetworkBounds( StrToIp( '2001::f009' ), 127, True, True ) == ( 0, 1 ) assert CIDRNetworkBounds( StrToIp( '254.0.0.0' ), 8, True, True ) == ( 0, 16777215 ) assert CIDRNetworkBounds( StrToIp( '255.0.0.0' ), 8, True, True ) == ( 0, 16777215 ) assert CIDRNetworkBounds( StrToIp( '1.2.3.4' ), 0, True, True ) == ( 0, 4294967295 ) assert CIDRNetworkBounds( StrToIp( '2001::' ), 0, True, True ) == ( 0, 340282366920938463463374607431768211455 ) with pytest.raises( ValueError ): CIDRNetworkBounds( -1, 0 ) with pytest.raises( ValueError ): CIDRNetworkBounds( 0x100000000000000000000000000000000, 0 ) with pytest.raises( ValueError ): CIDRNetworkBounds( StrToIp( '255.255.255.255' ), 33 ) with pytest.raises( ValueError ): CIDRNetworkBounds( StrToIp( '0.0.0.0' ), 33 ) with pytest.raises( ValueError ): CIDRNetworkBounds( StrToIp( 'ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff' ), 129 ) with pytest.raises( ValueError ): CIDRNetworkBounds( StrToIp( '::' ), 129 ) with pytest.raises( ValueError ): CIDRNetworkBounds( '::', 129 ) def test_cidrnetworkrange(): assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32 ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32, True ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32, False ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31 ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31, True ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31, False ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30 ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30, True ) ) == [ StrToIp( '169.254.1.0' ), StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30, False ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29 ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29, True ) ) == [ StrToIp( '169.254.1.0' ), StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ), StrToIp( '169.254.1.7' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29, False ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128 ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128, True ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128, False ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127 ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127, True ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127, False ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126 ) ) == [ StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126, True ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ), StrToIp( '2::7' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126, False ) ) == [ StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125 ) ) == [ StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125, True ) ) == [ StrToIp( '2::0' ), StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ), StrToIp( '2::7' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125, False ) ) == [ StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ) ] def test_cidrnetworksizerange(): # NOTE: becarefull with the prefixes here, this can generate some pretty large networks even a /8 in ipv4 can make this test take a while assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 32 ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 32 ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 32, True ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 32, True ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 32, False ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 32, False ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 31 ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 31 ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 31, True ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 31, True ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 31, False ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 31, False ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 30 ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 30 ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 30, True ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 30, True ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 30, False ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 30, False ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 16 ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 16 ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 16, True ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 16, True ) ) ) assert CIDRNetworkSize( StrToIp( '34.54.23.12' ), 16, False ) == len( list( CIDRNetworkRange( StrToIp( '34.54.23.12' ), 16, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 128 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 128 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 128, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 128, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 128, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 128, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 127 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 127 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 127, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 127, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 127, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 127, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 126 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 126 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 126, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 126, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 126, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 126, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 125 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 125 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 125, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 125, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 125, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 125, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 124 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 124 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 124, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 124, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 124, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 124, False ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 120 ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 120 ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 120, True ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 120, True ) ) ) assert CIDRNetworkSize( StrToIp( '1:2:3::' ), 120, False ) == len( list( CIDRNetworkRange( StrToIp( '1:2:3::' ), 120, False ) ) ) assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32 ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32, True ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 32, False ) ) == [ StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31 ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31, True ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 31, False ) ) == [ StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30 ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30, True ) ) == [ StrToIp( '169.254.1.0' ), StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 30, False ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29 ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29, True ) ) == [ StrToIp( '169.254.1.0' ), StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ), StrToIp( '169.254.1.7' ) ] assert list( CIDRNetworkRange( StrToIp( '169.254.1.3' ), 29, False ) ) == [ StrToIp( '169.254.1.1' ), StrToIp( '169.254.1.2' ), StrToIp( '169.254.1.3' ), StrToIp( '169.254.1.4' ), StrToIp( '169.254.1.5' ), StrToIp( '169.254.1.6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128 ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128, True ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 128, False ) ) == [ StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127 ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127, True ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 127, False ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126 ) ) == [ StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126, True ) ) == [ StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ), StrToIp( '2::7' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 126, False ) ) == [ StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125 ) ) == [ StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125, True ) ) == [ StrToIp( '2::0' ), StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ), StrToIp( '2::7' ) ] assert list( CIDRNetworkRange( StrToIp( '2::5' ), 125, False ) ) == [ StrToIp( '2::1' ), StrToIp( '2::2' ), StrToIp( '2::3' ), StrToIp( '2::4' ), StrToIp( '2::5' ), StrToIp( '2::6' ) ]
68.962076
285
0.623849
4,573
34,550
4.711131
0.030396
0.018659
0.059135
0.063684
0.880616
0.84042
0.736307
0.652432
0.548088
0.434088
0
0.213559
0.167033
34,550
500
286
69.1
0.535043
0.005441
0
0.229911
0
0
0.171309
0.016793
0
0
0.003056
0
0.727679
1
0.022321
true
0
0.004464
0
0.026786
0
0
0
0
null
0
0
0
1
1
1
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
6
39a46b87fdecb6fa9bd1031229883f4e29c4876b
27
py
Python
fs/expose/wsgi/__init__.py
rimrim/pyfs
ce9f3c76468a0779a0517ea7d7c191caf1bffd25
[ "BSD-3-Clause" ]
1
2021-07-15T22:45:17.000Z
2021-07-15T22:45:17.000Z
fs/expose/wsgi/__init__.py
rimrim/pyfs
ce9f3c76468a0779a0517ea7d7c191caf1bffd25
[ "BSD-3-Clause" ]
null
null
null
fs/expose/wsgi/__init__.py
rimrim/pyfs
ce9f3c76468a0779a0517ea7d7c191caf1bffd25
[ "BSD-3-Clause" ]
null
null
null
from .wsgi import serve_fs
13.5
26
0.814815
5
27
4.2
1
0
0
0
0
0
0
0
0
0
0
0
0.148148
27
1
27
27
0.913043
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
84389690105fc3f3af586503cd097a031d7bb4fe
46
py
Python
src/heuristics.py
sampreets3/pyjulia-pddl
8508690312535d90b59297279268d8a754e4a212
[ "Apache-2.0" ]
null
null
null
src/heuristics.py
sampreets3/pyjulia-pddl
8508690312535d90b59297279268d8a754e4a212
[ "Apache-2.0" ]
null
null
null
src/heuristics.py
sampreets3/pyjulia-pddl
8508690312535d90b59297279268d8a754e4a212
[ "Apache-2.0" ]
null
null
null
def zero_heuristic(state, pddl): return 0
15.333333
32
0.717391
7
46
4.571429
1
0
0
0
0
0
0
0
0
0
0
0.027027
0.195652
46
2
33
23
0.837838
0
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0.5
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
6
ffe5f41b7b8c2001386e9d4d198a0f12e72fe1b1
8,865
py
Python
train/dewarp.py
hhu-machine-learning/hdc2021-psfnn
275eec198e57c49dcd3eee3e7b09ee36d1655ede
[ "MIT" ]
2
2021-11-04T15:45:41.000Z
2021-11-04T15:47:22.000Z
train/dewarp.py
hhu-machine-learning/hdc2021-psfnn
275eec198e57c49dcd3eee3e7b09ee36d1655ede
[ "MIT" ]
1
2021-11-04T15:40:40.000Z
2021-11-16T07:44:57.000Z
train/dewarp.py
hhu-machine-learning/hdc2021-psfnn
275eec198e57c49dcd3eee3e7b09ee36d1655ede
[ "MIT" ]
null
null
null
import torch import torch.nn.functional as F def get_dewarping_matrix(step): if step == 0: return [1.0074838399887085, 0.0007350334199145436, 0.0018522378522902727, 0.0011821923544630408, -9.216999023919925e-05, -2.1575890059466474e-05, -0.0008361316868104041, -2.0978655811632052e-05, 2.024814057222102e-05, -9.610102279111743e-05, 0.005001508630812168, 1.0126982927322388, -0.002687784843146801, 0.0004823343479074538, -0.0003023565514013171, -0.00017967041640076786, 5.8112331316806376e-05, 0.0004127228748984635, -0.00010364824265707284, -3.341829142300412e-05, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step == 1: return [1.0001074075698853, 0.00026503356639295816, 0.0014267113292589784, -0.000140930904308334, -0.00024006569583434612, -0.002316687721759081, -8.164918835973367e-05, 0.00024537910940125585, 0.0002223470073658973, 0.0020410853903740644, 0.005251534283161163, 1.0058963298797607, -0.004370789974927902, -0.000786478107329458, 0.00014022525283508003, -0.0018259487114846706, -0.0007027303799986839, 0.002358881291002035, -0.00045202276669442654, -0.004688096232712269, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step == 2: return [0.9938850998878479, 0.0011783745139837265, 0.0, 0.001726538990624249, 0.00071810552617535, -0.0006797484820708632, -0.0037932987324893475, -0.00037118676118552685, -0.0031556240282952785, -0.003694966435432434, 0.005031114909797907, 1.0014299154281616, -0.00625627301633358, -0.0025116801261901855, 0.00016182185208890587, -0.007379685062915087, -0.0018687976989895105, 0.002322555286809802, 0.005523629952222109, -0.029866278171539307, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step == 3: return [0.9869324564933777, 0.00224688439629972, -0.0008935428340919316, 0.002959209494292736, -0.0022612223401665688, 0.0018939843866974115, -0.004060809034854174, 0.0017625142354518175, -0.006656560115516186, -0.009651963599026203, 0.00016188605513889343, 1.0035479068756104, -0.010218928568065166, 0.0005651656538248062, -0.0009788924362510443, 0.0014329419936984777, 0.008163115940988064, 0.005938321352005005, 0.008032983168959618, -0.08853603154420853, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step == 4: return [0.983738362789154, 0.0023218116257339716, -0.0017126877792179585, 0.001369951176457107, -0.004269269295036793, 0.004380248486995697, -0.016033707186579704, 0.0067543284967541695, -0.016424868255853653, -0.01624421216547489, 7.708399789407849e-05, 0.994385302066803, -0.012866039760410786, -0.001022055745124817, 0.0037307552993297577, 0.0027339875232428312, 0.009606639854609966, -0.008584169670939445, 0.013230630196630955, -0.09363924711942673, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step == 5: return [0.9786288738250732, 0.003588682971894741, -0.0023918221704661846, 0.004777341615408659, -0.0037737672682851553, 0.002030150732025504, -0.013176627457141876, -0.010321627371013165, -0.026121007278561592, -0.015811236575245857, 0.001201795064844191, 1.0035192966461182, -0.01841144822537899, 0.008479919284582138, 0.003908892627805471, 0.0044402433559298515, 0.015674248337745667, 0.005413076840341091, 0.008270949125289917, -0.18248037993907928, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step <= 10: return [0.9777207374572754, 0.003674966050311923, -0.000865395471919328, 0.00839876476675272, -0.00921174418181181, -0.00444203382357955, -0.024727338925004005, -0.007308421190828085, -0.05595914274454117, -0.009856735356152058, -0.0010057302424684167, 1.0023410320281982, -0.01852775737643242, 0.0016161234816536307, -0.0016956499312072992, 0.002951698610559106, 0.026358529925346375, -0.017851702868938446, -0.004329687915742397, -0.18836215138435364, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] if step >= 11: return [0.9716978073120117, 0.056364890187978745, -0.029213212430477142, 0.03584786504507065, 0.0005257084267213941, -0.0900191217660904, -0.09005090594291687, -0.07786328345537186, -0.13387863337993622, -0.0701950192451477, 0.001431028125807643, 0.9523154497146606, -0.04134024307131767, 0.004861794412136078, 0.013068363070487976, 0.018636401742696762, 0.00844097975641489, -0.008905373513698578, -0.0029179020784795284, -0.10307711362838745, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0] def warp(blurry, params): M = params.reshape(10, 10) h, w = blurry.shape x = torch.linspace(-1, 1, w, device=blurry.device) y = torch.linspace(-1, 1, h, device=blurry.device) y, x = torch.meshgrid(y, x, indexing="ij") features = [x, y, 1, x*x, x*y, y*y, x*x*x, x*x*y, x*y*y, y*y*y] x_warped = sum(weight * feature for weight, feature in zip(M[0], features)) y_warped = sum(weight * feature for weight, feature in zip(M[1], features)) grid = torch.stack([x_warped, y_warped], dim=2) blurry_warp = F.grid_sample( blurry[None, None, :, :], grid[None, :, :, :], mode='bicubic', align_corners=True, padding_mode='border')[0, 0, :, :] return blurry_warp def loss(sharp, blurry, M): blurry_warped = warp(blurry, M) blurry_warped_centered = blurry_warped - blurry_warped.mean() sharp_centered = sharp - sharp.mean() return torch.mean(torch.square(blurry_warped_centered - sharp_centered)) def main(): from load_hdc import load_hdc device = torch.device("cuda") sample = 2 for step in range(1, 10): sharp = load_hdc(step=step, cam=1, sample=sample, font="Times") blurry = load_hdc(step=step, cam=2, sample=sample, font="Times") sharp = torch.tensor(sharp, device=device) blurry = torch.tensor(blurry, device=device) M_eye = torch.eye(10, device=device).ravel() M_dewarp = torch.tensor(get_dewarping_matrix(step), device=device) loss_baseline = loss(sharp, blurry, M_eye).item() loss_warped = loss(sharp, blurry, M_dewarp).item() print("step", step) print("baseline:", loss_baseline) print("dewarped:", loss_warped) print() if __name__ == "__main__": main()
118.2
892
0.629554
1,910
8,865
2.9
0.126702
0.414154
0.585485
0.733706
0.261058
0.254559
0.254559
0.254559
0.254559
0.254559
0
0.571655
0.145967
8,865
74
893
119.797297
0.159952
0
0
0
0
0
0.006655
0
0
0
0
0
0
1
0.076923
false
0
0.057692
0
0.173077
0.076923
0
0
0
null
1
1
1
0
0
0
0
0
0
0
1
0
0
0
0
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
f23d333bf368163c918b3042745f184a91f16d6f
33
py
Python
concurrent_execution/_subprocess/subprocess_pyfile_only/02-worin-pipe-capture-err-otpt.py
codingEzio/code_python_standard_library
90ea086fa13ccde4f69bb5abb87450f07c2c5bbf
[ "MIT" ]
null
null
null
concurrent_execution/_subprocess/subprocess_pyfile_only/02-worin-pipe-capture-err-otpt.py
codingEzio/code_python_standard_library
90ea086fa13ccde4f69bb5abb87450f07c2c5bbf
[ "MIT" ]
null
null
null
concurrent_execution/_subprocess/subprocess_pyfile_only/02-worin-pipe-capture-err-otpt.py
codingEzio/code_python_standard_library
90ea086fa13ccde4f69bb5abb87450f07c2c5bbf
[ "MIT" ]
null
null
null
import subprocess # placeholder
8.25
17
0.818182
3
33
9
1
0
0
0
0
0
0
0
0
0
0
0
0.151515
33
3
18
11
0.964286
0.333333
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
f2522f4dae02a99bf22bdace69bb17bbbf762443
5,000
py
Python
battlesmithbackend/base/models.py
craigrmills/Capstone_Backend
b40d4c869a7f2e2ec2e746c159b269fcfd1d8db8
[ "MIT" ]
null
null
null
battlesmithbackend/base/models.py
craigrmills/Capstone_Backend
b40d4c869a7f2e2ec2e746c159b269fcfd1d8db8
[ "MIT" ]
null
null
null
battlesmithbackend/base/models.py
craigrmills/Capstone_Backend
b40d4c869a7f2e2ec2e746c159b269fcfd1d8db8
[ "MIT" ]
null
null
null
from django.db import models from django.contrib.auth.models import User from django.db.models.fields import related # Create your models here. class Product(models.Model): user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True) name = models.CharField(max_length=200, null=True, blank=True) image = models.ImageField(null=True, blank=True, default='/placeholder.png') category = models.CharField(max_length=200, null=True, blank=True) description = models.TextField(null=True, blank=True) rating = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) numReviews = models.IntegerField(null=True, blank=True, default=0) price = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) countInStock = models.IntegerField(null=True, blank=True, default=0) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return self.name class Review(models.Model): product = models.ForeignKey(Product, on_delete=models.SET_NULL, null=True) user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True) name = models.CharField(max_length=200, null=True, blank=True) rating = models.IntegerField(null=True, blank=True, default=0) comment = models.TextField(null=True, blank=True) createdAt = models.DateTimeField(auto_now_add=True) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self.rating) class Order(models.Model): user = models.ForeignKey(User, on_delete=models.SET_NULL, null=True) paymentMethod = models.CharField(max_length=200, null=True, blank=True) taxPrice = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) shippingPrice = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) totalPrice = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) isPaid = models.BooleanField(default=False) paidAt = models.DateTimeField(auto_now_add=False, null=True, blank=True) isDelivered = models.BooleanField(default=False) deliveredAt = models.DateTimeField( auto_now_add=False, null=True, blank=True) createdAt = models.DateTimeField(auto_now_add=True) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self.createdAt) class OrderItem(models.Model): product = models.ForeignKey(Product, on_delete=models.SET_NULL, null=True) order = models.ForeignKey(Order, on_delete=models.SET_NULL, null=True) name = models.CharField(max_length=200, null=True, blank=True) qty = models.IntegerField(null=True, blank=True, default=0) price = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) image = models.CharField(max_length=200, null=True, blank=True) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self.name) class ShippingAddress(models.Model): order = models.OneToOneField( Order, on_delete=models.CASCADE, null=True, blank=True) address = models.CharField(max_length=200, null=True, blank=True) city = models.CharField(max_length=200, null=True, blank=True) postalCode = models.CharField(max_length=200, null=True, blank=True) country = models.CharField(max_length=200, null=True, blank=True) shippingPrice = models.DecimalField( max_digits=7, decimal_places=2, null=True, blank=True) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self.address) class Faction(models.Model): name = models.CharField(max_length=200, null=True, blank=True) numPlayed = models.IntegerField(null=True, blank=True, default=0) winRate = models.IntegerField(null=True, blank=True, default=0) _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self.name) class Game(models.Model): player1 = models.ForeignKey( User, on_delete=models.SET_NULL, null=True, related_name="player1") player2 = models.ForeignKey( User, on_delete=models.SET_NULL, null=True, related_name="player2") p1Faction = models.ForeignKey( Faction, on_delete=models.SET_NULL, null=True, related_name="p1Faction") p2Faction = models.ForeignKey( Faction, on_delete=models.SET_NULL, null=True, related_name="p2Faction") p1Score = models.IntegerField(null=True, blank=True, default=0) p2Score = models.IntegerField(null=True, blank=True, default=0) loser = models.ForeignKey( User, on_delete=models.SET_NULL, null=True, related_name="winner") winner = models.ForeignKey( User, on_delete=models.SET_NULL, null=True, related_name="loser") _id = models.AutoField(primary_key=True, editable=False) def __str__(self): return str(self._id)
42.016807
80
0.7192
660
5,000
5.286364
0.145455
0.100889
0.119232
0.155919
0.799943
0.793064
0.771568
0.771568
0.72227
0.643737
0
0.015558
0.1644
5,000
118
81
42.372881
0.819531
0.0048
0
0.4
0
0
0.011862
0
0
0
0
0
0
1
0.073684
false
0
0.031579
0.073684
0.831579
0
0
0
0
null
0
0
0
0
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
6
f284b0c336bfbe5b609e8c33f52f7147355d99fe
4,518
py
Python
JsonFormatting.py
shaoshixin/Tool-Set
44a920b4351c4add1cdf247effad7cdcc22573f1
[ "Apache-2.0" ]
null
null
null
JsonFormatting.py
shaoshixin/Tool-Set
44a920b4351c4add1cdf247effad7cdcc22573f1
[ "Apache-2.0" ]
null
null
null
JsonFormatting.py
shaoshixin/Tool-Set
44a920b4351c4add1cdf247effad7cdcc22573f1
[ "Apache-2.0" ]
null
null
null
import json class JsonFormatting: def __init__(self): self.json = {"tools":[{"id":"toc","text":"图层","tooltip":"","enable":1},{"id":"view","text":"地图操作","tooltip":"","enable":1},{"id":"zoomIn","text":"放大","tooltip":"","enable":1},{"id":"zoomOut","text":"缩小","tooltip":"","enable":1},{"id":"clickZoomIn","text":"点击放大","tooltip":"","enable":1},{"id":"clickZoomOut","text":"点击缩小","tooltip":"","enable":1},{"id":"clickCenterAt","text":"点击居中","tooltip":"","enable":1},{"id":"customLocate","text":"自定义范围","tooltip":"","enable":1},{"id":"pan","text":"漫游","tooltip":"","enable":1},{"id":"zoomToFullExtent","text":"全图","tooltip":"","enable":1},{"id":"zoomToPrevExtent","text":"前图","tooltip":"","enable":1},{"id":"zoomToNextExtent","text":"后图","tooltip":"","enable":1},{"id":"tools","text":"工具","tooltip":"","enable":1},{"id":"distanceMeasure","text":"测距","tooltip":"","enable":1},{"id":"areaMeasure","text":"测面","tooltip":"","enable":1},{"id":"print","text":"打印","tooltip":"","enable":1},{"id":"mapSaveAs","text":"地图另存为","tooltip":"","enable":1},{"id":"pipeLineQuery","text":"管线查询","tooltip":"","enable":1},{"id":"identify","text":"快速查询","tooltip":"","enable":1},{"id":"identify","text":"快速查询","tooltip":"","enable":1},{"id":"spatialQuery","text":"空间查询","tooltip":"","enable":2},{"id":"query","text":"管线明细","tooltip":"","enable":2},{"id":"conditionQuery","text":"条件查询","tooltip":"","enable":1},{"id":"pipeStatistic","text":"管线统计","tooltip":"","enable":1},{"id":"pipePointStatistic","text":"管点统计","tooltip":"","enable":1},{"id":"pipeLineStatistic","text":"管线统计","tooltip":"","enable":1},{"id":"spatialStatistic","text":"空间统计","tooltip":"","enable":1},{"id":"fractureAnalysis","text":"断面分析","tooltip":"","enable":1},{"id":"tranSectionAnalysis","text":"横断面分析","tooltip":"","enable":1},{"id":"verticalsectionAnalysis","text":"纵断面分析","tooltip":"","enable":1},{"id":"spatialAnalysis","text":"空间分析","tooltip":"","enable":1},{"id":"pipeBurstAnalysis","text":"爆管分析","tooltip":"","enable":2},{"id":"connectivityAnalysis","text":"连通性分析","tooltip":"","enable":1},{"id":"bufferParamAnalysis","text":"缓冲区分析","tooltip":"","enable":1},{"id":"sectionAnalysis","text":"剖面分析","tooltip":"","enable":1},{"id":"pipeLineCollision","text":"管线碰撞","tooltip":"","enable":1},{"id":"pipeDisTance","text":"间距分析","tooltip":"","enable":1},{"id":"overburdenDepthAnalysis","text":"覆土深度分析","tooltip":"","enable":1},{"id":"crossingAnalysis","text":"交叉口分析","tooltip":"","enable":1},{"id":"parallelDistanceAnalysis","text":"水平净距分析","tooltip":"","enable":2},{"id":"dataUpload","text":"数据上报","tooltip":"","enable":1},{"id":"uploadFile","text":"数据上报","tooltip":"","enable":1},{"id":"uploadFileManage","text":"上报数据处理","tooltip":"","enable":1},{"id":"statUploadData","text":"上报统计","tooltip":"","enable":1},{"id":"uploadDataResult","text":"上报结果","tooltip":"","enable":1},{"id":"polling","text":"管线巡检","tooltip":"","enable":1},{"id":"pollingInfo","text":"巡检信息查看","tooltip":"","enable":1},{"id":"pollingThematic","text":"巡检专题图","tooltip":"","enable":1},{"id":"warning","text":"报废预警","tooltip":"","enable":1},{"id":"emergencyProcessing","text":"应急处理","tooltip":"","enable":1},{"id":"emergencyProcessingInfo","text":"应急信息查看","tooltip":"","enable":1},{"id":"emergencyProcessingThematic","text":"应急专题图","tooltip":"","enable":1},{"id":"accidentStatistic","text":"事故统计分析","tooltip":"","enable":1},{"id":"refreash","text":"刷新","tooltip":"","enable":1}],"groupTools":[["toc",{"id":"view","text":"地图操作","items":["zoomIn","zoomOut","clickZoomIn","clickZoomOut","clickCenterAt","customLocate","pan","zoomToFullExtent","zoomToPrevExtent","zoomToNextExtent"]},{"id":"tools","text":"工具","items":["distanceMeasure","areaMeasure","print","mapSaveAs"]}],[{"id":"pipeLineQuery","text":"管线查询","items":["identify","spatialQuery","query","conditionQuery"]},{"id":"pipeStatistic","text":"管线统计","items":["pipePointStatistic","pipeLineStatistic","spatialStatistic"]}],[{"id":"fractureAnalysis","text":"断面分析","items":["tranSectionAnalysis","verticalsectionAnalysis"]},{"id":"spatialAnalysis","text":"空间分析","items":["pipeBurstAnalysis","valveClosedAnalysis","connectivityAnalysis","bufferParamAnalysis","sectionAnalysis"]},{"id":"pipeLineCollision","text":"管线碰撞","items":["pipeDisTance","overburdenDepthAnalysis","crossingAnalysis","parallelDistanceAnalysis"]}],[{"id":"dataUpload","text":"数据上报","items":["uploadFile","uploadFileManage","statUploadData","uploadDataResult"]}],["refreash"]]} self.go() def go(self): js = json.dumps(self.json) print(js)
347.538462
4,362
0.631253
479
4,518
5.94572
0.256785
0.246489
0.245787
0.275281
0.061798
0.061798
0.02809
0.02809
0.02809
0.02809
0
0.012124
0.014166
4,518
13
4,363
347.538462
0.627301
0
0
0
0
0
0.598584
0.042045
0
0
0
0
0
1
0.25
false
0
0.125
0
0.5
0.25
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
1
1
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
6
f2b5e92799c7c339c27055f252e1f991b123a63c
164
py
Python
conlo/__init__.py
kira607/config_loader
024f33d48fee1635dfa9ed286f84bb96f22c134a
[ "MIT" ]
null
null
null
conlo/__init__.py
kira607/config_loader
024f33d48fee1635dfa9ed286f84bb96f22c134a
[ "MIT" ]
null
null
null
conlo/__init__.py
kira607/config_loader
024f33d48fee1635dfa9ed286f84bb96f22c134a
[ "MIT" ]
null
null
null
from .config_file import ConfigFile from .config_format import ConfigFormat, ConfigFormatType from .data_dir import DataDir from .config_loader import ConfigLoader
32.8
57
0.865854
21
164
6.571429
0.619048
0.217391
0
0
0
0
0
0
0
0
0
0
0.103659
164
4
58
41
0.938776
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
4b436be6efe22133f999bbf691697cb3d912dfb0
523
py
Python
carpedm/data/__init__.py
SimulatedANeal/carpedm
22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89
[ "MIT" ]
2
2020-09-30T04:59:06.000Z
2021-03-30T20:42:44.000Z
carpedm/data/__init__.py
SimulatedANeal/carpedm
22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89
[ "MIT" ]
null
null
null
carpedm/data/__init__.py
SimulatedANeal/carpedm
22bd5d28cfff50d7462e2a8e1b8dc1675e2a4c89
[ "MIT" ]
1
2018-05-25T07:15:16.000Z
2018-05-25T07:15:16.000Z
# # Copyright (C) 2018 Neal Digre. # # This software may be modified and distributed under the terms # of the MIT license. See the LICENSE file for details. # Bring in subpackages. from carpedm.data.download import * from carpedm.data.io import * from carpedm.data.lang import * from carpedm.data.meta import * from carpedm.data.ops import * from carpedm.data.preproc import * from carpedm.data.providers import * from carpedm.data.util import * # Easy access to sample data from carpedm.data.small import path as sample
27.526316
63
0.772467
81
523
4.987654
0.54321
0.24505
0.334158
0.363861
0
0
0
0
0
0
0
0.009029
0.152964
523
18
64
29.055556
0.902935
0.372849
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
4b678c6f375a767f4053c3c745bcd381e59ab591
43
py
Python
odoo-13.0/odoo/tests/__init__.py
VaibhavBhujade/Blockchain-ERP-interoperability
b5190a037fb6615386f7cbad024d51b0abd4ba03
[ "MIT" ]
null
null
null
odoo-13.0/odoo/tests/__init__.py
VaibhavBhujade/Blockchain-ERP-interoperability
b5190a037fb6615386f7cbad024d51b0abd4ba03
[ "MIT" ]
null
null
null
odoo-13.0/odoo/tests/__init__.py
VaibhavBhujade/Blockchain-ERP-interoperability
b5190a037fb6615386f7cbad024d51b0abd4ba03
[ "MIT" ]
null
null
null
from . import common from .common import *
14.333333
21
0.744186
6
43
5.333333
0.5
0
0
0
0
0
0
0
0
0
0
0
0.186047
43
2
22
21.5
0.914286
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6
4b689b4dcbbca47ba738d490ccf9d38f46e63ce4
45,754
py
Python
tmu/tsetlin_machine.py
cair/tmu
f662aea05726f8fb99182496220c75e75e5d0e2d
[ "MIT" ]
14
2021-12-07T13:44:18.000Z
2021-12-30T23:23:26.000Z
tmu/tsetlin_machine.py
cair/tmu
f662aea05726f8fb99182496220c75e75e5d0e2d
[ "MIT" ]
null
null
null
tmu/tsetlin_machine.py
cair/tmu
f662aea05726f8fb99182496220c75e75e5d0e2d
[ "MIT" ]
2
2021-12-11T11:28:50.000Z
2022-02-21T22:20:21.000Z
# Copyright (c) 2022 Ole-Christoffer Granmo # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # The above copyright notice and this permission notice shall be included in all # copies or substantial portions of the Software. # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # This code implements the Convolutional Tsetlin Machine from paper arXiv:1905.09688 # https://arxiv.org/abs/1905.09688 import sys import numpy as np from tmu.clause_bank import ClauseBank from tmu.weight_bank import WeightBank from scipy.sparse import csr_matrix from sys import maxsize from time import time class TMBasis(): def __init__(self, number_of_clauses, T, s, type_iii_feedback=False, focused_negative_sampling=False, output_balancing=False, d=200.0, platform='CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, number_of_state_bits_ind=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): self.number_of_clauses = number_of_clauses self.number_of_state_bits_ta = number_of_state_bits_ta self.number_of_state_bits_ind = number_of_state_bits_ind self.T = int(T) self.s = s self.type_iii_feedback = type_iii_feedback self.focused_negative_sampling= focused_negative_sampling self.output_balancing = output_balancing self.d = d self.platform = platform self.patch_dim = patch_dim self.feature_negation = feature_negation self.boost_true_positive_feedback = boost_true_positive_feedback self.weighted_clauses = weighted_clauses self.clause_drop_p = clause_drop_p self.literal_drop_p = literal_drop_p self.X_train = np.zeros(0, dtype=np.uint32) self.X_test = np.zeros(0, dtype=np.uint32) self.initialized = False def clause_co_occurrence(self, X, percentage=False): clause_outputs = csr_matrix(self.transform(X)) if percentage: return clause_outputs.transpose().dot(clause_outputs).multiply(1.0/clause_outputs.sum(axis=0)) else: return clause_outputs.transpose().dot(clause_outputs) def transform(self, X): encoded_X = self.clause_bank.prepare_X(X) transformed_X = np.empty((X.shape[0], self.number_of_clauses), dtype=np.uint32) for e in range(X.shape[0]): transformed_X[e,:] = self.clause_bank.calculate_clause_outputs_predict(encoded_X, e) return transformed_X def transform_patchwise(self, X): encoded_X = tmu.tools.encode(X, X.shape[0], self.number_of_patches, self.clause_bank.number_of_ta_chunks, self.dim, self.patch_dim, 0) transformed_X = np.empty((X.shape[0], self.number_of_clauses*self.number_of_patches), dtype=np.uint32) for e in range(X.shape[0]): transformed_X[e,:] = self.clause_bank.calculate_clause_outputs_patchwise(encoded_X, e) return transformed_X.reshape((X.shape[0], self.number_of_clauses, self.number_of_patches)) def literal_clause_frequency(self): clause_active = np.ones(self.number_of_clauses, dtype=np.uint32) return self.clause_bank.calculate_literal_clause_frequency(clause_active) def get_ta_action(self, clause, ta): return self.clause_bank.get_ta_action(clause, ta) def get_ta_state(self, clause, ta): return self.clause_bank.get_ta_state(clause, ta) def set_ta_state(self, clause, ta, state): return self.clause_bank.set_ta_state(clause, ta, state) class TMClassifier(TMBasis): def __init__(self, number_of_clauses, T, s, type_iii_feedback=False, d=200.0, platform='CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, number_of_state_bits_ind=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): super().__init__(number_of_clauses, T, s, type_iii_feedback=type_iii_feedback, d=d, platform=platform, patch_dim=patch_dim, feature_negation=feature_negation, boost_true_positive_feedback=boost_true_positive_feedback, number_of_state_bits_ta=number_of_state_bits_ta, number_of_state_bits_ind=number_of_state_bits_ind, weighted_clauses=weighted_clauses, clause_drop_p = clause_drop_p, literal_drop_p = literal_drop_p) def initialize(self, X, Y): self.number_of_classes = int(np.max(Y) + 1) self.weight_banks = [] for i in range(self.number_of_classes): self.weight_banks.append(WeightBank(np.concatenate((np.ones(self.number_of_clauses//2, dtype=np.int32), -1*np.ones(self.number_of_clauses//2, dtype=np.int32))))) self.clause_banks = [] if self.platform == 'CPU': for i in range(self.number_of_classes): self.clause_banks.append(ClauseBank(X, self.number_of_clauses, self.number_of_state_bits_ta, self.number_of_state_bits_ind, self.patch_dim)) elif self.platform == 'CUDA': from tmu.clause_bank_cuda import ClauseBankCUDA for i in range(self.number_of_classes): self.clause_banks.append(ClauseBankCUDA(X, self.number_of_clauses, self.number_of_state_bits_ta, self.patch_dim)) else: print("Unknown Platform") sys.exit(-1) self.positive_clauses = np.concatenate((np.ones(self.number_of_clauses//2, dtype=np.int32), np.zeros(self.number_of_clauses//2, dtype=np.int32))) self.negative_clauses = np.concatenate((np.zeros(self.number_of_clauses//2, dtype=np.int32), np.ones(self.number_of_clauses//2, dtype=np.int32))) def fit(self, X, Y, shuffle=True): if self.initialized == False: self.initialize(X, Y) self.initialized = True if not np.array_equal(self.X_train, X): self.encoded_X_train = self.clause_banks[0].prepare_X(X) self.X_train = X.copy() Ym = np.ascontiguousarray(Y).astype(np.uint32) clause_active = [] for i in range(self.number_of_classes): # Clauses are dropped based on their weights class_clause_active = np.ascontiguousarray(np.ones(self.number_of_clauses, dtype=np.int32)) clause_score = np.abs(self.weight_banks[i].get_weights()) deactivate = np.random.choice(np.arange(self.number_of_clauses), size=int(self.number_of_clauses*self.clause_drop_p), p = clause_score / clause_score.sum()) for d in range(deactivate.shape[0]): class_clause_active[deactivate[d]] = 0 clause_active.append(class_clause_active) # Literals are dropped based on their frequency literal_active = (np.zeros(self.clause_banks[0].number_of_ta_chunks, dtype=np.uint32) | ~0).astype(np.uint32) literal_clause_frequency = self.literal_clause_frequency() if literal_clause_frequency.sum() > 0: deactivate = np.random.choice(np.arange(self.clause_banks[0].number_of_literals), size=int(self.clause_banks[0].number_of_literals*self.literal_drop_p), p = literal_clause_frequency / literal_clause_frequency.sum()) else: deactivate = np.random.choice(np.arange(self.clause_banks[0].number_of_literals), size=int(self.clause_banks[0].number_of_literals*self.literal_drop_p)) for d in range(deactivate.shape[0]): ta_chunk = deactivate[d] // 32 chunk_pos = deactivate[d] % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) if not self.feature_negation: for k in range(self.clause_banks[0].number_of_literals//2, self.clause_banks[0].number_of_literals): ta_chunk = k // 32 chunk_pos = k % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) literal_active = literal_active.astype(np.uint32) shuffled_index = np.arange(X.shape[0]) if shuffle: np.random.shuffle(shuffled_index) for e in shuffled_index: target = Ym[e] clause_outputs = self.clause_banks[target].calculate_clause_outputs_update(literal_active, self.encoded_X_train, e) class_sum = np.dot(clause_active[target] * self.weight_banks[target].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) update_p = (self.T - class_sum)/(2*self.T) if self.weighted_clauses: self.weight_banks[target].increment(clause_outputs, update_p, clause_active[target], False) self.clause_banks[target].type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, clause_active[target]*self.positive_clauses, literal_active, self.encoded_X_train, e) self.clause_banks[target].type_ii_feedback(update_p, clause_active[target]*self.negative_clauses, literal_active, self.encoded_X_train, e) if self.type_iii_feedback: self.clause_banks[target].type_iii_feedback(update_p, self.d, clause_active[target]*self.positive_clauses, literal_active, self.encoded_X_train, e, 1) self.clause_banks[target].type_iii_feedback(update_p, self.d, clause_active[target]*self.negative_clauses, literal_active, self.encoded_X_train, e, 0) not_target = np.random.randint(self.number_of_classes) while not_target == target: not_target = np.random.randint(self.number_of_classes) clause_outputs = self.clause_banks[not_target].calculate_clause_outputs_update(literal_active, self.encoded_X_train, e) class_sum = np.dot(clause_active[not_target] * self.weight_banks[not_target].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) update_p = (self.T + class_sum)/(2*self.T) if self.weighted_clauses: self.weight_banks[not_target].decrement(clause_outputs, update_p, clause_active[not_target], False) self.clause_banks[not_target].type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, clause_active[not_target]*self.negative_clauses, literal_active, self.encoded_X_train, e) self.clause_banks[not_target].type_ii_feedback(update_p, clause_active[not_target]*self.positive_clauses, literal_active, self.encoded_X_train, e) if self.type_iii_feedback: self.clause_banks[not_target].type_iii_feedback(update_p, self.d, clause_active[not_target]*self.negative_clauses, literal_active, self.encoded_X_train, e, 1) self.clause_banks[not_target].type_iii_feedback(update_p, self.d, clause_active[not_target]*self.positive_clauses, literal_active, self.encoded_X_train, e, 0) return def predict(self, X): if not np.array_equal(self.X_test, X): self.encoded_X_test = self.clause_banks[0].prepare_X(X) self.X_test = X.copy() Y = np.ascontiguousarray(np.zeros(X.shape[0], dtype=np.uint32)) for e in range(X.shape[0]): max_class_sum = -self.T max_class = 0 for i in range(self.number_of_classes): class_sum = np.dot(self.weight_banks[i].get_weights(), self.clause_banks[i].calculate_clause_outputs_predict(self.encoded_X_test, e)).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) if class_sum > max_class_sum: max_class_sum = class_sum max_class = i Y[e] = max_class return Y def transform(self, X): encoded_X = self.clause_banks[0].prepare_X(X) transformed_X = np.empty((X.shape[0], self.number_of_classes, self.number_of_clauses), dtype=np.uint32) for e in range(X.shape[0]): for i in range(self.number_of_classes): transformed_X[e,i,:] = self.clause_banks[i].calculate_clause_outputs_predict(encoded_X, e) return transformed_X.reshape((X.shape[0], self.number_of_classes*self.number_of_clauses)) def transform_patchwise(self, X): encoded_X = tmu.tools.encode(X, X.shape[0], self.number_of_patches, self.number_of_ta_chunks, self.dim, self.patch_dim, 0) transformed_X = np.empty((X.shape[0], self.number_of_classes, self.number_of_clauses//2*self.number_of_patches), dtype=np.uint32) for e in range(X.shape[0]): for i in range(self.number_of_classes): transformed_X[e,i,:] = self.clause_bank[i].calculate_clause_outputs_patchwise(encoded_X, e) return transformed_X.reshape((X.shape[0], self.number_of_classes*self.number_of_clauses, self.number_of_patches)) def literal_clause_frequency(self): clause_active = np.ones(self.number_of_clauses, dtype=np.uint32) literal_frequency = np.zeros(self.clause_banks[0].number_of_literals, dtype=np.uint32) for i in range(self.number_of_classes): literal_frequency += self.clause_banks[i].calculate_literal_clause_frequency(clause_active) return literal_frequency def literal_importance(self, the_class, negated_features=False, negative_polarity=False): literal_frequency = np.zeros(self.clause_banks[0].number_of_literals, dtype=np.uint32) if negated_features: if negative_polarity: literal_frequency[self.clause_banks[the_class].number_of_literals//2:] += self.clause_banks[the_class].calculate_literal_clause_frequency(self.negative_clauses)[self.clause_banks[the_class].number_of_literals//2:] else: literal_frequency[self.clause_banks[the_class].number_of_literals//2:] += self.clause_banks[the_class].calculate_literal_clause_frequency(self.positive_clauses)[self.clause_banks[the_class].number_of_literals//2:] else: if negative_polarity: literal_frequency[:self.clause_banks[the_class].number_of_literals//2] += self.clause_banks[the_class].calculate_literal_clause_frequency(self.negative_clauses)[:self.clause_banks[the_class].number_of_literals//2] else: literal_frequency[:self.clause_banks[the_class].number_of_literals//2] += self.clause_banks[the_class].calculate_literal_clause_frequency(self.positive_clauses)[:self.clause_banks[the_class].number_of_literals//2] return literal_frequency def clause_precision(self, the_class, polarity, X, Y): clause_outputs = self.transform(X).reshape(X.shape[0], self.number_of_classes, 2, self.number_of_clauses//2)[:,the_class, polarity,:] if polarity == 0: true_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) else: true_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) return np.where(true_positive_clause_outputs + false_positive_clause_outputs == 0, 0, true_positive_clause_outputs/(true_positive_clause_outputs + false_positive_clause_outputs)) def clause_recall(self, the_class, polarity, X, Y): clause_outputs = self.transform(X).reshape(X.shape[0], self.number_of_classes, 2, self.number_of_clauses//2)[:,the_class, polarity,:] if polarity == 0: true_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) else: true_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) return true_positive_clause_outputs / Y[Y==the_class].shape[0] def get_weight(self, the_class, polarity, clause): if polarity == 0: return self.weight_banks[the_class].get_weights()[clause] else: return self.weight_banks[the_class].get_weights()[self.number_of_clauses//2 + clause] def set_weight(self, the_class, polarity, clause, weight): if polarity == 0: self.weight_banks[the_class].get_weights()[clause] = weight else: self.weight_banks[the_class].get_weights()[self.number_of_clauses//2 + clause] = weight def get_ta_action(self, the_class, polarity, clause, ta): if polarity == 0: return self.clause_banks[the_class].get_ta_action(clause, ta) else: return self.clause_banks[the_class].get_ta_action(self.number_of_clauses//2 + clause, ta) def get_ta_state(self, the_class, polarity, clause, ta): if polarity == 0: return self.clause_banks[the_class].get_ta_state(clause, ta) else: return self.clause_banks[the_class].get_ta_state(self.number_of_clauses//2 + clause, ta) def set_ta_state(self, the_class, polarity, clause, ta, state): if polarity == 0: return self.clause_banks[the_class].set_ta_state(clause, ta, state) else: return self.clause_banks[the_class].set_ta_state(self.number_of_clauses//2 + clause, ta, state) class TMCoalescedClassifier(TMBasis): def __init__(self, number_of_clauses, T, s, type_iii_feedback=False, focused_negative_sampling=False, output_balancing=False, d=200.0, platform = 'CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, number_of_state_bits_ind=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): super().__init__(number_of_clauses, T, s, type_iii_feedback=type_iii_feedback, focused_negative_sampling=focused_negative_sampling, output_balancing=output_balancing, d=d, platform = platform, patch_dim=patch_dim, feature_negation=feature_negation, boost_true_positive_feedback=boost_true_positive_feedback, number_of_state_bits_ta=number_of_state_bits_ta, number_of_state_bits_ind=number_of_state_bits_ind, weighted_clauses=weighted_clauses, clause_drop_p = clause_drop_p, literal_drop_p = literal_drop_p) def initialize(self, X, Y): self.number_of_classes = int(np.max(Y) + 1) if self.platform == 'CPU': self.clause_bank = ClauseBank(X, self.number_of_clauses, self.number_of_state_bits_ta, self.number_of_state_bits_ind, self.patch_dim) elif self.platform == 'CUDA': from tmu.clause_bank_cuda import ClauseBankCUDA self.clause_bank = ClauseBankCUDA(X, self.number_of_clauses, self.number_of_state_bits_ta, self.patch_dim) else: print("Unknown Platform") sys.exit(-1) self.weight_banks = [] for i in range(self.number_of_classes): self.weight_banks.append(WeightBank(np.random.choice([-1,1], size=self.number_of_clauses).astype(np.int32))) def update(self, target, e): clause_outputs = self.clause_bank.calculate_clause_outputs_update(self.literal_active, self.encoded_X_train, e) class_sum = np.dot(self.clause_active * self.weight_banks[target].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) update_p = (self.T - class_sum)/(2*self.T) type_iii_feedback_selection = np.random.choice(2) self.clause_bank.type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, self.clause_active*(self.weight_banks[target].get_weights() >= 0), self.literal_active, self.encoded_X_train, e) self.clause_bank.type_ii_feedback(update_p, self.clause_active*(self.weight_banks[target].get_weights() < 0), self.literal_active, self.encoded_X_train, e) self.weight_banks[target].increment(clause_outputs, update_p, self.clause_active, True) if self.type_iii_feedback and type_iii_feedback_selection == 0: self.clause_bank.type_iii_feedback(update_p, self.d, self.clause_active*(self.weight_banks[target].get_weights() >= 0), self.literal_active, self.encoded_X_train, e, 1) self.clause_bank.type_iii_feedback(update_p, self.d, self.clause_active*(self.weight_banks[target].get_weights() < 0), self.literal_active, self.encoded_X_train, e, 0) for i in range(self.number_of_classes): if i == target: self.update_ps[i] = 0.0 else: self.update_ps[i] = np.dot(self.clause_active * self.weight_banks[i].get_weights(), clause_outputs).astype(np.int32) self.update_ps[i] = np.clip(self.update_ps[i], -self.T, self.T) self.update_ps[i] = 1.0*(self.T + self.update_ps[i])/(2*self.T) if self.update_ps.sum() == 0: return if self.focused_negative_sampling: not_target = np.random.choice(self.number_of_classes, p=self.update_ps/self.update_ps.sum()) update_p = self.update_ps[not_target] else: not_target = np.random.randint(self.number_of_classes) while not_target == target: not_target = np.random.randint(self.number_of_classes) update_p = self.update_ps[not_target] self.clause_bank.type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, self.clause_active * (self.weight_banks[not_target].get_weights() < 0), self.literal_active, self.encoded_X_train, e) self.clause_bank.type_ii_feedback(update_p, self.clause_active*(self.weight_banks[not_target].get_weights() >= 0), self.literal_active, self.encoded_X_train, e) if self.type_iii_feedback and type_iii_feedback_selection == 1: self.clause_bank.type_iii_feedback(update_p, self.d, self.clause_active*(self.weight_banks[not_target].get_weights() < 0), self.literal_active, self.encoded_X_train, e, 1) self.clause_bank.type_iii_feedback(update_p, self.d, self.clause_active*(self.weight_banks[not_target].get_weights() >= 0), self.literal_active, self.encoded_X_train, e, 0) self.weight_banks[not_target].decrement(clause_outputs, update_p, self.clause_active, True) def fit(self, X, Y, shuffle=True): if self.initialized == False: self.initialize(X, Y) self.initialized = True if not np.array_equal(self.X_train, X): self.encoded_X_train = self.clause_bank.prepare_X(X) self.X_train = X.copy() Ym = np.ascontiguousarray(Y).astype(np.uint32) # Clauses are dropped based on their weights self.clause_active = np.ones(self.number_of_clauses, dtype=np.uint32) clause_score = np.zeros(self.number_of_clauses, dtype=np.int32) for i in range(self.number_of_classes): clause_score += np.abs(self.weight_banks[i].get_weights()) deactivate = np.random.choice(np.arange(self.number_of_clauses), size=int(self.number_of_clauses*self.clause_drop_p), p = clause_score / clause_score.sum()) for d in range(deactivate.shape[0]): self.clause_active[deactivate[d]] = 0 # Literals are dropped based on their frequency self.literal_active = (np.zeros(self.clause_bank.number_of_ta_chunks, dtype=np.uint32) | ~0).astype(np.uint32) literal_clause_frequency = self.literal_clause_frequency() if literal_clause_frequency.sum() > 0: deactivate = np.random.choice(np.arange(self.clause_bank.number_of_literals), size=int(self.clause_bank.number_of_literals*self.literal_drop_p), p = literal_clause_frequency / literal_clause_frequency.sum()) else: deactivate = np.random.choice(np.arange(self.clause_bank.number_of_literals), size=int(self.clause_bank.number_of_literals*self.literal_drop_p)) for d in range(deactivate.shape[0]): ta_chunk = deactivate[d] // 32 chunk_pos = deactivate[d] % 32 self.literal_active[ta_chunk] &= (~(1 << chunk_pos)) if not self.feature_negation: for k in range(self.clause_bank.number_of_literals//2, self.clause_bank.number_of_literals): ta_chunk = k // 32 chunk_pos = k % 32 self.literal_active[ta_chunk] &= (~(1 << chunk_pos)) self.literal_active = self.literal_active.astype(np.uint32) self.update_ps = np.empty(self.number_of_classes) shuffled_index = np.arange(X.shape[0]) if shuffle: np.random.shuffle(shuffled_index) class_observed = np.zeros(self.number_of_classes, dtype=np.uint32) example_indexes = np.zeros(self.number_of_classes, dtype=np.uint32) example_counter = 0 for e in shuffled_index: if self.output_balancing: if class_observed[Ym[e]] == 0: example_indexes[Ym[e]] = e class_observed[Ym[e]] = 1 example_counter += 1 else: example_indexes[example_counter] = e example_counter += 1 if example_counter == self.number_of_classes: example_counter = 0 for i in range(self.number_of_classes): class_observed[i] = 0 batch_example = example_indexes[i] self.update(Ym[batch_example], batch_example) return def predict(self, X): if not np.array_equal(self.X_test, X): self.encoded_X_test = self.clause_bank.prepare_X(X) self.X_test = X.copy() Y = np.ascontiguousarray(np.zeros(X.shape[0], dtype=np.uint32)) for e in range(X.shape[0]): max_class_sum = -self.T max_class = 0 clause_outputs = self.clause_bank.calculate_clause_outputs_predict(self.encoded_X_test, e) for i in range(self.number_of_classes): class_sum = np.dot(self.weight_banks[i].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) if class_sum > max_class_sum: max_class_sum = class_sum max_class = i Y[e] = max_class return Y def clause_precision(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) weights = self.weight_banks[the_class].get_weights() if positive_polarity == 0: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) return np.where(true_positive_clause_outputs + false_positive_clause_outputs == 0, 0, 1.0*true_positive_clause_outputs/(true_positive_clause_outputs + false_positive_clause_outputs)) def clause_recall(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) weights = self.weight_banks[the_class].get_weights() if positive_polarity == 0: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==the_class].sum(axis=0) else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y!=the_class].sum(axis=0) return true_positive_clause_outputs / Y[Y==the_class].shape[0] def get_weight(self, the_class, clause): return self.weight_banks[the_class].get_weights()[clause] def set_weight(self, the_class, clause, weight): self.weight_banks[the_class].get_weights()[clause] = weight class TMMultiChannelClassifier(TMBasis): def __init__(self, number_of_clauses, global_T, T, s, platform = 'CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): super().__init__(number_of_clauses, T, s, platform = platform, patch_dim=patch_dim, feature_negation=feature_negation, boost_true_positive_feedback=boost_true_positive_feedback, number_of_state_bits_ta=number_of_state_bits_ta, weighted_clauses=weighted_clauses, clause_drop_p = clause_drop_p, literal_drop_p = literal_drop_p) self.global_T = global_T def initialize(self, X, Y): self.number_of_classes = int(np.max(Y) + 1) if self.platform == 'CPU': self.clause_bank = ClauseBank(X[0], self.number_of_clauses, self.number_of_state_bits_ta, self.number_of_state_bits_ind, self.patch_dim) elif self.platform == 'CUDA': from tmu.clause_bank_cuda import ClauseBankCUDA self.clause_bank = ClauseBankCUDA(X[0], self.number_of_clauses, self.number_of_state_bits_ta, self.patch_dim) else: print("Unknown Platform") sys.exit(-1) self.weight_banks = [] for i in range(self.number_of_classes): self.weight_banks.append(WeightBank(np.random.choice([-1,1], size=self.number_of_clauses).astype(np.int32))) self.X_train = {} self.X_test = {} for c in range(X.shape[0]): self.X_train[c] = np.zeros(0, dtype=np.uint32) self.X_test[c] = np.zeros(0, dtype=np.uint32) self.encoded_X_train = {} self.encoded_X_test = {} def fit(self, X, Y, shuffle=True): if self.initialized == False: self.initialize(X, Y) self.initialized = True for c in range(X.shape[0]): if not np.array_equal(self.X_train[c], X[c]): self.encoded_X_train[c] = self.clause_bank.prepare_X(X[c]) self.X_train[c] = X[c].copy() Ym = np.ascontiguousarray(Y).astype(np.uint32) # Clauses are dropped based on their weights clause_active = np.ones(self.number_of_clauses, dtype=np.uint32) clause_score = np.zeros(self.number_of_clauses, dtype=np.int32) for i in range(self.number_of_classes): clause_score += np.abs(self.weight_banks[i].get_weights()) deactivate = np.random.choice(np.arange(self.number_of_clauses), size=int(self.number_of_clauses*self.clause_drop_p), p = clause_score / clause_score.sum()) for d in range(deactivate.shape[0]): clause_active[deactivate[d]] = 0 # Literals are dropped based on their frequency literal_active = (np.zeros(self.clause_bank.number_of_ta_chunks, dtype=np.uint32) | ~0).astype(np.uint32) literal_clause_frequency = self.literal_clause_frequency() if literal_clause_frequency.sum() > 0: deactivate = np.random.choice(np.arange(self.clause_bank.number_of_literals), size=int(self.clause_bank.number_of_literals*self.literal_drop_p), p = literal_clause_frequency / literal_clause_frequency.sum()) else: deactivate = np.random.choice(np.arange(self.clause_bank.number_of_literals), size=int(self.clause_bank.number_of_literals*self.literal_drop_p)) for d in range(deactivate.shape[0]): ta_chunk = deactivate[d] // 32 chunk_pos = deactivate[d] % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) if not self.feature_negation: for k in range(self.clause_bank.number_of_literals//2, self.clause_bank.number_of_literals): ta_chunk = k // 32 chunk_pos = k % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) literal_active = literal_active.astype(np.uint32) local_class_sum = np.empty(X.shape[0], dtype=np.int32) shuffled_index = np.arange(X.shape[1]) if shuffle: np.random.shuffle(shuffled_index) for e in shuffled_index: target = Ym[e] clause_outputs = [] for c in range(X.shape[0]): clause_outputs.append(self.clause_bank.calculate_clause_outputs_update(literal_active, self.encoded_X_train[c], e).copy()) global_class_sum = 0 for c in range(X.shape[0]): local_class_sum[c] = np.dot(clause_active * self.weight_banks[target].get_weights(), clause_outputs[c]).astype(np.int32) local_class_sum[c] = np.clip(local_class_sum[c], -self.T, self.T) global_class_sum += local_class_sum[c] global_class_sum = np.clip(global_class_sum, -self.global_T[target][0], self.global_T[target][1]) global_update_p = 1.0*(self.global_T[target][1] - global_class_sum)/(self.global_T[target][0]+self.global_T[target][1]) for c in range(X.shape[0]): local_update_p = 1.0*(self.T - local_class_sum[c])/(2*self.T) update_p = np.minimum(local_update_p, global_update_p) self.clause_bank.type_i_feedback(update_p, self.s[target], self.boost_true_positive_feedback, clause_active*(self.weight_banks[target].get_weights() >= 0), literal_active, self.encoded_X_train[c], e) self.clause_bank.type_ii_feedback(update_p, clause_active*(self.weight_banks[target].get_weights() < 0), literal_active, self.encoded_X_train[c], e) self.weight_banks[target].increment(clause_outputs[c], update_p, clause_active, True) not_target = np.random.randint(self.number_of_classes) while not_target == target: not_target = np.random.randint(self.number_of_classes) global_class_sum = 0.0 for c in range(X.shape[0]): local_class_sum[c] = np.dot(clause_active * self.weight_banks[not_target].get_weights(), clause_outputs[c]).astype(np.int32) local_class_sum[c] = np.clip(local_class_sum[c], -self.T, self.T) global_class_sum += local_class_sum[c] global_class_sum = np.clip(global_class_sum, -self.global_T[not_target][0], self.global_T[not_target][1]) global_update_p = 1.0*(self.global_T[not_target][0] + global_class_sum)/(self.global_T[not_target][0]+self.global_T[not_target][1]) for c in range(X.shape[0]): local_update_p = 1.0*(self.T + local_class_sum[c])/(2*self.T) update_p = np.minimum(local_update_p, global_update_p) self.clause_bank.type_i_feedback(update_p, self.s[not_target], self.boost_true_positive_feedback, clause_active * (self.weight_banks[not_target].get_weights() < 0), literal_active, self.encoded_X_train[c], e) self.clause_bank.type_ii_feedback(update_p, clause_active*(self.weight_banks[not_target].get_weights() >= 0), literal_active, self.encoded_X_train[c], e) self.weight_banks[not_target].decrement(clause_outputs[c], update_p, clause_active, True) return def predict(self, X): for c in range(X.shape[0]): if not np.array_equal(self.X_test[c], X[c]): self.encoded_X_test[c] = self.clause_bank.prepare_X(X[c]) self.X_test[c] = X[c].copy() Y = np.ascontiguousarray(np.zeros(X.shape[1], dtype=np.uint32)) for e in range(X.shape[1]): max_class_sum = -maxsize max_class = 0 clause_outputs = [] for c in range(X.shape[0]): clause_outputs.append(self.clause_bank.calculate_clause_outputs_predict(self.encoded_X_test[c], e).copy()) for i in range(self.number_of_classes): global_class_sum = 1 for c in range(X.shape[0]): local_class_sum = np.dot(self.weight_banks[i].get_weights(), clause_outputs[c]).astype(np.int32) local_class_sum = np.clip(local_class_sum, -self.T, self.T) global_class_sum *= local_class_sum >= 0 global_class_sum = np.clip(global_class_sum, -self.global_T[i][0], self.global_T[i][1]) if global_class_sum > max_class_sum: max_class_sum = global_class_sum max_class = i Y[e] = max_class return Y def clause_precision(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) weights = self.weight_banks[the_class].get_weights() if positive_polarity == 0: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = clause_outputs[Y!=the_class].sum(axis=0) false_positive_clause_outputs = clause_outputs[Y==the_class].sum(axis=0) return np.where(true_positive_clause_outputs + false_positive_clause_outputs == 0, 0, 1.0*true_positive_clause_outputs/(true_positive_clause_outputs + false_positive_clause_outputs)) def clause_recall(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) weights = self.weight_banks[the_class].get_weights() if positive_polarity == 0: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==the_class].sum(axis=0) else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y!=the_class].sum(axis=0) return true_positive_clause_outputs / Y[Y==the_class].shape[0] def get_weight(self, the_class, clause): return self.weight_banks[the_class].get_weights()[clause] def set_weight(self, the_class, clause, weight): self.weight_banks[the_class].get_weights()[clause] = weight class TMOneVsOneClassifier(TMBasis): def __init__(self, number_of_clauses, T, s, platform = 'CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): super().__init__(number_of_clauses, T, s, platform = platform, patch_dim=patch_dim, feature_negation=feature_negation, boost_true_positive_feedback=boost_true_positive_feedback, number_of_state_bits_ta=number_of_state_bits_ta, weighted_clauses=weighted_clauses, clause_drop_p = clause_drop_p, literal_drop_p = literal_drop_p) def initialize(self, X, Y): self.number_of_classes = int(np.max(Y) + 1) self.number_of_outputs = self.number_of_classes * (self.number_of_classes-1) if self.platform == 'CPU': self.clause_bank = ClauseBank(X, self.number_of_clauses, self.number_of_state_bits_ta, self.number_of_state_bits_ind, self.patch_dim) elif self.platform == 'CUDA': from tmu.clause_bank_cuda import ClauseBankCUDA self.clause_bank = ClauseBankCUDA(X, self.number_of_clauses, self.number_of_state_bits_ta, self.patch_dim) else: print("Unknown Platform") sys.exit(-1) self.weight_banks = [] for i in range(self.number_of_outputs): self.weight_banks.append(WeightBank(np.ones(self.number_of_clauses).astype(np.int32))) def fit(self, X, Y, shuffle=True): if self.initialized == False: self.initialize(X, Y) self.initialized = True if not np.array_equal(self.X_train, X): self.encoded_X_train = self.clause_bank.prepare_X(X) self.X_train = X.copy() Ym = np.ascontiguousarray(Y).astype(np.uint32) clause_active = np.ascontiguousarray(np.random.choice(2, self.number_of_clauses, p=[self.clause_drop_p, 1.0 - self.clause_drop_p]).astype(np.int32)) literal_active = (np.zeros(self.clause_bank.number_of_ta_chunks, dtype=np.uint32) | ~0).astype(np.uint32) if not self.feature_negation: for k in range(self.clause_bank.number_of_literals//2, self.clause_bank.number_of_literals): ta_chunk = k // 32 chunk_pos = k % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) literal_active = literal_active.astype(np.uint32) shuffled_index = np.arange(X.shape[0]) if shuffle: np.random.shuffle(shuffled_index) for e in shuffled_index: clause_outputs = self.clause_bank.calculate_clause_outputs_update(literal_active, self.encoded_X_train, e) target = Ym[e] not_target = np.random.randint(self.number_of_classes) while not_target == target: not_target = np.random.randint(self.number_of_classes) output = target * (self.number_of_classes-1) + not_target - (not_target > target) class_sum = np.dot(clause_active * self.weight_banks[output].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) update_p = (self.T - class_sum)/(2*self.T) self.clause_bank.type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, clause_active*(self.weight_banks[output].get_weights() >= 0), literal_active, self.encoded_X_train, e) self.clause_bank.type_ii_feedback(update_p, clause_active*(self.weight_banks[output].get_weights() < 0), literal_active, self.encoded_X_train, e) self.weight_banks[output].increment(clause_outputs, update_p, clause_active, True) output = not_target * (self.number_of_classes-1) + target - (target > not_target) class_sum = np.dot(clause_active * self.weight_banks[output].get_weights(), clause_outputs).astype(np.int32) class_sum = np.clip(class_sum, -self.T, self.T) update_p = (self.T + class_sum)/(2*self.T) self.clause_bank.type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, clause_active * (self.weight_banks[output].get_weights() < 0), literal_active, self.encoded_X_train, e) self.clause_bank.type_ii_feedback(update_p, clause_active*(self.weight_banks[output].get_weights() >= 0), literal_active, self.encoded_X_train, e) self.weight_banks[output].decrement(clause_outputs, update_p, clause_active, True) return def predict(self, X): if not np.array_equal(self.X_test, X): self.encoded_X_test = self.clause_bank.prepare_X(X) self.X_test = X.copy() Y = np.ascontiguousarray(np.zeros(X.shape[0], dtype=np.uint32)) for e in range(X.shape[0]): clause_outputs = self.clause_bank.calculate_clause_outputs_predict(self.encoded_X_test, e) max_class_sum = -self.T*self.number_of_classes max_class = 0 for i in range(self.number_of_classes): class_sum = 0 for output in range(i * (self.number_of_classes - 1), (i+1) * (self.number_of_classes-1)): output_sum = np.dot(self.weight_banks[output].get_weights(), clause_outputs).astype(np.int32) output_sum = np.clip(output_sum, -self.T, self.T) class_sum += output_sum if class_sum > max_class_sum: max_class_sum = class_sum max_class = i Y[e] = max_class return Y def clause_precision(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) precision = np.zeros((self.number_of_classes - 1, self.number_of_clauses)) for i in range(self.number_of_classes - 1): other_class = i + (i >= the_class) output = the_class * (self.number_of_classes - 1) + i weights = self.weight_banks[output].get_weights() if positive_polarity: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==the_class].sum(axis=0) false_positive_clause_outputs = positive_clause_outputs[Y==other_class].sum(axis=0) precision[i] = np.where(true_positive_clause_outputs + false_positive_clause_outputs == 0, 0, true_positive_clause_outputs/(true_positive_clause_outputs + false_positive_clause_outputs)) else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==other_class].sum(axis=0) false_positive_clause_outputs = positive_clause_outputs[Y==the_class].sum(axis=0) precision[i] = np.where(true_positive_clause_outputs + false_positive_clause_outputs == 0, 0, true_positive_clause_outputs/(true_positive_clause_outputs + false_positive_clause_outputs)) return precision def clause_recall(self, the_class, positive_polarity, X, Y): clause_outputs = self.transform(X) recall = np.zeros((self.number_of_classes - 1, self.number_of_clauses)) for i in range(self.number_of_classes - 1): other_class = i + (i >= the_class) output = the_class * (self.number_of_classes - 1) + i weights = self.weight_banks[output].get_weights() if positive_polarity: positive_clause_outputs = (weights >= 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==the_class].sum(axis=0) recall[i] = true_positive_clause_outputs / Y[Y==the_class].shape[0] else: positive_clause_outputs = (weights < 0)[:,np.newaxis].transpose() * clause_outputs true_positive_clause_outputs = positive_clause_outputs[Y==other_class].sum(axis=0) recall[i] = true_positive_clause_outputs / Y[Y==other_class].shape[0] return recall def get_weight(self, output, clause): return self.weight_banks[output].get_weights()[clause] def set_weight(self, output, weight): self.weight_banks[output].get_weights()[output] = weight class TMRegressor(TMBasis): def __init__(self, number_of_clauses, T, s, platform='CPU', patch_dim=None, feature_negation=True, boost_true_positive_feedback=1, number_of_state_bits_ta=8, weighted_clauses=False, clause_drop_p = 0.0, literal_drop_p = 0.0): super().__init__(number_of_clauses, T, s, platform=platform, patch_dim=patch_dim, feature_negation=feature_negation, boost_true_positive_feedback=boost_true_positive_feedback, number_of_state_bits_ta=number_of_state_bits_ta, weighted_clauses=weighted_clauses, clause_drop_p = clause_drop_p, literal_drop_p = literal_drop_p) def initialize(self, X, Y): self.max_y = np.max(Y) self.min_y = np.min(Y) if self.platform == 'CPU': self.clause_bank = ClauseBank(X, self.number_of_clauses, self.number_of_state_bits_ta, self.number_of_state_bits_ind, self.patch_dim) elif self.platform == 'CUDA': from tmu.clause_bank_cuda import ClauseBankCUDA self.clause_bank = ClauseBankCUDA(X, self.number_of_clauses, self.number_of_state_bits_ta, self.patch_dim) else: print("Unknown Platform") sys.exit(-1) self.weight_bank = WeightBank(np.ones(self.number_of_clauses).astype(np.int32)) def fit(self, X, Y, shuffle=True): if self.initialized == False: self.initialize(X, Y) self.initialized = True if not np.array_equal(self.X_train, X): self.encoded_X_train = self.clause_bank.prepare_X(X) self.X_train = X.copy() encoded_Y = np.ascontiguousarray(((Y - self.min_y)/(self.max_y - self.min_y)*self.T).astype(np.int32)) clause_active = np.ascontiguousarray(np.random.choice(2, self.number_of_clauses, p=[self.clause_drop_p, 1.0 - self.clause_drop_p]).astype(np.int32)) literal_active = (np.zeros(self.clause_bank.number_of_ta_chunks, dtype=np.uint32) | ~0).astype(np.uint32) if not self.feature_negation: for k in range(self.clause_bank.number_of_literals//2, self.clause_bank.number_of_literals): ta_chunk = k // 32 chunk_pos = k % 32 literal_active[ta_chunk] &= (~(1 << chunk_pos)) literal_active = literal_active.astype(np.uint32) shuffled_index = np.arange(X.shape[0]) if shuffle: np.random.shuffle(shuffled_index) for e in shuffled_index: clause_outputs = self.clause_bank.calculate_clause_outputs_update(literal_active, self.encoded_X_train, e) pred_y = np.dot(clause_active * self.weight_bank.get_weights(), clause_outputs).astype(np.int32) pred_y = np.clip(pred_y, 0, self.T) prediction_error = pred_y - encoded_Y[e]; update_p = (1.0*prediction_error/self.T)**2 if pred_y < encoded_Y[e]: self.clause_bank.type_i_feedback(update_p, self.s, self.boost_true_positive_feedback, clause_active, literal_active, self.encoded_X_train, e) if self.weighted_clauses: self.weight_bank.increment(clause_outputs, update_p, clause_active, False) elif pred_y > encoded_Y[e]: self.clause_bank.type_ii_feedback(update_p, clause_active, literal_active, self.encoded_X_train, e) if self.weighted_clauses: self.weight_bank.decrement(clause_outputs, update_p, clause_active, False) return def predict(self, X): if not np.array_equal(self.X_test, X): self.encoded_X_test = self.clause_bank.prepare_X(X) self.X_test = X.copy() Y = np.ascontiguousarray(np.zeros(X.shape[0])) for e in range(X.shape[0]): clause_outputs = self.clause_bank.calculate_clause_outputs_predict(self.encoded_X_test, e) pred_y = np.dot(self.weight_bank.get_weights(), clause_outputs).astype(np.int32) Y[e] = 1.0*pred_y * (self.max_y - self.min_y)/(self.T) + self.min_y return Y def get_weight(self, clause): return self.weight_bank.get_weights()[clause] def set_weight(self, clause, weight): self.weight_banks.get_weights()[clause] = weight
51.236282
508
0.764458
7,319
45,754
4.455117
0.038803
0.050541
0.050419
0.033796
0.906094
0.887018
0.86739
0.844021
0.82378
0.799338
0
0.013436
0.115094
45,754
892
509
51.293722
0.791914
0.031582
0
0.634286
0
0
0.003003
0
0
0
0
0
0
1
0.077143
false
0
0.018571
0.01
0.165714
0.007143
0
0
0
null
0
0
0
1
1
1
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
6
4b71b02c448bd0e0d0e5e3f089e91681339dbe6b
132
py
Python
src/10000/10992.py3.py
upple/BOJ
e6dbf9fd17fa2b458c6a781d803123b14c18e6f1
[ "MIT" ]
8
2018-04-12T15:54:09.000Z
2020-06-05T07:41:15.000Z
src/10000/10992.py3.py
upple/BOJ
e6dbf9fd17fa2b458c6a781d803123b14c18e6f1
[ "MIT" ]
null
null
null
src/10000/10992.py3.py
upple/BOJ
e6dbf9fd17fa2b458c6a781d803123b14c18e6f1
[ "MIT" ]
null
null
null
n = int(input()) print(' '*(n-1)+'*') for i in range(1, n-1): print(' '*(n-i-1)+'*'+' '*(2*i-1)+'*') if n>1: print('*'*(2*n-1))
22
42
0.401515
27
132
1.962963
0.407407
0.150943
0.264151
0
0
0
0
0
0
0
0
0.080357
0.151515
132
6
43
22
0.392857
0
0
0
0
0
0.052632
0
0
0
0
0
0
1
0
false
0
0
0
0
0.6
1
0
1
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
6
29a2e1942a02a2f73d5f928c43adbe8eb185c867
34
py
Python
__init__.py
joegagliardo/sqldf
2d90496a0fc041ac989b3ee538f4a43a283dceb2
[ "Apache-2.0" ]
2
2022-03-23T14:05:43.000Z
2022-03-31T05:09:24.000Z
__init__.py
joegagliardo/sqldf
2d90496a0fc041ac989b3ee538f4a43a283dceb2
[ "Apache-2.0" ]
2
2022-03-23T14:34:13.000Z
2022-03-31T06:37:40.000Z
__init__.py
joegagliardo/sqldf
2d90496a0fc041ac989b3ee538f4a43a283dceb2
[ "Apache-2.0" ]
null
null
null
from bettersql.sqldf import sqldf
17
33
0.852941
5
34
5.8
0.8
0
0
0
0
0
0
0
0
0
0
0
0.117647
34
1
34
34
0.966667
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
6