hexsha
string
size
int64
ext
string
lang
string
max_stars_repo_path
string
max_stars_repo_name
string
max_stars_repo_head_hexsha
string
max_stars_repo_licenses
list
max_stars_count
int64
max_stars_repo_stars_event_min_datetime
string
max_stars_repo_stars_event_max_datetime
string
max_issues_repo_path
string
max_issues_repo_name
string
max_issues_repo_head_hexsha
string
max_issues_repo_licenses
list
max_issues_count
int64
max_issues_repo_issues_event_min_datetime
string
max_issues_repo_issues_event_max_datetime
string
max_forks_repo_path
string
max_forks_repo_name
string
max_forks_repo_head_hexsha
string
max_forks_repo_licenses
list
max_forks_count
int64
max_forks_repo_forks_event_min_datetime
string
max_forks_repo_forks_event_max_datetime
string
content
string
avg_line_length
float64
max_line_length
int64
alphanum_fraction
float64
qsc_code_num_words_quality_signal
int64
qsc_code_num_chars_quality_signal
float64
qsc_code_mean_word_length_quality_signal
float64
qsc_code_frac_words_unique_quality_signal
float64
qsc_code_frac_chars_top_2grams_quality_signal
float64
qsc_code_frac_chars_top_3grams_quality_signal
float64
qsc_code_frac_chars_top_4grams_quality_signal
float64
qsc_code_frac_chars_dupe_5grams_quality_signal
float64
qsc_code_frac_chars_dupe_6grams_quality_signal
float64
qsc_code_frac_chars_dupe_7grams_quality_signal
float64
qsc_code_frac_chars_dupe_8grams_quality_signal
float64
qsc_code_frac_chars_dupe_9grams_quality_signal
float64
qsc_code_frac_chars_dupe_10grams_quality_signal
float64
qsc_code_frac_chars_replacement_symbols_quality_signal
float64
qsc_code_frac_chars_digital_quality_signal
float64
qsc_code_frac_chars_whitespace_quality_signal
float64
qsc_code_size_file_byte_quality_signal
float64
qsc_code_num_lines_quality_signal
float64
qsc_code_num_chars_line_max_quality_signal
float64
qsc_code_num_chars_line_mean_quality_signal
float64
qsc_code_frac_chars_alphabet_quality_signal
float64
qsc_code_frac_chars_comments_quality_signal
float64
qsc_code_cate_xml_start_quality_signal
float64
qsc_code_frac_lines_dupe_lines_quality_signal
float64
qsc_code_cate_autogen_quality_signal
float64
qsc_code_frac_lines_long_string_quality_signal
float64
qsc_code_frac_chars_string_length_quality_signal
float64
qsc_code_frac_chars_long_word_length_quality_signal
float64
qsc_code_frac_lines_string_concat_quality_signal
float64
qsc_code_cate_encoded_data_quality_signal
float64
qsc_code_frac_chars_hex_words_quality_signal
float64
qsc_code_frac_lines_prompt_comments_quality_signal
float64
qsc_code_frac_lines_assert_quality_signal
float64
qsc_codepython_cate_ast_quality_signal
float64
qsc_codepython_frac_lines_func_ratio_quality_signal
float64
qsc_codepython_cate_var_zero_quality_signal
bool
qsc_codepython_frac_lines_pass_quality_signal
float64
qsc_codepython_frac_lines_import_quality_signal
float64
qsc_codepython_frac_lines_simplefunc_quality_signal
float64
qsc_codepython_score_lines_no_logic_quality_signal
float64
qsc_codepython_frac_lines_print_quality_signal
float64
qsc_code_num_words
int64
qsc_code_num_chars
int64
qsc_code_mean_word_length
int64
qsc_code_frac_words_unique
null
qsc_code_frac_chars_top_2grams
int64
qsc_code_frac_chars_top_3grams
int64
qsc_code_frac_chars_top_4grams
int64
qsc_code_frac_chars_dupe_5grams
int64
qsc_code_frac_chars_dupe_6grams
int64
qsc_code_frac_chars_dupe_7grams
int64
qsc_code_frac_chars_dupe_8grams
int64
qsc_code_frac_chars_dupe_9grams
int64
qsc_code_frac_chars_dupe_10grams
int64
qsc_code_frac_chars_replacement_symbols
int64
qsc_code_frac_chars_digital
int64
qsc_code_frac_chars_whitespace
int64
qsc_code_size_file_byte
int64
qsc_code_num_lines
int64
qsc_code_num_chars_line_max
int64
qsc_code_num_chars_line_mean
int64
qsc_code_frac_chars_alphabet
int64
qsc_code_frac_chars_comments
int64
qsc_code_cate_xml_start
int64
qsc_code_frac_lines_dupe_lines
int64
qsc_code_cate_autogen
int64
qsc_code_frac_lines_long_string
int64
qsc_code_frac_chars_string_length
int64
qsc_code_frac_chars_long_word_length
int64
qsc_code_frac_lines_string_concat
null
qsc_code_cate_encoded_data
int64
qsc_code_frac_chars_hex_words
int64
qsc_code_frac_lines_prompt_comments
int64
qsc_code_frac_lines_assert
int64
qsc_codepython_cate_ast
int64
qsc_codepython_frac_lines_func_ratio
int64
qsc_codepython_cate_var_zero
int64
qsc_codepython_frac_lines_pass
int64
qsc_codepython_frac_lines_import
int64
qsc_codepython_frac_lines_simplefunc
int64
qsc_codepython_score_lines_no_logic
int64
qsc_codepython_frac_lines_print
int64
effective
string
hits
int64
a37d6077908b7b772bc007466596f71de1834f6c
74
py
Python
test_QandT.py
Jul-Tedyputro/python-sample-vscode-flask-tutorial
8878615add25cad7ee59c804d5aba1e86e5077e2
[ "MIT" ]
null
null
null
test_QandT.py
Jul-Tedyputro/python-sample-vscode-flask-tutorial
8878615add25cad7ee59c804d5aba1e86e5077e2
[ "MIT" ]
null
null
null
test_QandT.py
Jul-Tedyputro/python-sample-vscode-flask-tutorial
8878615add25cad7ee59c804d5aba1e86e5077e2
[ "MIT" ]
null
null
null
def test_eggplantGUI(): print ('Mr Moritz is in action') assert False
18.5
34
0.716216
11
74
4.727273
1
0
0
0
0
0
0
0
0
0
0
0
0.189189
74
3
35
24.666667
0.866667
0
0
0
0
0
0.297297
0
0
0
0
0
0.333333
1
0.333333
true
0
0
0
0.333333
0.333333
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
a383a3e2c3c02b404a91c75a2ae26afbd581d269
254
py
Python
web/WebView/admin.py
shinoyasan/intelli-switch
d32fc1617c5c145e0bb67bafd05acd292a761d4c
[ "MIT" ]
12
2021-01-28T02:45:41.000Z
2022-02-13T16:27:15.000Z
web/WebView/admin.py
shinoyasan/intelli-switch
d32fc1617c5c145e0bb67bafd05acd292a761d4c
[ "MIT" ]
null
null
null
web/WebView/admin.py
shinoyasan/intelli-switch
d32fc1617c5c145e0bb67bafd05acd292a761d4c
[ "MIT" ]
3
2021-02-01T03:47:38.000Z
2021-03-04T10:31:53.000Z
from django.contrib import admin from .models import ServerInfo,SampleData,DeviceControl,UserApp # Register your models here. admin.site.register(ServerInfo) admin.site.register(SampleData) admin.site.register(DeviceControl) admin.site.register(UserApp)
31.75
63
0.84252
32
254
6.6875
0.4375
0.168224
0.317757
0
0
0
0
0
0
0
0
0
0.066929
254
8
64
31.75
0.902954
0.102362
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
6e7225c8632d4f0a73564353db4d878ee8e3ec87
28,125
py
Python
specification/tools/VMHprocessMappings1.py
iptc/video-metadata-hub
e3b03f7197801fd413999d9d6e483a4477796f81
[ "MIT" ]
1
2021-09-28T10:56:19.000Z
2021-09-28T10:56:19.000Z
specification/tools/VMHprocessMappings1.py
iptc/video-metadata-hub
e3b03f7197801fd413999d9d6e483a4477796f81
[ "MIT" ]
12
2021-06-17T08:35:45.000Z
2022-02-09T16:09:01.000Z
specification/tools/VMHprocessMappings1.py
iptc/video-metadata-hub
e3b03f7197801fd413999d9d6e483a4477796f81
[ "MIT" ]
null
null
null
#!/usr/bin/env python3 """ Python script for retrieving IPTC Video Metadata Hub mapping data from a Google sheet The retrieved data are transformed in HTML as saved as HTML page. For IPTC-internal use Creator: Michael Steidl History: 2016-11-25 mws: project started, download and HTML output ok 2020-06-15 BQ: Updated and checked into GitHub """ from __future__ import print_function import pickle import os import sys from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from lxml import etree as ET SCOPES = 'https://www.googleapis.com/auth/spreadsheets.readonly' CLIENT_SECRET_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'client_secret.json') APPLICATION_NAME = 'Video Metadata Hub Documentation Generator' # Constant values StdVersion = "1.3" HeaderAppendix = "" # could be " - D-R-A-F-T - " IPTCApprovalDate = "13 May 2020" IPTCRevisionDate = "13 May 2020" CopyrightYear = "2020" def get_credentials(): """Gets valid user credentials from storage. If nothing has been stored, or if the stored credentials are invalid, the OAuth2 flow is completed to obtain the new credentials. Returns: Credentials, the obtained credential. """ creds = None # The file token.pickle stores the user's access and refresh tokens, and is # created automatically when the authorization flow completes for the first # time. if os.path.exists('token.pickle'): with open('token.pickle', 'rb') as token: creds = pickle.load(token) # If there are no (valid) credentials available, let the user log in. if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( CLIENT_SECRET_FILE, SCOPES) creds = flow.run_local_server(port=0) # Save the credentials for the next run with open('token.pickle', 'wb') as token: pickle.dump(creds, token) return creds def createSpecificMapping(valuesProp, headingtext1, headingtext2, findmoreaturl, mapIdx, filename): # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': "Content-Type", 'content': "text/html; charset=utf-8"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = headingtext1 seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'Return to ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-mapping-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'all recommended mappings of the Video Metadata Hub.' seeotherdoc2 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc2.text = 'See the ' seeotherdoc1link2 = ET.SubElement(seeotherdoc2, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link2.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class="smallnote1">Copyright © ' + CopyrightYear + ', <a href="https://iptc.org">IPTC</a> - all rights reserved. Published under the Creative Commons Attribution 4.0 license <a href="http://creativecommons.org/licenses/by/4.0/">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with a blue header are defined by the Video Metadata Hub, the column with the green header is defined by ' + headingtext2 propnote1 = ET.fromstring('<p class="note1">Note on the column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a specification of JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' """ thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' """ thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8.text = headingtext2 # second row with "find more at ..." links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' """ thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' """ moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][mapIdx] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr """ xcell4 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr """ xcell5 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', { 'class': 'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][mapIdx] except: valstr = ' ' xcell8.text = valstr with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) def main(): credentials = get_credentials() service = build('sheets', 'v4', credentials=credentials) spreadsheetId = '1TgfvHcsbGvJqmF0iUUnaL-RAdd1lbentmb2LhcM8SDk' rangeName = 'MappingRec 1.3.1!A4:R' result1 = service.spreadsheets().values().get( spreadsheetId=spreadsheetId, range=rangeName).execute() valuesProp = result1.get('values', []) # create the HTML document xroot = ET.Element('html') head = ET.SubElement(xroot, 'head') title = ET.SubElement(head, 'title') title.text = 'Video Metadata Hub Mapping' metachset = ET.SubElement(head, 'meta', {'http-equiv': "Content-Type", 'content': "text/html; charset=utf-8"}) csslink1 = ET.SubElement(head, 'link', {'type': 'text/css', 'rel': 'stylesheet', 'href': 'iptcspecs1.css'}) body = ET.SubElement(xroot, 'body') pageheader = ET.SubElement(body, 'h1', {'class':'pageheader'}) iptcanc = ET.SubElement(pageheader, 'a', {'href':'https://iptc.org'}) iptcimg = ET.SubElement(iptcanc, 'img', {'src':'https://iptc.org/download/resources/logos/iptc-gr_70x70.jpg', 'align':'left', 'border':'0'}) pageheader.text = 'IPTC Video Metadata Hub - Recommendation '+ StdVersion +' / all Mappings' + HeaderAppendix seeotherdoc1 = ET.SubElement(body, 'p', {'class':'note1'}) seeotherdoc1.text = 'See the ' seeotherdoc1link1 = ET.SubElement(seeotherdoc1, 'a', {'href':'IPTC-VideoMetadataHub-props-Rec_'+StdVersion+'.html'}) seeotherdoc1link1.text = 'specification of Video Metadata Hub properties' docdate = ET.SubElement(body, 'p', {'class':'note1'}) docdate.text = 'Mapping recommended on ' + IPTCApprovalDate + '. Document revision as of ' + IPTCRevisionDate + '.' copyrightnotice = ET.fromstring('<p class="smallnote1">Copyright © '+ CopyrightYear + ', <a href="https://iptc.org">IPTC</a> - all rights reserved. Published under the Creative Commons Attribution 4.0 license <a href="http://creativecommons.org/licenses/by/4.0/">http://creativecommons.org/licenses/by/4.0/</a></p>') body.append(copyrightnotice) mappedstdnote = ET.SubElement(body, 'p', {'class':'note1'}) mappedstdnote.text = 'In this table the columns with a blue header are defined by the Video Metadata Hub, the columns with the green or amber headers are defined by other standards or tools.' propnote1 = ET.fromstring('<p class="note1">Note on the column headers:<br />EBUcore: based on the EBU Core Metadata Standard.<br />XMP: based on the ISO XMP standard.<br />PVMD: a specification of JSON properties for Photo and Video MetaData by IPTC (aka phovidmd).</p>') body.append(propnote1) docnote1 = ET.SubElement(body, 'p', {'class':'smallnote1'}) docnote1.text = 'The header of mappings to other standards provides a link to a table including only this mapping (better for printing)' if not valuesProp: print('No Property data found.') else: table = ET.SubElement(body, 'table', {'class':'spec1 vmhmapping'}) thead = ET.SubElement(table, 'thead') throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'th', {'class':'hdrcol1'}) thcol1.text = 'Property Group' thcol2 = ET.SubElement(throw, 'th', {'class':'hdrcol2'}) thcol2.text = 'Property Name' thcol3 = ET.SubElement(throw, 'th', {'class':'hdrcol3'}) thcol3.text = 'Definition / Semantics' """ thcol4 = ET.SubElement(throw, 'th', {'class':'hdrcol4'}) thcol4.text = 'Basic Type/Cardinality' """ thcol5 = ET.SubElement(throw, 'th', {'class':'hdrcol5'}) thcol5.text = 'EBUcore' thcol6 = ET.SubElement(throw, 'th', {'class':'hdrcol6'}) thcol6.text = 'XMP' thcol7 = ET.SubElement(throw, 'th', {'class':'hdrcol7'}) thcol7.text = 'IPTC PVMD JSON' thcol8 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol8link = ET.SubElement(thcol8,'a', {'href':'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html'}) thcol8link.text = 'Apple Quicktime' thcol9 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol9link = ET.SubElement(thcol9,'a', {'href':'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html'}) thcol9link.text = 'MPEG 7' thcol10 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol10link = ET.SubElement(thcol10,'a', {'href':'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html'}) thcol10link.text = 'NewsML-G2' thcol11 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol11link = ET.SubElement(thcol11,'a', {'href':'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html'}) thcol11link.text = 'PB Core 2.1' thcol12 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol12link = ET.SubElement(thcol12,'a', {'href':'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html'}) thcol12link.text = 'Schema.org' # new in 2018-03 thcol13 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol13link = ET.SubElement(thcol13,'a', {'href':'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html'}) thcol13link.text = 'Sony XDCAM & Planning' thcol14 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol14link = ET.SubElement(thcol14,'a', {'href':'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html'}) thcol14link.text = 'Panasonic/SMPTE P2' thcol15 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol15link = ET.SubElement(thcol15,'a', {'href':'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html'}) thcol15link.text = 'Canon VideoClip XML' thcol16 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc'}) thcol16link = ET.SubElement(thcol16,'a', {'href':'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html'}) thcol16link.text = 'exiftool field ids' thcol17 = ET.SubElement(throw, 'th', {'class':'hdrcolNoniptc2'}) thcol17link = ET.SubElement(thcol17,'a', {'href':'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html'}) thcol17link.text = 'EIDR Data Fields 2.0' # second row with "find more at ..." links throw = ET.SubElement(thead, 'tr') thcol1 = ET.SubElement(throw, 'td', {'class':'hdrcol1'}) thcol1.text = ' ' thcol2 = ET.SubElement(throw, 'td', {'class':'hdrcol2'}) thcol2.text = ' ' thcol3 = ET.SubElement(throw, 'td', {'class':'hdrcol3'}) thcol3.text = ' ' """ thcol4 = ET.SubElement(throw, 'td', {'class':'hdrcol4'}) thcol4.text = '' """ moreatlink = valuesProp[0][4] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][5] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][6] colcode = ET.fromstring( '<td class="hdrcolIptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) moreatlink = valuesProp[0][7] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) moreatlink = valuesProp[0][9] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc2"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc2"> </td>') throw.append(colcode) moreatlink = valuesProp[0][10] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) moreatlink = valuesProp[0][11] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc2"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc2"> </td>') throw.append(colcode) moreatlink = valuesProp[0][12] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) moreatlink = valuesProp[0][13] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc2"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc2"> </td>') throw.append(colcode) moreatlink = valuesProp[0][14] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) moreatlink = valuesProp[0][15] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc2"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc2"> </td>') throw.append(colcode) moreatlink = valuesProp[0][16] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc"> </td>') throw.append(colcode) moreatlink = valuesProp[0][17] if moreatlink != '': colcode = ET.fromstring( '<td class="hdrcolNoniptc2"><a href="' + moreatlink + '" target="_blank">Find more about it at ...</a></td>') throw.append(colcode) else: colcode = ET.fromstring( '<td class="hdrcolNoniptc2"> </td>') throw.append(colcode) tbody = ET.SubElement(table, 'tbody') for rowcounter in range(2, 186): xrow = ET.SubElement(tbody, 'tr') teststr = valuesProp[rowcounter][0] if teststr == 'Property Structures (PS)': xrow.set('style', 'background-color: #009999;') if teststr.find('PS', 0) == 0: xrow.set('style', 'background-color: #00cccc;') xcell1 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][0] except: valstr = ' ' xcell1.text = valstr xcell2 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][1] except: valstr = ' ' xcell2.text = valstr xcell3 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][2] except: valstr = ' ' xcell3.text = valstr """ xcell4 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][3] except: valstr = ' ' xcell4.text = valstr """ xcell5 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][4] except: valstr = ' ' xcell5.text = valstr xcell6 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][5] except: valstr = ' ' xcell6.text = valstr xcell7 = ET.SubElement(xrow, 'td', {'class':'bgdcolIptc'}) try: valstr = valuesProp[rowcounter][6] except: valstr = ' ' xcell7.text = valstr xcell8 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][7] except: valstr = ' ' xcell8.text = valstr xcell9 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][9] except: valstr = ' ' xcell9.text = valstr xcell10 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][10] except: valstr = ' ' xcell10.text = valstr xcell11 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][11] except: valstr = ' ' xcell11.text = valstr xcell12 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][12] except: valstr = ' ' xcell12.text = valstr xcell13 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][13] except: valstr = ' ' xcell13.text = valstr xcell14 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][14] except: valstr = ' ' xcell14.text = valstr xcell15 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][15] except: valstr = ' ' xcell15.text = valstr xcell16 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc'}) try: valstr = valuesProp[rowcounter][16] except: valstr = ' ' xcell16.text = valstr xcell17 = ET.SubElement(xrow, 'td', {'class':'bgdcolNoniptc2'}) try: valstr = valuesProp[rowcounter][17] except: valstr = ' ' xcell17.text = valstr filename = "IPTC-VideoMetadataHub-mapping-Rec_"+StdVersion+".html" with open(filename, 'w') as file: file.write(ET.tostring(xroot, pretty_print=True).decode()) moreatlink = valuesProp[0][7] createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Apple Quicktime', 'Apple Quicktime', moreatlink, 7, 'IPTC-VideoMetadataHub-mapping-AppleQT-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - MPEG 7', 'MPEG 7', moreatlink, 9,'IPTC-VideoMetadataHub-mapping-MPEG7-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - NewsML-G2', 'NewsML-G2', moreatlink, 10,'IPTC-VideoMetadataHub-mapping-NewsMLG2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - PB Core 2.1', 'PB Core 2.1', moreatlink, 11,'IPTC-VideoMetadataHub-mapping-PBCore21-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Schema.org', 'Schema.org', moreatlink, 12,'IPTC-VideoMetadataHub-mapping-SchemaOrg-Rec_'+StdVersion+'.html') # new in 2018-03 createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Sony Cameras ', 'Sony XDCAM & Planning', moreatlink, 13,'IPTC-VideoMetadataHub-mapping-SonyXDCAM-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Panasonic Cameras', 'Panasonic/SMPTE P2', moreatlink, 14,'IPTC-VideoMetadataHub-mapping-Panasonic-SMPTEP2-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - Canon Cameras', 'Canon VideoClip XML', moreatlink, 15,'IPTC-VideoMetadataHub-mapping-CanonVClip-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - exiftool', 'exiftool field id', moreatlink, 16,'IPTC-VideoMetadataHub-mapping-exiftool-Rec_'+StdVersion+'.html') createSpecificMapping(valuesProp, 'IPTC Video Metadata Hub - Recommendation ' + StdVersion + HeaderAppendix + '/ Mapping VMHub - EIDR Data Fields 2.0', 'EIDR Data Fields 2.0', moreatlink, 17,'IPTC-VideoMetadataHub-mapping-EIDR-Rec_'+StdVersion+'.html') if __name__ == '__main__': main()
47.913118
321
0.589547
2,918
28,125
5.656271
0.154558
0.077795
0.03399
0.035626
0.77728
0.764314
0.735414
0.686398
0.667979
0.655135
0
0.024909
0.263467
28,125
586
322
47.994881
0.77176
0.0368
0
0.703625
0
0.014925
0.308196
0.066828
0
0
0
0
0
1
0.006397
false
0
0.017058
0
0.025586
0.012793
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6e73a8b5a5431029c3e98fe181d67f59ff8b3071
160
py
Python
imgtopdf/__init__.py
AVIPAGHADAR1729/imgtopdf
642f83632e99685d71ad601593cd907814237f92
[ "MIT" ]
null
null
null
imgtopdf/__init__.py
AVIPAGHADAR1729/imgtopdf
642f83632e99685d71ad601593cd907814237f92
[ "MIT" ]
null
null
null
imgtopdf/__init__.py
AVIPAGHADAR1729/imgtopdf
642f83632e99685d71ad601593cd907814237f92
[ "MIT" ]
null
null
null
from .imgtopdf import get_images_and_convert # https://towardsdatascience.com/how-to-build-your-first-python-package-6a00b02635c9
9.411765
84
0.6875
18
160
5.944444
1
0
0
0
0
0
0
0
0
0
0
0.072581
0.225
160
17
84
9.411765
0.790323
0.5125
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
6e8c2309c9aa1289d95c411b897881690a7bd531
364
py
Python
dl4nlp_pos_tagging/models/modules/seq2seq_encoders/bi_feedforward_encoder.py
michaeljneely/model-uncertainty-pos-tagging
4ed3e1677b2514f162120a7c785d6a9147503106
[ "MIT" ]
1
2021-09-22T15:04:13.000Z
2021-09-22T15:04:13.000Z
dl4nlp_pos_tagging/models/modules/seq2seq_encoders/bi_feedforward_encoder.py
michaeljneely/model-uncertainty-pos-tagging
4ed3e1677b2514f162120a7c785d6a9147503106
[ "MIT" ]
null
null
null
dl4nlp_pos_tagging/models/modules/seq2seq_encoders/bi_feedforward_encoder.py
michaeljneely/model-uncertainty-pos-tagging
4ed3e1677b2514f162120a7c785d6a9147503106
[ "MIT" ]
null
null
null
from overrides import overrides from allennlp.modules.seq2seq_encoders.feedforward_encoder import FeedForwardEncoder from allennlp.modules.seq2seq_encoders.seq2seq_encoder import Seq2SeqEncoder @Seq2SeqEncoder.register("bi-feedforward") class BiFeedForwardEncoder(FeedForwardEncoder): @overrides def is_bidirectional(self) -> bool: return True
30.333333
84
0.824176
37
364
7.972973
0.594595
0.081356
0.128814
0.176271
0.230508
0
0
0
0
0
0
0.015528
0.115385
364
11
85
33.090909
0.900621
0
0
0
0
0
0.038462
0
0
0
0
0
0
1
0.125
false
0
0.375
0.125
0.75
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
6e8dbc55e5ccc100670611d0c6cb2d264bf5d9af
160
py
Python
descarteslabs/common/graft/interpreter/__init__.py
descarteslabs/descarteslabs-python
efc874d6062603dc424c9646287a9b1f8636e7ac
[ "Apache-2.0" ]
167
2017-03-23T22:16:58.000Z
2022-03-08T09:19:30.000Z
descarteslabs/common/graft/interpreter/__init__.py
descarteslabs/descarteslabs-python
efc874d6062603dc424c9646287a9b1f8636e7ac
[ "Apache-2.0" ]
93
2017-03-23T22:11:40.000Z
2021-12-13T18:38:53.000Z
descarteslabs/common/graft/interpreter/__init__.py
descarteslabs/descarteslabs-python
efc874d6062603dc424c9646287a9b1f8636e7ac
[ "Apache-2.0" ]
46
2017-03-25T19:12:14.000Z
2021-08-15T18:04:29.000Z
from .interpreter import interpret from . import exceptions from .scopedchainmap import ScopedChainMap __all__ = ["interpret", "exceptions", "ScopedChainMap"]
26.666667
55
0.8
15
160
8.266667
0.466667
0
0
0
0
0
0
0
0
0
0
0
0.1125
160
5
56
32
0.873239
0
0
0
0
0
0.20625
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6ea29c1b9fd896844cdc66b6ce489925ab495d5c
50
py
Python
src/__init__.py
iki-taichi/tf-keras-transformer
613122705583c0274b0c9be0993f3bbeb240932d
[ "MIT" ]
5
2019-08-03T07:56:30.000Z
2020-07-04T09:00:23.000Z
src/__init__.py
iki-taichi/tf-keras-transformer
613122705583c0274b0c9be0993f3bbeb240932d
[ "MIT" ]
1
2019-10-15T16:50:11.000Z
2019-10-15T16:50:11.000Z
src/__init__.py
iki-taichi/tf-keras-transformer
613122705583c0274b0c9be0993f3bbeb240932d
[ "MIT" ]
4
2019-06-15T03:13:47.000Z
2020-08-03T09:04:14.000Z
# coding:utf-8 #from .custom_callbacks import *
10
32
0.72
7
50
5
1
0
0
0
0
0
0
0
0
0
0
0.02381
0.16
50
4
33
12.5
0.809524
0.86
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
6eba1497ffef85ead898a77e1a2828205020b8e1
39
py
Python
ordenenumeros.py
EBERTONSCHIPPNIK/Pequenos-codigospy
b9cc49a1cce372df2ef5217cb93766fafd9e405a
[ "MIT" ]
null
null
null
ordenenumeros.py
EBERTONSCHIPPNIK/Pequenos-codigospy
b9cc49a1cce372df2ef5217cb93766fafd9e405a
[ "MIT" ]
null
null
null
ordenenumeros.py
EBERTONSCHIPPNIK/Pequenos-codigospy
b9cc49a1cce372df2ef5217cb93766fafd9e405a
[ "MIT" ]
null
null
null
lista = [3,2,1] print(sorted(lista))
13
20
0.615385
7
39
3.428571
0.857143
0
0
0
0
0
0
0
0
0
0
0.090909
0.153846
39
3
20
13
0.636364
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.5
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
6ee97f02804d6fe12be5b749e25e662ef9fc939d
199
py
Python
discord/ext/ui/item.py
Lapis256/discord-ext-ui
593de0a1107d2a0c26023587a2937f00ecec3ed1
[ "MIT" ]
null
null
null
discord/ext/ui/item.py
Lapis256/discord-ext-ui
593de0a1107d2a0c26023587a2937f00ecec3ed1
[ "MIT" ]
null
null
null
discord/ext/ui/item.py
Lapis256/discord-ext-ui
593de0a1107d2a0c26023587a2937f00ecec3ed1
[ "MIT" ]
null
null
null
from typing import Any, Callable import discord class Item: def to_discord(self) -> Any: pass def check(self, func: Callable[[discord.Interaction], bool]) -> 'Item': pass
16.583333
75
0.638191
25
199
5.04
0.64
0
0
0
0
0
0
0
0
0
0
0
0.251256
199
11
76
18.090909
0.845638
0
0
0.285714
0
0
0.020101
0
0
0
0
0
0
1
0.285714
false
0.285714
0.285714
0
0.714286
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
42b288fb2bbcbbfb9085736893e5f574a1c07957
150
py
Python
bims/views/under_development.py
Christiaanvdm/django-bims
f92a63156c711b2d53c5f8ea06867cd64cee9eb9
[ "MIT" ]
null
null
null
bims/views/under_development.py
Christiaanvdm/django-bims
f92a63156c711b2d53c5f8ea06867cd64cee9eb9
[ "MIT" ]
null
null
null
bims/views/under_development.py
Christiaanvdm/django-bims
f92a63156c711b2d53c5f8ea06867cd64cee9eb9
[ "MIT" ]
null
null
null
# coding=utf-8 from django.views.generic import TemplateView class UnderDevelopmentView(TemplateView): template_name = 'under_development.html'
21.428571
45
0.806667
17
150
7
0.941176
0
0
0
0
0
0
0
0
0
0
0.007519
0.113333
150
6
46
25
0.887218
0.08
0
0
0
0
0.161765
0.161765
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
6e35f54e54bf012c2e57ad14a6b064d3914856f4
328
py
Python
py/test_pat.py
frasertweedale/drill
4e71b5348b633fd9beecb243c046f19ddfe131fe
[ "MIT" ]
1
2020-09-02T17:25:26.000Z
2020-09-02T17:25:26.000Z
py/test_pat.py
frasertweedale/drill
4e71b5348b633fd9beecb243c046f19ddfe131fe
[ "MIT" ]
null
null
null
py/test_pat.py
frasertweedale/drill
4e71b5348b633fd9beecb243c046f19ddfe131fe
[ "MIT" ]
null
null
null
import unittest from . import pat class PatTestCase(unittest.TestCase): def test_pat(self): self.assertTrue(pat.match('a*', '')) self.assertFalse(pat.match('.', '')) self.assertTrue(pat.match('ab*', 'a')) self.assertTrue(pat.match('a.', 'ab')) self.assertTrue(pat.match('a', 'a'))
25.230769
46
0.591463
40
328
4.825
0.375
0.207254
0.352332
0.455959
0.357513
0
0
0
0
0
0
0
0.204268
328
12
47
27.333333
0.739464
0
0
0
0
0
0.039634
0
0
0
0
0
0.555556
1
0.111111
false
0
0.222222
0
0.444444
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
0
0
0
0
0
0
0
4
6e46af7713bd465def2bc46a62a6f5a2877b31be
186
py
Python
napari_svg/__init__.py
Carreau/napari-svg
f5e83f65121a079f0aa012380d58793920f325c8
[ "BSD-3-Clause" ]
1
2020-04-13T12:20:00.000Z
2020-04-13T12:20:00.000Z
napari_svg/__init__.py
Carreau/napari-svg
f5e83f65121a079f0aa012380d58793920f325c8
[ "BSD-3-Clause" ]
1
2020-05-23T19:07:00.000Z
2020-05-23T20:11:54.000Z
napari_svg/__init__.py
Carreau/napari-svg
f5e83f65121a079f0aa012380d58793920f325c8
[ "BSD-3-Clause" ]
1
2020-05-23T18:33:27.000Z
2020-05-23T18:33:27.000Z
from .hook_implementations import ( napari_get_writer, napari_write_image, napari_write_labels, napari_write_points, napari_write_shapes, napari_write_vectors, )
20.666667
35
0.763441
22
186
5.863636
0.590909
0.426357
0
0
0
0
0
0
0
0
0
0
0.188172
186
8
36
23.25
0.854305
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.125
0
0.125
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
2807886c6e4a2a28a8260e941eaf956810fe8636
246
py
Python
xmpath/translate.py
xmake-io/pxmake
c5ca995e1afa840d54b513e8b2f193de463a3606
[ "Apache-2.0" ]
1
2021-08-15T21:26:10.000Z
2021-08-15T21:26:10.000Z
xmpath/translate.py
xmake-io/pxmake
c5ca995e1afa840d54b513e8b2f193de463a3606
[ "Apache-2.0" ]
null
null
null
xmpath/translate.py
xmake-io/pxmake
c5ca995e1afa840d54b513e8b2f193de463a3606
[ "Apache-2.0" ]
null
null
null
from os.path import expanduser from os import sep from re import split from functools import reduce from xmtrace import xmtrace @xmtrace def xm_path_translate(lua, ph): return expanduser(reduce(lambda a, b: a + sep + b, split(r"\\|/", ph)))
24.6
75
0.739837
40
246
4.5
0.525
0.066667
0
0
0
0
0
0
0
0
0
0
0.162602
246
9
76
27.333333
0.873786
0
0
0
0
0
0.01626
0
0
0
0
0
0
1
0.125
false
0
0.625
0.125
0.875
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
2843c67d3495ed413d600d4c112625c4b89b76e8
250
py
Python
blog/admin/__init__.py
hentt30/education4all
8f930ade7303fe65355cfe4b2ba66787acad93b4
[ "MIT" ]
null
null
null
blog/admin/__init__.py
hentt30/education4all
8f930ade7303fe65355cfe4b2ba66787acad93b4
[ "MIT" ]
null
null
null
blog/admin/__init__.py
hentt30/education4all
8f930ade7303fe65355cfe4b2ba66787acad93b4
[ "MIT" ]
2
2021-06-18T08:13:17.000Z
2021-12-03T05:08:41.000Z
""" Admin access page settings """ from django.contrib import admin from blog.models import get_model_factory from .posts_admin import PostAdmin # Register your models here. admin.site.register(get_model_factory('PostsFactory').create(), PostAdmin)
25
74
0.804
34
250
5.764706
0.617647
0.081633
0.153061
0
0
0
0
0
0
0
0
0
0.104
250
9
75
27.777778
0.875
0.216
0
0
0
0
0.06383
0
0
0
0
0
0
1
0
true
0
0.75
0
0.75
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
2857dbde0cc754d9c0768c1f84e4dad01de21f93
31
py
Python
arviz/plots/backends/__init__.py
Ban-zee/arviz
2b31d7318da063cc26f0e41b0f86830d80df0558
[ "Apache-2.0" ]
null
null
null
arviz/plots/backends/__init__.py
Ban-zee/arviz
2b31d7318da063cc26f0e41b0f86830d80df0558
[ "Apache-2.0" ]
null
null
null
arviz/plots/backends/__init__.py
Ban-zee/arviz
2b31d7318da063cc26f0e41b0f86830d80df0558
[ "Apache-2.0" ]
null
null
null
"""ArviZ plotting backends."""
15.5
30
0.677419
3
31
7
1
0
0
0
0
0
0
0
0
0
0
0
0.096774
31
1
31
31
0.75
0.774194
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
285a8b4142b061327f98eac18337b5a9999755b9
109
py
Python
Part 1/Chapter 4/example 1.1.py
MineSelf2016/PythonInEconomicManagement
e61a69a5d22dc88a3faf88db72c3819abcc134bf
[ "MIT" ]
null
null
null
Part 1/Chapter 4/example 1.1.py
MineSelf2016/PythonInEconomicManagement
e61a69a5d22dc88a3faf88db72c3819abcc134bf
[ "MIT" ]
null
null
null
Part 1/Chapter 4/example 1.1.py
MineSelf2016/PythonInEconomicManagement
e61a69a5d22dc88a3faf88db72c3819abcc134bf
[ "MIT" ]
null
null
null
score = 92 print("优秀") if score >= 90 else print("及格") a = 1 b = 2 print(type(a)) print(type(b)) print(a/b)
12.111111
43
0.59633
23
109
2.826087
0.565217
0.276923
0
0
0
0
0
0
0
0
0
0.067416
0.183486
109
9
44
12.111111
0.662921
0
0
0
0
0
0.036364
0
0
0
0
0
0
1
0
false
0
0
0
0
0.571429
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
2864fc965ab0b030370035eb0463988806dbc0ac
707
py
Python
plico/utils/loop.py
lbusoni/plico
e4bab48fcc7767a50dcac13644b5e1d6175ca5f0
[ "MIT" ]
null
null
null
plico/utils/loop.py
lbusoni/plico
e4bab48fcc7767a50dcac13644b5e1d6175ca5f0
[ "MIT" ]
7
2021-08-30T17:18:34.000Z
2022-03-25T22:42:20.000Z
plico/utils/loop.py
lbusoni/plico
e4bab48fcc7767a50dcac13644b5e1d6175ca5f0
[ "MIT" ]
null
null
null
import abc from six import with_metaclass class Loop(with_metaclass(abc.ABCMeta, object)): @abc.abstractmethod def name(self): assert False @abc.abstractmethod def close(self): assert False @abc.abstractmethod def open(self): assert False @abc.abstractmethod def isClosed(self): assert False @abc.abstractmethod def performOnePass(self): assert False @abc.abstractmethod def getConvergenceStepCount(self): assert False @abc.abstractmethod def hasConverged(self): assert False class LoopException(Exception): def __init__(self, message): Exception.__init__(self, message)
17.675
48
0.660537
74
707
6.175676
0.351351
0.260394
0.306346
0.236324
0.459519
0.459519
0
0
0
0
0
0
0.264498
707
39
49
18.128205
0.878846
0
0
0.518519
0
0
0
0
0
0
0
0
0.259259
1
0.296296
false
0.037037
0.074074
0
0.444444
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
28776eb8f03b21f285acf937666fc68e5bc8f34b
190
py
Python
virtual/bin/django-admin.py
vinnyotach7/insta-photo
07bc4f870fa119f96b7fbbaeb0982d6902bc41a4
[ "MIT" ]
null
null
null
virtual/bin/django-admin.py
vinnyotach7/insta-photo
07bc4f870fa119f96b7fbbaeb0982d6902bc41a4
[ "MIT" ]
null
null
null
virtual/bin/django-admin.py
vinnyotach7/insta-photo
07bc4f870fa119f96b7fbbaeb0982d6902bc41a4
[ "MIT" ]
null
null
null
#!/home/moringaschool/Documents/django projects/insta-moringa/virtual/bin/python3.6 from django.core import management if __name__ == "__main__": management.execute_from_command_line()
31.666667
83
0.805263
24
190
5.916667
0.875
0
0
0
0
0
0
0
0
0
0
0.011494
0.084211
190
5
84
38
0.804598
0.431579
0
0
0
0
0.074766
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2878266ec7b83cbe1361c5b9c375e3bfd1d5507a
93
py
Python
pelayanan/apps.py
diaksizz/Adisatya
1b20e523aede6ab3e8effb1ca63adf72016a6839
[ "MIT" ]
null
null
null
pelayanan/apps.py
diaksizz/Adisatya
1b20e523aede6ab3e8effb1ca63adf72016a6839
[ "MIT" ]
7
2021-03-30T14:04:35.000Z
2022-01-13T03:07:50.000Z
pelayanan/apps.py
diaksizz/Adisatya
1b20e523aede6ab3e8effb1ca63adf72016a6839
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PelayananConfig(AppConfig): name = 'pelayanan'
15.5
33
0.763441
10
93
7.1
0.9
0
0
0
0
0
0
0
0
0
0
0
0.16129
93
5
34
18.6
0.910256
0
0
0
0
0
0.096774
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
954eef887a982e473fee330ceda4a0756c075d30
165
py
Python
qingmi/utils/functional.py
xiongxianzhu/qingmi
ae5a446abec3982ebf2c5dde8546ef72f9453137
[ "BSD-3-Clause" ]
20
2018-05-22T09:29:40.000Z
2020-12-11T04:53:15.000Z
qingmi/utils/functional.py
xiongxianzhu/qingmi
ae5a446abec3982ebf2c5dde8546ef72f9453137
[ "BSD-3-Clause" ]
65
2019-03-07T02:43:06.000Z
2021-01-07T03:43:52.000Z
qingmi/utils/functional.py
xiongxianzhu/qingmi
ae5a446abec3982ebf2c5dde8546ef72f9453137
[ "BSD-3-Clause" ]
6
2019-03-08T06:39:47.000Z
2021-07-01T11:02:56.000Z
class Promise: """ Base class for the proxy class created in the closure of the lazy function. It's used to recognize promises in code. """ pass
23.571429
79
0.660606
25
165
4.36
0.8
0
0
0
0
0
0
0
0
0
0
0
0.278788
165
6
80
27.5
0.915966
0.70303
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0.5
0
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
0
0
0
4
95640f337672b95f112bad4c0ac277cda9e4288d
125
py
Python
flex/http/cors.py
centergy/flex
4fc11d3ad48e4b5016f53256015e3eed2157daae
[ "MIT" ]
null
null
null
flex/http/cors.py
centergy/flex
4fc11d3ad48e4b5016f53256015e3eed2157daae
[ "MIT" ]
null
null
null
flex/http/cors.py
centergy/flex
4fc11d3ad48e4b5016f53256015e3eed2157daae
[ "MIT" ]
null
null
null
from flask_cors import CORS from flex.conf import config cors = CORS(origins=config.CORS_ORIGINS, supports_credentials=True)
31.25
67
0.84
19
125
5.368421
0.578947
0.196078
0
0
0
0
0
0
0
0
0
0
0.096
125
4
67
31.25
0.902655
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
95a99ee5de4fe96ed705bbb02886bbe960f22b38
102
py
Python
deliravision/torch/models/gans/context_conditional/__init__.py
delira-dev/vision_torch
d944aa67d319bd63a2add5cb89e8308413943de6
[ "BSD-2-Clause" ]
4
2019-08-03T09:56:50.000Z
2019-09-05T09:32:06.000Z
deliravision/torch/models/gans/context_conditional/__init__.py
delira-dev/vision_torch
d944aa67d319bd63a2add5cb89e8308413943de6
[ "BSD-2-Clause" ]
23
2019-08-03T14:16:47.000Z
2019-10-22T10:15:10.000Z
deliravision/torch/models/gans/context_conditional/__init__.py
delira-dev/vision_torch
d944aa67d319bd63a2add5cb89e8308413943de6
[ "BSD-2-Clause" ]
null
null
null
from deliravision.models.gans.context_conditional.context_cond_gan import \ ContextConditionalGAN
34
75
0.862745
11
102
7.727273
0.909091
0
0
0
0
0
0
0
0
0
0
0
0.088235
102
2
76
51
0.913978
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
95b461d2bbf0d1512b375805e80d08bd1b14c33e
5,291
py
Python
tests/unit/baskerville_tests/models_tests/pipeline_task_tests/tests_task_base.py
deflect-ca/baskerville
9659f4b39ab66fcf5329a4eccff15e97245b04f0
[ "CC-BY-4.0" ]
2
2021-12-03T11:26:38.000Z
2022-01-12T22:24:29.000Z
tests/unit/baskerville_tests/models_tests/pipeline_task_tests/tests_task_base.py
deflect-ca/baskerville
9659f4b39ab66fcf5329a4eccff15e97245b04f0
[ "CC-BY-4.0" ]
3
2022-01-19T15:17:37.000Z
2022-03-22T04:55:22.000Z
tests/unit/baskerville_tests/models_tests/pipeline_task_tests/tests_task_base.py
deflect-ca/baskerville
9659f4b39ab66fcf5329a4eccff15e97245b04f0
[ "CC-BY-4.0" ]
null
null
null
# Copyright (c) 2020, eQualit.ie inc. # All rights reserved. # # This source code is licensed under the BSD-style license found in the # LICENSE file in the root directory of this source tree. from unittest import mock from baskerville.models.config import BaskervilleConfig from tests.unit.baskerville_tests.helpers.spark_testing_base import \ SQLTestCaseLatestSpark from tests.unit.baskerville_tests.helpers.utils import test_baskerville_conf class TestTask(SQLTestCaseLatestSpark): def setUp(self): super().setUp() self.test_conf = test_baskerville_conf self.baskerville_config = BaskervilleConfig(self.test_conf).validate() def _helper_task_set_up(self, steps=()): from baskerville.models.pipeline_tasks.tasks_base import Task self.task = Task( self.baskerville_config, steps ) def test_initialize(self): self._helper_task_set_up() step_one = mock.MagicMock() step_two = mock.MagicMock() self.task.steps = [step_one, step_two] with mock.patch.object( self.task.service_provider, 'initialize_db_tools_service' ) as mock_initialize_db_tools_service: with mock.patch.object( self.task.service_provider, 'initialize_spark_service' ) as mock_initialize_spark_service: self.task.initialize() mock_initialize_db_tools_service.assert_called_once() mock_initialize_spark_service.assert_called_once() step_one.initialize.assert_called_once() step_two.initialize.assert_called_once() def test_run(self): step_one = mock.MagicMock() step_two = mock.MagicMock() mock_steps = [step_one, step_two] self._helper_task_set_up(mock_steps) self.task.run() for step in mock_steps: step.set_df.assert_called_once() step.set_df.return_value.run.assert_called_once() self.assertTrue(len(self.task.remaining_steps) == 0) def test_finish_up(self): self._helper_task_set_up() with mock.patch.object( self.task.service_provider, 'finish_up' ) as mock_finish_up: self.task.finish_up() mock_finish_up.assert_called_once() def test_reset(self): self._helper_task_set_up() with mock.patch.object( self.task.service_provider, 'reset' ) as mock_reset: self.task.reset() mock_reset.assert_called_once() class TestCacheTask(SQLTestCaseLatestSpark): def setUp(self): super().setUp() self.test_conf = test_baskerville_conf self.baskerville_config = BaskervilleConfig(self.test_conf).validate() def _helper_task_set_up(self, steps=()): from baskerville.models.pipeline_tasks.tasks_base import CacheTask self.task = CacheTask( self.baskerville_config, steps ) def test_initialize(self): self._helper_task_set_up() step_one = mock.MagicMock() step_two = mock.MagicMock() self.task.steps = [step_one, step_two] with mock.patch.object( self.task.service_provider, 'initialize_db_tools_service' ) as mock_initialize_db_tools_service: with mock.patch.object( self.task.service_provider, 'initialize_spark_service' ) as mock_initialize_spark_service: with mock.patch.object( self.task.service_provider, 'initialize_request_set_cache_service' ) as mock_initialize_request_set_cache_service: self.task.initialize() mock_initialize_db_tools_service.assert_called_once() mock_initialize_spark_service.assert_called_once() mock_initialize_request_set_cache_service.\ assert_called_once() step_one.initialize.assert_called_once() step_two.initialize.assert_called_once() class TestMLTask(SQLTestCaseLatestSpark): def setUp(self): super().setUp() self.test_conf = test_baskerville_conf self.baskerville_config = BaskervilleConfig(self.test_conf).validate() def _helper_task_set_up(self, steps=()): from baskerville.models.pipeline_tasks.tasks_base import MLTask self.task = MLTask( self.baskerville_config, steps ) def test_initialize(self): self._helper_task_set_up() step_one = mock.MagicMock() step_two = mock.MagicMock() self.task.steps = [step_one, step_two] self.task.service_provider = mock.MagicMock() self.task.initialize() self.task.service_provider.initialize_db_tools_service\ .assert_called_once() self.task.service_provider\ .initialize_spark_service.assert_called_once() self.task.service_provider.initialize_request_set_cache_service. \ assert_called_once() self.task.service_provider.initalize_ml_services.assert_called_once() step_one.initialize.assert_called_once() step_two.initialize.assert_called_once()
36.489655
78
0.659233
611
5,291
5.343699
0.153846
0.061256
0.093109
0.084533
0.77121
0.757427
0.716998
0.715467
0.671363
0.623583
0
0.00128
0.261765
5,291
144
79
36.743056
0.834613
0.034398
0
0.59292
0
0
0.029786
0.027043
0
0
0
0
0.176991
1
0.106195
false
0
0.061947
0
0.19469
0
0
0
0
null
0
0
0
0
1
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
95ba78b81e42c2423e99d36818e54b2ead046494
192
py
Python
allink_core/core_apps/allink_legacy_redirect/config.py
allink/allink-core
cf2727f26192d8dee89d76feb262bc4760f36f5e
[ "BSD-3-Clause" ]
5
2017-03-13T08:49:45.000Z
2022-03-05T20:05:56.000Z
allink_core/core_apps/allink_legacy_redirect/config.py
allink/allink-core
cf2727f26192d8dee89d76feb262bc4760f36f5e
[ "BSD-3-Clause" ]
28
2019-10-21T08:32:18.000Z
2022-02-10T13:16:38.000Z
allink_core/core_apps/allink_legacy_redirect/config.py
allink/allink-core
cf2727f26192d8dee89d76feb262bc4760f36f5e
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- from django.apps import AppConfig class AllinkLegacyConfig(AppConfig): name = 'allink_core.core_apps.allink_legacy_redirect' verbose_name = "Legacy Redirect"
24
57
0.744792
23
192
6
0.695652
0.202899
0
0
0
0
0
0
0
0
0
0.006098
0.145833
192
7
58
27.428571
0.835366
0.109375
0
0
0
0
0.349112
0.260355
0
0
0
0
0
1
0
false
0
0.25
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
95c648711164955b4c3300c034a9f596208b0d25
4,247
py
Python
lab3/es3/to_bike_webservice.py
haraldmeister/Programming_for_IoT_applications
04ec13689caee1fca28bf4fb6a261c318ebd374d
[ "Apache-2.0" ]
null
null
null
lab3/es3/to_bike_webservice.py
haraldmeister/Programming_for_IoT_applications
04ec13689caee1fca28bf4fb6a261c318ebd374d
[ "Apache-2.0" ]
null
null
null
lab3/es3/to_bike_webservice.py
haraldmeister/Programming_for_IoT_applications
04ec13689caee1fca28bf4fb6a261c318ebd374d
[ "Apache-2.0" ]
null
null
null
import cherrypy import json import requests class BikeSharing(): exposed=True @cherrypy.tools.json_out() def GET(self,*uri,**params): if len(uri)==0: self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json() return json.loads(json.dumps(self.json_data,default=lambda x: x.__dict__)) if uri[0]=="order_slots": self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json() self.json_out=[] if "N" in params: self.N=int(params["N"]) else: self.N=10 if "order" in params: if params["order"]=="ascend": self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('empty_slots', 0)), reverse=False) if params["order"]=="descend": self.json_data['network']['stations'] = sorted(self.json_data["network"]["stations"], key=lambda k: int(k.get('empty_slots', 0)), reverse=True) else: self.json_data['network']['stations'] = sorted(self.json_data["network"]["stations"], key=lambda k: int(k.get('empty_slots', 0)), reverse=True) for i in range(0,self.N): self.json_out.append(self.json_data["network"]["stations"][i]) return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__)) if uri[0]=="order_bikes": self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json() self.json_out=[] if "N" in params: self.N=int(params["N"]) else: self.N=10 if "order" in params: if params["order"]=="ascend": self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=False) if params["order"]=="descend": self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=True) else: self.json_data['network']['stations'] = sorted(self.json_data['network']['stations'], key=lambda k: int(k.get('free_bikes', 0)), reverse=True) for i in range(0,self.N): self.json_out.append(self.json_data['network']['stations'][i]) return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__)) if uri[0]=="count_bikes_slots": self.json_data = requests.get("https://api.citybik.es/v2/networks/to-bike").json() self.bikes=0 self.slots=0 if "lat" and "lon" in params: self.lat=float(params["lat"]) self.lon=float(params["lon"]) else: return "District number not set" for i in range(0,len(self.json_data["network"]["stations"])): if ((float(self.json_data["network"]["stations"][i]["latitude"])<self.lat+0.005 and float(self.json_data["network"]["stations"][i]["latitude"])>self.lat-0.005) and (float(self.json_data["network"]["stations"][i]["longitude"])<self.lon+0.01 and float(self.json_data["network"]["stations"][i]["longitude"])>self.lon-0.01)): self.bikes+=int(self.json_data["network"]["stations"][i]["free_bikes"]) self.slots+=int(self.json_data["network"]["stations"][i]["empty_slots"]) self.json_out={"latitude":float(params["lat"]),"longitude":float(params["lon"]),"bikes":self.bikes,"slots":self.slots} return json.loads(json.dumps(self.json_out,default=lambda x: x.__dict__)) if __name__ == '__main__': conf = { '/': { 'request.dispatch': cherrypy.dispatch.MethodDispatcher(), 'tools.sessions.on': True } } cherrypy.tree.mount(BikeSharing(), '/', conf) cherrypy.config.update({'server.socket_host': '0.0.0.0'}) cherrypy.config.update({'server.socket_port': 9090}) cherrypy.engine.start() cherrypy.engine.block()
46.67033
164
0.567459
542
4,247
4.311808
0.166052
0.116389
0.133504
0.170732
0.773641
0.729568
0.729568
0.689345
0.689345
0.674369
0
0.014169
0.252178
4,247
91
165
46.67033
0.721662
0
0
0.432432
0
0
0.201036
0
0
0
0
0
0
1
0.013514
false
0
0.040541
0
0.148649
0
0
0
0
null
0
0
1
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
95f87eb6787862f67ee6e4ff2166b4b17941c211
200
py
Python
.env-cbre/bin/django-admin.py
ThebiggunSeeoil/app-cbre-exxon
efec395dca662132a19f882b0ff3dbb6318b3e51
[ "MIT" ]
null
null
null
.env-cbre/bin/django-admin.py
ThebiggunSeeoil/app-cbre-exxon
efec395dca662132a19f882b0ff3dbb6318b3e51
[ "MIT" ]
null
null
null
.env-cbre/bin/django-admin.py
ThebiggunSeeoil/app-cbre-exxon
efec395dca662132a19f882b0ff3dbb6318b3e51
[ "MIT" ]
null
null
null
#!/Users/yutthachaithongkumchum/myproject/app-cbre-exxon/app-cbre-exxon/.env-cbre/bin/python3 from django.core import management if __name__ == "__main__": management.execute_from_command_line()
33.333333
93
0.8
26
200
5.730769
0.769231
0.09396
0.161074
0
0
0
0
0
0
0
0
0.005405
0.075
200
5
94
40
0.8
0.46
0
0
0
0
0.074766
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
25037f81ff175252f3fe1f767fba6931b0e3455e
100
py
Python
PetService/apps.py
sifullahrakin/HelloPaw
dc01827076b59bb145ccfb92aa4a5cdda97683e7
[ "MIT" ]
null
null
null
PetService/apps.py
sifullahrakin/HelloPaw
dc01827076b59bb145ccfb92aa4a5cdda97683e7
[ "MIT" ]
null
null
null
PetService/apps.py
sifullahrakin/HelloPaw
dc01827076b59bb145ccfb92aa4a5cdda97683e7
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PetserviceConfig(AppConfig): name = 'PetService'
16.666667
35
0.73
10
100
7.3
0.9
0
0
0
0
0
0
0
0
0
0
0
0.2
100
5
36
20
0.9125
0
0
0
0
0
0.105263
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
250ec70e5d9a97d0228ec0739efdb46b30a01b71
85
py
Python
gh_build.py
sonvt1710/manga-py
848a78e93b890af0c92056a1a9fc7f6ce5707cf6
[ "MIT" ]
337
2019-08-27T16:14:50.000Z
2022-03-29T09:58:22.000Z
gh_build.py
sonvt1710/manga-py
848a78e93b890af0c92056a1a9fc7f6ce5707cf6
[ "MIT" ]
225
2019-08-25T15:02:01.000Z
2022-03-31T06:36:09.000Z
gh_build.py
sonvt1710/manga-py
848a78e93b890af0c92056a1a9fc7f6ce5707cf6
[ "MIT" ]
41
2019-10-04T13:28:02.000Z
2022-03-19T08:18:34.000Z
#!/usr/bin/python3 # -*- coding: utf-8 -*- from helpers.gh_pages import main main()
14.166667
33
0.658824
13
85
4.230769
0.923077
0
0
0
0
0
0
0
0
0
0
0.027397
0.141176
85
5
34
17
0.726027
0.458824
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
2519907eb09f2009dba322b2478c0e43655d9d02
178
py
Python
app/routes/models/form_model.py
mampilly/fileaccess
1bc0af992653c0b427f9c9f8aafd362b0fca3b43
[ "MIT" ]
null
null
null
app/routes/models/form_model.py
mampilly/fileaccess
1bc0af992653c0b427f9c9f8aafd362b0fca3b43
[ "MIT" ]
null
null
null
app/routes/models/form_model.py
mampilly/fileaccess
1bc0af992653c0b427f9c9f8aafd362b0fca3b43
[ "MIT" ]
null
null
null
from pydantic import BaseModel from fastapi.param_functions import Body from typing import Optional class FormModel(BaseModel): first_name: str = None second_name: str
19.777778
40
0.792135
24
178
5.75
0.708333
0.101449
0
0
0
0
0
0
0
0
0
0
0.168539
178
8
41
22.25
0.932432
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
25456be9acd5f44886321ba6525a12cba4457924
37
py
Python
modules/2.79/bpy/types/TextureNodeCurveRGB.py
cmbasnett/fake-bpy-module
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
[ "MIT" ]
null
null
null
modules/2.79/bpy/types/TextureNodeCurveRGB.py
cmbasnett/fake-bpy-module
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
[ "MIT" ]
null
null
null
modules/2.79/bpy/types/TextureNodeCurveRGB.py
cmbasnett/fake-bpy-module
acb8b0f102751a9563e5b5e5c7cd69a4e8aa2a55
[ "MIT" ]
null
null
null
TextureNodeCurveRGB.mapping = None
9.25
34
0.810811
3
37
10
1
0
0
0
0
0
0
0
0
0
0
0
0.135135
37
3
35
12.333333
0.9375
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c25542e063541cf37440c290075ae0b2b9d38f08
115
py
Python
WebMirror/management/rss_parser_funcs/feed_parse_extractKendalblackBlogspotCom.py
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
193
2016-08-02T22:04:35.000Z
2022-03-09T20:45:41.000Z
WebMirror/management/rss_parser_funcs/feed_parse_extractKendalblackBlogspotCom.py
fake-name/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
533
2016-08-23T20:48:23.000Z
2022-03-28T15:55:13.000Z
WebMirror/management/rss_parser_funcs/feed_parse_extractKendalblackBlogspotCom.py
rrosajp/ReadableWebProxy
ed5c7abe38706acc2684a1e6cd80242a03c5f010
[ "BSD-3-Clause" ]
19
2015-08-13T18:01:08.000Z
2021-07-12T17:13:09.000Z
def extractKendalblackBlogspotCom(item): ''' DISABLED Parser for 'kendalblack.blogspot.com' ''' return None
14.375
40
0.73913
11
115
7.727273
1
0
0
0
0
0
0
0
0
0
0
0
0.147826
115
8
41
14.375
0.867347
0.408696
0
0
0
0
0
0
0
0
0
0
0
1
0.5
false
0
0
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
c266e21e1b60f10837c5ee8875bb015d24e6bbf1
413
py
Python
web/src/auth.py
computer-geek64/guardian
b6aa05074c8f63b7b4e9dfc642f03ba750e32640
[ "MIT" ]
null
null
null
web/src/auth.py
computer-geek64/guardian
b6aa05074c8f63b7b4e9dfc642f03ba750e32640
[ "MIT" ]
null
null
null
web/src/auth.py
computer-geek64/guardian
b6aa05074c8f63b7b4e9dfc642f03ba750e32640
[ "MIT" ]
null
null
null
# auth.py import os import json import hashlib authentication_credentials = json.loads(os.environ['AUTHENTICATION_CREDENTIALS']) def authenticate(username, password): if username is None or password is None: return False password_hash = hashlib.sha512(password.encode()).hexdigest() return username in authentication_credentials and password_hash == authentication_credentials[username]
24.294118
107
0.779661
48
413
6.583333
0.541667
0.316456
0
0
0
0
0
0
0
0
0
0.008523
0.1477
413
16
108
25.8125
0.889205
0.016949
0
0
0
0
0.064356
0.064356
0
0
0
0
0
1
0.111111
false
0.444444
0.333333
0
0.666667
0
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
1
0
1
0
0
4
c27c7b85bef69eec7f1661780c39fa1813738a76
358
py
Python
src/triggers/recommendation_trigger.py
jherrerotardon/spies
ec855b3c1bd207c8ee2beb829e446fa575354c59
[ "Apache-2.0" ]
null
null
null
src/triggers/recommendation_trigger.py
jherrerotardon/spies
ec855b3c1bd207c8ee2beb829e446fa575354c59
[ "Apache-2.0" ]
null
null
null
src/triggers/recommendation_trigger.py
jherrerotardon/spies
ec855b3c1bd207c8ee2beb829e446fa575354c59
[ "Apache-2.0" ]
null
null
null
from pyframework.triggers.abstract_trigger import AbstractTrigger from src.commands.fire.base_fire import Event class RecommendationTrigger(AbstractTrigger): ACTION_KEY_PREFIX = AbstractTrigger.ACTION_KEY_PREFIX + ':' + 'download' EVENT_TASK = Event.RECOMMENDATION_DOWNLOAD_TASK.value EVENT_ACTION = Event.RECOMMENDATION_DOWNLOAD_ACTION.value
35.8
76
0.829609
40
358
7.125
0.525
0.147368
0.168421
0.210526
0
0
0
0
0
0
0
0
0.106145
358
9
77
39.777778
0.890625
0
0
0
0
0
0.02514
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
c292e1dcbb3aeb1b053e7bfaf6e9940f0c0794ff
123
py
Python
src/models.py
VasudhaJha/URLShortner
c28e852a524eb52406bd050b3e4f346fa237a36c
[ "MIT" ]
null
null
null
src/models.py
VasudhaJha/URLShortner
c28e852a524eb52406bd050b3e4f346fa237a36c
[ "MIT" ]
null
null
null
src/models.py
VasudhaJha/URLShortner
c28e852a524eb52406bd050b3e4f346fa237a36c
[ "MIT" ]
null
null
null
from pydantic import BaseModel class URL(BaseModel): long_url: str class ShortURL(BaseModel): short_url: str
15.375
30
0.723577
16
123
5.4375
0.625
0.137931
0
0
0
0
0
0
0
0
0
0
0.211382
123
8
31
15.375
0.896907
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.2
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
1
0
0
4
c2cafc4f9ff31f75f8818e65ddee5cd5d06ca6d9
108
py
Python
profile_app_mod/apps.py
kurniantoska/medicalwebapp_project
a2e36a44b598ad2989c207f950a89c02d987e00d
[ "BSD-3-Clause" ]
1
2019-10-22T02:12:49.000Z
2019-10-22T02:12:49.000Z
profile_app_mod/apps.py
kurniantoska/medicalwebapp_project
a2e36a44b598ad2989c207f950a89c02d987e00d
[ "BSD-3-Clause" ]
3
2020-06-05T18:30:35.000Z
2021-06-10T20:31:09.000Z
profile_app_mod/apps.py
kurniantoska/medicalwebapp_project
a2e36a44b598ad2989c207f950a89c02d987e00d
[ "BSD-3-Clause" ]
null
null
null
from django.apps import AppConfig class ProfileAppModConfig(AppConfig): name = 'profile_app_mod'
18
38
0.75
12
108
6.583333
0.916667
0
0
0
0
0
0
0
0
0
0
0
0.185185
108
5
39
21.6
0.897727
0
0
0
0
0
0.145631
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
c2e2d7dd11ea75f5970ce8d2520d4e7031181352
90
py
Python
hangman.py
juank27/Hangman_python
39d2117bf207581691bed5c9b625c486d29ef47e
[ "MIT" ]
null
null
null
hangman.py
juank27/Hangman_python
39d2117bf207581691bed5c9b625c486d29ef47e
[ "MIT" ]
null
null
null
hangman.py
juank27/Hangman_python
39d2117bf207581691bed5c9b625c486d29ef47e
[ "MIT" ]
null
null
null
print("hola a todos") for i in range(0,100): print("Hola") print("Hola otra vez")
22.5
25
0.611111
16
90
3.4375
0.75
0.490909
0
0
0
0
0
0
0
0
0
0.056338
0.211111
90
4
25
22.5
0.71831
0
0
0
0
0
0.329545
0
0
0
0
0
0
1
0
false
0
0
0
0
0.75
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
c2edf9bb96e7e3cd5e041a82496e51eef83c6a05
121
py
Python
positive no in range.py
KrutikaSoor/print-all-positive-no.in-range
61892e4d735bf82576312c009f60411e7a43d2ac
[ "MIT" ]
null
null
null
positive no in range.py
KrutikaSoor/print-all-positive-no.in-range
61892e4d735bf82576312c009f60411e7a43d2ac
[ "MIT" ]
null
null
null
positive no in range.py
KrutikaSoor/print-all-positive-no.in-range
61892e4d735bf82576312c009f60411e7a43d2ac
[ "MIT" ]
null
null
null
list1=[12,-7,5,64,-14] list2=[12,14,-95,3] for i in list1: if i>0: print(i) for j in list2: if j>0: print(j)
13.444444
22
0.553719
29
121
2.310345
0.551724
0.179104
0
0
0
0
0
0
0
0
0
0.225806
0.231405
121
8
23
15.125
0.494624
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0
0
0
0.25
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c2f48358c6829bc521b9857586c1bf0e1032a3c0
49
py
Python
OpenAttack/data/test.py
e-tornike/OpenAttack
b19c53af2e01f096505f8ebb8f48a54388295003
[ "MIT" ]
444
2020-07-14T12:13:26.000Z
2022-03-28T02:46:30.000Z
OpenAttack/data/test.py
e-tornike/OpenAttack
b19c53af2e01f096505f8ebb8f48a54388295003
[ "MIT" ]
50
2020-07-15T01:34:42.000Z
2022-01-24T12:19:19.000Z
OpenAttack/data/test.py
e-tornike/OpenAttack
b19c53af2e01f096505f8ebb8f48a54388295003
[ "MIT" ]
86
2020-08-02T13:16:45.000Z
2022-03-27T06:22:04.000Z
NAME = "test" DOWNLOAD = "/TAADToolbox/test.pkl"
16.333333
34
0.693878
6
49
5.666667
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.122449
49
2
35
24.5
0.790698
0
0
0
0
0
0.510204
0.428571
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
6c3448d61b9d525a3d0cfd85877dd708519dfcee
729
py
Python
meraki_sdk/models/device_policy_enum.py
meraki/meraki-python-sdk
9894089eb013318243ae48869cc5130eb37f80c0
[ "MIT" ]
37
2019-04-24T14:01:33.000Z
2022-01-28T01:37:21.000Z
meraki_sdk/models/device_policy_enum.py
ankita66666666/meraki-python-sdk
9894089eb013318243ae48869cc5130eb37f80c0
[ "MIT" ]
10
2019-07-09T16:35:11.000Z
2021-12-07T03:47:53.000Z
meraki_sdk/models/device_policy_enum.py
ankita66666666/meraki-python-sdk
9894089eb013318243ae48869cc5130eb37f80c0
[ "MIT" ]
17
2019-04-30T23:53:21.000Z
2022-02-07T22:57:44.000Z
# -*- coding: utf-8 -*- """ meraki_sdk This file was automatically generated for meraki by APIMATIC v2.0 ( https://apimatic.io ). """ class DevicePolicyEnum(object): """Implementation of the 'DevicePolicy' enum. The policy to apply to the specified client. Can be 'Whitelisted', 'Blocked', 'Normal' or 'Group policy'. Required. Attributes: WHITELISTED: TODO: type description here. BLOCKED: TODO: type description here. NORMAL: TODO: type description here. ENUM_GROUP POLICY: TODO: type description here. """ WHITELISTED = 'Whitelisted' BLOCKED = 'Blocked' NORMAL = 'Normal' ENUM_GROUP_POLICY = 'Group policy'
22.78125
95
0.625514
78
729
5.794872
0.551282
0.097345
0.168142
0.20354
0
0
0
0
0
0
0
0.005671
0.274348
729
31
96
23.516129
0.848771
0.657064
0
0
1
0
0.220859
0
0
0
0
0.129032
0
1
0
false
0
0
0
1
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
1
0
0
0
0
0
0
0
1
0
0
4
6c64e8abb731e86a9317708f71e530472211a481
265
py
Python
github/models.py
billryan/github-rss
000fb186c66a0ef2fb234649e10bd1bf157f63fd
[ "MIT" ]
null
null
null
github/models.py
billryan/github-rss
000fb186c66a0ef2fb234649e10bd1bf157f63fd
[ "MIT" ]
null
null
null
github/models.py
billryan/github-rss
000fb186c66a0ef2fb234649e10bd1bf157f63fd
[ "MIT" ]
null
null
null
from django.db import models class Repo(models.Model): repo_url = models.URLField(max_length=200) owner = models.CharField(max_length=200) repo = models.CharField(max_length=200) def __unicode__(self): return self.owner + '/' + self.repo
24.090909
46
0.698113
36
265
4.916667
0.527778
0.152542
0.20339
0.271186
0.305085
0
0
0
0
0
0
0.04186
0.188679
265
10
47
26.5
0.781395
0
0
0
0
0
0.003774
0
0
0
0
0
0
1
0.142857
false
0
0.142857
0.142857
1
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
1
0
0
4
6c64f3b860d8f1f50a7c8234c20610ae62b635c5
329
py
Python
example3.py
djinn/python-duckduckgo
e4bb5729cdf8c1e086226760af01e2c0c7dbb500
[ "BSD-3-Clause" ]
2
2015-02-19T10:41:31.000Z
2021-11-12T11:42:48.000Z
example3.py
djinn/python-duckduckgo
e4bb5729cdf8c1e086226760af01e2c0c7dbb500
[ "BSD-3-Clause" ]
null
null
null
example3.py
djinn/python-duckduckgo
e4bb5729cdf8c1e086226760af01e2c0c7dbb500
[ "BSD-3-Clause" ]
null
null
null
from duckduckgo import query def wikipedia_presence(text): """Find if a query has wikipedia article""" return query(text).abstract.url if query(text).abstract != None and query(text).abstract.source == 'Wikipedia' else None if __name__ == '__main__': import sys print wikipedia_presence(' '.join(sys.argv[1:]))
32.9
124
0.714286
45
329
5
0.6
0.12
0.226667
0
0
0
0
0
0
0
0
0.00361
0.158055
329
9
125
36.555556
0.808664
0
0
0
0
0
0.062937
0
0
0
0
0
0
0
null
null
0
0.333333
null
null
0.166667
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
1
0
0
0
0
4
6669c63ec370f0d0bf4e7cf00d020b0eee9a8c8c
353
py
Python
replaybuffer/utils.py
mattbev/replaybuffer
ed2f2bd8e10ab6f118bda49d8c4b26d257bcb5c5
[ "MIT" ]
null
null
null
replaybuffer/utils.py
mattbev/replaybuffer
ed2f2bd8e10ab6f118bda49d8c4b26d257bcb5c5
[ "MIT" ]
null
null
null
replaybuffer/utils.py
mattbev/replaybuffer
ed2f2bd8e10ab6f118bda49d8c4b26d257bcb5c5
[ "MIT" ]
null
null
null
from typing import Iterable, Tuple def remove_nones(*arrays: Iterable) -> Tuple[Iterable]: """ Take inputted arrays that may contain None values, and return copies without Nones. Returns: tuple[Iterable]: New arrays with only non-None values """ return tuple([[i for i in array if i is not None] for array in arrays])
27.153846
75
0.68272
51
353
4.705882
0.627451
0.108333
0
0
0
0
0
0
0
0
0
0
0.235127
353
12
76
29.416667
0.888889
0.427762
0
0
0
0
0
0
0
0
0
0
0
1
0.333333
true
0
0.333333
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
1
0
1
0
0
4
666abd2400392f590093f17cdeaae2457e29958b
950
py
Python
phy/cluster/tests/conftest.py
m-beau/phy
755082af4e123dc057b8edca138652f901d0c8b1
[ "BSD-3-Clause" ]
null
null
null
phy/cluster/tests/conftest.py
m-beau/phy
755082af4e123dc057b8edca138652f901d0c8b1
[ "BSD-3-Clause" ]
null
null
null
phy/cluster/tests/conftest.py
m-beau/phy
755082af4e123dc057b8edca138652f901d0c8b1
[ "BSD-3-Clause" ]
null
null
null
# -*- coding: utf-8 -*- """Test fixtures.""" #------------------------------------------------------------------------------ # Imports #------------------------------------------------------------------------------ from pytest import fixture from phy.io.array import (get_closest_clusters, ) #------------------------------------------------------------------------------ # Fixtures #------------------------------------------------------------------------------ @fixture def cluster_ids(): return [0, 1, 2, 10, 11, 20, 30] # i, g, N, i, g, N, N @fixture def cluster_groups(): return {0: 'noise', 1: 'good', 10: 'mua', 11: 'good'} @fixture def quality(): def quality(c): return c return quality @fixture def similarity(cluster_ids): sim = lambda c, d: (c * 1.01 + d) def similarity(c): return get_closest_clusters(c, cluster_ids, sim) return similarity
21.590909
79
0.382105
87
950
4.08046
0.471264
0.112676
0.101408
0
0
0
0
0
0
0
0
0.027344
0.191579
950
43
80
22.093023
0.434896
0.415789
0
0.2
0
0
0.02952
0
0
0
0
0
0
1
0.3
false
0
0.1
0.2
0.7
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
1
0
0
4
666ee62c62edcf3d4cd56d09470ad6c25ed530c6
50
py
Python
tests/co_sim_io/python/__init__.py
KratosMultiphysics/CoSimIO
cb4578dc338a3215d377e03d9f7cea007c87bfd6
[ "BSD-4-Clause" ]
15
2020-04-17T17:25:47.000Z
2022-02-02T09:28:56.000Z
tests/integration_tutorials/python/mpi/__init__.py
KratosMultiphysics/CoSimIO
cb4578dc338a3215d377e03d9f7cea007c87bfd6
[ "BSD-4-Clause" ]
84
2020-04-29T17:22:04.000Z
2022-02-14T12:24:59.000Z
tests/integration_tutorials/python/mpi/__init__.py
KratosMultiphysics/CoSimIO
cb4578dc338a3215d377e03d9f7cea007c87bfd6
[ "BSD-4-Clause" ]
2
2021-03-02T04:15:05.000Z
2022-01-15T11:59:22.000Z
# this is needed for the python unittest discovery
50
50
0.82
8
50
5.125
1
0
0
0
0
0
0
0
0
0
0
0
0.16
50
1
50
50
0.97619
0.96
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
66a9b896387bf96f4917f56c03e45c26b0acf1fb
694
py
Python
custom_components/ge_kitchen/devices/__init__.py
joelmoses/ha_components
4a4c311337480f9482ece096b35b9f2b51427bcc
[ "MIT" ]
null
null
null
custom_components/ge_kitchen/devices/__init__.py
joelmoses/ha_components
4a4c311337480f9482ece096b35b9f2b51427bcc
[ "MIT" ]
null
null
null
custom_components/ge_kitchen/devices/__init__.py
joelmoses/ha_components
4a4c311337480f9482ece096b35b9f2b51427bcc
[ "MIT" ]
null
null
null
import logging from typing import Type from gekitchensdk.erd import ErdApplianceType from .base import ApplianceApi from .oven import OvenApi from .fridge import FridgeApi from .dishwasher import DishwasherApi _LOGGER = logging.getLogger(__name__) def get_appliance_api_type(appliance_type: ErdApplianceType) -> Type: _LOGGER.debug(f"Found device type: {appliance_type}") """Get the appropriate appliance type""" if appliance_type == ErdApplianceType.OVEN: return OvenApi if appliance_type == ErdApplianceType.FRIDGE: return FridgeApi if appliance_type == ErdApplianceType.DISH_WASHER: return DishwasherApi # Fallback return ApplianceApi
28.916667
69
0.76513
78
694
6.615385
0.435897
0.151163
0.224806
0.180233
0
0
0
0
0
0
0
0
0.174352
694
23
70
30.173913
0.900524
0.011527
0
0
0
0
0.054348
0
0
0
0
0
0
1
0.058824
false
0
0.411765
0
0.705882
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
66cd7ba9850325ca160977d19d5e1d9f829c4ddb
21
py
Python
esp8266/platform.py
pythings/PythingsOS
276b41a32af7fa0d5395b2bb308e611f784f9711
[ "Apache-2.0" ]
11
2020-01-15T14:25:48.000Z
2021-11-25T04:21:18.000Z
esp8266/platform.py
Pythings/PythingsOS
276b41a32af7fa0d5395b2bb308e611f784f9711
[ "Apache-2.0" ]
8
2021-02-04T16:41:57.000Z
2022-03-29T21:57:15.000Z
esp8266/platform.py
pythings/PythingsOS
276b41a32af7fa0d5395b2bb308e611f784f9711
[ "Apache-2.0" ]
null
null
null
platform = 'esp8266'
10.5
20
0.714286
2
21
7.5
1
0
0
0
0
0
0
0
0
0
0
0.222222
0.142857
21
1
21
21
0.611111
0
0
0
0
0
0.333333
0
0
0
0
0
0
1
0
false
0
0
0
0
0
1
1
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
66d369f3c34a7ae1107b0ab17b6c370805300977
235
py
Python
python_modules/models/Result.py
martijnbroekman/OfficeHeatlth
7673c4cd5147f0c917869d28c5fd87d80aa93929
[ "MIT" ]
null
null
null
python_modules/models/Result.py
martijnbroekman/OfficeHeatlth
7673c4cd5147f0c917869d28c5fd87d80aa93929
[ "MIT" ]
null
null
null
python_modules/models/Result.py
martijnbroekman/OfficeHeatlth
7673c4cd5147f0c917869d28c5fd87d80aa93929
[ "MIT" ]
null
null
null
class Result: def __init__(self, face_detected, emotions=None, posture=None, fatigue=None): self.face_detected = face_detected self.emotions = emotions self.posture = posture self.fatigue = fatigue
29.375
81
0.676596
27
235
5.62963
0.407407
0.236842
0.210526
0
0
0
0
0
0
0
0
0
0.242553
235
7
82
33.571429
0.853933
0
0
0
0
0
0
0
0
0
0
0
0
1
0.166667
false
0
0
0
0.333333
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
66eb69c3968e7d18867984bc5ed54179b53dddbc
98
py
Python
test_pytube.py
Tom-Niesytto/YouTubeDownload
d5391d174f064026efc0b21cece2e3d60af7daf8
[ "MIT" ]
null
null
null
test_pytube.py
Tom-Niesytto/YouTubeDownload
d5391d174f064026efc0b21cece2e3d60af7daf8
[ "MIT" ]
null
null
null
test_pytube.py
Tom-Niesytto/YouTubeDownload
d5391d174f064026efc0b21cece2e3d60af7daf8
[ "MIT" ]
null
null
null
from pytube import YouTube YouTube('http://youtube.com/watch?v=9bZkp7q19f0').streams[0].download()
49
71
0.785714
14
98
5.5
0.857143
0
0
0
0
0
0
0
0
0
0
0.06383
0.040816
98
2
71
49
0.755319
0
0
0
0
0
0.383838
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
66ebfa43d3f643cb88ccc46d871565a088dcda7d
58
py
Python
run.py
talos-org/server
6be199fcaf836415b7d32ffb2cee911a9d600395
[ "MIT" ]
1
2019-01-17T20:43:14.000Z
2019-01-17T20:43:14.000Z
run.py
talos-org/server
6be199fcaf836415b7d32ffb2cee911a9d600395
[ "MIT" ]
42
2018-11-13T06:13:55.000Z
2019-07-27T19:18:23.000Z
run.py
talos-org/server
6be199fcaf836415b7d32ffb2cee911a9d600395
[ "MIT" ]
1
2019-03-26T12:55:01.000Z
2019-03-26T12:55:01.000Z
from app import app app.run(host='0.0.0.0', port="5000")
14.5
36
0.655172
13
58
2.923077
0.615385
0.157895
0.157895
0
0
0
0
0
0
0
0
0.156863
0.12069
58
3
37
19.333333
0.588235
0
0
0
0
0
0.189655
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dd202a82a46675a6cded977f11b5495e3094818c
129
py
Python
correios/__init__.py
rennancockles/rastreio-correios
689f2d5ea26e45983834b2192d249c35e3db90aa
[ "MIT" ]
2
2021-11-16T16:54:19.000Z
2022-03-17T19:10:08.000Z
correios/__init__.py
rennancockles/rastreio-correios
689f2d5ea26e45983834b2192d249c35e3db90aa
[ "MIT" ]
null
null
null
correios/__init__.py
rennancockles/rastreio-correios
689f2d5ea26e45983834b2192d249c35e3db90aa
[ "MIT" ]
null
null
null
from correios.entities import Objeto from correios.main import Correios __version__ = "0.1.4" __all__ = ["Objeto", "Correios"]
18.428571
36
0.751938
17
129
5.235294
0.647059
0.269663
0
0
0
0
0
0
0
0
0
0.026786
0.131783
129
6
37
21.5
0.767857
0
0
0
0
0
0.147287
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
dd33079c75c147480313b52087ef6c990b8e2948
5,147
py
Python
swarm_cli/cli_swarm.py
sungazer-io/swarm-cli
da8f06611ccaad7b072c069fbc73656b77833f8b
[ "MIT" ]
null
null
null
swarm_cli/cli_swarm.py
sungazer-io/swarm-cli
da8f06611ccaad7b072c069fbc73656b77833f8b
[ "MIT" ]
null
null
null
swarm_cli/cli_swarm.py
sungazer-io/swarm-cli
da8f06611ccaad7b072c069fbc73656b77833f8b
[ "MIT" ]
null
null
null
from typing import List import click from swarm_cli.lib import SwarmModeState, load_env_files, run_cmd @click.group() @click.option('--environment', '-e', multiple=True, required=False) @click.pass_context def swarm(ctx: click.Context, environment: List[str]): load_env_files(environment) state = SwarmModeState() state.initFromFile('swarm-config.yml') ctx.obj = state @swarm.group() @click.pass_context def preset(ctx: click.Context): state: SwarmModeState = ctx.obj @preset.command('ls') @click.option('--preset', '-p', help="Select a preset", required=False) @click.pass_context def preset_ls(ctx: click.Context, preset: str): state: SwarmModeState = ctx.obj if preset: state.ensure_preset(preset) for k, v in state.cfg['presets'][preset]['stacks'].items(): click.secho("{}:{}".format(k, v['variant'])) else: for preset in state.cfg['presets'].keys(): click.secho("Preset {}".format(preset)) for k, v in state.cfg['presets'][preset]['stacks'].items(): click.secho(" - {}:{}".format(k, v['variant'])) @preset.command('deploy') @click.option('--preset', '-p', help="Select a preset", required=True) @click.option('--dry-run', is_flag=True) @click.pass_context def preset_deploy(ctx: click.Context, preset: str = None, dry_run=False): state: SwarmModeState = ctx.obj state.ensure_preset(preset) preset_data = state.cfg['presets'][preset] load_env_files(preset_data.get('env_files', []), ignore_missing=True) stacks = state.cfg['presets'][preset]['stacks'] for k, v in stacks.items(): name, variant = k, v['variant'] cmd = ' '.join(['docker', 'stack', 'deploy', state.build_deploy_sequence_for_stack(name, variant), name]) run_cmd(cmd, dry_run=dry_run, env=state.get_environment_for_stack(preset, name, variant)) @preset.command('build') @click.option('--preset', '-p', help="Select a preset", required=True) @click.option('--dry-run', is_flag=True) @click.pass_context def preset_build(ctx: click.Context, preset: str = None, dry_run=False): state: SwarmModeState = ctx.obj state.ensure_preset(preset) preset_data = state.cfg['presets'][preset] load_env_files(preset_data.get('env_files', []), ignore_missing=True) stacks = state.cfg['presets'][preset]['stacks'] for k, v in stacks.items(): name, variant = k, v['variant'] state.prepare_build_folder(preset, name, variant) cmd = ' '.join(['docker-compose', state.build_compose_sequence_for_stack(name, variant), 'build']) run_cmd(cmd, dry_run=dry_run, cwd=state.get_build_folder(preset, name, variant), env=state.get_environment_for_stack(preset, name, variant) ) @preset.command('push') @click.option('--preset', '-p', help="Select a preset", required=True) @click.option('--dry-run', is_flag=True) @click.pass_context def preset_push(ctx: click.Context, preset: str = None, dry_run=False): state: SwarmModeState = ctx.obj state.ensure_preset(preset) preset_data = state.cfg['presets'][preset] load_env_files(preset_data.get('env_files', []), ignore_missing=True) stacks = state.cfg['presets'][preset]['stacks'] for k, v in stacks.items(): name, variant = k, v['variant'] cmd = ' '.join(['docker-compose', state.build_compose_sequence_for_stack(name, variant), 'push']) run_cmd(cmd, dry_run=dry_run, env=state.get_environment_for_stack(preset, name, variant) ) # @swarm.group() # def stack(): # pass # # # @stack.command('ls') # @click.pass_context # def stack_ls(ctx: click.Context): # state: SwarmModeState = ctx.obj # click.echo('Available stacks:') # for stack_name in sorted(state.layered_stacks.keys()): # click.echo(stack_name) # for stack_variant in sorted(state.layered_stacks[stack_name].keys()): # click.echo("\t {}".format(stack_variant)) # # # @stack.command('deploy') # @click.argument('name_variant', nargs=-1) # @click.option('--dump-cmd', is_flag=True) # @click.pass_context # def stack_deploy(ctx: click.Context, name_variant: str, dump_cmd: str): # state: SwarmModeState = ctx.obj # for name_variant_elem in name_variant: # name, variant = name_variant_elem.split(':') # state.ensure_stack_exists(name, variant) # cmd = ' '.join(['docker', 'stack', 'deploy', state.build_deploy_sequence_for_stack(name, variant), name]) # env = state.get_environment_for_stack(preset, name, variant) # run_cmd(cmd, dry_run=dump_cmd, env=env) # # # @stack.command('setup') # @click.argument('name_variant', nargs=-1) # @click.option('--dump-cmd', is_flag=True) # @click.pass_context # def stack_setup(ctx: click.Context, name_variant: str, dump_cmd: str): # state: SwarmModeState = ctx.obj # for name_variant_elem in name_variant: # name, variant = name_variant_elem.split(':') # state.ensure_stack_exists(name, variant) # state.ensure_preconditions(name, variant, dump_cmd=dump_cmd)
38.410448
115
0.662328
683
5,147
4.806735
0.128843
0.093817
0.043862
0.052087
0.788608
0.721596
0.704539
0.673774
0.673774
0.64636
0
0.000472
0.176025
5,147
133
116
38.699248
0.773638
0.28502
0
0.556962
0
0
0.112668
0
0
0
0
0
0
1
0.075949
false
0.075949
0.037975
0
0.113924
0
0
0
0
null
0
0
0
0
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
dd44136641483aec5e58869e582e925a4afdf07f
107
py
Python
scts/__init__.py
deniscapeto/SimpleCorreiosTrackingService
e96bcec580dc6cd2cc89c0e8e038270d40d19164
[ "MIT" ]
null
null
null
scts/__init__.py
deniscapeto/SimpleCorreiosTrackingService
e96bcec580dc6cd2cc89c0e8e038270d40d19164
[ "MIT" ]
12
2020-06-05T23:26:54.000Z
2021-10-02T09:36:41.000Z
scts/__init__.py
deniscapeto/SimpleCorreiosTrackingService
e96bcec580dc6cd2cc89c0e8e038270d40d19164
[ "MIT" ]
1
2019-10-11T00:32:06.000Z
2019-10-11T00:32:06.000Z
from django import setup setup() from scts.factory.build_app import build_app # noqa app = build_app()
13.375
52
0.757009
17
107
4.588235
0.529412
0.307692
0
0
0
0
0
0
0
0
0
0
0.168224
107
7
53
15.285714
0.876404
0.037383
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.5
0
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
dd4640e671657dc1c95ae56ba8493acc45a399a2
93
py
Python
bc_website/__main__.py
beginner-codes/website
37d2787ff9350c0969d01edcd9b239c860c5d359
[ "MIT" ]
1
2021-08-05T20:22:33.000Z
2021-08-05T20:22:33.000Z
bc_website/__main__.py
beginner-codes/website
37d2787ff9350c0969d01edcd9b239c860c5d359
[ "MIT" ]
12
2021-08-05T20:37:10.000Z
2021-11-08T06:20:39.000Z
bc_website/__main__.py
beginner-codes/website
37d2787ff9350c0969d01edcd9b239c860c5d359
[ "MIT" ]
1
2021-08-05T21:06:49.000Z
2021-08-05T21:06:49.000Z
import uvicorn uvicorn.run("bc_website.app:app", host="localhost", port=5000, reload=True)
18.6
75
0.752688
14
93
4.928571
0.857143
0
0
0
0
0
0
0
0
0
0
0.047059
0.086022
93
4
76
23.25
0.764706
0
0
0
0
0
0.290323
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dd48c34c58d01914bdce95063cac113a26dfa99e
179
py
Python
web_wrapper/context_processors.py
musicmetadata/web-wrapper
ffc8423769b9d7d1fc57ac2865373ec89ae83192
[ "MIT" ]
1
2019-12-21T12:14:51.000Z
2019-12-21T12:14:51.000Z
web_wrapper/context_processors.py
musicmetadata/web-wrapper
ffc8423769b9d7d1fc57ac2865373ec89ae83192
[ "MIT" ]
2
2019-12-05T16:23:40.000Z
2020-06-23T07:54:37.000Z
web_wrapper/context_processors.py
musicmetadata/web-wrapper
ffc8423769b9d7d1fc57ac2865373ec89ae83192
[ "MIT" ]
1
2020-12-25T16:37:38.000Z
2020-12-25T16:37:38.000Z
from django.conf import settings def features(request): return { 'CWR2_AVAILABLE': settings.CWR2_AVAILABLE, 'CWR3_AVAILABLE': settings.CWR3_AVAILABLE, }
19.888889
50
0.692737
19
179
6.315789
0.631579
0.216667
0
0
0
0
0
0
0
0
0
0.028571
0.217877
179
8
51
22.375
0.828571
0
0
0
0
0
0.156425
0
0
0
0
0
0
1
0.166667
false
0
0.166667
0.166667
0.5
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
1
0
0
0
4
dd7325f505ab2300a98892114fa72bc9daf0d40c
129
py
Python
01. Variable/021.py
MaksonViini/Aprendendo-Python
8d8422f793e4ea9f81fa4ed0e4101bcfc2ba3c99
[ "MIT" ]
1
2020-09-20T23:18:47.000Z
2020-09-20T23:18:47.000Z
01. Variable/021.py
MaksonViini/Aprendendo-Python
8d8422f793e4ea9f81fa4ed0e4101bcfc2ba3c99
[ "MIT" ]
null
null
null
01. Variable/021.py
MaksonViini/Aprendendo-Python
8d8422f793e4ea9f81fa4ed0e4101bcfc2ba3c99
[ "MIT" ]
1
2020-09-20T23:18:49.000Z
2020-09-20T23:18:49.000Z
#Tocando um MP3 from pygame import mixer mixer.init() mixer.music.load('EX021.mp3') #Adicione o nome da musica mixer.music.play()
25.8
56
0.767442
22
129
4.5
0.772727
0.20202
0
0
0
0
0
0
0
0
0
0.043478
0.108527
129
5
57
25.8
0.817391
0.302326
0
0
0
0
0.101124
0
0
0
0
0
0
1
0
true
0
0.25
0
0.25
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
dd73f1e2e89e6ff8832edc49aebbd60bc238b60a
165
py
Python
python/setup.py
SamChill/drunkardswalk
5c30b9dfdfba7df0fb34679a039534b6f84cfcc8
[ "MIT" ]
3
2017-11-08T01:53:44.000Z
2019-04-24T06:55:41.000Z
python/setup.py
SamChill/drunkardswalk
5c30b9dfdfba7df0fb34679a039534b6f84cfcc8
[ "MIT" ]
null
null
null
python/setup.py
SamChill/drunkardswalk
5c30b9dfdfba7df0fb34679a039534b6f84cfcc8
[ "MIT" ]
null
null
null
#!/usr/bin/env python from setuptools import setup from os.path import dirname, abspath, join setup(name='drunkardswalk', packages=['drunkardswalk'], )
20.625
42
0.709091
20
165
5.85
0.8
0
0
0
0
0
0
0
0
0
0
0
0.169697
165
7
43
23.571429
0.854015
0.121212
0
0
0
0
0.180556
0
0
0
0
0
0
1
0
true
0
0.4
0
0.4
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
dd74be1482e927f336a388abc03138f8ca7ef313
217
py
Python
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_subselection.py
kstennettlull/dagster
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
[ "Apache-2.0" ]
null
null
null
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_subselection.py
kstennettlull/dagster
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
[ "Apache-2.0" ]
null
null
null
examples/docs_snippets/docs_snippets_tests/concepts_tests/io_management_tests/test_subselection.py
kstennettlull/dagster
dd6f57e170ff03bf145f1dd1417e0b2c3156b1d6
[ "Apache-2.0" ]
null
null
null
from docs_snippets.concepts.io_management.subselection import ( execute_full, execute_subselection, ) def test_execute_job(): execute_full() def test_execute_subselection(): execute_subselection()
16.692308
63
0.769585
24
217
6.541667
0.541667
0.363057
0.178344
0
0
0
0
0
0
0
0
0
0.152074
217
12
64
18.083333
0.853261
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
true
0
0.125
0
0.375
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
1
0
0
0
0
0
0
4
06edec272e6f323369bf14129deee41c42b943fc
114
py
Python
post_office/__init__.py
LeGast00n/django-post_office
cfff8a9e824e3352fa897d20b8531723791ebfd3
[ "MIT" ]
null
null
null
post_office/__init__.py
LeGast00n/django-post_office
cfff8a9e824e3352fa897d20b8531723791ebfd3
[ "MIT" ]
null
null
null
post_office/__init__.py
LeGast00n/django-post_office
cfff8a9e824e3352fa897d20b8531723791ebfd3
[ "MIT" ]
null
null
null
VERSION = (1, 1, 1) from .backends import EmailBackend from .models import PRIORITY from .utils import send_mail
19
34
0.77193
17
114
5.117647
0.647059
0.045977
0
0
0
0
0
0
0
0
0
0.03125
0.157895
114
5
35
22.8
0.875
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.75
0
0.75
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
06f8bac11e775f5c5215020296ea436249d1f719
153
py
Python
LNU_OS/main.py
JessyTsu1/DL_Backup
e553525bdd8eba8ac6f8082f50de63862950d460
[ "Apache-2.0" ]
null
null
null
LNU_OS/main.py
JessyTsu1/DL_Backup
e553525bdd8eba8ac6f8082f50de63862950d460
[ "Apache-2.0" ]
null
null
null
LNU_OS/main.py
JessyTsu1/DL_Backup
e553525bdd8eba8ac6f8082f50de63862950d460
[ "Apache-2.0" ]
1
2021-12-15T15:03:43.000Z
2021-12-15T15:03:43.000Z
from process import * #结构体 from out import * #界面窗口 # from config import originate, target import time import os if __name__ == '__main__': run()
12.75
38
0.699346
21
153
4.714286
0.714286
0
0
0
0
0
0
0
0
0
0
0
0.215686
153
11
39
13.909091
0.825
0.287582
0
0
0
0
0.07767
0
0
0
0
0
0
1
0
true
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
6632a527632eb2a27cd322d36cc4b99408f7159d
566
py
Python
src/Bicho.py
victorlujan/Dise-odeSoftwarePatrones
b9845cc1c4abdc44867c90b9e9784246e57f16b3
[ "MIT" ]
null
null
null
src/Bicho.py
victorlujan/Dise-odeSoftwarePatrones
b9845cc1c4abdc44867c90b9e9784246e57f16b3
[ "MIT" ]
null
null
null
src/Bicho.py
victorlujan/Dise-odeSoftwarePatrones
b9845cc1c4abdc44867c90b9e9784246e57f16b3
[ "MIT" ]
null
null
null
class Bicho: def __init__(self): self.vida=0 self.modo = None self.ataque = 10 self.posicion = None def hablar(self): self.modo.hablar() def dormir(self): self.modo.dormir() def atacar(self): self.modo.atacar() def esPerezoso(self): return self.modo.esPerezoso() def esAgresivo(self): return self.modo.esAgresivo() def recorrer(self): self.modo.printOn() def actua(self): self.modo.actua(self) def mover(self): self.modo.mover()
19.517241
37
0.567138
68
566
4.661765
0.323529
0.227129
0.227129
0.113565
0
0
0
0
0
0
0
0.007712
0.312721
566
28
38
20.214286
0.807198
0
0
0
0
0
0
0
0
0
0
0
0
1
0.409091
false
0
0
0.090909
0.545455
0.045455
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
1
0
0
4
b07e6f3d1f847b30e6670f0c09e2b6575e496439
289
py
Python
pybamm/models/submodels/thermal/x_lumped/__init__.py
jedgedrudd/PyBaMM
79c9d34978382d50e09adaf8bf74c8fa4723f759
[ "BSD-3-Clause" ]
1
2019-10-29T19:06:04.000Z
2019-10-29T19:06:04.000Z
pybamm/models/submodels/thermal/x_lumped/__init__.py
jedgedrudd/PyBaMM
79c9d34978382d50e09adaf8bf74c8fa4723f759
[ "BSD-3-Clause" ]
null
null
null
pybamm/models/submodels/thermal/x_lumped/__init__.py
jedgedrudd/PyBaMM
79c9d34978382d50e09adaf8bf74c8fa4723f759
[ "BSD-3-Clause" ]
null
null
null
from .base_x_lumped import BaseModel from .x_lumped_no_current_collectors import NoCurrentCollector from .x_lumped_0D_current_collectors import CurrentCollector0D from .x_lumped_1D_current_collectors import CurrentCollector1D from .x_lumped_2D_current_collectors import CurrentCollector2D
48.166667
62
0.913495
38
289
6.473684
0.421053
0.142276
0.178862
0
0
0
0
0
0
0
0
0.022305
0.069204
289
5
63
57.8
0.892193
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
b0aaeebdb1d78612402cfdbf50a6d8583b2e7d5c
100
py
Python
doobi/doobi_pack/apps.py
bryanopew/doobi
45e98c7a0a8aceea7b13665da4d57f161fe78725
[ "MIT" ]
null
null
null
doobi/doobi_pack/apps.py
bryanopew/doobi
45e98c7a0a8aceea7b13665da4d57f161fe78725
[ "MIT" ]
null
null
null
doobi/doobi_pack/apps.py
bryanopew/doobi
45e98c7a0a8aceea7b13665da4d57f161fe78725
[ "MIT" ]
null
null
null
from django.apps import AppConfig class DoobiPackConfig(AppConfig): name = 'doobi.doobi_pack'
16.666667
33
0.77
12
100
6.333333
0.833333
0
0
0
0
0
0
0
0
0
0
0
0.15
100
5
34
20
0.894118
0
0
0
0
0
0.16
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b0b38613cada6570ab2206e440d6b3a6a88cf3fe
166
py
Python
py_tdlib/constructors/user_profile_photo.py
Mr-TelegramBot/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
24
2018-10-05T13:04:30.000Z
2020-05-12T08:45:34.000Z
py_tdlib/constructors/user_profile_photo.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
3
2019-06-26T07:20:20.000Z
2021-05-24T13:06:56.000Z
py_tdlib/constructors/user_profile_photo.py
MrMahdi313/python-tdlib
2e2d21a742ebcd439971a32357f2d0abd0ce61eb
[ "MIT" ]
5
2018-10-05T14:29:28.000Z
2020-08-11T15:04:10.000Z
from ..factory import Type class userProfilePhoto(Type): id = None # type: "int64" added_date = None # type: "int32" sizes = None # type: "vector<photoSize>"
20.75
42
0.674699
21
166
5.285714
0.714286
0.216216
0
0
0
0
0
0
0
0
0
0.029851
0.192771
166
7
43
23.714286
0.798507
0.319277
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.2
0
1
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
4
b0e865e3d917656be94ae9180ed5791cbde11b1c
578
py
Python
pillow_heif/__init__.py
bigcat88/pillow_heif
45fc1b3accd0da4be52b279083dc725d0e02eb87
[ "Apache-2.0" ]
20
2021-09-15T10:03:31.000Z
2022-03-27T22:51:57.000Z
pillow_heif/__init__.py
bigcat88/pillow_heif
45fc1b3accd0da4be52b279083dc725d0e02eb87
[ "Apache-2.0" ]
8
2021-10-29T18:47:18.000Z
2022-03-22T15:41:47.000Z
pillow_heif/__init__.py
bigcat88/pillow_heif
45fc1b3accd0da4be52b279083dc725d0e02eb87
[ "Apache-2.0" ]
4
2021-11-01T10:25:50.000Z
2022-03-11T03:45:57.000Z
from .constants import * # pylint: disable=unused-wildcard-import from .reader import HeifFile, UndecodedHeifFile, check, read, open # pylint: disable=redefined-builtin,unused-import from .writer import write # pylint: disable=unused-import from .error import HeifError # pylint: disable=unused-import from .as_opener import register_heif_opener, check_heif_magic # pylint: disable=unused-import from . import _libheif # pylint: disable=import-self __version__ = "0.1.4" def libheif_version(): return _libheif.ffi.string(_libheif.lib.heif_get_version()).decode()
41.285714
117
0.780277
76
578
5.736842
0.486842
0.178899
0.174312
0.172018
0.199541
0
0
0
0
0
0
0.005894
0.119377
578
13
118
44.461538
0.850688
0.352941
0
0
0
0
0.013624
0
0
0
0
0
0
1
0.111111
false
0
0.666667
0.111111
0.888889
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
1
1
0
0
4
9fe6eade2553c13d4774cadac8231414559ffbcb
270
py
Python
roles/aliasses/molecule/default/tests/test_default.py
PW999/home-assistant-ansible
fe14d5390712abfe19194bae1af4a9378e1c10af
[ "Apache-2.0" ]
null
null
null
roles/aliasses/molecule/default/tests/test_default.py
PW999/home-assistant-ansible
fe14d5390712abfe19194bae1af4a9378e1c10af
[ "Apache-2.0" ]
10
2021-08-08T17:59:04.000Z
2022-02-05T09:45:06.000Z
roles/aliasses/molecule/default/tests/test_default.py
PW999/home-assistant-ansible
fe14d5390712abfe19194bae1af4a9378e1c10af
[ "Apache-2.0" ]
null
null
null
import os import testinfra.utils.ansible_runner testinfra_hosts = testinfra.utils.ansible_runner.AnsibleRunner( os.environ['MOLECULE_INVENTORY_FILE']).get_hosts('all') def test_alias(host): host.run_expect([0], 'sudo -u molecule /bin/bash -vilc ll')
27
64
0.740741
37
270
5.189189
0.72973
0.145833
0.21875
0.28125
0
0
0
0
0
0
0
0.004292
0.137037
270
9
65
30
0.819742
0
0
0
0
0
0.233716
0.088123
0
0
0
0
0
1
0.166667
false
0
0.333333
0
0.5
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
b002c602385fbb0947a1ea20ac9464e307a614ae
159
py
Python
Modulo 01/exercicos/d003.py
euyag/python-cursoemvideo
d2f684854d926e38ea193816a6c7d2c48d25aa3d
[ "MIT" ]
2
2021-06-22T00:15:11.000Z
2021-08-02T11:28:56.000Z
Modulo 01/exercicos/d003.py
euyag/python-cursoemvideo
d2f684854d926e38ea193816a6c7d2c48d25aa3d
[ "MIT" ]
null
null
null
Modulo 01/exercicos/d003.py
euyag/python-cursoemvideo
d2f684854d926e38ea193816a6c7d2c48d25aa3d
[ "MIT" ]
null
null
null
print('===== DESAFIO 003 =====') n1 = int(input('digite um valor: ')) n2 = int(input('digite um valor: ')) s = n1 + n2 print(f'a soma entre {n1} e {n2} é {s}')
31.8
40
0.559748
28
159
3.178571
0.607143
0.179775
0.314607
0.359551
0.47191
0
0
0
0
0
0
0.068702
0.176101
159
5
40
31.8
0.610687
0
0
0
0
0
0.54375
0
0
0
0
0
0
1
0
false
0
0
0
0
0.4
1
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
b0114ee1ad2ba99baea331bfcd86c8a3128cfd37
136
py
Python
src/aiofiles/__init__.py
q0w/aiofiles
d010ff4d789598213334a32ec3d3f55caaab766c
[ "Apache-2.0" ]
1,947
2015-04-01T20:44:36.000Z
2022-03-31T23:14:38.000Z
src/aiofiles/__init__.py
q0w/aiofiles
d010ff4d789598213334a32ec3d3f55caaab766c
[ "Apache-2.0" ]
133
2015-04-01T21:06:54.000Z
2022-03-31T22:37:34.000Z
venv/lib/python3.8/site-packages/aiofiles/__init__.py
HCDigitalScholarship/migration-encounters
08e705f8ed1b4d4e00d2c1112a8b5d30bf2ebd4d
[ "MIT" ]
162
2015-04-01T21:01:09.000Z
2022-03-16T04:36:56.000Z
"""Utilities for asyncio-friendly file handling.""" from .threadpool import open from . import tempfile __all__ = ["open", "tempfile"]
22.666667
51
0.735294
16
136
6
0.75
0
0
0
0
0
0
0
0
0
0
0
0.132353
136
5
52
27.2
0.813559
0.330882
0
0
0
0
0.141176
0
0
0
0
0
0
1
0
false
0
0.666667
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
b02eb724ab52c3722d06948ea9c270d4d2835327
7,847
py
Python
HDPython/hdl_converter.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
null
null
null
HDPython/hdl_converter.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
null
null
null
HDPython/hdl_converter.py
HardwareDesignWithPython/HDPython
aade03aaa092b1684fa12bffd17674cf1c45f5ac
[ "MIT" ]
1
2021-10-20T20:08:16.000Z
2021-10-20T20:08:16.000Z
def get_dependency_objects(obj, dep_list): return obj.__hdl_converter__.get_dependency_objects(obj, dep_list) def ops2str(obj, ops): return obj.__hdl_converter__.ops2str(ops) def get_MemfunctionCalls(obj): return obj.__hdl_converter__.get_MemfunctionCalls(obj) def FlagFor_TemplateMissing(obj): obj.__hdl_converter__.FlagFor_TemplateMissing(obj) def reset_TemplateMissing(obj): obj.__hdl_converter__.reset_TemplateMissing(obj) def isTemplateMissing(obj): return obj.__hdl_converter__.isTemplateMissing(obj) def IsSucessfullConverted(obj): return obj.__hdl_converter__.IsSucessfullConverted(obj) def convert_all_packages(obj, ouputFolder, x, FilesDone): return obj.__hdl_converter__.convert_all_packages(obj, ouputFolder, x, FilesDone) def convert_all_entities(obj, ouputFolder, x, FilesDone): return obj.__hdl_converter__.convert_all_entities(obj, ouputFolder, x, FilesDone) def convert_all_impl(obj, ouputFolder, FilesDone): return obj.__hdl_converter__.convert_all_impl(obj, ouputFolder, FilesDone) def convert_all(obj, ouputFolder): return obj.__hdl_converter__.convert_all(obj, ouputFolder) def get_primary_object(obj): return obj.__hdl_converter__.get_primary_object(obj) def get_packet_file_name(obj): return obj.__hdl_converter__.get_packet_file_name(obj) def get_packet_file_content(obj): return obj.__hdl_converter__.get_packet_file_content(obj) def get_enity_file_content(obj): return obj.__hdl_converter__.get_enity_file_content(obj) def get_entity_file_name(obj): return obj.__hdl_converter__.get_entity_file_name(obj) def get_type_simple(obj): return obj.__hdl_converter__.get_type_simple(obj) def get_type_simple_template(obj): return obj.__hdl_converter__.get_type_simple_template(obj) def impl_constructor(obj): return obj.__hdl_converter__.impl_constructor(obj) def parse_file(obj): return obj.__hdl_converter__.parse_file(obj) def impl_includes(obj, name, parent): return obj.__hdl_converter__.impl_includes(obj, name, parent) def def_includes(obj, name, parent): return obj.__hdl_converter__.def_includes(obj, name, parent) def def_record_Member(obj, name, parent, Inout=None): return obj.__hdl_converter__.def_record_Member(obj, name, parent, Inout) def def_record_Member_Default(obj, name, parent, Inout=None): return obj.__hdl_converter__.def_record_Member_Default(obj, name, parent, Inout) def def_packet_header(obj, name, parent): return obj.__hdl_converter__.def_packet_header(obj, name, parent) def def_packet_body(obj, name, parent): return obj.__hdl_converter__.def_packet_body(obj, name, parent) def impl_entity_port(obj, name): return obj.__hdl_converter__.impl_entity_port(obj, name) def impl_function_argument(obj, func_arg, arg): return obj.__hdl_converter__.impl_function_argument(obj, func_arg, arg) def impl_get_attribute(obj, attName,parent = None): return obj.__hdl_converter__.impl_get_attribute(obj, attName, parent) def impl_slice(obj, sl, astParser=None): return obj.__hdl_converter__.impl_slice(obj, sl, astParser) def impl_compare(obj, ops, rhs, astParser=None): return obj.__hdl_converter__.impl_compare(obj, ops, rhs, astParser) def impl_add(obj, args): return obj.__hdl_converter__.impl_add(obj, args) def impl_sub(obj, args): return obj.__hdl_converter__.impl_sub(obj, args) def impl_to_bool(obj, astParser): return obj.__hdl_converter__.impl_to_bool(obj, astParser) def impl_bit_and(obj, rhs, astParser): return obj.__hdl_converter__.impl_bit_and(obj, rhs, astParser) def function_name_modifier(obj, name, varSigSuffix): return obj.__hdl_converter__.function_name_modifier(obj, name, varSigSuffix) def impl_get_value(obj, ReturnToObj=None, astParser=None): return obj.__hdl_converter__.impl_get_value(obj, ReturnToObj, astParser) def impl_reasign_type(obj): return obj.__hdl_converter__.impl_reasign_type(obj) def impl_reasign(obj, rhs, astParser=None, context_str=None): return obj.__hdl_converter__.impl_reasign(obj, rhs, astParser, context_str) def impl_reasign_rshift_(obj, rhs, astParser=None, context_str=None): return obj.__hdl_converter__.impl_reasign_rshift_(obj, rhs, astParser, context_str) def get_call_member_function(obj, name, args): return obj.__hdl_converter__.get_call_member_function(obj, name, args) def impl_function_call(obj, name, args, astParser=None): return obj.__hdl_converter__.impl_function_call(obj=obj, name=name, args=args, astParser=astParser) def impl_symbol_instantiation(obj, VarSymb="variable"): return obj.__hdl_converter__.impl_symbol_instantiation(obj, VarSymb) def impl_architecture_header(obj): prepare_for_conversion(obj) return obj.__hdl_converter__.impl_architecture_header(obj) def impl_architecture_body(obj): return obj.__hdl_converter__.impl_architecture_body(obj) def impl_add(obj,args): return obj.__hdl_converter__.impl_add(obj, args) def impl_sub(obj,args): return obj.__hdl_converter__.impl_sub(obj, args) def impl_multi(obj,args): return obj.__hdl_converter__.impl_multi(obj, args) def def_entity_port(obj): prepare_for_conversion(obj) return obj.__hdl_converter__.def_entity_port(obj) def impl_process_header(obj): return obj.__hdl_converter__.impl_process_header(obj) def impl_process_sensitivity_list(obj): return obj.__hdl_converter__.impl_process_sensitivity_list(obj) def impl_process_pull(obj,clk): return obj.__hdl_converter__.impl_process_pull(obj,clk) def impl_process_push(obj,clk): return obj.__hdl_converter__.impl_process_push(obj,clk) def impl_enter_rising_edge(obj): return obj.__hdl_converter__.impl_enter_rising_edge(obj) def impl_exit_rising_edge(obj): return obj.__hdl_converter__.impl_exit_rising_edge(obj) def get_assiment_op(obj): return obj.__hdl_converter__.get_assiment_op(obj) def get_Inout(obj,parent): return obj.__hdl_converter__.get_Inout(obj,parent) def InOut_t2str2(obj, inOut): return obj.__hdl_converter__.InOut_t2str2(inOut) def InOut_t2str(obj): return obj.__hdl_converter__.InOut_t2str(obj) def get_default_value(obj): return obj.__hdl_converter__.get_default_value(obj) def extract_conversion_types(obj, exclude_class_type=None, filter_inout=None): return obj.__hdl_converter__.extract_conversion_types(obj, exclude_class_type, filter_inout) def get_Name_array(obj): return obj.__hdl_converter__.get_Name_array(obj) def length(obj): return obj.__hdl_converter__.length(obj) def to_arglist(obj, name, parent, withDefault=False, astParser=None): return obj.__hdl_converter__.to_arglist(obj, name, parent, withDefault, astParser) def get_inout_type_recursive(obj): return obj.__hdl_converter__.get_inout_type_recursive(obj) def Has_pushpull_function(obj, pushpull): return obj.__hdl_converter__.Has_pushpull_function(obj, pushpull) def get_free_symbols(obj, name, parent_list=[]): return obj.__hdl_converter__.get_free_symbols(obj,name, parent_list) def get_component_suffix(obj, Inout_type, varsignal_type): return obj.__hdl_converter__.get_component_suffix(obj, Inout_type, varsignal_type) def prepare_for_conversion(obj): return obj.__hdl_converter__.prepare_for_conversion(obj) def get_HDL_name(obj, parent,suffix): return obj.__hdl_converter__.get_HDL_name(obj,parent,suffix) def impl_get_init_values(obj,parent=None, InOut_Filter=None, VaribleSignalFilter = None,ForceExpand=False): return obj.__hdl_converter__.impl_get_init_values(obj, parent, InOut_Filter, VaribleSignalFilter ,ForceExpand) def get_extractedTypes(obj): primary = get_primary_object(obj) prepare_for_conversion(primary) return primary.__hdl_converter__.extractedTypes
27.925267
114
0.795973
1,120
7,847
5.000893
0.108036
0.154258
0.190145
0.258704
0.784146
0.611676
0.396715
0.252455
0.135333
0.103196
0
0.001152
0.114948
7,847
280
115
28.025
0.805328
0
0
0.067568
0
0
0.00102
0
0
0
0
0
0
1
0.486486
false
0
0
0.452703
0.959459
0
0
0
0
null
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
b0395013daa1abb48d72d80560781bf8ef91a6af
2,757
py
Python
centinel/unit_test/test_http.py
mikiec84/centinel
39fc263e71e85135fa3d65513e1d417ef76388ea
[ "MIT" ]
29
2015-02-12T22:39:27.000Z
2022-01-25T13:03:18.000Z
centinel/unit_test/test_http.py
mikiec84/centinel
39fc263e71e85135fa3d65513e1d417ef76388ea
[ "MIT" ]
158
2015-01-03T02:29:58.000Z
2021-02-05T18:35:56.000Z
centinel/unit_test/test_http.py
mikiec84/centinel
39fc263e71e85135fa3d65513e1d417ef76388ea
[ "MIT" ]
22
2015-02-11T05:08:49.000Z
2022-01-25T13:03:33.000Z
import pytest import os from ..primitives import http class TestHTTPMethods: def test_url_not_exist(self): """ test if _get_http_request(args...) returns failure for an invalid url. """ file_name = "data/invalid_hosts.txt" fd = open(file_name, 'r') for line in fd: line = line.rstrip('\n') res = http._get_http_request(line) assert res is not None assert 'failure' in res['response'].keys() fd.close() def test_url_exist(self): """ test if _get_http_request(args..) returns valid contents from a valid url. """ file_name = "data/valid_hosts.txt" fd = open(file_name, 'r') for line in fd: line = line.rstrip('\n') res = http._get_http_request(line) assert res is not None assert 'failure' not in res['response'].keys() fd.close() def test_batch_url_invalid_hosts(self): """ test _get_http_request(arg...) primitive when a list of invaid domain name is passed to get_requests_batch(args...). """ invalid_hosts_file_name = "data/invalid_hosts.txt" fd = open(invalid_hosts_file_name, 'r') lines = [line.rstrip('\n') for line in fd] results = http.get_requests_batch(lines) assert results is not None # assert failure for inValid Hosts for key, result in results.items(): assert result is not None assert 'failure' in result['response'].keys() fd.close() def test_batch_url_valid_hosts(self): """ test _get_http_request(arg...) primitive when a list of valid domain name is passed to get_requests_batch(args...). """ valid_hosts_file_name = "data/valid_hosts.txt" fd = open(valid_hosts_file_name, 'r') lines = [line.rstrip('\n') for line in fd] results = http.get_requests_batch(lines) assert results is not None # assert no failure for valid hosts for key,result in results.items(): assert result is not None assert 'failure' not in result['response'].keys() fd.close() def test_batch_url_thread_error(self): """ test if thread takes long time to finish TODO: choose url that gives thread error """ #file_name = "data/input_file.txt" #fd = open(file_name, 'r') #lines = [line.rstrip('\n') for line in fd] #result = http.get_requests_batch(lines) #assert result is not None #assert 'error' in result #assert result['error'] is "Threads took too long to finish." #fd.close()
34.037037
77
0.587232
363
2,757
4.272727
0.209366
0.05158
0.040619
0.067698
0.751773
0.74726
0.70793
0.704062
0.594455
0.491296
0
0
0.311208
2,757
81
78
34.037037
0.816746
0.285092
0
0.55814
0
0
0.086331
0.02435
0
0
0
0.012346
0.232558
1
0.116279
false
0
0.069767
0
0.209302
0
0
0
0
null
0
0
0
0
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
4
b043a3e6c49bebd9a1ab89eaebbf5edf82a12c79
760
py
Python
tests/test_encoding_validators/test_are_sources_in_utf.py
SerejkaSJ/fiasko_bro
dfb8c30109f317c1e5b6d211e002fd148695809e
[ "MIT" ]
25
2018-01-24T10:45:35.000Z
2020-12-05T21:47:20.000Z
tests/test_encoding_validators/test_are_sources_in_utf.py
SerejkaSJ/fiasko_bro
dfb8c30109f317c1e5b6d211e002fd148695809e
[ "MIT" ]
110
2018-01-21T12:25:13.000Z
2021-06-10T19:27:22.000Z
tests/test_encoding_validators/test_are_sources_in_utf.py
SerejkaSJ/fiasko_bro
dfb8c30109f317c1e5b6d211e002fd148695809e
[ "MIT" ]
13
2017-12-12T22:19:01.000Z
2019-01-29T18:08:05.000Z
from fiasko_bro import defaults from fiasko_bro.pre_validation_checks import file_not_in_utf8 def test_file_not_in_utf8_fail(encoding_repo_path): directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] output = file_not_in_utf8(encoding_repo_path, directories_to_skip) assert isinstance(output, str) def test_file_not_in_utf8_ok(general_repo_path): directories_to_skip = defaults.VALIDATION_PARAMETERS['directories_to_skip'] output = file_not_in_utf8(general_repo_path, directories_to_skip) assert output is None def test_file_not_in_utf8_uses_whitelist(encoding_repo_path): directories_to_skip = ['win1251'] output = file_not_in_utf8(encoding_repo_path, directories_to_skip) assert output is None
36.190476
79
0.828947
114
760
4.982456
0.289474
0.183099
0.239437
0.160211
0.783451
0.783451
0.580986
0.580986
0.580986
0.484155
0
0.016369
0.115789
760
20
80
38
0.828869
0
0
0.428571
0
0
0.059211
0
0
0
0
0
0.214286
1
0.214286
false
0
0.142857
0
0.357143
0
0
0
0
null
0
1
1
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
0
0
0
0
4
b068f5b5070b6daa67c7d700aba8e849c2a13712
106
py
Python
stubs/esp32_1_10_0/btree.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
stubs/esp32_1_10_0/btree.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
stubs/esp32_1_10_0/btree.py
jmannau/micropython-stubber
8930e8a0038192fd259b31a193d1da3b2501256a
[ "MIT" ]
null
null
null
"Module 'btree' on firmware 'v1.10-247-g0fb15fc3f on 2019-03-29'" DESC = 2 INCL = 1 def open(): pass
15.142857
65
0.650943
19
106
3.631579
0.947368
0
0
0
0
0
0
0
0
0
0
0.235294
0.198113
106
6
66
17.666667
0.576471
0.59434
0
0
0
0.2
0.6
0.2
0
0
0
0
0
1
0.2
false
0.2
0
0
0.2
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
1
0
0
0
0
0
4
c688e3e75f76cd628395dbb666887416facdcb9f
90
py
Python
main.py
Pommers/LCExtract
e9cbe4f0057fd4288b2b9feba44135e4a32df65c
[ "MIT" ]
null
null
null
main.py
Pommers/LCExtract
e9cbe4f0057fd4288b2b9feba44135e4a32df65c
[ "MIT" ]
null
null
null
main.py
Pommers/LCExtract
e9cbe4f0057fd4288b2b9feba44135e4a32df65c
[ "MIT" ]
null
null
null
from src.LCExtract.LCExtract import LCExtract if __name__ == '__main__': LCExtract()
18
45
0.744444
10
90
5.9
0.7
0
0
0
0
0
0
0
0
0
0
0
0.155556
90
4
46
22.5
0.776316
0
0
0
0
0
0.088889
0
0
0
0
0
0
1
0
true
0
0.333333
0
0.333333
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
c6ac846d724d750ec36e961256dd9afbef7fec11
53
py
Python
brainstat/tutorial/__init__.py
rmarkello/BrainStat
f34ffa01274aabf411feb801a3ea1869f8a22d11
[ "BSD-3-Clause" ]
null
null
null
brainstat/tutorial/__init__.py
rmarkello/BrainStat
f34ffa01274aabf411feb801a3ea1869f8a22d11
[ "BSD-3-Clause" ]
null
null
null
brainstat/tutorial/__init__.py
rmarkello/BrainStat
f34ffa01274aabf411feb801a3ea1869f8a22d11
[ "BSD-3-Clause" ]
null
null
null
"""Functions required for the BrainStat Tutorials"""
26.5
52
0.773585
6
53
6.833333
1
0
0
0
0
0
0
0
0
0
0
0
0.113208
53
1
53
53
0.87234
0.867925
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
c6be883ddfe6e99b1bba02d2e1c9ae0dd5f68dd1
6,495
py
Python
straxen/gain_models.py
cheryonthetop/straxen
3291c0fb7203dd42ed6c260f528f011c6b7a8391
[ "BSD-3-Clause" ]
null
null
null
straxen/gain_models.py
cheryonthetop/straxen
3291c0fb7203dd42ed6c260f528f011c6b7a8391
[ "BSD-3-Clause" ]
null
null
null
straxen/gain_models.py
cheryonthetop/straxen
3291c0fb7203dd42ed6c260f528f011c6b7a8391
[ "BSD-3-Clause" ]
null
null
null
import numpy as np import strax import straxen export, __all__ = strax.exporter() __all__ += ['ADC_TO_E', 'FIXED_TO_PE'] # Convert from ADC * samples to electrons emitted by PMT # see pax.dsputils.adc_to_pe for calculation ADC_TO_E = 17142.81741 @export def get_to_pe(run_id, gain_model, n_tpc_pmts): if not isinstance(gain_model, tuple): raise ValueError(f"gain_model must be a tuple") if not len(gain_model) == 2: raise ValueError(f"gain_model must have two elements: " f"the model type and its specific configuration") model_type, model_conf = gain_model # Convert from ADC * samples to electrons emitted by PMT # see pax.dsputils.adc_to_pe for calculation adc_to_e = 17142.81741 if model_type == 'disabled': # Somebody messed up raise RuntimeError("Attempt to use a disabled gain model") if model_type == 'to_pe_per_run': # Load a npy file specifing a run_id -> to_pe array to_pe_file = model_conf x = straxen.get_resource(to_pe_file, fmt='npy') run_index = np.where(x['run_id'] == int(run_id))[0] if not len(run_index): # Gains not known: using placeholders run_index = [-1] to_pe = x[run_index[0]]['to_pe'] elif model_type == 'to_pe_constant': if model_conf in FIXED_TO_PE: return FIXED_TO_PE[model_conf] # Uniform gain, specified as a to_pe factor to_pe = np.ones(n_tpc_pmts, dtype=np.float32) * model_conf else: raise NotImplementedError(f"Gain model type {model_type} not implemented") if len(to_pe) != n_tpc_pmts: raise ValueError( f"Gain model {gain_model} resulted in a to_pe " f"of length {len(to_pe)}, but n_tpc_pmts is {n_tpc_pmts}!") return to_pe # Specific gain models, fixed forever. Do not remove or alter models here! FIXED_TO_PE = { # First gain calibration, PMTs at 1300 V. # https://xe1t-wiki.lngs.infn.it/doku.php?id=xenon:giovo:first_led_run '1300V_20200428': np.array([0.00241, 0.0064, 0.01071, 0.01465, 0.00812, 0.03647, 0.00384, 0.0025, 0.00385, 0.00546, 0.00456, 0.00755, 0.0197, 0.00521, 0.00672, 0.04181, 0.00647, 0.00652, 0.00849, 0.01027, 0.00581, 0.01491, 0.0072, 0.00739, 0.00952, 0.00599, 0.00801, 0.00482, 0.02414, 0.01633, 0.00745, 0.00667, 0.00783, 0.01905, 0.00974, 0.00836, 0.0064, 0.00593, 0.00531, 0.00742, 0.01199, 0.00717, 0.01045, 0.01224, 0.01014, 0.0102, 0.01014, 0.00686, 0.00917, 0.01033, 0.00606, 0.00708, 0.00723, 0.00527, 0.00675, 0.01309, 0.00779, 0.01052, 0.00828, 0.00503, 0.00828, 0.02198, 0.00688, 0.00942, 0.00652, 0.01078, 0.0098, 0.00619, 0.0061, 0.01207, 0.00446, 0.00628, 0.00937, 0.00652, 0.00828, 0.00828, 0.01394, 0.01602, 0.01014, 0.00947, 0.01158, 0.00801, 0.00635, 0.01319, 0.00542, 0.01003, 0.00745, 0.00749, 0.0084, 0.00564, 0.00828, 0.00691, 0.00828, 0.00828, 0.00879, 0.00683, 0.0084, 0.01371, 0.00974, 0.00664, 0.00832, 0.01045, 0.01078, 0.00626, 0.00772, 0.00546, 0.00974, 0.00828, 0.00828, 0.00488, 0.00969, 0.00553, 0.01199, 0.01092, 0.00745, 0.00612, 0.00942, 0.00898, 0.00783, 0.01681, 0.00755, 0.01045, 0.00828, 0.00828, 0.00564, 0.00828, 0.00828, 0.01681, 0.00797, 0.00649, 0.00759, 0.00861, 0.01143, 0.00733, 0.00736, 0.01216, 0.01681, 0.00703, 0.01216, 0.00917, 0.00828, 0.00828, 0.00523, 0.00828, 0.00657, 0.01394, 0.00794, 0.00801, 0.00705, 0.01633, 0.00769, 0.00824, 0.02116, 0.00739, 0.00937, 0.00898, 0.00963, 0.01113, 0.007, 0.00783, 0.00602, 0.00717, 0.00519, 0.01843, 0.00509, 0.00521, 0.00353, 0.00487, 0.00678, 0.00879, 0.03297, 0.02721, 0.00985, 0.00549, 0.00969, 0.0056, 0.00667, 0.00861, 0.02484, 0.00697, 0.02637, 0.00759, 0.00449, 0.0011, 0.00857, 0.00828, 0.0127, 0.00688, 0.00468, 0.00606, 0.0217, 0.01843, 0.00595, 0.00581, 0.0087, 0.00599, 0.0072, 0.00423, 0.00683, 0.00644, 0.00947, 0.00755, 0.01151, 0.0084, 0.01309, 0.00717, 0.00548, 0.0112, 0.00597, 0.00912, 0.00623, 0.00922, 0.00449, 0.00888, 0.00769, 0.00927, 0.01279, 0.00765, 0.01207, 0.00809, 0.00902, 0.01602, 0.0056, 0.00765, 0.00794, 0.00551, 0.01174, 0.00509, 0.00577, 0.00597, 0.01207, 0.0112, 0.00644, 0.01027, 0.00991, 0.00575, 0.00433, 0.00828, 0.00812, 0.01948, 0.0084, 0.00591, 0.00664, 0.00691, 0.00776, 0.00548, 0.00786, 0.02348, 0.0087, 0.01092, 0.00772, 0.00786, 0.00717, 0.02198, 0.0082, 0.00902, 0.01405, 0.00688, 0.00708, 0.00985, 0.00902, 0.00694, 0.01279, 0.0098, 0.00779, 0.01033, 0.02597, 0.00729, 0.01429, 0.01039, 0.02041, 0.00542, 0.01052, 0.01617, 0.01052, 0.01106, 0.00937, 0.00828, 0.00866, 0.00884, 0.00844, 0.00902, 0.01361, 0.00776, 0.00476, 0.02317, 0.01008, 0.0135, 0.00898, 0.00647, 0.00828, 0.00861, 0.00614, 0.00958, 0.01052, 0.00617, 0.01926, 0.01453, 0.02484, 0.00652, 0.00969, 0.01039, 0.03117, 0.00828, 0.0047, 0.00805, 0.00828, 0.00614, 0.02956, 0.00907, 0.01587, 0.00947, 0.00524, 0.00717, 0.0381, 0.00902, 0.00853, 0.00917, 0.0068, 0.00828, 0.01014, 0.00922, 0.00828, 0.01884, 0.00714, 0.03499, 0.00501, 0.03571, 0.00912, 0.02065, 0.01251, 0.00568, 0.00902, 0.00783, 0.01099, 0.00521, 0.00828, 0.00686, 0.02597, 0.00912, 0.01045, 0.00893, 0.00599, 0.00812, 0.00456, 0.00714, 0.00729, 0.01174, 0.0112, 0.00451, 0.00828, 0.00942, 0.0072, 0.00932, 0.00828, 0.00937, 0.00521, 0.02597, 0.00515, 0.00927, 0.00726, 0.00659, 0.00717, 0.00917, 0.01309, 0.00801, 0.01242, 0.00664, 0.01199, 0.00739, 0.00697, 0.02956, 0.00551, 0.00591, 0.0067, 0.00474, 0.03361, 0.007, 0.01242, 0.01071, 0.00749, 0.02857, 0.00591, 0.00844, 0.00583, 0.01233, 0.00828, 0.01309, 0.00711, 0.01233, 0.00628, 0.02637, 0.00902, 0.0061, 0.01884, 0.00686, 0.00952, 0.00974, 0.00425, 0.01544, 0.0051, 0.0197, 0.0044, 0.00678, 0.01587, 0.01135, 0.00527, 0.00985, 0.01003, 0.00585, 0.00664, 0.00893, 0.00657, 0.00801, 0.01158, 0.00571, 0.00523, 0.00801, 0.00776, 0.02721, 0.00853, 0.01045, 0.0061, 0.00446, 0.00523, 0.00694, 0.00478, 0.00828, 0.00733, 0.00752, 0.0051, 0.02116, 0.01617, 0.00828, 0.0112, 0.00776, 0.00577, 0.00675, 0.03117, 0.01382, 0.0084, 0.00536, 0.00902, 0.00875, 0.03499, 0.00675, 0.0079, 0.00506, 0.02484, 0.00717, 0.01158, 0.01106, 0.00463, 0.00521, 0.00762, 0.01329, 0.00947, 0.02017, 0.00577, 0.00551, 0.0061, 0.00733, 0.00649, 0.01174, 0.00527, 0.01289, 0.00659, 0.00849, 0.00902, 0.00642, 0.01199, 0.01065, 0.00779, 0.00879, 0.00801, 0.01008, 0.01085, 0.01182, 0.00853, 0.02484, 0.00786, 0.00875, 0.02857, 0.00776, 0.00755, 0.00836, 0.01339, 0.00769, 0.01135, 0.02956, 0.00703, 0.00672, 0.01371, 0.02597]) }
103.095238
4,433
0.660816
1,313
6,495
3.204113
0.281797
0.042786
0.049917
0.017114
0.087711
0.058949
0.045163
0.045163
0.045163
0.045163
0
0.541301
0.157506
6,495
62
4,434
104.758065
0.227522
0.080677
0
0
0
0
0.061588
0
0
0
0
0
0
1
0.026316
false
0
0.078947
0
0.157895
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
1
1
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
c6c813afc455f3b68be01f9399c579d60cfaa71f
60
py
Python
test/integration/targets/script/files/no_shebang.py
Container-Projects/ansible-provider-docs
100b695b0b0c4d8d08af362069557ffc735d0d7e
[ "PSF-2.0", "BSD-2-Clause", "MIT" ]
37
2017-08-15T15:02:43.000Z
2021-07-23T03:44:31.000Z
test/integration/targets/script/files/no_shebang.py
Container-Projects/ansible-provider-docs
100b695b0b0c4d8d08af362069557ffc735d0d7e
[ "PSF-2.0", "BSD-2-Clause", "MIT" ]
12
2018-01-10T05:25:25.000Z
2021-11-28T06:55:48.000Z
test/integration/targets/script/files/no_shebang.py
Container-Projects/ansible-provider-docs
100b695b0b0c4d8d08af362069557ffc735d0d7e
[ "PSF-2.0", "BSD-2-Clause", "MIT" ]
49
2017-08-15T09:52:13.000Z
2022-03-21T17:11:54.000Z
import sys sys.stdout.write("Script with shebang omitted")
15
47
0.783333
9
60
5.222222
0.888889
0
0
0
0
0
0
0
0
0
0
0
0.116667
60
3
48
20
0.886792
0
0
0
0
0
0.45
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
05d317d5688bdb5e5e7075bf0340da01de9972bb
359
py
Python
Mathematics/106bombyx/usage.py
667MARTIN/Epitech
81095d8e7d54e9abd95541ee3dfcc3bc85d5cf0e
[ "MIT" ]
40
2018-01-28T14:23:27.000Z
2022-03-05T15:57:47.000Z
Mathematics/106bombyx/usage.py
667MARTIN/Epitech
81095d8e7d54e9abd95541ee3dfcc3bc85d5cf0e
[ "MIT" ]
1
2021-10-05T09:03:51.000Z
2021-10-05T09:03:51.000Z
Mathematics/106bombyx/usage.py
667MARTIN/Epitech
81095d8e7d54e9abd95541ee3dfcc3bc85d5cf0e
[ "MIT" ]
73
2019-01-07T18:47:00.000Z
2022-03-31T08:48:38.000Z
#!/usr/bin/python # -*- coding: utf-8 -*- ## usage.py for usage in /home/rodrig_1/rendu/Maths/103architecte ## ## Made by gwendoline rodriguez ## Login <rodrig_1@epitech.net> ## ## Started on Sun Dec 7 16:31:56 2014 gwendoline rodriguez ## Last update Sun Feb 22 18:32:16 2015 gwendoline rodriguez ## print "Usage: ./106bombyx k(integer >= 1 and <= 4)."
27.615385
65
0.685237
56
359
4.357143
0.803571
0.233607
0
0
0
0
0
0
0
0
0
0.112583
0.158774
359
12
66
29.916667
0.695364
0.771588
0
0
0
0
0.6875
0
0
0
0
0
0
0
null
null
0
0
null
null
1
0
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
4
05d5ea8100cc04852e143c63434b59ba93e68e2b
411
py
Python
I Coding Dojo/Solution/test_dojo.py
ComputerSocietyIFB/Dojo
2e09637e1325e8204be4e35e0937ec49b8256df2
[ "MIT" ]
null
null
null
I Coding Dojo/Solution/test_dojo.py
ComputerSocietyIFB/Dojo
2e09637e1325e8204be4e35e0937ec49b8256df2
[ "MIT" ]
null
null
null
I Coding Dojo/Solution/test_dojo.py
ComputerSocietyIFB/Dojo
2e09637e1325e8204be4e35e0937ec49b8256df2
[ "MIT" ]
null
null
null
from dojo import * def test_valid_input(): assert entrada_valida(1) == False assert entrada_valida('ABCDEF GHI') == True assert entrada_valida('') == False def test_get_indice(): assert get_indice('A') == 0 assert get_indice(' ') == 8 assert get_indice('J') == 3 def test_converte(): assert converte('PUZZLES') == '7889999_9999555337777' assert converte('OI PESSOAL') == '66644407337777_77776662555'
27.4
62
0.717762
54
411
5.222222
0.555556
0.12766
0.202128
0
0
0
0
0
0
0
0
0.138028
0.136253
411
15
62
27.4
0.656338
0
0
0
0
0
0.186893
0.114078
0
0
0
0
0.666667
1
0.25
true
0
0.083333
0
0.333333
0
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
1
1
0
0
0
0
0
0
4
05e6afe5bdcddb8f3fc880a72b40f31a91c9e30c
1,395
py
Python
SHIMON/api/api_calls.py
dosisod/SHIMON
bdcafc1d1036390e1872d0f17bbda511891e02dc
[ "MIT" ]
null
null
null
SHIMON/api/api_calls.py
dosisod/SHIMON
bdcafc1d1036390e1872d0f17bbda511891e02dc
[ "MIT" ]
1
2020-03-06T07:17:27.000Z
2020-03-06T07:17:27.000Z
SHIMON/api/api_calls.py
dosisod/SHIMON
bdcafc1d1036390e1872d0f17bbda511891e02dc
[ "MIT" ]
null
null
null
from SHIMON.api.external import api_recent, api_friends, api_allfor from SHIMON.api.error import error, error_200, error_202, error_400 from SHIMON.api.unlock import ApiUnlock from SHIMON.api.send_msg import ApiSendMsg from SHIMON.api.delete_msg import ApiDeleteMsg from SHIMON.api.save import ApiSave from SHIMON.api.lock import ApiLock from SHIMON.api.change_pwd import ApiChangePwd from SHIMON.api.new_key import ApiNewKey from SHIMON.api.msg_policy import ApiMsgPolicy from SHIMON.api.expiration_timer import ApiExpirationTimer from SHIMON.api.theme import ApiTheme from SHIMON.api.devmode import ApiDevmode from SHIMON.api.nuke import ApiNuke from SHIMON.api.fresh_js import ApiFreshJs from SHIMON.api.fresh_css import ApiFreshCss from SHIMON.api.status import ApiStatus from SHIMON.api.ping import ApiPing from SHIMON.api.friends import ApiFriends from SHIMON.api.recent import ApiRecent from SHIMON.api.allfor import ApiAllfor from SHIMON.api.add_friend import ApiAddFriend from SHIMON.api.api_base import ApiBase apicalls = [ ApiUnlock(), ApiSendMsg(), ApiDeleteMsg(), ApiSave(), ApiLock(), ApiChangePwd(), ApiNewKey(), ApiMsgPolicy(), ApiExpirationTimer(), ApiTheme(), ApiDevmode(), ApiNuke(), ApiFreshJs(), ApiFreshCss(), ApiStatus(), ApiPing(), ApiFriends(), ApiRecent(), ApiAllfor(), ApiAddFriend(), ]
29.680851
67
0.773477
180
1,395
5.905556
0.322222
0.216369
0.281279
0.033866
0
0
0
0
0
0
0
0.007544
0.144803
1,395
46
68
30.326087
0.883487
0
0
0
0
0
0
0
0
0
0
0
0
1
0
false
0
0.511111
0
0.511111
0
0
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
af01a24dd48b27e455d5a11e4e22049ff562ced0
181
py
Python
Nduja/user_info_retriever/__init__.py
herrBez/Nduja
51f93c6a8827ddf8605f88cf062d524b0ca5cebf
[ "BSD-3-Clause" ]
2
2019-07-12T00:52:39.000Z
2020-02-13T17:09:07.000Z
Nduja/user_info_retriever/__init__.py
herrBez/Nduja
51f93c6a8827ddf8605f88cf062d524b0ca5cebf
[ "BSD-3-Clause" ]
2
2018-05-04T09:28:37.000Z
2019-11-09T13:37:00.000Z
Nduja/user_info_retriever/__init__.py
herrBez/Nduja
51f93c6a8827ddf8605f88cf062d524b0ca5cebf
[ "BSD-3-Clause" ]
2
2018-12-04T11:33:31.000Z
2021-09-07T20:13:52.000Z
"""This package contains the definition and implementation of the user info retriever classes, i.e., classes that fetches from the relative APIs the information about an account"""
45.25
75
0.801105
27
181
5.37037
0.851852
0
0
0
0
0
0
0
0
0
0
0
0.143646
181
3
76
60.333333
0.935484
0.961326
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
af2bf4a10ec6c785f3cae15398f4c31df3f63bd9
176
py
Python
9term/fipt/P2PLending/reviews/forms.py
nik-sergeson/bsuir-informatics-labs
14805fb83b8e2324580b6253158565068595e804
[ "Apache-2.0" ]
null
null
null
9term/fipt/P2PLending/reviews/forms.py
nik-sergeson/bsuir-informatics-labs
14805fb83b8e2324580b6253158565068595e804
[ "Apache-2.0" ]
null
null
null
9term/fipt/P2PLending/reviews/forms.py
nik-sergeson/bsuir-informatics-labs
14805fb83b8e2324580b6253158565068595e804
[ "Apache-2.0" ]
null
null
null
from django.forms import ModelForm from P2PLending.reviews.models import Review class ReviewForm(ModelForm): class Meta: model = Review fields = ['text']
19.555556
44
0.698864
20
176
6.15
0.75
0
0
0
0
0
0
0
0
0
0
0.007353
0.227273
176
8
45
22
0.897059
0
0
0
0
0
0.022727
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.666667
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
af3449e99e3851cb666128207a6c1bd7f22efd0a
698
py
Python
PySARibbon/__init__.py
Bllose/SARibbon-pyqt5
98052f0a8862515eecbece2c681387c4655b0db5
[ "MIT" ]
3
2021-11-26T07:05:38.000Z
2022-03-20T15:16:04.000Z
PySARibbon/__init__.py
Bllose/SARibbon-pyqt5
98052f0a8862515eecbece2c681387c4655b0db5
[ "MIT" ]
null
null
null
PySARibbon/__init__.py
Bllose/SARibbon-pyqt5
98052f0a8862515eecbece2c681387c4655b0db5
[ "MIT" ]
2
2021-09-21T13:25:45.000Z
2022-03-03T08:14:01.000Z
# -*- coding: utf-8 -*- """ @Module __init__.py @Author ROOT """ from .SAFramelessHelper import SAFramelessHelper from .SARibbonBar import SARibbonBar from .SARibbonButtonGroupWidget import SARibbonButtonGroupWidget from .SARibbonCategory import SARibbonCategory from .SARibbonCategoryLayout import SARibbonCategoryLayout from .SARibbonContextCategory import SARibbonContextCategory from .SARibbonGallery import SARibbonGallery from .SARibbonMainWindow import SARibbonMainWindow from .SARibbonPannel import SARibbonPannel from .SARibbonPannelLayout import SARibbonPannelLayout from .SARibbonQuickAccessBar import SARibbonQuickAccessBar from .SAWindowButtonGroup import SAWindowButtonGroup
36.736842
64
0.859599
56
698
10.642857
0.392857
0
0
0
0
0
0
0
0
0
0
0.001587
0.097421
698
18
65
38.777778
0.944444
0.090258
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
0
0
1
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
af5d1d071152b9648c81c50190a03ebb8b5b5673
86
py
Python
client/hotbox.py
odontomachus/hotbox
d42c48d7f056f2b1f7bd707ad674e737a3c2fe08
[ "MIT" ]
null
null
null
client/hotbox.py
odontomachus/hotbox
d42c48d7f056f2b1f7bd707ad674e737a3c2fe08
[ "MIT" ]
null
null
null
client/hotbox.py
odontomachus/hotbox
d42c48d7f056f2b1f7bd707ad674e737a3c2fe08
[ "MIT" ]
null
null
null
if __name__ == "__main__": from gui import App app = App() app.mainloop()
17.2
26
0.593023
11
86
3.909091
0.727273
0.418605
0.418605
0
0
0
0
0
0
0
0
0
0.27907
86
4
27
21.5
0.693548
0
0
0
0
0
0.093023
0
0
0
0
0
0
1
0
false
0
0.25
0
0.25
0
1
0
0
null
1
1
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
bb7c01cedef18581351bbed1c806cb8850d04b28
3,107
py
Python
tests/core/test_base_types.py
balancap/arrowbic
088bb3aff5649f189c935a55c6cdbcc61886f778
[ "Apache-2.0" ]
4
2022-02-08T18:10:35.000Z
2022-02-09T20:28:41.000Z
tests/core/test_base_types.py
balancap/arrowbic
088bb3aff5649f189c935a55c6cdbcc61886f778
[ "Apache-2.0" ]
20
2022-01-11T17:02:14.000Z
2022-02-05T16:53:14.000Z
tests/core/test_base_types.py
balancap/arrowbic
088bb3aff5649f189c935a55c6cdbcc61886f778
[ "Apache-2.0" ]
null
null
null
import numpy as np import pyarrow as pa import pytest import arrowbic.extensions from arrowbic.core.base_types import ( from_arrow_to_numpy_dtype, from_arrow_to_python_class, from_numpy_to_arrow_type, is_supported_base_type, ) def test__is_supported_base_type__proper_result() -> None: assert not is_supported_base_type(arrowbic.extensions.IntEnumType()) assert not is_supported_base_type(arrowbic.extensions.TensorType()) def test__from_numpy_to_arrow_type__np_dtype__proper_coverage() -> None: assert from_numpy_to_arrow_type(None) == pa.null() assert from_numpy_to_arrow_type(type(None)) == pa.null() assert from_numpy_to_arrow_type(np.bool_) == pa.bool_() assert from_numpy_to_arrow_type(np.int8) == pa.int8() assert from_numpy_to_arrow_type(np.float32) == pa.float32() assert from_numpy_to_arrow_type(np.dtype(str)) == pa.string() assert from_numpy_to_arrow_type(np.dtype(bytes)) == pa.binary(-1) assert from_numpy_to_arrow_type(np.dtype("datetime64[s]")) == pa.timestamp("s") assert from_numpy_to_arrow_type(np.dtype("timedelta64[ns]")) == pa.duration("ns") with pytest.raises(TypeError): from_numpy_to_arrow_type(np.dtype("O")) def test__from_numpy_to_arrow_type__python_class__proper_coverage() -> None: assert from_numpy_to_arrow_type(None) == pa.null() assert from_numpy_to_arrow_type(type(None)) == pa.null() assert from_numpy_to_arrow_type(bool) == pa.bool_() assert from_numpy_to_arrow_type(int) == pa.int64() assert from_numpy_to_arrow_type(float) == pa.float64() assert from_numpy_to_arrow_type(str) == pa.string() assert from_numpy_to_arrow_type(bytes) == pa.binary(-1) def test__from_arrow_to_numpy_dtype__proper_coverage() -> None: assert from_arrow_to_numpy_dtype(None) == type(None) # noqa: E721 assert from_arrow_to_numpy_dtype(type(None)) == type(None) # noqa: E721 assert from_arrow_to_numpy_dtype(pa.null()) == type(None) # noqa: E721 assert from_arrow_to_numpy_dtype(pa.bool_()) == np.bool_ assert from_arrow_to_numpy_dtype(pa.uint8()) == np.uint8 assert from_arrow_to_numpy_dtype(pa.float32()) == np.float32 assert from_arrow_to_numpy_dtype(pa.string()) == np.dtype(str) assert from_arrow_to_numpy_dtype(pa.binary(-1)) == np.dtype(bytes) assert from_arrow_to_numpy_dtype(pa.timestamp("us")) == np.dtype("datetime64[us]") assert from_arrow_to_numpy_dtype(pa.duration("ns")) == np.dtype("timedelta64[ns]") def test__from_arrow_to_python_class__proper_coverage() -> None: assert from_arrow_to_python_class(pa.null()) == type(None) # noqa: E721 assert from_arrow_to_python_class(pa.float32()) == float # noqa: E721 assert from_arrow_to_python_class(pa.int32()) == int # noqa: E721 assert from_arrow_to_python_class(pa.string()) == str # noqa: E721 assert from_arrow_to_python_class(pa.binary(-1)) == bytes # noqa: E721 assert from_arrow_to_python_class(pa.timestamp("us")) == np.dtype("datetime64[us]") assert from_arrow_to_python_class(pa.duration("ns")) == np.dtype("timedelta64[ns]")
41.426667
87
0.745092
481
3,107
4.370062
0.122661
0.156993
0.109895
0.152236
0.785442
0.735966
0.704091
0.489534
0.395338
0.248335
0
0.022198
0.130029
3,107
74
88
41.986486
0.755457
0.028001
0
0.076923
0
0
0.032547
0
0
0
0
0
0.673077
1
0.096154
true
0
0.096154
0
0.192308
0
0
0
0
null
0
0
0
0
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
1
0
0
1
0
0
0
0
0
0
4
bb8cf1fca3e3837b63490012df70f15647da968b
313
py
Python
wallet/errors.py
iesteban/bitcoin_bazaar_backend
2aa7c61d8727dae3a9be4b19c4b2aa49ec7ecaa0
[ "MIT" ]
18
2017-03-08T06:30:55.000Z
2020-05-08T17:30:20.000Z
wallet/errors.py
iesteban/bitcoin_bazaar_backend
2aa7c61d8727dae3a9be4b19c4b2aa49ec7ecaa0
[ "MIT" ]
871
2017-03-06T21:03:59.000Z
2022-03-28T19:46:44.000Z
wallet/errors.py
iesteban/bitcoin_bazaar_backend
2aa7c61d8727dae3a9be4b19c4b2aa49ec7ecaa0
[ "MIT" ]
5
2017-07-07T12:10:47.000Z
2020-05-13T15:57:56.000Z
from django.db import IntegrityError class InsufficientBalance(IntegrityError): """Raised when a wallet has insufficient balance to run an operation. We're subclassing from :mod:`django.db.IntegrityError` so that it is automatically rolled-back during django's transaction lifecycle. """
31.3
59
0.750799
39
313
6.025641
0.846154
0.068085
0
0
0
0
0
0
0
0
0
0
0.182109
313
9
60
34.777778
0.917969
0.638978
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.5
0
1
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
1
0
0
4
bbc7c7cb6d7d568c5084f050284cf002a5bcd61a
101
py
Python
examples/run_pre_tuned_algorithm/deepar/run.py
arangatang/Crayon
ff1ca68fe676028a8209ad56c108b8d8179ba2d7
[ "MIT" ]
null
null
null
examples/run_pre_tuned_algorithm/deepar/run.py
arangatang/Crayon
ff1ca68fe676028a8209ad56c108b8d8179ba2d7
[ "MIT" ]
null
null
null
examples/run_pre_tuned_algorithm/deepar/run.py
arangatang/Crayon
ff1ca68fe676028a8209ad56c108b8d8179ba2d7
[ "MIT" ]
null
null
null
from crayon import benchmark benchmark("deepar.yml", "deepar", benchmark_id="deepar_100", runs=100)
25.25
70
0.772277
14
101
5.428571
0.642857
0
0
0
0
0
0
0
0
0
0
0.065217
0.089109
101
3
71
33.666667
0.76087
0
0
0
0
0
0.257426
0
0
0
0
0
0
1
0
true
0
0.5
0
0.5
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
bbd4ef2bc2bba8befa6288cfc340a6a50999b2c7
216
py
Python
src/airbnb_priceforecaster/features/host_location.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
src/airbnb_priceforecaster/features/host_location.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
src/airbnb_priceforecaster/features/host_location.py
andersbogsnes/airbnb_priceforecaster
f397c16a08fe7eba9977611f4af5352d234a4624
[ "MIT" ]
null
null
null
""" host_location ============= Where the host is located. Hypothesis that the host being somewhere else affects the price Text of where the host is located. Could be used to extract features from dtype: string """
24
90
0.731481
33
216
4.757576
0.727273
0.133758
0.152866
0.178344
0.267516
0
0
0
0
0
0
0
0.166667
216
8
91
27
0.872222
0.958333
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
0
0
0
null
0
0
1
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
bbe85ee8e4c269ac93def093a370a21c4e158a9d
1,870
py
Python
{{cookiecutter.project_slug}}/{{cookiecutter.main_app}}/tests/test_{{cookiecutter.main_model|lower}}_status.py
huogerac/cookiecutter-djangofloppyforms
0a2c1d7fe506a5df13aaefde0f716373dbb8194e
[ "BSD-3-Clause" ]
3
2021-03-29T19:11:30.000Z
2021-05-08T13:18:41.000Z
{{cookiecutter.project_slug}}/{{cookiecutter.main_app}}/tests/test_{{cookiecutter.main_model|lower}}_status.py
huogerac/cookiecutter-djangofloppyforms
0a2c1d7fe506a5df13aaefde0f716373dbb8194e
[ "BSD-3-Clause" ]
null
null
null
{{cookiecutter.project_slug}}/{{cookiecutter.main_app}}/tests/test_{{cookiecutter.main_model|lower}}_status.py
huogerac/cookiecutter-djangofloppyforms
0a2c1d7fe506a5df13aaefde0f716373dbb8194e
[ "BSD-3-Clause" ]
2
2021-03-12T15:13:38.000Z
2021-07-01T19:38:11.000Z
from datetime import datetime import pytest from model_bakery import baker from {{cookiecutter.main_app}}.models import {{cookiecutter.main_model}} from {{cookiecutter.main_app}}.services import {{cookiecutter.main_model|lower}}_service def test_should_get_{{cookiecutter.main_model|lower}}_as_pending(db): my_{{cookiecutter.main_model|lower}} = baker.make({{cookiecutter.main_model}}, description='Create an ansible deploy script', due_to=datetime.now()) assert my_{{cookiecutter.main_model|lower}}.status == 'pending' def test_should_get_{{cookiecutter.main_model|lower}}_as_done(db): my_{{cookiecutter.main_model|lower}} = baker.make({{cookiecutter.main_model}}, description='Create an ansible deploy script', due_to=datetime.now()) {{cookiecutter.main_model|lower}}_updated = {{cookiecutter.main_model|lower}}_service.mark_as_done(my_{{cookiecutter.main_model|lower}}.id) assert {{cookiecutter.main_model|lower}}_updated.status == 'done' def test_should_raise_an_erro_for_invalid_{{cookiecutter.main_model|lower}}_id(db): invalid_{{cookiecutter.main_model|lower}} = 0 with pytest.raises(RuntimeError) as error: {{cookiecutter.main_model|lower}} = {{cookiecutter.main_model|lower}}_service.mark_as_done(invalid_{{cookiecutter.main_model|lower}}) assert str(error.value) == f"{{cookiecutter.main_model}} ID: {invalid_{{cookiecutter.main_model|lower}}} invalida" def test_should_mark_as_undone(db): my_{{cookiecutter.main_model|lower}} = baker.make( {{cookiecutter.main_model}}, description='Create an ansible deploy script', due_to=datetime.now(), done=True) {{cookiecutter.main_model|lower}}_updated = {{cookiecutter.main_model|lower}}_service.mark_as_done(my_{{cookiecutter.main_model|lower}}.id) assert {{cookiecutter.main_model|lower}}_updated.status == 'pending'
44.52381
152
0.759358
242
1,870
5.549587
0.219008
0.333582
0.406552
0.406552
0.722264
0.577066
0.577066
0.577066
0.545048
0.479523
0
0.000595
0.101604
1,870
41
153
45.609756
0.79881
0
0
0.16
0
0
0.104278
0.037433
0
0
0
0
0.16
0
null
null
0
0.2
null
null
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
0
0
4
bbea09728b7a60a98d5028f548dbb446b1645180
300
py
Python
test_tenacity/main_test.py
Etuloser/python-playground
2b40e88b3b5a2744284c8e1cae2b3917a75bc803
[ "MIT" ]
null
null
null
test_tenacity/main_test.py
Etuloser/python-playground
2b40e88b3b5a2744284c8e1cae2b3917a75bc803
[ "MIT" ]
null
null
null
test_tenacity/main_test.py
Etuloser/python-playground
2b40e88b3b5a2744284c8e1cae2b3917a75bc803
[ "MIT" ]
null
null
null
import unittest from test_tenacity.main import do_something_unreliable class TestMain(unittest.TestCase): def setUp(self) -> None: pass def tearDown(self) -> None: pass def test_do_something_unreliable(self): got = do_something_unreliable() print(got)
20
54
0.683333
36
300
5.472222
0.555556
0.167513
0.319797
0.152284
0
0
0
0
0
0
0
0
0.24
300
14
55
21.428571
0.864035
0
0
0.2
0
0
0
0
0
0
0
0
0
1
0.3
false
0.2
0.2
0
0.6
0.1
0
0
0
null
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
1
0
0
1
0
0
4
bbf3b90b802450649d8c14713f55b3cadc744f74
74
py
Python
python/seldon_core/__init__.py
juldou/seldon-core
34021ee3ead41c729ff57efd1964ab3f0d37861e
[ "Apache-2.0" ]
3,049
2017-12-21T14:50:09.000Z
2022-03-30T18:14:15.000Z
python/seldon_core/__init__.py
juldou/seldon-core
34021ee3ead41c729ff57efd1964ab3f0d37861e
[ "Apache-2.0" ]
3,678
2017-12-22T16:21:30.000Z
2022-03-31T20:32:31.000Z
python/seldon_core/__init__.py
juldou/seldon-core
34021ee3ead41c729ff57efd1964ab3f0d37861e
[ "Apache-2.0" ]
714
2018-01-03T11:29:49.000Z
2022-03-31T03:49:59.000Z
from seldon_core.version import __version__ from .storage import Storage
18.5
43
0.851351
10
74
5.8
0.6
0
0
0
0
0
0
0
0
0
0
0
0.121622
74
3
44
24.666667
0.892308
0
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
1
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4
bbfb331635f6e73b54c8ca18e4bc802a411eedfc
241
py
Python
roobet-Listing1.py
AdamSierakowski/Math-Behind-Roobet-s-Crash-Game
bf804d5742d7bb960b42052aa248181f42b5c3f5
[ "MIT" ]
1
2021-03-10T11:01:51.000Z
2021-03-10T11:01:51.000Z
roobet-Listing1.py
AdamSierakowski/Math-Behind-Roobet-s-Crash-Game
bf804d5742d7bb960b42052aa248181f42b5c3f5
[ "MIT" ]
null
null
null
roobet-Listing1.py
AdamSierakowski/Math-Behind-Roobet-s-Crash-Game
bf804d5742d7bb960b42052aa248181f42b5c3f5
[ "MIT" ]
1
2021-07-10T13:31:16.000Z
2021-07-10T13:31:16.000Z
import hashlib def prev_hash(hash_code): return hashlib.sha256(hash_code.encode()).hexdigest() def main(): game_hash = 'cc4a75236ecbc038c37729aa5ced461e36155319e88fa375c\ 994933b6a42a0c4' print(prev_hash(game_hash)) main()
16.066667
67
0.763485
26
241
6.846154
0.576923
0.089888
0
0
0
0
0
0
0
0
0
0.215311
0.13278
241
14
68
17.214286
0.636364
0
0
0
0
0
0
0
0
0
0
0
0
1
0.25
false
0
0.125
0.125
0.5
0.125
1
0
0
null
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
1
0
0
0
1
0
0
0
4
a51e15e888b1d3f211aca0c3ec2adcad23363f3a
129
py
Python
home/urls.py
mxpxgx/moiprez.com
8af2bc8ff676b67b5dd773b93721a5e457f89c16
[ "MIT" ]
null
null
null
home/urls.py
mxpxgx/moiprez.com
8af2bc8ff676b67b5dd773b93721a5e457f89c16
[ "MIT" ]
null
null
null
home/urls.py
mxpxgx/moiprez.com
8af2bc8ff676b67b5dd773b93721a5e457f89c16
[ "MIT" ]
null
null
null
# from django.conf.urls import url # from home.views import HomeView # urlpatterns = [ # url(r'^', HomeView.as_view())) # ]
18.428571
36
0.651163
17
129
4.882353
0.764706
0
0
0
0
0
0
0
0
0
0
0
0.186047
129
7
37
18.428571
0.790476
0.906977
0
null
0
null
0
0
null
0
0
0
null
1
null
true
0
0
null
null
null
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
0
0
0
0
0
4
a52d73617e46dee078fe3895184327cf99dbb572
241
py
Python
aspen_ssh/parser/exceptions.py
thinkwelltwd/aspen_ssh
68cfab56187b63b6e22ab96fefe4c87171f7ccce
[ "Apache-2.0" ]
1
2021-09-09T13:02:36.000Z
2021-09-09T13:02:36.000Z
aspen_ssh/parser/exceptions.py
thinkwelltwd/aspen_ssh
68cfab56187b63b6e22ab96fefe4c87171f7ccce
[ "Apache-2.0" ]
null
null
null
aspen_ssh/parser/exceptions.py
thinkwelltwd/aspen_ssh
68cfab56187b63b6e22ab96fefe4c87171f7ccce
[ "Apache-2.0" ]
null
null
null
class SSHCertificateParserError(Exception): pass class UnsupportedKeyTypeError(SSHCertificateParserError): """This key has a type which we do not know how to parse""" class InputTooShortError(SSHCertificateParserError): pass
21.909091
63
0.784232
24
241
7.875
0.791667
0
0
0
0
0
0
0
0
0
0
0
0.153527
241
10
64
24.1
0.926471
0.219917
0
0.4
0
0
0
0
0
0
0
0
0
1
0
true
0.4
0
0
0.6
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
1
0
0
1
0
0
4
a52f3df0441eb430930399dd1762ebe5a6d9c5b0
3,399
py
Python
DailyProgrammer/DP20131128C.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
2
2020-12-23T18:59:22.000Z
2021-04-14T13:16:09.000Z
DailyProgrammer/DP20131128C.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
null
null
null
DailyProgrammer/DP20131128C.py
DayGitH/Python-Challenges
bc32f1332a92fcc2dfa6f5ea4d95f8a8d64c3edf
[ "MIT" ]
null
null
null
""" [11/28/13] Challenge #137 [Intermediate / Hard] Banquet Planning https://www.reddit.com/r/dailyprogrammer/comments/1rnrs2/112813_challenge_137_intermediate_hard_banquet/ # [](#IntermediateIcon) *(Intermediate)*: Banquet Planning You and your friends are planning a big banquet, but need to figure out the order in which food will be served. Some food, like a turkey, have to be served after appetizers, but before desserts. Other foods are more simple, like a pecan pie, which can be eaten any time after the main meal. Given a list of foods and the order-relationships they have, print the banquet schedule. If a given food item cannot be placed in this schedule, write an error message for it. # Formal Inputs & Outputs ## Input Description On standard console input, you will be given two space-delimited integers, N and M. N is the number of food items, while M is the number of food-relationships. Food-items are unique single-word lower-case names with optional underscores (the '_' character), while food-relationships are two food items that are space delimited. All food-items will be listed first on their own lines, then all food-relationships will be listed on their own lines afterwards. A food-relationship is where the first item must be served before the second item. Note that in the food-relationships list, some food-item names can use the [wildcard-character](http://en.wikipedia.org/wiki/Wildcard_character) '\*'. You must support this by expanding the rule to fulfill any combination of strings that fit the wildcard. For example, using the items from Sample Input 2, the rule "turkey\* \*_pie" expands to the following four rules: turkey almond_pie turkey_stuffing almond_pie turkey pecan_pie turkey_stuffing pecan_pie A helpful way to think about the wildcard expansion is to use the phrase "any item A must be before any item B". An example would be the food-relationship "\*pie coffee", which can be read as "any pie must be before coffee". Some orderings may be ambiguous: you might have two desserts before coffee, but the ordering of desserts may not be explicit. In such a case, group the items together. ## Output Description Print the correct order of food-items with a preceding index, starting from 1. If there are ambiguous ordering for items, list them together on the same line as a comma-delimited array of food-items. Any items that do not have a relationship must be printed with a warning or error message. # Sample Inputs & Outputs ## Sample Input 1 3 3 salad turkey dessert salad dessert turkey dessert salad turkey ## Sample Output 1 1. salad 2. turkey 3. dessert ## Sample Input 2 8 5 turkey pecan_pie salad crab_cakes almond_pie rice coffee turkey_stuffing turkey_stuffing turkey turkey* *_pie *pie coffee salad turkey* crab_cakes salad ## Sample Output 2 1. crab_cakes 2. salad 3. turkey_stuffing 4. turkey 5. almond_pie, pecan_pie 6. coffee Warning: Rice does not have any ordering. # Author's Note: This challenge has some subtle ordering logic that might be hard to understand at first. Work through sample data 2 by hand to better understand the ordering rules before writing code. Make sure to expand all widecard rules as well. """ def main(): pass if __name__ == "__main__": main()
40.951807
119
0.75228
545
3,399
4.631193
0.394495
0.021395
0.013074
0.022187
0.041204
0
0
0
0
0
0
0.014636
0.19594
3,399
82
120
41.45122
0.908891
0.978817
0
0
0
0
0.125
0
0
0
0
0
0
1
0.25
true
0.25
0
0
0.25
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
null
0
0
0
0
0
1
1
1
0
0
0
0
0
4
a55226b3d70c510b89f069856ccaddedee71d98a
431
py
Python
src/question/migrations/0006_auto_20190215_0755.py
DevTeamSCH/vikoverflow-backend
bac0a5f8d0f18bea4d99e0d94ee322feb6a8039e
[ "MIT" ]
null
null
null
src/question/migrations/0006_auto_20190215_0755.py
DevTeamSCH/vikoverflow-backend
bac0a5f8d0f18bea4d99e0d94ee322feb6a8039e
[ "MIT" ]
24
2018-10-09T12:34:09.000Z
2022-02-10T11:01:32.000Z
src/question/migrations/0006_auto_20190215_0755.py
DevTeamSCH/vikoverflow-backend
bac0a5f8d0f18bea4d99e0d94ee322feb6a8039e
[ "MIT" ]
null
null
null
# Generated by Django 2.1.7 on 2019-02-15 07:55 from django.db import migrations class Migration(migrations.Migration): dependencies = [("question", "0005_merge_20190215_0616")] operations = [ migrations.RemoveField(model_name="answer", name="is_visible"), migrations.RemoveField(model_name="comment", name="is_visible"), migrations.RemoveField(model_name="question", name="is_visible"), ]
28.733333
73
0.707657
52
431
5.692308
0.615385
0.212838
0.263514
0.304054
0.290541
0.290541
0.290541
0
0
0
0
0.085635
0.160093
431
14
74
30.785714
0.732044
0.104408
0
0
1
0
0.216146
0.0625
0
0
0
0
0
1
0
false
0
0.125
0
0.5
0
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
a568ad6dcef13fcee64e95ac026b1a8e9f2f3483
69
py
Python
redis/__init__.py
RuiCoreSci/auth
5a0708ebc86012902e7737d87bf691ab5fd1421c
[ "MIT" ]
null
null
null
redis/__init__.py
RuiCoreSci/auth
5a0708ebc86012902e7737d87bf691ab5fd1421c
[ "MIT" ]
null
null
null
redis/__init__.py
RuiCoreSci/auth
5a0708ebc86012902e7737d87bf691ab5fd1421c
[ "MIT" ]
null
null
null
from redis.client import Redis redis = Redis() __all__ = ['redis']
11.5
30
0.695652
9
69
4.888889
0.555556
0.454545
0
0
0
0
0
0
0
0
0
0
0.173913
69
5
31
13.8
0.77193
0
0
0
0
0
0.072464
0
0
0
0
0
0
1
0
false
0
0.333333
0
0.333333
0
1
0
0
null
1
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
0
0
0
4
a56fc512de17dc2fa626e2693007022677fdbddf
70,906
py
Python
multipole-graph-neural-operator/utilities.py
vir-k01/graph-pde
f7bcf22d3f3c58b30769edfa57b86727154850d2
[ "MIT" ]
121
2020-03-13T08:33:29.000Z
2022-03-30T12:57:39.000Z
multipole-graph-neural-operator/utilities.py
vir-k01/graph-pde
f7bcf22d3f3c58b30769edfa57b86727154850d2
[ "MIT" ]
3
2020-04-26T11:52:52.000Z
2022-03-31T15:28:15.000Z
multipole-graph-neural-operator/utilities.py
vir-k01/graph-pde
f7bcf22d3f3c58b30769edfa57b86727154850d2
[ "MIT" ]
36
2020-03-13T08:33:39.000Z
2022-03-31T14:35:27.000Z
import torch import numpy as np import scipy.io import h5py import sklearn.metrics from torch_geometric.data import Data import torch.nn as nn from scipy.ndimage import gaussian_filter ################################################# # # Utilities # ################################################# device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') # reading data class MatReader(object): def __init__(self, file_path, to_torch=True, to_cuda=False, to_float=True): super(MatReader, self).__init__() self.to_torch = to_torch self.to_cuda = to_cuda self.to_float = to_float self.file_path = file_path self.data = None self.old_mat = None self._load_file() def _load_file(self): try: self.data = scipy.io.loadmat(self.file_path) self.old_mat = True except: self.data = h5py.File(self.file_path) self.old_mat = False def load_file(self, file_path): self.file_path = file_path self._load_file() def read_field(self, field): x = self.data[field] if not self.old_mat: x = x[()] x = np.transpose(x, axes=range(len(x.shape) - 1, -1, -1)) if self.to_float: x = x.astype(np.float32) if self.to_torch: x = torch.from_numpy(x) if self.to_cuda: x = x.cuda() return x def set_cuda(self, to_cuda): self.to_cuda = to_cuda def set_torch(self, to_torch): self.to_torch = to_torch def set_float(self, to_float): self.to_float = to_float # normalization, pointwise gaussian class UnitGaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(UnitGaussianNormalizer, self).__init__() # x could be in shape of ntrain*n or ntrain*T*n or ntrain*n*T self.mean = torch.mean(x, 0) self.std = torch.std(x, 0) self.eps = eps def encode(self, x): x = (x - self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): if sample_idx is None: std = self.std + self.eps # n mean = self.mean else: if len(self.mean.shape) == len(sample_idx[0].shape): std = self.std[sample_idx] + self.eps # batch*n mean = self.mean[sample_idx] if len(self.mean.shape) > len(sample_idx[0].shape): std = self.std[:,sample_idx]+ self.eps # T*batch*n mean = self.mean[:,sample_idx] # x is in shape of batch*n or T*batch*n x = (x * std) + mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, Gaussian class GaussianNormalizer(object): def __init__(self, x, eps=0.00001): super(GaussianNormalizer, self).__init__() self.mean = torch.mean(x) self.std = torch.std(x) self.eps = eps def encode(self, x): x = (x - self.mean) / (self.std + self.eps) return x def decode(self, x, sample_idx=None): x = (x * (self.std + self.eps)) + self.mean return x def cuda(self): self.mean = self.mean.cuda() self.std = self.std.cuda() def cpu(self): self.mean = self.mean.cpu() self.std = self.std.cpu() # normalization, scaling by range class RangeNormalizer(object): def __init__(self, x, low=0.0, high=1.0): super(RangeNormalizer, self).__init__() mymin = torch.min(x, 0)[0].view(-1) mymax = torch.max(x, 0)[0].view(-1) self.a = (high - low)/(mymax - mymin) self.b = -self.a*mymax + high def encode(self, x): s = x.size() x = x.view(s[0], -1) x = self.a*x + self.b x = x.view(s) return x def decode(self, x): s = x.size() x = x.view(s[0], -1) x = (x - self.b)/self.a x = x.view(s) return x #loss function with rel/abs Lp loss class LpLoss(object): def __init__(self, d=2, p=2, size_average=True, reduction=True): super(LpLoss, self).__init__() #Dimension and Lp-norm type are postive assert d > 0 and p > 0 self.d = d self.p = p self.reduction = reduction self.size_average = size_average def abs(self, x, y): num_examples = x.size()[0] #Assume uniform mesh h = 1.0 / (x.size()[1] - 1.0) all_norms = (h**(self.d/self.p))*torch.norm(x.view(num_examples,-1) - y.view(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(all_norms) else: return torch.sum(all_norms) return all_norms def rel(self, x, y): num_examples = x.size()[0] diff_norms = torch.norm(x.reshape(num_examples,-1) - y.reshape(num_examples,-1), self.p, 1) y_norms = torch.norm(y.reshape(num_examples,-1), self.p, 1) if self.reduction: if self.size_average: return torch.mean(diff_norms/y_norms) else: return torch.sum(diff_norms/y_norms) return diff_norms/y_norms def __call__(self, x, y): return self.rel(x, y) # A simple feedforward neural network class DenseNet(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) if j != self.n_layers - 1: if normalize: self.layers.append(nn.BatchNorm1d(layers[j+1])) self.layers.append(nonlinearity()) if out_nonlinearity is not None: self.layers.append(out_nonlinearity()) def forward(self, x): for _, l in enumerate(self.layers): x = l(x) return x class DenseNet_sin(torch.nn.Module): def __init__(self, layers, nonlinearity, out_nonlinearity=None, normalize=False): super(DenseNet_sin, self).__init__() self.n_layers = len(layers) - 1 assert self.n_layers >= 1 self.layers = nn.ModuleList() for j in range(self.n_layers): self.layers.append(nn.Linear(layers[j], layers[j+1])) def forward(self, x): for j, l in enumerate(self.layers): x = l(x) if j != self.n_layers - 1: x = torch.sin(x) return x # generate graphs on square domain class SquareMeshGenerator(object): def __init__(self, real_space, mesh_size): super(SquareMeshGenerator, self).__init__() self.d = len(real_space) self.s = mesh_size[0] assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T def ball_connectivity(self, r): pwd = sklearn.metrics.pairwise_distances(self.grid) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def get_grid(self): return torch.tensor(self.grid, dtype=torch.float) def attributes(self, f=None, theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) else: edge_attr = np.zeros((self.n_edges, 2*self.d+2)) edge_attr[:,0:2*self.d] = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d +1] = theta[self.edge_index[1]] else: xy = self.grid[self.edge_index.T].reshape((self.n_edges,-1)) if theta is None: edge_attr = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) def get_boundary(self): s = self.s n = self.n boundary1 = np.array(range(0, s)) boundary2 = np.array(range(n - s, n)) boundary3 = np.array(range(s, n, s)) boundary4 = np.array(range(2 * s - 1, n, s)) self.boundary = np.concatenate([boundary1, boundary2, boundary3, boundary4]) def boundary_connectivity2d(self, stride=1): boundary = self.boundary[::stride] boundary_size = len(boundary) vertice1 = np.array(range(self.n)) vertice1 = np.repeat(vertice1, boundary_size) vertice2 = np.tile(boundary, self.n) self.edge_index_boundary = np.stack([vertice2, vertice1], axis=0) self.n_edges_boundary = self.edge_index_boundary.shape[1] return torch.tensor(self.edge_index_boundary, dtype=torch.long) def attributes_boundary(self, f=None, theta=None): # if self.edge_index_boundary == None: # self.boundary_connectivity2d() if f is None: if theta is None: edge_attr_boundary = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) else: edge_attr_boundary = np.zeros((self.n_edges_boundary, 2*self.d+2)) edge_attr_boundary[:,0:2*self.d] = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) edge_attr_boundary[:, 2 * self.d] = theta[self.edge_index_boundary[0]] edge_attr_boundary[:, 2 * self.d +1] = theta[self.edge_index_boundary[1]] else: xy = self.grid[self.edge_index_boundary.T].reshape((self.n_edges_boundary,-1)) if theta is None: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:]) else: edge_attr_boundary = f(xy[:,0:self.d], xy[:,self.d:], theta[self.edge_index_boundary[0]], theta[self.edge_index_boundary[1]]) return torch.tensor(edge_attr_boundary, dtype=torch.float) # generate graphs with sampling class RandomMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, attr_features=1): super(RandomMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.attr_features = attr_features assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.grid_sample = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.grid_sample = self.grid[self.idx] return self.idx def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float) def ball_connectivity(self, r, is_forward=False): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] if is_forward: print(self.edge_index.shape) self.edge_index = self.edge_index[:, self.edge_index[0] >= self.edge_index[1]] print(self.edge_index.shape) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def torus1d_connectivity(self, r): grid = self.grid_sample pwd0 = sklearn.metrics.pairwise_distances(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) PWD = np.stack([pwd0,pwd1], axis=2) pwd = np.min(PWD, axis=2) self.edge_index = np.vstack(np.where(pwd <= r)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def gaussian_connectivity(self, sigma): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) rbf = np.exp(-pwd**2/sigma**2) sample = np.random.binomial(1,rbf) self.edge_index = np.vstack(np.where(sample)) self.n_edges = self.edge_index.shape[1] return torch.tensor(self.edge_index, dtype=torch.long) def attributes(self, f=None, theta=None): if f is None: if theta is None: edge_attr = self.grid[self.edge_index.T].reshape((self.n_edges, -1)) else: theta = theta[self.idx] edge_attr = np.zeros((self.n_edges, 2 * self.d + 2*self.attr_features)) edge_attr[:, 0:2 * self.d] = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d : 2 * self.d + self.attr_features] = theta[self.edge_index[0]].view(-1, self.attr_features) edge_attr[:, 2 * self.d + self.attr_features: 2 * self.d + 2*self.attr_features] = theta[self.edge_index[1]].view(-1, self.attr_features) else: xy = self.grid_sample[self.edge_index.T].reshape((self.n_edges, -1)) if theta is None: edge_attr = f(xy[:, 0:self.d], xy[:, self.d:]) else: theta = theta[self.idx] edge_attr = f(xy[:, 0:self.d], xy[:, self.d:], theta[self.edge_index[0]], theta[self.edge_index[1]]) return torch.tensor(edge_attr, dtype=torch.float) # # generate two-level graph class RandomTwoMeshGenerator(object): def __init__(self, real_space, mesh_size, sample_size, induced_point): super(RandomTwoMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_size self.m_i = induced_point assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T if self.m > self.n: self.m = self.n self.idx = np.array(range(self.n)) self.idx_i = self.idx self.idx_both = self.idx self.grid_sample = self.grid self.grid_sample_i = self.grid self.grid_sample_both = self.grid def sample(self): perm = torch.randperm(self.n) self.idx = perm[:self.m] self.idx_i = perm[self.m: self.m+self.m_i] self.idx_both = perm[: self.m+self.m_i] self.grid_sample = self.grid[self.idx] self.grid_sample_i = self.grid[self.idx_i] self.grid_sample_both = self.grid[self.idx_both] return self.idx, self.idx_i, self.idx_both def get_grid(self): return torch.tensor(self.grid_sample, dtype=torch.float), \ torch.tensor(self.grid_sample_i, dtype=torch.float), \ torch.tensor(self.grid_sample_both, dtype=torch.float) def ball_connectivity(self, r11, r12, r22): pwd = sklearn.metrics.pairwise_distances(self.grid_sample) pwd12 = sklearn.metrics.pairwise_distances(self.grid_sample, self.grid_sample_i) pwd22 = sklearn.metrics.pairwise_distances(self.grid_sample_i) self.edge_index = np.vstack(np.where(pwd <= r11)) self.edge_index_12 = np.vstack(np.where(pwd12 <= r12)) self.edge_index_12[1,:] = self.edge_index_12[1,:] + self.m self.edge_index_21 = self.edge_index_12[[1,0],:] self.edge_index_22 = np.vstack(np.where(pwd22 <= r22)) + self.m self.n_edges = self.edge_index.shape[1] self.n_edges_12 = self.edge_index_12.shape[1] self.n_edges_22 = self.edge_index_22.shape[1] return torch.tensor(self.edge_index, dtype=torch.long), \ torch.tensor(self.edge_index_12, dtype=torch.long), \ torch.tensor(self.edge_index_21, dtype=torch.long), \ torch.tensor(self.edge_index_22, dtype=torch.long) def attributes(self, theta=None): if theta is None: edge_attr = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr_12 = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_21 = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_22 = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) else: theta = theta[self.idx_both] edge_attr = np.zeros((self.n_edges, 3 * self.d)) edge_attr[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index.T].reshape((self.n_edges, -1)) edge_attr[:, 2 * self.d] = theta[self.edge_index[0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[1]] edge_attr_12 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_12[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_12.T].reshape((self.n_edges_12, -1)) edge_attr_12[:, 2 * self.d] = theta[self.edge_index_12[0]] edge_attr_12[:, 2 * self.d + 1] = theta[self.edge_index_12[1]] edge_attr_21 = np.zeros((self.n_edges_12, 3 * self.d)) edge_attr_21[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_21.T].reshape((self.n_edges_12, -1)) edge_attr_21[:, 2 * self.d] = theta[self.edge_index_21[0]] edge_attr_21[:, 2 * self.d + 1] = theta[self.edge_index_21[1]] edge_attr_22 = np.zeros((self.n_edges_22, 3 * self.d)) edge_attr_22[:, 0:2 * self.d] = self.grid_sample_both[self.edge_index_22.T].reshape((self.n_edges_22, -1)) edge_attr_22[:, 2 * self.d] = theta[self.edge_index_22[0]] edge_attr_22[:, 2 * self.d + 1] = theta[self.edge_index_22[1]] return torch.tensor(edge_attr, dtype=torch.float), \ torch.tensor(edge_attr_12, dtype=torch.float), \ torch.tensor(edge_attr_21, dtype=torch.float), \ torch.tensor(edge_attr_22, dtype=torch.float) # generate multi-level graph class RandomMultiMeshGenerator(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshGenerator, self).__init__() self.d = len(real_space) self.m = sample_sizes self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self): self.idx = [] self.grid_sample = [] perm = torch.randperm(self.n) index = 0 for l in range(self.level): self.idx.append(perm[index: index+self.m[l]]) self.grid_sample.append(self.grid[self.idx[l]]) index = index+self.m[l] self.idx_all = perm[:index] self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph network's data structure, # the edge index shall be stored as tensor instead of list # we concatenate the edge index list and label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out # generate graph, with split and assemble class RandomGridSplitter(object): def __init__(self, grid, resolution, d=2, m=200, l=1, radius=0.25): super(RandomGridSplitter, self).__init__() self.grid = grid self.resolution = resolution self.n = resolution**d self.d = d self.m = m self.l = l self.radius = radius assert self.n % self.m == 0 self.num = self.n // self.m # number of sub-grid def get_data(self, theta, edge_features=1): data = [] for i in range(self.l): perm = torch.randperm(self.n) perm = perm.reshape(self.num, self.m) for j in range(self.num): idx = perm[j,:].reshape(-1,) grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] X = torch.cat([grid_sample,theta_sample],dim=1) pwd = sklearn.metrics.pairwise_distances(grid_sample) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] edge_index = torch.tensor(edge_index, dtype=torch.long) if edge_features == 0: edge_attr = grid_sample[edge_index.T].reshape(n_edges, -1) else: edge_attr = np.zeros((n_edges, 2*self.d+2)) a = theta_sample[:,0] edge_attr[:, :2*self.d] = grid_sample[edge_index.T].reshape(n_edges, -1) edge_attr[:, 2*self.d] = a[edge_index[0]] edge_attr[:, 2*self.d+1] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1, cuda=False): assert len(pred) == len(split_idx) assert len(pred) == self.num * self.l // batch_size2 out = torch.zeros(self.n, ) if cuda: out = out.cuda() for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i].reshape(batch_size2, self.m) for j in range(batch_size2): pred_ij = pred_i[j,:].reshape(-1,) idx = split_idx_i[j,:].reshape(-1,) out[idx] = out[idx] + pred_ij out = out / self.l # out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) # out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate multi-level graph, with split and assemble class RandomMultiMeshSplitter(object): def __init__(self, real_space, mesh_size, level, sample_sizes): super(RandomMultiMeshSplitter, self).__init__() self.d = len(real_space) self.ms = sample_sizes self.m = sample_sizes[0] self.level = level assert len(sample_sizes) == level assert len(mesh_size) == self.d if self.d == 1: self.n = mesh_size[0] self.grid = np.linspace(real_space[0][0], real_space[0][1], self.n).reshape((self.n, 1)) else: self.n = 1 grids = [] for j in range(self.d): grids.append(np.linspace(real_space[j][0], real_space[j][1], mesh_size[j])) self.n *= mesh_size[j] self.grid = np.vstack([xx.ravel() for xx in np.meshgrid(*grids)]).T self.splits = self.n // self.m # number of sub-grid if self.splits * self.m < self.n: self.splits = self.splits + 1 print('n:',self.n,' m:',self.m, ' number of splits:', self.splits ) self.perm = None self.idx = [] self.idx_all = None self.grid_sample = [] self.grid_sample_all = None self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] self.n_edges_inner = [] self.n_edges_inter = [] def sample(self, new_sample=True, index0=0): self.idx = [] self.grid_sample = [] if (new_sample) or (self.perm is None): self.perm = torch.randperm(self.n) index = index0 for l in range(self.level): index = index % self.n index_end = (index+self.ms[l]) % self.n if index < index_end: idx = self.perm[index: index_end] else: idx = torch.cat((self.perm[index: ],self.perm[: index_end]), dim=0) self.idx.append(idx) self.grid_sample.append(self.grid[idx]) index = index_end if index0 < index_end: idx_all = self.perm[index0: index_end] else: idx_all = torch.cat((self.perm[index0:], self.perm[: index_end]), dim=0) self.idx_all = idx_all self.grid_sample_all = self.grid[self.idx_all] return self.idx, self.idx_all def get_grid(self): grid_out = [] for grid in self.grid_sample: grid_out.append(torch.tensor(grid, dtype=torch.float)) return grid_out, torch.tensor(self.grid_sample_all, dtype=torch.float) def ball_connectivity(self, radius_inner, radius_inter): assert len(radius_inner) == self.level assert len(radius_inter) == self.level - 1 self.edge_index = [] self.edge_index_down = [] self.edge_index_up = [] self.n_edges_inner = [] self.n_edges_inter = [] edge_index_out = [] edge_index_down_out = [] edge_index_up_out = [] index = 0 for l in range(self.level): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l]) edge_index = np.vstack(np.where(pwd <= radius_inner[l])) + index self.edge_index.append(edge_index) edge_index_out.append(torch.tensor(edge_index, dtype=torch.long)) self.n_edges_inner.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] index = 0 for l in range(self.level-1): pwd = sklearn.metrics.pairwise_distances(self.grid_sample[l], self.grid_sample[l+1]) edge_index = np.vstack(np.where(pwd <= radius_inter[l])) + index edge_index[1, :] = edge_index[1, :] + self.grid_sample[l].shape[0] self.edge_index_down.append(edge_index) edge_index_down_out.append(torch.tensor(edge_index, dtype=torch.long)) self.edge_index_up.append(edge_index[[1,0],:]) edge_index_up_out.append(torch.tensor(edge_index[[1,0],:], dtype=torch.long)) self.n_edges_inter.append(edge_index.shape[1]) index = index + self.grid_sample[l].shape[0] edge_index_out = torch.cat(edge_index_out, dim=1) edge_index_down_out = torch.cat(edge_index_down_out, dim=1) edge_index_up_out = torch.cat(edge_index_up_out, dim=1) return edge_index_out, edge_index_down_out, edge_index_up_out def get_edge_index_range(self): # in order to use graph network's data structure, # the edge index shall be stored as tensor instead of list # we concatenate the edge index list and label the range of each level edge_index_range = torch.zeros((self.level,2), dtype=torch.long) edge_index_down_range = torch.zeros((self.level-1,2), dtype=torch.long) edge_index_up_range = torch.zeros((self.level-1,2), dtype=torch.long) n_edge_index = 0 for l in range(self.level): edge_index_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index[l].shape[1] edge_index_range[l, 1] = n_edge_index n_edge_index = 0 for l in range(self.level-1): edge_index_down_range[l, 0] = n_edge_index edge_index_up_range[l, 0] = n_edge_index n_edge_index = n_edge_index + self.edge_index_down[l].shape[1] edge_index_down_range[l, 1] = n_edge_index edge_index_up_range[l, 1] = n_edge_index return edge_index_range, edge_index_down_range, edge_index_up_range def attributes(self, theta=None): self.edge_attr = [] self.edge_attr_down = [] self.edge_attr_up = [] if theta is None: for l in range(self.level): edge_attr = self.grid_sample_all[self.edge_index[l].T].reshape((self.n_edges_inner[l], 2*self.d)) self.edge_attr.append(torch.tensor(edge_attr)) for l in range(self.level - 1): edge_attr_down = self.grid_sample_all[self.edge_index_down[l].T].reshape((self.n_edges_inter[l], 2*self.d)) edge_attr_up = self.grid_sample_all[self.edge_index_up[l].T].reshape((self.n_edges_inter[l], 2*self.d)) self.edge_attr_down.append(torch.tensor(edge_attr_down)) self.edge_attr_up.append(torch.tensor(edge_attr_up)) else: theta = theta[self.idx_all] for l in range(self.level): edge_attr = np.zeros((self.n_edges_inner[l], 2 * self.d + 2)) edge_attr[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index[l].T].reshape( (self.n_edges_inner[l], 2 * self.d)) edge_attr[:, 2 * self.d] = theta[self.edge_index[l][0]] edge_attr[:, 2 * self.d + 1] = theta[self.edge_index[l][1]] self.edge_attr.append(torch.tensor(edge_attr, dtype=torch.float)) for l in range(self.level - 1): edge_attr_down = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_up = np.zeros((self.n_edges_inter[l], 2 * self.d + 2)) edge_attr_down[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_down[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_down[:, 2 * self.d] = theta[self.edge_index_down[l][0]] edge_attr_down[:, 2 * self.d + 1] = theta[self.edge_index_down[l][1]] self.edge_attr_down.append(torch.tensor(edge_attr_down, dtype=torch.float)) edge_attr_up[:, 0:2 * self.d] = self.grid_sample_all[self.edge_index_up[l].T].reshape( (self.n_edges_inter[l], 2 * self.d)) edge_attr_up[:, 2 * self.d] = theta[self.edge_index_up[l][0]] edge_attr_up[:, 2 * self.d + 1] = theta[self.edge_index_up[l][1]] self.edge_attr_up.append(torch.tensor(edge_attr_up, dtype=torch.float)) edge_attr_out = torch.cat(self.edge_attr, dim=0) edge_attr_down_out = torch.cat(self.edge_attr_down, dim=0) edge_attr_up_out = torch.cat(self.edge_attr_up, dim=0) return edge_attr_out, edge_attr_down_out, edge_attr_up_out def splitter(self, radius_inner, radius_inter, theta_a, theta_all): # give a test mesh, generate a list of data data = [] index = 0 for i in range(self.splits): if i==0: idx, idx_all = self.sample(new_sample=True, index0=index) else: idx, idx_all = self.sample(new_sample=False, index0=index) index = (index + self.m) % self.n grid, grid_all = self.get_grid() edge_index, edge_index_down, edge_index_up = self.ball_connectivity(radius_inner, radius_inter) edge_index_range, edge_index_down_range, edge_index_up_range = self.get_edge_index_range() edge_attr, edge_attr_down, edge_attr_up = self.attributes(theta=theta_a) x = torch.cat([grid_all, theta_all[idx_all,:] ], dim=1) data.append(Data(x=x, edge_index_mid=edge_index, edge_index_down=edge_index_down, edge_index_up=edge_index_up, edge_index_range=edge_index_range, edge_index_down_range=edge_index_down_range, edge_index_up_range=edge_index_up_range, edge_attr_mid=edge_attr, edge_attr_down=edge_attr_down, edge_attr_up=edge_attr_up, sample_idx=idx[0])) return data def assembler(self, out_list, sample_idx_list, is_cuda=False): assert len(out_list) == self.splits if is_cuda: pred = torch.zeros(self.n, ).cuda() else: pred = torch.zeros(self.n, ) for i in range(self.splits): pred[sample_idx_list[i]] = out_list[i].reshape(-1) return pred # generate graph, with split and assemble with downsample class DownsampleGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, edge_features=1): super(DownsampleGridSplitter, self).__init__() # instead of randomly sample sub-grids, here we downsample sub-grids self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def ball_connectivity(self, grid): pwd = sklearn.metrics.pairwise_distances(grid) edge_index = np.vstack(np.where(pwd <= self.radius)) n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges def get_data(self, theta): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4 + self.edge_features] = a[edge_index[0]] edge_attr[:, 4 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges = self.ball_connectivity(grid_split) edge_attr = np.zeros((n_edges, 4+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, :4] = grid_split[edge_index.T].reshape(n_edges, -1) edge_attr[:, 4:4+self.edge_features] = a[edge_index[0]] edge_attr[:, 4+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if y==0: ny = self.s else: ny = self.s-1 else: nx = self.s ny = self.s # pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='constant', cval=0) out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) # generate graph on Torus, with split and assemble class TorusGridSplitter(object): def __init__(self, grid, resolution, r, m=100, radius=0.15, T=None, edge_features=1, ): super(TorusGridSplitter, self).__init__() self.grid = grid.reshape(resolution, resolution,2) # self.theta = theta.reshape(resolution, resolution,-1) # self.y = y.reshape(resolution, resolution,1) self.resolution = resolution if resolution%2==1: self.s = int(((resolution - 1)/r) + 1) else: self.s = int(resolution/r) self.r = r self.n = resolution**2 self.m = m self.T = T self.radius = radius self.edge_features = edge_features self.index = torch.tensor(range(self.n), dtype=torch.long).reshape(self.resolution, self.resolution) def pairwise_difference(self,grid1, grid2): n = grid1.shape[0] x1 = grid1[:,0] y1 = grid1[:,1] x2 = grid2[:,0] y2 = grid2[:,1] X1 = np.tile(x1.reshape(n, 1), [1, n]) X2 = np.tile(x2.reshape(1, n), [n, 1]) X_diff = X1 - X2 Y1 = np.tile(y1.reshape(n, 1), [1, n]) Y2 = np.tile(y2.reshape(1, n), [n, 1]) Y_diff = Y1 - Y2 return X_diff, Y_diff def torus_connectivity(self, grid): pwd0 = sklearn.metrics.pairwise_distances(grid, grid) X_diff0, Y_diff0 = self.pairwise_difference(grid, grid) grid1 = grid grid1[:,0] = grid[:,0]+1 pwd1 = sklearn.metrics.pairwise_distances(grid, grid1) X_diff1, Y_diff1 = self.pairwise_difference(grid, grid1) grid2 = grid grid2[:, 1] = grid[:, 1] + 1 pwd2 = sklearn.metrics.pairwise_distances(grid, grid2) X_diff2, Y_diff2 = self.pairwise_difference(grid, grid2) grid3 = grid grid3[:, :] = grid[:, :] + 1 pwd3 = sklearn.metrics.pairwise_distances(grid, grid3) X_diff3, Y_diff3 = self.pairwise_difference(grid, grid3) grid4 = grid grid4[:, 0] = grid[:, 0] + 1 grid4[:, 1] = grid[:, 1] - 1 pwd4 = sklearn.metrics.pairwise_distances(grid, grid4) X_diff4, Y_diff4 = self.pairwise_difference(grid, grid4) PWD = np.stack([pwd0,pwd1,pwd2,pwd3,pwd4], axis=2) X_DIFF = np.stack([X_diff0,X_diff1,X_diff2,X_diff3,X_diff4], axis=2) Y_DIFF = np.stack([Y_diff0, Y_diff1, Y_diff2, Y_diff3, Y_diff4], axis=2) pwd = np.min(PWD, axis=2) pwd_index = np.argmin(PWD, axis=2) edge_index = np.vstack(np.where(pwd <= self.radius)) pwd_index = pwd_index[np.where(pwd <= self.radius)] PWD_index = (np.where(pwd <= self.radius)[0], np.where(pwd <= self.radius)[1], pwd_index) distance = PWD[PWD_index] X_difference = X_DIFF[PWD_index] Y_difference = Y_DIFF[PWD_index] n_edges = edge_index.shape[1] return torch.tensor(edge_index, dtype=torch.long), n_edges, distance, X_difference, Y_difference def get_data(self, theta, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) data = [] for x in range(self.r): for y in range(self.r): grid_sub = self.grid[x::self.r, y::self.r,:].reshape(-1,2) theta_sub = theta[x::self.r, y::self.r,:].reshape(-1,theta_d) perm = torch.randperm(self.n) m = self.m - grid_sub.shape[0] idx = perm[:m] grid_sample = self.grid.reshape(self.n,-1)[idx] theta_sample = theta.reshape(self.n,-1)[idx] grid_split = torch.cat([grid_sub, grid_sample],dim=0) theta_split = torch.cat([theta_sub, theta_sample],dim=0) X = torch.cat([grid_split,theta_split],dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x,y],dtype=torch.long).reshape(1,2) if params==None: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx)) else: data.append(Data(x=X, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, params=params)) print('test', len(data), X.shape, edge_index.shape, edge_attr.shape) return data def sample(self, theta, Y): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.resolution, self.resolution) x = torch.randint(0,self.r,(1,)) y = torch.randint(0,self.r,(1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[x::self.r, y::self.r].reshape(-1,) index_sub = self.index[x::self.r, y::self.r].reshape(-1,) n_sub = Y_sub.shape[0] if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.n, )[idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=0).reshape(-1,) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(-1,) index_split = index_sub.reshape(-1,) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3+self.edge_features*2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3+self.edge_features] = a[edge_index[0]] edge_attr[:, 3+self.edge_features: 4+self.edge_features*2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def sampleT(self, theta, Y, params=None): theta_d = theta.shape[1] theta = theta.reshape(self.resolution, self.resolution, theta_d) Y = Y.reshape(self.T, self.resolution, self.resolution) x = torch.randint(0, self.r, (1,)) y = torch.randint(0, self.r, (1,)) grid_sub = self.grid[x::self.r, y::self.r, :].reshape(-1, 2) theta_sub = theta[x::self.r, y::self.r, :].reshape(-1, theta_d) Y_sub = Y[:,x::self.r, y::self.r].reshape(self.T,-1) index_sub = self.index[x::self.r, y::self.r].reshape(-1, ) n_sub = Y_sub.shape[1] if self.m >= n_sub: m = self.m - n_sub perm = torch.randperm(self.n) idx = perm[:m] grid_sample = self.grid.reshape(self.n, -1)[idx] theta_sample = theta.reshape(self.n, -1)[idx] Y_sample = Y.reshape(self.T, self.n)[:,idx] grid_split = torch.cat([grid_sub, grid_sample], dim=0) theta_split = torch.cat([theta_sub, theta_sample], dim=0) Y_split = torch.cat([Y_sub, Y_sample], dim=1).reshape(self.T,-1) index_split = torch.cat([index_sub, idx], dim=0).reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) else: grid_split = grid_sub theta_split = theta_sub Y_split = Y_sub.reshape(self.T, -1) index_split = index_sub.reshape(-1, ) X = torch.cat([grid_split, theta_split], dim=1) edge_index, n_edges, distance, X_difference, Y_difference = self.torus_connectivity(grid_split) edge_attr = np.zeros((n_edges, 3 + self.edge_features * 2)) a = theta_split[:, :self.edge_features] edge_attr[:, 0] = X_difference.reshape(n_edges, ) edge_attr[:, 1] = Y_difference.reshape(n_edges, ) edge_attr[:, 2] = distance.reshape(n_edges, ) edge_attr[:, 3:3 + self.edge_features] = a[edge_index[0]] edge_attr[:, 3 + self.edge_features: 4 + self.edge_features * 2] = a[edge_index[1]] edge_attr = torch.tensor(edge_attr, dtype=torch.float) split_idx = torch.tensor([x, y], dtype=torch.long).reshape(1, 2) if params==None: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split) else: data = Data(x=X, y=Y_split, edge_index=edge_index, edge_attr=edge_attr, split_idx=split_idx, sample_idx=index_split, params=params) print('train', X.shape, Y_split.shape, edge_index.shape, edge_attr.shape, index_split.shape) return data def assemble(self, pred, split_idx, batch_size2, sigma=1): assert len(pred) == len(split_idx) assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.resolution,self.resolution)) for i in range(len(pred)): pred_i = pred[i].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if y==0: ny = self.s else: ny = self.s-1 else: nx = self.s ny = self.s # pred_ij = pred_i[idx : idx + nx * ny] out[x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(-1,) def assembleT(self, pred, split_idx, batch_size2, sigma=1): # pred is a list (batches) of list (time seq) assert len(pred) == len(split_idx) assert len(pred[0]) == self.T assert len(pred) == self.r**2 // batch_size2 out = torch.zeros((self.T, self.resolution,self.resolution)) for t in range(self.T): for i in range(len(pred)): pred_i = pred[i][t].reshape(batch_size2, self.m) split_idx_i = split_idx[i] for j in range(batch_size2): pred_ij = pred_i[j,:] x, y = split_idx_i[j] if self.resolution%2==1: if x==0: nx = self.s else: nx = self.s-1 if y==0: ny = self.s else: ny = self.s-1 else: nx = self.s ny = self.s # pred_ij = pred_i[idx : idx + nx * ny] out[t, x::self.r, y::self.r] = pred_ij[:nx * ny].reshape(nx,ny) out = gaussian_filter(out, sigma=sigma, mode='wrap') out = torch.tensor(out, dtype=torch.float) return out.reshape(self.T,self.n) def downsample(data, grid_size, l): data = data.reshape(-1, grid_size, grid_size) data = data[:, ::l, ::l] data = data.reshape(-1, (grid_size // l) ** 2) return data def simple_grid(n_x, n_y): xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in range(n_y): for x in range(n_x): i = y * n_x + x if (x != n_x - 1): edge_index.append((i, i + 1)) edge_attr.append((1, 0, 0)) edge_index.append((i + 1, i)) edge_attr.append((-1, 0, 0)) if (y != n_y - 1): edge_index.append((i, i + n_x)) edge_attr.append((0, 1, 0)) edge_index.append((i + n_x, i)) edge_attr.append((0, -1, 0)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge(n_x, n_y, a=None): if a != None: a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in range(n_y): for x in range(n_x): i = y * n_x + x if (x != n_x - 1): d = 1 / n_x edge_index.append((i, i + 1)) edge_index.append((i + 1, i )) if a != None: a1 = a[x, y] a2 = a[x + 1, y] edge_attr.append((x / n_x, y / n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) if (y != n_y - 1): d = 1 / n_y edge_index.append((i, i + n_x)) edge_index.append((i + n_x, i)) if a != None: a1 = a[x, y] a2 = a[x, y+1] edge_attr.append((x/n_x, y/n_y, a1, a2)) edge_attr.append((y/n_y, x/n_x, a2, a1)) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge1d(n_x, a=None): if a != None: a = a.reshape(n_x) xs = np.linspace(0.0, 1.0, n_x) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) edge_index = [] edge_attr = [] for x in range(n_x): i = x i1 = (x+1)%n_x edge_index.append((i, i1)) edge_index.append((i1, i )) i2 = (x + 2) % n_x edge_index.append((i, i2)) edge_index.append((i2, i )) if a != None: a1 = a[x] a2 = a[x + 1] edge_attr.append((x / n_x, a1, a2)) edge_attr.append((x / n_x, a2, a1)) X = torch.tensor(xs, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug(n_x, n_y, a): a = a.reshape(n_x, n_y) xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) # xs = np.array(range(n_x)) # ys = np.array(range(n_y)) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for y in range(n_y): for x in range(n_x): i = y * n_x + x if (x != n_x - 1): d = 1 / n_x a1 = a[x, y] a2 = a[x + 1, y] edge_index.append((i, i + 1)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + 1, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) if (y != n_y - 1): d = 1 / n_y a1 = a[x, y] a2 = a[x, y+1] edge_index.append((i, i + n_x)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i + n_x, i)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def grid_edge_aug_full(n_x, n_y, r, a): n = n_x * n_y xs = np.linspace(0.0, 1.0, n_x) ys = np.linspace(0.0, 1.0, n_y) grid = np.vstack([xx.ravel() for xx in np.meshgrid(xs, ys)]).T edge_index = [] edge_attr = [] for i1 in range(n): x1 = grid[i1] for i2 in range(n): x2 = grid[i2] d = np.linalg.norm(x1-x2) if(d<=r): a1 = a[i1] a2 = a[i2] edge_index.append((i1, i2)) edge_attr.append((d, a1, a2, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) edge_index.append((i2, i1)) edge_attr.append((d, a2, a1, 1 / np.sqrt(np.abs(a1 * a2)), np.exp(-(d) ** 2), np.exp(-(d / 0.1) ** 2), np.exp(-(d / 0.01) ** 2))) X = torch.tensor(grid, dtype=torch.float) # Exact = torch.tensor(Exact, dtype=torch.float).view(-1) edge_index = torch.tensor(edge_index, dtype=torch.long).transpose(0, 1) edge_attr = torch.tensor(edge_attr, dtype=torch.float) return X, edge_index, edge_attr def multi_grid(depth, n_x, n_y, grid, params): edge_index_global = [] edge_attr_global = [] X_global = [] num_nodes = 0 # build connected graph for l in range(depth): h_x_l = n_x // (2 ** l) h_y_l = n_y // (2 ** l) n_l = h_x_l * h_y_l a = downsample(params, n_x, (2 ** l)) if grid == 'grid': X, edge_index_inner, edge_attr_inner = grid(h_y_l, h_x_l) elif grid == 'grid_edge': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) elif grid == 'grid_edge_aug': X, edge_index_inner, edge_attr_inner = grid_edge(h_y_l, h_x_l, a) # update index edge_index_inner = edge_index_inner + num_nodes edge_index_global.append(edge_index_inner) edge_attr_global.append(edge_attr_inner) # construct X # if (is_high): # X = torch.cat([torch.zeros(n_l, l * 2), X, torch.zeros(n_l, (depth - 1 - l) * 2)], dim=1) # else: # X_l = torch.tensor(l, dtype=torch.float).repeat(n_l, 1) # X = torch.cat([X, X_l], dim=1) X_global.append(X) # construct edges index1 = torch.tensor(range(n_l), dtype=torch.long) index1 = index1 + num_nodes num_nodes += n_l # #construct inter-graph edge if l != depth-1: index2 = np.array(range(n_l//4)).reshape(h_x_l//2, h_y_l//2) # torch.repeat is different from numpy index2 = index2.repeat(2, axis = 0).repeat(2, axis = 1) index2 = torch.tensor(index2).reshape(-1) index2 = index2 + num_nodes index2 = torch.tensor(index2, dtype=torch.long) edge_index_inter1 = torch.cat([index1,index2], dim=-1).reshape(2,-1) edge_index_inter2 = torch.cat([index2,index1], dim=-1).reshape(2,-1) edge_index_inter = torch.cat([edge_index_inter1, edge_index_inter2], dim=1) edge_attr_inter1 = torch.tensor((0, 0, 1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter2 = torch.tensor((0, 0,-1), dtype=torch.float).repeat(n_l, 1) edge_attr_inter = torch.cat([edge_attr_inter1, edge_attr_inter2], dim=0) edge_index_global.append(edge_index_inter) edge_attr_global.append(edge_attr_inter) X = torch.cat(X_global, dim=0) edge_index = torch.cat(edge_index_global, dim=1) edge_attr = torch.cat(edge_attr_global, dim=0) mask_index = torch.tensor(range(n_x * n_y), dtype=torch.long) # print('create multi_grid with size:', X.shape, edge_index.shape, edge_attr.shape, mask_index.shape) return (X, edge_index, edge_attr, mask_index, num_nodes) def multi_pole_grid1d(theta, theta_d, s, N, is_periodic=False): grid_list = [] theta_list = [] edge_index_list = [] edge_index_list_cuda = [] level = int(np.log2(s) - 1) print(level) for l in range(1, level+1): r_l = 2 ** (l - 1) s_l = s // r_l n_l = s_l print('level',s_l,r_l,n_l) xs = np.linspace(0.0, 1.0, s_l) grid_l = xs grid_l = torch.tensor(grid_l, dtype=torch.float) print(grid_l.shape) grid_list.append(grid_l) theta_l = theta[:,:,:theta_d].reshape(N, s, theta_d) theta_l = theta_l[:, ::r_l, :] theta_l = theta_l.reshape(N, n_l, theta_d) theta_l = torch.tensor(theta_l, dtype=torch.float) print(theta_l.shape) theta_list.append(theta_l) # for the finest level, we construct the nearest neighbors (NN) if l==1: edge_index_nn = [] for x_i in range(s_l): for x in (-1,1): x_j = x_i + x if is_periodic: x_j = x_j % s_l # if (xj, yj) is a valid node if (x_j in range(s_l)): edge_index_nn.append([x_i,x_j]) edge_index_nn = torch.tensor(edge_index_nn, dtype=torch.long) edge_index_nn = edge_index_nn.transpose(0,1) edge_index_list.append(edge_index_nn) edge_index_list_cuda.append(edge_index_nn.cuda()) print('edge', edge_index_nn.shape) # we then compute the interactive neighbors -- their parents are NN but they are not NearestNeighbor edge_index_inter = [] for x_i in range(s_l): for x in range(-3,4): x_j = x_i + x # if (xj, yj) is a valid node if is_periodic: x_j = x_j % s_l if (x_j in range(s_l)): # if (xi, yi), (xj, yj) not NearestNeighbor if abs(x)>=2: # if their parents are NN if abs(x_i//2 - x_j//2)%(s_l//2) <=1: edge_index_inter.append([x_i,x_j]) edge_index_inter = torch.tensor(edge_index_inter, dtype=torch.long) edge_index_inter = edge_index_inter.transpose(0,1) edge_index_list.append(edge_index_inter) edge_index_list_cuda.append(edge_index_inter.cuda()) print('edge_inter', edge_index_inter.shape) print(len(grid_list),len(edge_index_list),len(theta_list)) return grid_list, theta_list, edge_index_list, edge_index_list_cuda def get_edge_attr(grid, theta, edge_index): n_edges = edge_index.shape[1] edge_attr = np.zeros((n_edges, 4)) edge_attr[:, 0:2] = grid[edge_index.transpose(0,1)].reshape((n_edges, -1)) edge_attr[:, 2] = theta[edge_index[0]] edge_attr[:, 3] = theta[edge_index[1]] return torch.tensor(edge_attr, dtype=torch.float)
39.87964
153
0.571969
10,457
70,906
3.66128
0.034905
0.092149
0.041765
0.015045
0.803871
0.768088
0.729718
0.691976
0.661678
0.645171
0
0.024561
0.291992
70,906
1,777
154
39.902082
0.738078
0.041026
0
0.646589
0
0
0.00174
0
0
0
0
0
0.018142
1
0.063135
false
0
0.005806
0.002903
0.127721
0.011611
0
0
0
null
0
0
0
1
1
1
0
0
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
4
a573575eb47ee5f59b1acb1272487d1d173397d0
84
py
Python
test_hello.py
bkwin66/python_testx
45acb6fca740d46855322cf11f1a1197cbdcb82e
[ "Apache-2.0" ]
null
null
null
test_hello.py
bkwin66/python_testx
45acb6fca740d46855322cf11f1a1197cbdcb82e
[ "Apache-2.0" ]
null
null
null
test_hello.py
bkwin66/python_testx
45acb6fca740d46855322cf11f1a1197cbdcb82e
[ "Apache-2.0" ]
null
null
null
print "Hello World" print "Print something else" for i in range (10): print i
12
28
0.678571
14
84
4.071429
0.714286
0
0
0
0
0
0
0
0
0
0
0.03125
0.238095
84
6
29
14
0.859375
0
0
0
0
0
0.373494
0
0
0
0
0
0
0
null
null
0
0
null
null
0.75
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
1
0
0
0
0
0
0
1
0
4
a595db8e6a7ce9570cdc1f94fe4be454e40272a1
89
py
Python
posters/apps.py
postersession/postersession
1c246f820005673af15fd52da56a2011897e953c
[ "MIT" ]
null
null
null
posters/apps.py
postersession/postersession
1c246f820005673af15fd52da56a2011897e953c
[ "MIT" ]
null
null
null
posters/apps.py
postersession/postersession
1c246f820005673af15fd52da56a2011897e953c
[ "MIT" ]
null
null
null
from django.apps import AppConfig class PostersConfig(AppConfig): name = 'posters'
14.833333
33
0.752809
10
89
6.7
0.9
0
0
0
0
0
0
0
0
0
0
0
0.168539
89
5
34
17.8
0.905405
0
0
0
0
0
0.078652
0
0
0
0
0
0
1
0
false
0
0.333333
0
1
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
1
0
1
0
0
4
a59f016df3e6e6d3d87d70a6a34e78535d79e64e
538
py
Python
2017/10_Oct/11/04-isnumeric.py
z727354123/pyCharmTest
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
[ "Apache-2.0" ]
null
null
null
2017/10_Oct/11/04-isnumeric.py
z727354123/pyCharmTest
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
[ "Apache-2.0" ]
null
null
null
2017/10_Oct/11/04-isnumeric.py
z727354123/pyCharmTest
9cbd770e19929cb4feb3be2f13b60dc0b1f68b56
[ "Apache-2.0" ]
null
null
null
myStr = '' print(myStr.isalnum()) # False 不支持空 myStr = 'abCC' print(myStr.isalpha()) # True 支持大写 myStr = 'abc*' print(myStr.isalpha()) # False 不支持 符号 myStr = 'abc1' print(myStr.isalpha()) # False 不支持 包含num print(myStr.isalnum()) # True 支持 包含num myStr = '123' print(myStr.isnumeric()) # True 只支持 全数字 myStr = '123.123' print(myStr.isnumeric()) # False myStr = '0.1' print(myStr.isnumeric()) # False 不支持 . print(myStr.isalnum()) # False 不支持 . myStr = 'abc123.1' print(myStr.isalnum()) # False 不支持 .
26.9
45
0.622677
71
538
4.71831
0.309859
0.298507
0.202985
0.197015
0.298507
0
0
0
0
0
0
0.037559
0.208178
538
19
46
28.315789
0.748826
0.22119
0
0.555556
0
0
0.081081
0
0
0
0
0
0
1
0
false
0
0
0
0
0.555556
0
0
0
null
1
1
1
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
0
0
0
0
0
1
0
4
a5a1a33d59bbc6bd6b2c679b73a2011d21d8a80e
209
py
Python
dbaas/tsuru/admin/__init__.py
jaeko44/python_dbaas
4fafa4ad70200fec1436c326c751761922ec9fa8
[ "BSD-3-Clause" ]
null
null
null
dbaas/tsuru/admin/__init__.py
jaeko44/python_dbaas
4fafa4ad70200fec1436c326c751761922ec9fa8
[ "BSD-3-Clause" ]
null
null
null
dbaas/tsuru/admin/__init__.py
jaeko44/python_dbaas
4fafa4ad70200fec1436c326c751761922ec9fa8
[ "BSD-3-Clause" ]
1
2017-07-02T08:46:17.000Z
2017-07-02T08:46:17.000Z
# -*- coding: utf-8 -*- from __future__ import absolute_import, unicode_literals from django.contrib import admin from .. import models from .bind import BindAdmin admin.site.register(models.Bind, BindAdmin)
26.125
56
0.784689
28
209
5.642857
0.607143
0
0
0
0
0
0
0
0
0
0
0.005435
0.119617
209
7
57
29.857143
0.853261
0.100478
0
0
0
0
0
0
0
0
0
0
0
1
0
true
0
0.8
0
0.8
0
1
0
0
null
0
0
0
0
0
0
0
0
0
0
0
0
0
1
0
0
0
0
0
0
0
0
0
0
null
0
0
0
0
0
0
1
0
1
0
0
0
0
4