query
stringlengths
5
1.23k
positive
stringlengths
53
15.2k
id_
int64
0
252k
task_name
stringlengths
87
242
negative
listlengths
20
553
search for text - currently this looks in all folders in the root of AIKIF but that also contains binaries so will need to use the agent_filelist . py to specify the list of folders . NOTE - this needs to use indexes rather than full search each time
def search_aikif ( txt , formatHTML = True ) : results = [ ] num_found = 0 import aikif . lib . cls_filelist as mod_fl my_files = mod_fl . FileList ( [ aikif_folder ] , [ '*.*' ] , [ '*.pyc' ] ) files = my_files . get_list ( ) for f in files : try : num_found = 0 with open ( f , 'r' ) as cur : line_num = 0 for line in cur : line_num += 1 if txt in line : num_found += 1 if formatHTML is True : results . append ( format_result ( line , line_num , txt ) ) else : results . append ( [ f , line , line_num , txt ] ) if num_found > 0 : if formatHTML is True : results . append ( '<h3>' + f + ' = ' + str ( num_found ) + ' results</h3>' ) else : print ( f + ' = ' + str ( num_found ) + '' ) except Exception : results . append ( 'problem with file ' + f ) if len ( results ) == 0 : results . append ( "No results" ) return results
2,200
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/page_search.py#L23-L58
[ "def", "_set_led_value", "(", "self", ",", "group", ",", "val", ")", ":", "new_bitmask", "=", "set_bit", "(", "self", ".", "_value", ",", "group", ",", "bool", "(", "val", ")", ")", "self", ".", "_set_led_bitmask", "(", "new_bitmask", ")" ]
highlight the search result
def format_result ( line , line_num , txt ) : return '&nbsp;&nbsp;' + str ( line_num ) + ': ' + line . replace ( txt , '<span style="background-color: #FFFF00">' + txt + '</span>' )
2,201
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/page_search.py#L60-L63
[ "def", "_post_parse_request", "(", "self", ",", "request", ",", "client_id", "=", "''", ",", "*", "*", "kwargs", ")", ":", "if", "'state'", "in", "request", ":", "try", ":", "sinfo", "=", "self", ".", "endpoint_context", ".", "sdb", "[", "request", "[", "'code'", "]", "]", "except", "KeyError", ":", "logger", ".", "error", "(", "'Code not present in SessionDB'", ")", "return", "self", ".", "error_cls", "(", "error", "=", "\"unauthorized_client\"", ")", "else", ":", "state", "=", "sinfo", "[", "'authn_req'", "]", "[", "'state'", "]", "if", "state", "!=", "request", "[", "'state'", "]", ":", "logger", ".", "error", "(", "'State value mismatch'", ")", "return", "self", ".", "error_cls", "(", "error", "=", "\"unauthorized_client\"", ")", "if", "\"client_id\"", "not", "in", "request", ":", "# Optional for access token request", "request", "[", "\"client_id\"", "]", "=", "client_id", "logger", ".", "debug", "(", "\"%s: %s\"", "%", "(", "request", ".", "__class__", ".", "__name__", ",", "sanitize", "(", "request", ")", ")", ")", "return", "request" ]
Modules for testing happiness of persons in worlds based on simplistic preferences . Just a toy - dont take seriously
def TEST ( ) : w = World ( 'Mars' , [ 0 , 0.0 , 0.9 , 0.0 ] ) print ( w ) p = Person ( 'Rover' , { 'tax_min' : 0.0 , 'tax_max' : 0.9 , 'tradition' : 0.9 , 'equity' : 0.0 } ) print ( p ) h = Happiness ( p , w ) #h.add_factor(HappinessFactors(name, type, min, max)) h . add_factor ( HappinessFactors ( 'tax' , 'Economic' , 0.1 , 0.3 ) ) h . add_factor ( HappinessFactors ( 'tradition' , 'Personal' , 0.3 , 0.9 ) ) h . add_factor ( HappinessFactors ( 'equity' , 'Personal' , 0.1 , 0.9 ) ) h . add_factor ( HappinessFactors ( 'growth' , 'Economic' , 0.01 , 0.09 ) ) print ( h . show_details ( ) )
2,202
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/environments/happiness.py#L3-L39
[ "def", "construct_item_args", "(", "self", ",", "domain_event", ")", ":", "# Get the sequence ID.", "sequence_id", "=", "domain_event", ".", "__dict__", "[", "self", ".", "sequence_id_attr_name", "]", "# Get the position in the sequence.", "position", "=", "getattr", "(", "domain_event", ",", "self", ".", "position_attr_name", ",", "None", ")", "# Get topic and data.", "topic", ",", "state", "=", "self", ".", "get_item_topic_and_state", "(", "domain_event", ".", "__class__", ",", "domain_event", ".", "__dict__", ")", "# Get the 'other' args.", "# - these are meant to be derivative of the other attributes,", "# to populate database fields, and shouldn't affect the hash.", "other_args", "=", "tuple", "(", "(", "getattr", "(", "domain_event", ",", "name", ")", "for", "name", "in", "self", ".", "other_attr_names", ")", ")", "return", "(", "sequence_id", ",", "position", ",", "topic", ",", "state", ")", "+", "other_args" ]
find the best world to make people happy
def solve ( self , max_worlds = 10000 , silent = False ) : self . num_worlds = 0 num_unhappy = 0 for tax_rate in range ( self . tax_range [ 0 ] , self . tax_range [ 1 ] ) : for equity in range ( self . equity_range [ 0 ] , self . equity_range [ 1 ] ) : for tradition in range ( self . tradition_range [ 0 ] , self . tradition_range [ 1 ] ) : self . num_worlds += 1 if self . num_worlds > max_worlds : break w = World ( str ( self . num_worlds ) . zfill ( 6 ) , [ 5000 , tax_rate / 10 , tradition / 10 , equity / 10 ] ) world_happiness = 0 num_unhappy = 0 for person in self . all_people : wh = Happiness ( person , w ) world_happiness += wh . rating if wh . rating < 0 : num_unhappy += 1 if world_happiness > self . net_happiness : self . net_happiness = world_happiness self . unhappy_people = num_unhappy if not silent : print ( 'found better world - ' + w . nme + ' = ' + str ( world_happiness ) + ' - total unhappy_people = ' + str ( self . unhappy_people ) )
2,203
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/environments/happiness.py#L129-L153
[ "def", "create_binding", "(", "self", ",", "vhost", ",", "exchange", ",", "queue", ",", "rt_key", "=", "None", ",", "args", "=", "None", ")", ":", "vhost", "=", "quote", "(", "vhost", ",", "''", ")", "exchange", "=", "quote", "(", "exchange", ",", "''", ")", "queue", "=", "quote", "(", "queue", ",", "''", ")", "body", "=", "json", ".", "dumps", "(", "{", "'routing_key'", ":", "rt_key", ",", "'arguments'", ":", "args", "or", "[", "]", "}", ")", "path", "=", "Client", ".", "urls", "[", "'bindings_between_exch_queue'", "]", "%", "(", "vhost", ",", "exchange", ",", "queue", ")", "binding", "=", "self", ".", "_call", "(", "path", ",", "'POST'", ",", "body", "=", "body", ",", "headers", "=", "Client", ".", "json_headers", ")", "return", "binding" ]
extended print details of happiness parameters
def show_details ( self ) : res = str ( self ) res += '\nDETAILS\n' for f in self . factors : res += str ( f ) return res
2,204
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/environments/happiness.py#L222-L231
[ "def", "merge", "(", "self", ",", "keys", ")", ":", "deletes", "=", "[", "]", "for", "pseudo_key", ",", "rows", "in", "self", ".", "_rows", ".", "items", "(", ")", ":", "self", ".", "_additional_rows_date2int", "(", "keys", ",", "rows", ")", "rows", "=", "self", ".", "_intersection", "(", "keys", ",", "rows", ")", "if", "rows", ":", "rows", "=", "self", ".", "_rows_sort", "(", "rows", ")", "self", ".", "_rows", "[", "pseudo_key", "]", "=", "self", ".", "_merge_adjacent_rows", "(", "rows", ")", "else", ":", "deletes", ".", "append", "(", "pseudo_key", ")", "for", "pseudo_key", "in", "deletes", ":", "del", "self", ".", "_rows", "[", "pseudo_key", "]" ]
this is going to be the tricky bit - probably not possible to get the exact rating for a value . Will need to do sentiment analysis of the text to see how it matches the rating . Even that sounds like it wont work - maybe a ML algorithm would do it but that requires a large body of text already matched to values - and values aren t even defined as far as I have found .
def match_value_to_text ( self , text ) : if self . nme in text : res = 0.8 else : res = 0.2 return self . nme + ' = ' + str ( res ) + ' match against ' + text
2,205
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/environments/happiness.py#L310-L329
[ "def", "on_exception", "(", "self", ",", "exception", ")", ":", "logger", ".", "error", "(", "'Exception from stream!'", ",", "exc_info", "=", "True", ")", "self", ".", "streaming_exception", "=", "exception" ]
convert a list to html using table formatting
def list2html ( lst ) : txt = '<TABLE width=100% border=0>' for l in lst : txt += '<TR>\n' if type ( l ) is str : txt += '<TD>' + l + '</TD>\n' elif type ( l ) is list : txt += '<TD>' for i in l : txt += i + ', ' txt += '</TD>' else : txt += '<TD>' + str ( l ) + '</TD>\n' txt += '</TR>\n' txt += '</TABLE><BR>\n' return txt
2,206
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L10-L28
[ "def", "save_and_validate_logo", "(", "logo_stream", ",", "logo_filename", ",", "community_id", ")", ":", "cfg", "=", "current_app", ".", "config", "logos_bucket_id", "=", "cfg", "[", "'COMMUNITIES_BUCKET_UUID'", "]", "logo_max_size", "=", "cfg", "[", "'COMMUNITIES_LOGO_MAX_SIZE'", "]", "logos_bucket", "=", "Bucket", ".", "query", ".", "get", "(", "logos_bucket_id", ")", "ext", "=", "os", ".", "path", ".", "splitext", "(", "logo_filename", ")", "[", "1", "]", "ext", "=", "ext", "[", "1", ":", "]", "if", "ext", ".", "startswith", "(", "'.'", ")", "else", "ext", "logo_stream", ".", "seek", "(", "SEEK_SET", ",", "SEEK_END", ")", "# Seek from beginning to end", "logo_size", "=", "logo_stream", ".", "tell", "(", ")", "if", "logo_size", ">", "logo_max_size", ":", "return", "None", "if", "ext", "in", "cfg", "[", "'COMMUNITIES_LOGO_EXTENSIONS'", "]", ":", "key", "=", "\"{0}/logo.{1}\"", ".", "format", "(", "community_id", ",", "ext", ")", "logo_stream", ".", "seek", "(", "0", ")", "# Rewind the stream to the beginning", "ObjectVersion", ".", "create", "(", "logos_bucket", ",", "key", ",", "stream", "=", "logo_stream", ",", "size", "=", "logo_size", ")", "return", "ext", "else", ":", "return", "None" ]
returns the html for a simple edit form
def build_edit_form ( title , id , cols , return_page ) : txt = '<H3>' + title + '<H3>' txt += '<form action="' + return_page + '" method="POST">\n' # return_page = /agents txt += ' updating id:' + str ( id ) + '\n<BR>' txt += ' <input type="hidden" name="rec_id" readonly value="' + str ( id ) + '"> ' txt += ' <TABLE width=80% valign=top border=1>' for col_num , col in enumerate ( cols ) : txt += ' <TR>\n' txt += ' <TD><div id="form_label">' + col + '</div></TD>\n' txt += ' <TD><div id="form_input"><input type="text" name="col_' + str ( col_num ) + '"></div></TD>\n' txt += ' </TR>\n' txt += ' <TR><TD></TD>\n' txt += ' <TD>\n' txt += ' <input type="submit" name="update-form" value="Save Changes">\n' txt += ' <input type="submit" name="delete-form" value="Delete">\n' txt += ' <input type="submit" name="add-form" value="Add">\n' txt += ' </TD></TR></TABLE>' txt += '</form>\n' return txt
2,207
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L67-L89
[ "def", "devices", "(", "self", ",", "timeout", "=", "None", ")", ":", "# b313b945 device usb:1-7 product:d2vzw model:SCH_I535 device:d2vzw", "# from Android system/core/adb/transport.c statename()", "re_device_info", "=", "re", ".", "compile", "(", "r'([^\\s]+)\\s+(offline|bootloader|device|host|recovery|sideload|no permissions|unauthorized|unknown)'", ")", "devices", "=", "[", "]", "lines", "=", "self", ".", "command_output", "(", "[", "\"devices\"", ",", "\"-l\"", "]", ",", "timeout", "=", "timeout", ")", ".", "split", "(", "'\\n'", ")", "for", "line", "in", "lines", ":", "if", "line", "==", "'List of devices attached '", ":", "continue", "match", "=", "re_device_info", ".", "match", "(", "line", ")", "if", "match", ":", "device", "=", "{", "'device_serial'", ":", "match", ".", "group", "(", "1", ")", ",", "'state'", ":", "match", ".", "group", "(", "2", ")", "}", "remainder", "=", "line", "[", "match", ".", "end", "(", "2", ")", ":", "]", ".", "strip", "(", ")", "if", "remainder", ":", "try", ":", "device", ".", "update", "(", "dict", "(", "[", "j", ".", "split", "(", "':'", ")", "for", "j", "in", "remainder", ".", "split", "(", "' '", ")", "]", ")", ")", "except", "ValueError", ":", "self", ".", "_logger", ".", "warning", "(", "'devices: Unable to parse '", "'remainder for device %s'", "%", "line", ")", "devices", ".", "append", "(", "device", ")", "return", "devices" ]
returns the html to display a listbox
def build_html_listbox ( lst , nme ) : res = '<select name="' + nme + '" multiple="multiple">\n' for l in lst : res += ' <option>' + str ( l ) + '</option>\n' res += '</select>\n' return res
2,208
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L91-L100
[ "def", "share", "(", "self", ",", "share_id", ":", "str", ",", "token", ":", "dict", "=", "None", ",", "augment", ":", "bool", "=", "False", ",", "prot", ":", "str", "=", "\"https\"", ",", ")", "->", "dict", ":", "# passing auth parameter", "share_url", "=", "\"{}://v1.{}.isogeo.com/shares/{}\"", ".", "format", "(", "prot", ",", "self", ".", "api_url", ",", "share_id", ")", "share_req", "=", "self", ".", "get", "(", "share_url", ",", "headers", "=", "self", ".", "header", ",", "proxies", "=", "self", ".", "proxies", ",", "verify", "=", "self", ".", "ssl", ")", "# checking response", "checker", ".", "check_api_response", "(", "share_req", ")", "# enhance share model", "share", "=", "share_req", ".", "json", "(", ")", "if", "augment", ":", "share", "=", "utils", ".", "share_extender", "(", "share", ",", "self", ".", "search", "(", "whole_share", "=", "1", ",", "share", "=", "share_id", ")", ".", "get", "(", "\"results\"", ")", ")", "else", ":", "pass", "# end of method", "return", "share" ]
returns the html with supplied list as a HTML listbox
def build_data_list ( lst ) : txt = '<H3>' + List + '<H3><UL>' for i in lst : txt += '<LI>' + i + '</LI>' txt += '<UL>' return txt
2,209
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L102-L111
[ "def", "multipleOrderComparison", "(", "cls", ",", "orders", ")", ":", "comparers", "=", "[", "(", "o", ".", "keyfn", ",", "1", "if", "o", ".", "isAscending", "(", ")", "else", "-", "1", ")", "for", "o", "in", "orders", "]", "def", "cmpfn", "(", "a", ",", "b", ")", ":", "for", "keyfn", ",", "ascOrDesc", "in", "comparers", ":", "comparison", "=", "cmp", "(", "keyfn", "(", "a", ")", ",", "keyfn", "(", "b", ")", ")", "*", "ascOrDesc", "if", "comparison", "is", "not", "0", ":", "return", "comparison", "return", "0", "return", "cmpfn" ]
formats a standard filelist to htmk using table formats
def filelist2html ( lst , fldr , hasHeader = 'N' ) : txt = '<TABLE width=100% border=0>' numRows = 1 if lst : for l in lst : if hasHeader == 'Y' : if numRows == 1 : td_begin = '<TH>' td_end = '</TH>' else : td_begin = '<TD>' td_end = '</TD>' else : td_begin = '<TD>' td_end = '</TD>' numRows += 1 txt += '<TR>' if type ( l ) is str : txt += td_begin + link_file ( l , fldr ) + td_end elif type ( l ) is list : txt += td_begin for i in l : txt += link_file ( i , fldr ) + '; ' txt += td_end else : txt += td_begin + str ( l ) + td_end txt += '</TR>\n' txt += '</TABLE><BR>\n' return txt
2,210
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L114-L145
[ "def", "get_user_last_submissions", "(", "self", ",", "limit", "=", "5", ",", "request", "=", "None", ")", ":", "if", "request", "is", "None", ":", "request", "=", "{", "}", "request", ".", "update", "(", "{", "\"username\"", ":", "self", ".", "_user_manager", ".", "session_username", "(", ")", "}", ")", "# Before, submissions were first sorted by submission date, then grouped", "# and then resorted by submission date before limiting. Actually, grouping", "# and pushing, keeping the max date, followed by result filtering is much more", "# efficient", "data", "=", "self", ".", "_database", ".", "submissions", ".", "aggregate", "(", "[", "{", "\"$match\"", ":", "request", "}", ",", "{", "\"$group\"", ":", "{", "\"_id\"", ":", "{", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", "}", ",", "\"submitted_on\"", ":", "{", "\"$max\"", ":", "\"$submitted_on\"", "}", ",", "\"submissions\"", ":", "{", "\"$push\"", ":", "{", "\"_id\"", ":", "\"$_id\"", ",", "\"result\"", ":", "\"$result\"", ",", "\"status\"", ":", "\"$status\"", ",", "\"courseid\"", ":", "\"$courseid\"", ",", "\"taskid\"", ":", "\"$taskid\"", ",", "\"submitted_on\"", ":", "\"$submitted_on\"", "}", "}", ",", "}", "}", ",", "{", "\"$project\"", ":", "{", "\"submitted_on\"", ":", "1", ",", "\"submissions\"", ":", "{", "# This could be replaced by $filter if mongo v3.2 is set as dependency", "\"$setDifference\"", ":", "[", "{", "\"$map\"", ":", "{", "\"input\"", ":", "\"$submissions\"", ",", "\"as\"", ":", "\"submission\"", ",", "\"in\"", ":", "{", "\"$cond\"", ":", "[", "{", "\"$eq\"", ":", "[", "\"$submitted_on\"", ",", "\"$$submission.submitted_on\"", "]", "}", ",", "\"$$submission\"", ",", "False", "]", "}", "}", "}", ",", "[", "False", "]", "]", "}", "}", "}", ",", "{", "\"$sort\"", ":", "{", "\"submitted_on\"", ":", "pymongo", ".", "DESCENDING", "}", "}", ",", "{", "\"$limit\"", ":", "limit", "}", "]", ")", "return", "[", "item", "[", "\"submissions\"", "]", "[", "0", "]", "for", "item", "in", "data", "]" ]
creates a html link for a file using folder fldr
def link_file ( f , fldr ) : fname = os . path . join ( fldr , f ) if os . path . isfile ( fname ) : return '<a href="/aikif/data/core/' + f + '">' + f + '</a>' else : return f
2,211
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L147-L155
[ "def", "get_changed_devices", "(", "self", ",", "timestamp", ")", ":", "if", "timestamp", "is", "None", ":", "payload", "=", "{", "}", "else", ":", "payload", "=", "{", "'timeout'", ":", "SUBSCRIPTION_WAIT", ",", "'minimumdelay'", ":", "SUBSCRIPTION_MIN_WAIT", "}", "payload", ".", "update", "(", "timestamp", ")", "# double the timeout here so requests doesn't timeout before vera", "payload", ".", "update", "(", "{", "'id'", ":", "'lu_sdata'", ",", "}", ")", "logger", ".", "debug", "(", "\"get_changed_devices() requesting payload %s\"", ",", "str", "(", "payload", ")", ")", "r", "=", "self", ".", "data_request", "(", "payload", ",", "TIMEOUT", "*", "2", ")", "r", ".", "raise_for_status", "(", ")", "# If the Vera disconnects before writing a full response (as lu_sdata", "# will do when interrupted by a Luup reload), the requests module will", "# happily return 200 with an empty string. So, test for empty response,", "# so we don't rely on the JSON parser to throw an exception.", "if", "r", ".", "text", "==", "\"\"", ":", "raise", "PyveraError", "(", "\"Empty response from Vera\"", ")", "# Catch a wide swath of what the JSON parser might throw, within", "# reason. Unfortunately, some parsers don't specifically return", "# json.decode.JSONDecodeError, but so far most seem to derive what", "# they do throw from ValueError, so that's helpful.", "try", ":", "result", "=", "r", ".", "json", "(", ")", "except", "ValueError", "as", "ex", ":", "raise", "PyveraError", "(", "\"JSON decode error: \"", "+", "str", "(", "ex", ")", ")", "if", "not", "(", "type", "(", "result", ")", "is", "dict", "and", "'loadtime'", "in", "result", "and", "'dataversion'", "in", "result", ")", ":", "raise", "PyveraError", "(", "\"Unexpected/garbled response from Vera\"", ")", "# At this point, all good. Update timestamp and return change data.", "device_data", "=", "result", ".", "get", "(", "'devices'", ")", "timestamp", "=", "{", "'loadtime'", ":", "result", ".", "get", "(", "'loadtime'", ")", ",", "'dataversion'", ":", "result", ".", "get", "(", "'dataversion'", ")", "}", "return", "[", "device_data", ",", "timestamp", "]" ]
converts a dictionary to a HTML table row
def dict_to_htmlrow ( d ) : res = "<TR>\n" for k , v in d . items ( ) : if type ( v ) == str : res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + v + '</p></TD>' else : res = res + '<TD><p>' + k + ':</p></TD><TD><p>' + str ( v ) + '</p></TD>' res += '</TR>\n' return res
2,212
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L157-L168
[ "def", "get_free_gpus", "(", "max_procs", "=", "0", ")", ":", "# Try connect with NVIDIA drivers", "logger", "=", "logging", ".", "getLogger", "(", "__name__", ")", "try", ":", "py3nvml", ".", "nvmlInit", "(", ")", "except", ":", "str_", "=", "\"\"\"Couldn't connect to nvml drivers. Check they are installed correctly.\"\"\"", "warnings", ".", "warn", "(", "str_", ",", "RuntimeWarning", ")", "logger", ".", "warn", "(", "str_", ")", "return", "[", "]", "num_gpus", "=", "py3nvml", ".", "nvmlDeviceGetCount", "(", ")", "gpu_free", "=", "[", "False", "]", "*", "num_gpus", "for", "i", "in", "range", "(", "num_gpus", ")", ":", "try", ":", "h", "=", "py3nvml", ".", "nvmlDeviceGetHandleByIndex", "(", "i", ")", "except", ":", "continue", "procs", "=", "try_get_info", "(", "py3nvml", ".", "nvmlDeviceGetComputeRunningProcesses", ",", "h", ",", "[", "'something'", "]", ")", "if", "len", "(", "procs", ")", "<=", "max_procs", ":", "gpu_free", "[", "i", "]", "=", "True", "py3nvml", ".", "nvmlShutdown", "(", ")", "return", "gpu_free" ]
reads a CSV file and converts it to HTML
def read_csv_to_html_table ( csvFile , hasHeader = 'N' ) : txt = '<table class="as-table as-table-zebra as-table-horizontal">' with open ( csvFile , "r" ) as f : # numRows = 1 for row in f : if hasHeader == 'Y' : if numRows == 1 : td_begin = '<TH>' td_end = '</TH>' else : td_begin = '<TD>' td_end = '</TD>' else : td_begin = '<TD>' td_end = '</TD>' cols = row . split ( ',' ) numRows += 1 txt += "<TR>" for col in cols : txt += td_begin try : colString = col except Exception : colString = '<font color=red>Error decoding column data</font>' txt += colString . strip ( '"' ) txt += td_end txt += "</TR>\n" txt += "</TABLE>\n\n" return txt
2,213
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L170-L202
[ "def", "sort_key", "(", "val", ")", ":", "return", "numpy", ".", "sum", "(", "(", "max", "(", "val", ")", "+", "1", ")", "**", "numpy", ".", "arange", "(", "len", "(", "val", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", "*", "val", ")" ]
reads a CSV file and converts it to a HTML List
def read_csv_to_html_list ( csvFile ) : txt = '' with open ( csvFile ) as csv_file : for row in csv . reader ( csv_file , delimiter = ',' ) : txt += '<div id="table_row">' for col in row : txt += " " try : txt += col except Exception : txt += 'Error' txt += " " txt += "</div>\n" return txt
2,214
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/web_app/web_utils.py#L206-L222
[ "def", "sort_key", "(", "val", ")", ":", "return", "numpy", ".", "sum", "(", "(", "max", "(", "val", ")", "+", "1", ")", "**", "numpy", ".", "arange", "(", "len", "(", "val", ")", "-", "1", ",", "-", "1", ",", "-", "1", ")", "*", "val", ")" ]
the goal of the explore agent is to move to the target while avoiding blockages on the grid . This function is messy and needs to be looked at . It currently has a bug in that the backtrack oscillates so need a new method of doing this - probably checking if previously backtracked in that direction for those coords ie keep track of cells visited and number of times visited?
def do_your_job ( self ) : y , x = self . get_intended_direction ( ) # first find out where we should go if self . target_x == self . current_x and self . target_y == self . current_y : #print(self.name + " : TARGET ACQUIRED") if len ( self . results ) == 0 : self . results . append ( "TARGET ACQUIRED" ) self . lg_mv ( 2 , self . name + ": TARGET ACQUIRED" ) return self . num_steps += 1 # first try is to move on the x axis in a simple greedy search accessible = [ '\\' , '-' , '|' , '/' , '.' ] # randomly move in Y direction instead of X if all paths clear if y != 0 and x != 0 and self . backtrack == [ 0 , 0 ] : if random . randint ( 1 , 10 ) > 6 : if self . grd . get_tile ( self . current_y + y , self . current_x ) in accessible : self . current_y += y self . lg_mv ( 3 , self . name + ": randomly moving Y axis " + str ( self . num_steps ) ) return if x == 1 : if self . grd . get_tile ( self . current_y , self . current_x + 1 ) in accessible : self . current_x += 1 self . lg_mv ( 3 , self . name + ": move# " + str ( self . num_steps ) + " - moving West" ) return elif x == - 1 : if self . grd . get_tile ( self . current_y , self . current_x - 1 ) in accessible : self . current_x -= 1 self . lg_mv ( 3 , self . name + ": move# " + str ( self . num_steps ) + " - moving East" ) return elif y == 1 : if self . grd . get_tile ( self . current_y + 1 , self . current_x ) in accessible : self . current_y += 1 self . lg_mv ( 3 , self . name + ": move# " + str ( self . num_steps ) + " - moving South" ) return elif y == - 1 : if self . grd . get_tile ( self . current_y - 1 , self . current_x ) in accessible : self . current_y -= 1 self . lg_mv ( 3 , self . name + ": move# " + str ( self . num_steps ) + " - moving North" ) return self . grd . set_tile ( self . start_y , self . start_x , 'A' ) self . grd . save ( os . path . join ( os . getcwd ( ) , 'agent.txt' ) )
2,215
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/agents/explore/agent_explore_grid.py#L43-L95
[ "def", "loadJSON", "(", "self", ",", "jdata", ")", ":", "super", "(", "StringColumn", ",", "self", ")", ".", "loadJSON", "(", "jdata", ")", "# load additional info", "self", ".", "__maxLength", "=", "jdata", ".", "get", "(", "'maxLength'", ")", "or", "self", ".", "__maxLength" ]
wrapper for debugging print and log methods
def lg_mv ( self , log_lvl , txt ) : if log_lvl <= self . LOG_LEVEL : print ( txt + str ( self . current_y ) + "," + str ( self . current_x ) )
2,216
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/agents/explore/agent_explore_grid.py#L98-L103
[ "def", "get_applicable_content_pattern_names", "(", "self", ",", "path", ")", ":", "encodings", "=", "set", "(", ")", "applicable_content_pattern_names", "=", "set", "(", ")", "for", "path_pattern_name", ",", "content_pattern_names", "in", "self", ".", "_required_matches", ".", "items", "(", ")", ":", "m", "=", "self", ".", "_path_matchers", "[", "path_pattern_name", "]", "if", "m", ".", "matches", "(", "path", ")", ":", "encodings", ".", "add", "(", "m", ".", "content_encoding", ")", "applicable_content_pattern_names", ".", "update", "(", "content_pattern_names", ")", "if", "len", "(", "encodings", ")", ">", "1", ":", "raise", "ValueError", "(", "'Path matched patterns with multiple content encodings ({}): {}'", ".", "format", "(", "', '", ".", "join", "(", "sorted", "(", "encodings", ")", ")", ",", "path", ")", ")", "content_encoding", "=", "next", "(", "iter", "(", "encodings", ")", ")", "if", "encodings", "else", "None", "return", "applicable_content_pattern_names", ",", "content_encoding" ]
returns a Y X value showing which direction the agent should move in order to get to the target
def get_intended_direction ( self ) : x = 0 y = 0 if self . target_x == self . current_x and self . target_y == self . current_y : return y , x # target already acquired if self . target_y > self . current_y : y = 1 elif self . target_y < self . current_y : y = - 1 if self . target_x > self . current_x : x = 1 elif self . target_x < self . current_x : x = - 1 return y , x
2,217
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/agents/explore/agent_explore_grid.py#L105-L122
[ "def", "filters", "(", "filter_directory", "=", "None", ",", "update", "=", "False", ",", "fmt", "=", "'table'", ",", "*", "*", "kwargs", ")", ":", "if", "filter_directory", "is", "None", ":", "filter_directory", "=", "resource_filename", "(", "'svo_filters'", ",", "'data/filters/'", ")", "# Get the pickle path and make sure file exists", "p_path", "=", "os", ".", "path", ".", "join", "(", "filter_directory", ",", "'filter_list.p'", ")", "updated", "=", "False", "if", "not", "os", ".", "path", ".", "isfile", "(", "p_path", ")", ":", "os", ".", "system", "(", "'touch {}'", ".", "format", "(", "p_path", ")", ")", "if", "update", ":", "print", "(", "'Loading filters into table...'", ")", "# Get all the filters (except the pickle)", "files", "=", "glob", "(", "filter_directory", "+", "'*'", ")", "files", "=", "[", "f", "for", "f", "in", "files", "if", "not", "f", ".", "endswith", "(", "'.p'", ")", "]", "bands", "=", "[", "os", ".", "path", ".", "basename", "(", "b", ")", "for", "b", "in", "files", "]", "tables", "=", "[", "]", "for", "band", "in", "bands", ":", "# Load the filter", "band", "=", "band", ".", "replace", "(", "'.txt'", ",", "''", ")", "filt", "=", "Filter", "(", "band", ",", "*", "*", "kwargs", ")", "filt", ".", "Band", "=", "band", "# Put metadata into table with correct dtypes", "info", "=", "filt", ".", "info", "(", "True", ")", "vals", "=", "[", "float", "(", "i", ")", "if", "i", ".", "replace", "(", "'.'", ",", "''", ")", ".", "replace", "(", "'-'", ",", "''", ")", ".", "replace", "(", "'+'", ",", "''", ")", ".", "isnumeric", "(", ")", "else", "i", "for", "i", "in", "info", "[", "'Values'", "]", "]", "dtypes", "=", "np", ".", "array", "(", "[", "type", "(", "i", ")", "for", "i", "in", "vals", "]", ")", "table", "=", "at", ".", "Table", "(", "np", ".", "array", "(", "[", "vals", "]", ")", ",", "names", "=", "info", "[", "'Attributes'", "]", ",", "dtype", "=", "dtypes", ")", "tables", ".", "append", "(", "table", ")", "del", "filt", ",", "info", ",", "table", "# Write to the pickle", "with", "open", "(", "p_path", ",", "'wb'", ")", "as", "file", ":", "pickle", ".", "dump", "(", "at", ".", "vstack", "(", "tables", ")", ",", "file", ")", "# Load the saved pickle", "data", "=", "{", "}", "if", "os", ".", "path", ".", "isfile", "(", "p_path", ")", ":", "with", "open", "(", "p_path", ",", "'rb'", ")", "as", "file", ":", "data", "=", "pickle", ".", "load", "(", "file", ")", "# Return the data", "if", "data", ":", "if", "fmt", "==", "'dict'", ":", "data", "=", "{", "r", "[", "0", "]", ":", "{", "k", ":", "r", "[", "k", "]", ".", "value", "if", "hasattr", "(", "r", "[", "k", "]", ",", "'unit'", ")", "else", "r", "[", "k", "]", "for", "k", "in", "data", ".", "keys", "(", ")", "[", "1", ":", "]", "}", "for", "r", "in", "data", "}", "else", ":", "# Add Band as index", "data", ".", "add_index", "(", "'Band'", ")", "return", "data", "# Or try to generate it once", "else", ":", "if", "not", "updated", ":", "updated", "=", "True", "filters", "(", "update", "=", "True", ")", "else", ":", "print", "(", "'No filters found in'", ",", "filter_directory", ")" ]
dumps the status of the agent
def show_status ( self ) : txt = 'Agent Status:\n' print ( txt ) txt += "start_x = " + str ( self . start_x ) + "\n" txt += "start_y = " + str ( self . start_y ) + "\n" txt += "target_x = " + str ( self . target_x ) + "\n" txt += "target_y = " + str ( self . target_y ) + "\n" txt += "current_x = " + str ( self . current_x ) + "\n" txt += "current_y = " + str ( self . current_y ) + "\n" print ( self . grd ) return txt
2,218
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/agents/explore/agent_explore_grid.py#L135-L150
[ "def", "xrdb", "(", "xrdb_files", "=", "None", ")", ":", "xrdb_files", "=", "xrdb_files", "or", "[", "os", ".", "path", ".", "join", "(", "CACHE_DIR", ",", "\"colors.Xresources\"", ")", "]", "if", "shutil", ".", "which", "(", "\"xrdb\"", ")", "and", "OS", "!=", "\"Darwin\"", ":", "for", "file", "in", "xrdb_files", ":", "subprocess", ".", "run", "(", "[", "\"xrdb\"", ",", "\"-merge\"", ",", "\"-quiet\"", ",", "file", "]", ")" ]
retrieve the metadata from an MP3 file
def get_audio_metadata_old ( fname ) : audio_dict = { } print ( "IDv2 tag info for %s:" % fname ) try : audio = mutagenx . id3 . ID3 ( fname , translate = False ) except StandardError as err : print ( "ERROR = " + str ( err ) ) #else: #print(audio.pprint().encode("utf-8", "replace")) #for frame in audio.values(): # print(repr(frame)) try : audio_dict [ "title" ] = audio [ "title" ] except KeyError : print ( "No title" ) try : audio_dict [ "artist" ] = audio [ "artist" ] # tags['TPE1'] except KeyError : print ( "No artist" ) try : audio_dict [ "album" ] = audio [ "album" ] except KeyError : print ( "No album" ) try : audio_dict [ "length" ] = audio [ "length" ] except KeyError : print ( "No length" ) #pprint.pprint(audio.tags) return audio_dict
2,219
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/audio_tools.py#L65-L100
[ "def", "_shannon_radii_from_cn", "(", "species_list", ",", "cn_roman", ",", "radius_to_compare", "=", "0", ")", ":", "shannon_radii", "=", "[", "]", "for", "s", "in", "species_list", ":", "try", ":", "radius", "=", "s", ".", "get_shannon_radius", "(", "cn_roman", ")", "shannon_radii", ".", "append", "(", "{", "'species'", ":", "s", ",", "'radius'", ":", "radius", ",", "'radii_diff'", ":", "radius", "-", "radius_to_compare", "}", ")", "except", "KeyError", ":", "pass", "return", "shannon_radii" ]
Find all row names and the maximum column widths .
def calculate_columns ( sequence ) : columns = { } for row in sequence : for key in row . keys ( ) : if key not in columns : columns [ key ] = len ( key ) value_length = len ( str ( row [ key ] ) ) if value_length > columns [ key ] : columns [ key ] = value_length return columns
2,220
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/table.py#L5-L26
[ "def", "put_on_top", "(", "self", ",", "request", ",", "queryset", ")", ":", "queryset", ".", "update", "(", "publication_date", "=", "timezone", ".", "now", "(", ")", ")", "self", ".", "ping_directories", "(", "request", ",", "queryset", ",", "messages", "=", "False", ")", "self", ".", "message_user", "(", "request", ",", "_", "(", "'The selected entries are now set at the current date.'", ")", ")" ]
Calculate row format .
def calculate_row_format ( columns , keys = None ) : row_format = '' if keys is None : keys = columns . keys ( ) else : keys = [ key for key in keys if key in columns ] for key in keys : if len ( row_format ) > 0 : row_format += "|" row_format += "%%(%s)-%ds" % ( key , columns [ key ] ) return '|' + row_format + '|'
2,221
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/table.py#L29-L51
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Print sequence as ascii table to stdout .
def pprint ( sequence , keys = None ) : if len ( sequence ) > 0 : columns = calculate_columns ( sequence ) row_format = calculate_row_format ( columns , keys ) header = row_format % dict ( [ ( key , key . title ( ) ) for key in columns ] ) separator = row_format % dict ( [ ( key , '-' * columns [ key ] ) for key in columns ] ) print ( separator ) print ( header ) print ( separator ) for row in sequence : print ( row_format % row ) print ( separator )
2,222
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/table.py#L54-L75
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
Run pipelines in parallel .
def matrix_worker ( data ) : matrix = data [ 'matrix' ] Logger . get_logger ( __name__ + '.worker' ) . info ( "Processing pipeline for matrix entry '%s'" , matrix [ 'name' ] ) env = matrix [ 'env' ] . copy ( ) env . update ( { 'PIPELINE_MATRIX' : matrix [ 'name' ] } ) pipeline = Pipeline ( model = data [ 'model' ] , env = env , options = data [ 'options' ] ) pipeline . hooks = data [ 'hooks' ] return pipeline . process ( data [ 'pipeline' ] )
2,223
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L32-L50
[ "def", "init", "(", "cls", ",", "conn_string", "=", "None", ")", ":", "if", "conn_string", ":", "_update_meta", "(", "conn_string", ")", "# We initialize the engine within the models module because models'", "# schema can depend on which data types are supported by the engine", "Meta", ".", "Session", "=", "new_sessionmaker", "(", ")", "Meta", ".", "engine", "=", "Meta", ".", "Session", ".", "kw", "[", "\"bind\"", "]", "logger", ".", "info", "(", "f\"Connecting user:{Meta.DBUSER} \"", "f\"to {Meta.DBHOST}:{Meta.DBPORT}/{Meta.DBNAME}\"", ")", "Meta", ".", "_init_db", "(", ")", "if", "not", "Meta", ".", "log_path", ":", "init_logging", "(", ")", "return", "cls" ]
Check given matrix tags to be in the given list of matric tags .
def can_process_matrix ( entry , matrix_tags ) : if len ( matrix_tags ) == 0 : return True count = 0 if 'tags' in entry : for tag in matrix_tags : if tag in entry [ 'tags' ] : count += 1 return count > 0
2,224
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L115-L134
[ "def", "_smooth", "(", "values", ":", "List", "[", "float", "]", ",", "beta", ":", "float", ")", "->", "List", "[", "float", "]", ":", "avg_value", "=", "0.", "smoothed", "=", "[", "]", "for", "i", ",", "value", "in", "enumerate", "(", "values", ")", ":", "avg_value", "=", "beta", "*", "avg_value", "+", "(", "1", "-", "beta", ")", "*", "value", "smoothed", ".", "append", "(", "avg_value", "/", "(", "1", "-", "beta", "**", "(", "i", "+", "1", ")", ")", ")", "return", "smoothed" ]
Running pipelines one after the other .
def run_matrix_ordered ( self , process_data ) : output = [ ] for entry in self . matrix : env = entry [ 'env' ] . copy ( ) env . update ( { 'PIPELINE_MATRIX' : entry [ 'name' ] } ) if Matrix . can_process_matrix ( entry , process_data . options . matrix_tags ) : self . logger . info ( "Processing pipeline for matrix entry '%s'" , entry [ 'name' ] ) pipeline = Pipeline ( model = process_data . model , env = env , options = process_data . options ) pipeline . hooks = process_data . hooks result = pipeline . process ( process_data . pipeline ) output += result [ 'output' ] if not result [ 'success' ] : return { 'success' : False , 'output' : output } return { 'success' : True , 'output' : output }
2,225
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L136-L157
[ "def", "author_info", "(", "name", ",", "contact", "=", "None", ",", "public_key", "=", "None", ")", ":", "return", "Storage", "(", "name", "=", "name", ",", "contact", "=", "contact", ",", "public_key", "=", "public_key", ")" ]
Running pipelines in parallel .
def run_matrix_in_parallel ( self , process_data ) : worker_data = [ { 'matrix' : entry , 'pipeline' : process_data . pipeline , 'model' : process_data . model , 'options' : process_data . options , 'hooks' : process_data . hooks } for entry in self . matrix if Matrix . can_process_matrix ( entry , process_data . options . matrix_tags ) ] output = [ ] success = True with closing ( multiprocessing . Pool ( multiprocessing . cpu_count ( ) ) ) as pool : for result in pool . map ( matrix_worker , worker_data ) : output += result [ 'output' ] if not result [ 'success' ] : success = False return { 'success' : success , 'output' : output }
2,226
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L159-L172
[ "def", "init", "(", "cls", ",", "conn_string", "=", "None", ")", ":", "if", "conn_string", ":", "_update_meta", "(", "conn_string", ")", "# We initialize the engine within the models module because models'", "# schema can depend on which data types are supported by the engine", "Meta", ".", "Session", "=", "new_sessionmaker", "(", ")", "Meta", ".", "engine", "=", "Meta", ".", "Session", ".", "kw", "[", "\"bind\"", "]", "logger", ".", "info", "(", "f\"Connecting user:{Meta.DBUSER} \"", "f\"to {Meta.DBHOST}:{Meta.DBPORT}/{Meta.DBNAME}\"", ")", "Meta", ".", "_init_db", "(", ")", "if", "not", "Meta", ".", "log_path", ":", "init_logging", "(", ")", "return", "cls" ]
Process the pipeline per matrix item .
def process ( self , process_data ) : if self . parallel and not process_data . options . dry_run : return self . run_matrix_in_parallel ( process_data ) return self . run_matrix_ordered ( process_data )
2,227
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/matrix.py#L174-L178
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
Takes a SQL string containing 0 or more statements and returns a list of individual statements as strings . Comments and empty statements are ignored .
def _sqlfile_to_statements ( sql ) : statements = ( sqlparse . format ( stmt , strip_comments = True ) . strip ( ) for stmt in sqlparse . split ( sql ) ) return [ stmt for stmt in statements if stmt ]
2,228
https://github.com/aartur/mschematool/blob/57ec9541f80b44890294126eab92ce243c8833c4/mschematool/core.py#L94-L101
[ "def", "AddFrequencyObject", "(", "self", ",", "frequency", ",", "problem_reporter", ")", ":", "if", "frequency", "is", "not", "None", ":", "self", ".", "AddFrequency", "(", "frequency", ".", "StartTime", "(", ")", ",", "frequency", ".", "EndTime", "(", ")", ",", "frequency", ".", "HeadwaySecs", "(", ")", ",", "frequency", ".", "ExactTimes", "(", ")", ",", "problem_reporter", ")" ]
Returns a name of a new migration . It will usually be a filename with a valid and unique name .
def generate_migration_name ( self , name , suffix ) : return os . path . join ( self . dir , 'm{datestr}_{name}.{suffix}' . format ( datestr = datetime . datetime . utcnow ( ) . strftime ( '%Y%m%d%H%M%S' ) , name = name . replace ( ' ' , '_' ) , suffix = suffix ) )
2,229
https://github.com/aartur/mschematool/blob/57ec9541f80b44890294126eab92ce243c8833c4/mschematool/core.py#L118-L129
[ "def", "merge", "(", "self", ",", "options", ")", ":", "if", "not", "options", ":", "return", "_CallSettings", "(", "timeout", "=", "self", ".", "timeout", ",", "retry", "=", "self", ".", "retry", ",", "page_descriptor", "=", "self", ".", "page_descriptor", ",", "page_token", "=", "self", ".", "page_token", ",", "bundler", "=", "self", ".", "bundler", ",", "bundle_descriptor", "=", "self", ".", "bundle_descriptor", ",", "kwargs", "=", "self", ".", "kwargs", ")", "else", ":", "if", "options", ".", "timeout", "==", "OPTION_INHERIT", ":", "timeout", "=", "self", ".", "timeout", "else", ":", "timeout", "=", "options", ".", "timeout", "if", "options", ".", "retry", "==", "OPTION_INHERIT", ":", "retry", "=", "self", ".", "retry", "else", ":", "retry", "=", "options", ".", "retry", "if", "options", ".", "page_token", "==", "OPTION_INHERIT", ":", "page_token", "=", "self", ".", "page_token", "else", ":", "page_token", "=", "options", ".", "page_token", "if", "options", ".", "is_bundling", ":", "bundler", "=", "self", ".", "bundler", "else", ":", "bundler", "=", "None", "if", "options", ".", "kwargs", "==", "OPTION_INHERIT", ":", "kwargs", "=", "self", ".", "kwargs", "else", ":", "kwargs", "=", "self", ".", "kwargs", ".", "copy", "(", ")", "kwargs", ".", "update", "(", "options", ".", "kwargs", ")", "return", "_CallSettings", "(", "timeout", "=", "timeout", ",", "retry", "=", "retry", ",", "page_descriptor", "=", "self", ".", "page_descriptor", ",", "page_token", "=", "page_token", ",", "bundler", "=", "bundler", ",", "bundle_descriptor", "=", "self", ".", "bundle_descriptor", ",", "kwargs", "=", "kwargs", ")" ]
Subclasses should call this method instead of module . migrate directly to support db_config optional argument .
def _call_migrate ( self , module , connection_param ) : args = [ connection_param ] spec = inspect . getargspec ( module . migrate ) if len ( spec . args ) == 2 : args . append ( self . db_config ) return module . migrate ( * args )
2,230
https://github.com/aartur/mschematool/blob/57ec9541f80b44890294126eab92ce243c8833c4/mschematool/core.py#L232-L240
[ "def", "getOverlayKey", "(", "self", ",", "ulOverlayHandle", ",", "pchValue", ",", "unBufferSize", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getOverlayKey", "pError", "=", "EVROverlayError", "(", ")", "result", "=", "fn", "(", "ulOverlayHandle", ",", "pchValue", ",", "unBufferSize", ",", "byref", "(", "pError", ")", ")", "return", "result", ",", "pError" ]
uses the input data which may be a string list number or file to work out how to load the data ( this can be overridden by passing the data_type on the command line
def _identify_datatype ( self , input_data ) : if isinstance ( input_data , ( int , float ) ) : self . data_type = 'number' elif isinstance ( input_data , ( list ) ) : #, set self . data_type = 'list' elif isinstance ( input_data , dict ) : self . data_type = 'dict' elif type ( input_data ) is str : if self . input_data [ 0 : 4 ] == 'http' : self . data_type = 'url' elif os . path . exists ( input_data ) : self . data_type = 'file' else : self . data_type = 'str' lg . record_result ( '_identify_datatype' , self . name + ' is ' + self . data_type )
2,231
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/dataTools/cls_data.py#L51-L71
[ "def", "__unLock", "(", "self", ")", ":", "self", ".", "_operation", "=", "False", "self", ".", "_timer", "=", "0", "self", ".", "_isLocked", "=", "False" ]
get the size in bytes and num records of the content
def _calc_size_stats ( self ) : self . total_records = 0 self . total_length = 0 self . total_nodes = 0 if type ( self . content [ 'data' ] ) is dict : self . total_length += len ( str ( self . content [ 'data' ] ) ) self . total_records += 1 self . total_nodes = sum ( len ( x ) for x in self . content [ 'data' ] . values ( ) ) elif hasattr ( self . content [ 'data' ] , '__iter__' ) and type ( self . content [ 'data' ] ) is not str : self . _get_size_recursive ( self . content [ 'data' ] ) else : self . total_records += 1 self . total_length += len ( str ( self . content [ 'data' ] ) ) return str ( self . total_records ) + ' records [or ' + str ( self . total_nodes ) + ' nodes], taking ' + str ( self . total_length ) + ' bytes'
2,232
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/dataTools/cls_data.py#L120-L138
[ "def", "getStartingApplication", "(", "self", ",", "pchAppKeyBuffer", ",", "unAppKeyBufferLen", ")", ":", "fn", "=", "self", ".", "function_table", ".", "getStartingApplication", "result", "=", "fn", "(", "pchAppKeyBuffer", ",", "unAppKeyBufferLen", ")", "return", "result" ]
recursively walk through a data set or json file to get the total number of nodes
def _get_size_recursive ( self , dat ) : self . total_records += 1 #self.total_nodes += 1 for rec in dat : if hasattr ( rec , '__iter__' ) and type ( rec ) is not str : self . _get_size_recursive ( rec ) else : self . total_nodes += 1 self . total_length += len ( str ( rec ) )
2,233
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/dataTools/cls_data.py#L140-L152
[ "def", "handleServerEvents", "(", "self", ",", "msg", ")", ":", "self", ".", "log", ".", "debug", "(", "'MSG %s'", ",", "msg", ")", "self", ".", "handleConnectionState", "(", "msg", ")", "if", "msg", ".", "typeName", "==", "\"error\"", ":", "self", ".", "handleErrorEvents", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CURRENT_TIME\"", "]", ":", "if", "self", ".", "time", "<", "msg", ".", "time", ":", "self", ".", "time", "=", "msg", ".", "time", "elif", "(", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_MKT_DEPTH\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_MKT_DEPTH_L2\"", "]", ")", ":", "self", ".", "handleMarketDepth", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_STRING\"", "]", ":", "self", ".", "handleTickString", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_PRICE\"", "]", ":", "self", ".", "handleTickPrice", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_GENERIC\"", "]", ":", "self", ".", "handleTickGeneric", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_SIZE\"", "]", ":", "self", ".", "handleTickSize", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_OPTION\"", "]", ":", "self", ".", "handleTickOptionComputation", "(", "msg", ")", "elif", "(", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_OPEN_ORDER\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_OPEN_ORDER_END\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_ORDER_STATUS\"", "]", ")", ":", "self", ".", "handleOrders", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_HISTORICAL_DATA\"", "]", ":", "self", ".", "handleHistoricalData", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_ACCOUNT_UPDATES\"", "]", ":", "self", ".", "handleAccount", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_PORTFOLIO_UPDATES\"", "]", ":", "self", ".", "handlePortfolio", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_POSITION\"", "]", ":", "self", ".", "handlePosition", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_NEXT_ORDER_ID\"", "]", ":", "self", ".", "handleNextValidId", "(", "msg", ".", "orderId", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONNECTION_CLOSED\"", "]", ":", "self", ".", "handleConnectionClosed", "(", "msg", ")", "# elif msg.typeName == dataTypes[\"MSG_TYPE_MANAGED_ACCOUNTS\"]:", "# self.accountCode = msg.accountsList", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_COMMISSION_REPORT\"", "]", ":", "self", ".", "commission", "=", "msg", ".", "commissionReport", ".", "m_commission", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONTRACT_DETAILS\"", "]", ":", "self", ".", "handleContractDetails", "(", "msg", ",", "end", "=", "False", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONTRACT_DETAILS_END\"", "]", ":", "self", ".", "handleContractDetails", "(", "msg", ",", "end", "=", "True", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TICK_SNAPSHOT_END\"", "]", ":", "self", ".", "ibCallback", "(", "caller", "=", "\"handleTickSnapshotEnd\"", ",", "msg", "=", "msg", ")", "else", ":", "# log handler msg", "self", ".", "log_msg", "(", "\"server\"", ",", "msg", ")" ]
Create a readable version string from version_info tuple components .
def _make_version ( major , minor , micro , releaselevel , serial ) : assert releaselevel in [ 'alpha' , 'beta' , 'candidate' , 'final' ] version = "%d.%d" % ( major , minor ) if micro : version += ".%d" % ( micro , ) if releaselevel != 'final' : short = { 'alpha' : 'a' , 'beta' : 'b' , 'candidate' : 'rc' } [ releaselevel ] version += "%s%d" % ( short , serial ) return version
2,234
https://github.com/staticdev/django-pagination-bootstrap/blob/b4bf8352a364b223babbc5f33e14ecabd82c0886/pagination_bootstrap/version.py#L3-L12
[ "def", "setDefaultApplicationForMimeType", "(", "self", ",", "pchAppKey", ",", "pchMimeType", ")", ":", "fn", "=", "self", ".", "function_table", ".", "setDefaultApplicationForMimeType", "result", "=", "fn", "(", "pchAppKey", ",", "pchMimeType", ")", "return", "result" ]
Make the URL people should start at for this version of coverage . py .
def _make_url ( major , minor , micro , releaselevel , serial ) : url = "https://django-pagination-bootstrap.readthedocs.io" if releaselevel != 'final' : # For pre-releases, use a version-specific URL. url += "/en/" + _make_version ( major , minor , micro , releaselevel , serial ) return url
2,235
https://github.com/staticdev/django-pagination-bootstrap/blob/b4bf8352a364b223babbc5f33e14ecabd82c0886/pagination_bootstrap/version.py#L15-L21
[ "def", "encode", "(", "self", ",", "transmission", ")", ":", "data", "=", "''", "data", "+=", "self", ".", "_record_encode", "(", "transmission", ".", "header", ")", "for", "group", "in", "transmission", ".", "groups", ":", "data", "+=", "self", ".", "_record_encode", "(", "group", ".", "group_header", ")", "for", "transaction", "in", "group", ".", "transactions", ":", "for", "record", "in", "transaction", ":", "data", "+=", "self", ".", "_record_encode", "(", "record", ")", "data", "+=", "self", ".", "_record_encode", "(", "group", ".", "group_trailer", ")", "data", "+=", "self", ".", "_record_encode", "(", "transmission", ".", "trailer", ")", "return", "data" ]
return a list of unique paths in the file list
def get_list_of_paths ( self ) : all_paths = [ ] for p in self . fl_metadata : try : all_paths . append ( p [ 'path' ] ) except : try : print ( 'cls_filelist - no key path, ignoring folder ' + str ( p ) ) except : print ( 'cls_filelist - no key path, ignoring odd character folder' ) return list ( set ( all_paths ) )
2,236
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_filelist.py#L47-L61
[ "def", "jtag_configure", "(", "self", ",", "instr_regs", "=", "0", ",", "data_bits", "=", "0", ")", ":", "if", "not", "util", ".", "is_natural", "(", "instr_regs", ")", ":", "raise", "ValueError", "(", "'IR value is not a natural number.'", ")", "if", "not", "util", ".", "is_natural", "(", "data_bits", ")", ":", "raise", "ValueError", "(", "'Data bits is not a natural number.'", ")", "self", ".", "_dll", ".", "JLINKARM_ConfigJTAG", "(", "instr_regs", ",", "data_bits", ")", "return", "None" ]
collects the files metadata - note that this will fail with strange errors if network connection drops out to shared folder but it is better to stop the program rather than do a try except otherwise you will get an incomplete set of files .
def add_file_metadata ( self , fname ) : file_dict = { } file_dict [ "fullfilename" ] = fname try : file_dict [ "name" ] = os . path . basename ( fname ) file_dict [ "date" ] = self . GetDateAsString ( fname ) file_dict [ "size" ] = os . path . getsize ( fname ) file_dict [ "path" ] = os . path . dirname ( fname ) except IOError : print ( 'Error getting metadata for file' ) self . fl_metadata . append ( file_dict )
2,237
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_filelist.py#L100-L119
[ "def", "_SetHeader", "(", "self", ",", "new_values", ")", ":", "row", "=", "self", ".", "row_class", "(", ")", "row", ".", "row", "=", "0", "for", "v", "in", "new_values", ":", "row", "[", "v", "]", "=", "v", "self", ".", "_table", "[", "0", "]", "=", "row" ]
saves as csv format
def print_file_details_as_csv ( self , fname , col_headers ) : line = '' qu = '"' d = ',' for fld in col_headers : if fld == "fullfilename" : line = line + qu + fname + qu + d if fld == "name" : line = line + qu + os . path . basename ( fname ) + qu + d if fld == "date" : line = line + qu + self . GetDateAsString ( fname ) + qu + d if fld == "size" : line = line + qu + self . get_size_as_string ( fname ) + qu + d if fld == "path" : try : line = line + qu + os . path . dirname ( fname ) + qu + d except IOError : line = line + qu + 'ERROR_PATH' + qu + d return line
2,238
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_filelist.py#L145-L165
[ "def", "parse_url", "(", "url", ")", ":", "parsed", "=", "url", "if", "not", "url", ".", "startswith", "(", "\"http://\"", ")", "and", "not", "url", ".", "startswith", "(", "\"https://\"", ")", ":", "# if url is like www.yahoo.com", "parsed", "=", "\"http://\"", "+", "parsed", "elif", "url", ".", "startswith", "(", "\"https://\"", ")", ":", "parsed", "=", "parsed", "[", "8", ":", "]", "parsed", "=", "\"http://\"", "+", "parsed", "index_hash", "=", "parsed", ".", "rfind", "(", "\"#\"", ")", "# remove trailing #", "index_slash", "=", "parsed", ".", "rfind", "(", "\"/\"", ")", "if", "index_hash", ">", "index_slash", ":", "parsed", "=", "parsed", "[", "0", ":", "index_hash", "]", "return", "parsed" ]
uses a List of files and collects meta data on them and saves to an text file as a list or with metadata depending on opFormat .
def save_filelist ( self , opFile , opFormat , delim = ',' , qu = '"' ) : op_folder = os . path . dirname ( opFile ) if op_folder is not None : # short filename passed if not os . path . exists ( op_folder ) : os . makedirs ( op_folder ) with open ( opFile , 'w' ) as fout : fout . write ( "fullFilename" + delim ) for colHeading in opFormat : fout . write ( colHeading + delim ) fout . write ( '\n' ) for f in self . filelist : line = qu + f + qu + delim try : for fld in opFormat : if fld == "name" : line = line + qu + os . path . basename ( f ) + qu + delim if fld == "date" : line = line + qu + self . GetDateAsString ( f ) + qu + delim if fld == "size" : line = line + qu + str ( os . path . getsize ( f ) ) + qu + delim if fld == "path" : line = line + qu + os . path . dirname ( f ) + qu + delim except IOError : line += '\n' # no metadata try : fout . write ( str ( line . encode ( 'ascii' , 'ignore' ) . decode ( 'utf-8' ) ) ) fout . write ( '\n' ) except IOError : #print("Cant print line - cls_filelist line 304") pass
2,239
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/lib/cls_filelist.py#L192-L228
[ "def", "surviors_are_inconsistent", "(", "survivor_mapping", ":", "Mapping", "[", "BaseEntity", ",", "Set", "[", "BaseEntity", "]", "]", ")", "->", "Set", "[", "BaseEntity", "]", ":", "victim_mapping", "=", "set", "(", ")", "for", "victim", "in", "itt", ".", "chain", ".", "from_iterable", "(", "survivor_mapping", ".", "values", "(", ")", ")", ":", "if", "victim", "in", "survivor_mapping", ":", "victim_mapping", ".", "add", "(", "victim", ")", "return", "victim_mapping" ]
connect here - use the other classes cls_oracle cls_mysql etc otherwise this has the credentials used to access a share folder
def login ( self , schema , username , password ) : self . schema = schema self . username = username self . password = password self . connection = schema
2,240
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/dataTools/cls_dataset.py#L34-L42
[ "def", "_StructMessageToJsonObject", "(", "self", ",", "message", ")", ":", "fields", "=", "message", ".", "fields", "ret", "=", "{", "}", "for", "key", "in", "fields", ":", "ret", "[", "key", "]", "=", "self", ".", "_ValueMessageToJsonObject", "(", "fields", "[", "key", "]", ")", "return", "ret" ]
Types a collection of genes returning the most likely gene version in the collection with it s genotype
def type ( self , sequence_coverage_collection , min_gene_percent_covg_threshold = 99 ) : best_versions = self . get_best_version ( sequence_coverage_collection . values ( ) , min_gene_percent_covg_threshold ) return [ self . presence_typer . type ( best_version ) for best_version in best_versions ]
2,241
https://github.com/Phelimb/atlas/blob/02e85497bb5ac423d6452a10dca11964582ac4d7/mykatlas/typing/typer/presence.py#L146-L154
[ "def", "merge_text_nodes_on", "(", "self", ",", "node", ")", ":", "if", "not", "isinstance", "(", "node", ",", "ContainerNode", ")", "or", "not", "node", ".", "children", ":", "return", "new_children", "=", "[", "]", "text_run", "=", "[", "]", "for", "i", "in", "node", ".", "children", ":", "if", "isinstance", "(", "i", ",", "Text", ")", "and", "not", "i", ".", "translatable", ":", "text_run", ".", "append", "(", "i", ".", "escaped", "(", ")", ")", "else", ":", "if", "text_run", ":", "new_children", ".", "append", "(", "EscapedText", "(", "''", ".", "join", "(", "text_run", ")", ")", ")", "text_run", "=", "[", "]", "new_children", ".", "append", "(", "i", ")", "if", "text_run", ":", "new_children", ".", "append", "(", "EscapedText", "(", "''", ".", "join", "(", "text_run", ")", ")", ")", "node", ".", "children", "=", "new_children", "for", "i", "in", "node", ".", "children", ":", "self", ".", "merge_text_nodes_on", "(", "i", ")" ]
collects a filelist of all . py programs
def list_all_python_programs ( self ) : self . tot_lines = 0 self . tot_bytes = 0 self . tot_files = 0 self . tot_loc = 0 self . lstPrograms = [ ] fl = mod_fl . FileList ( [ self . fldr ] , [ '*.py' ] , [ "__pycache__" , "/venv/" , "/venv2/" , ".git" ] ) for fip in fl . get_list ( ) : if '__init__.py' not in fip : self . add ( fip , 'TODO - add comment' ) f = mod_file . TextFile ( fip ) self . tot_lines += f . count_lines_in_file ( ) self . tot_loc += f . count_lines_of_code ( ) self . tot_bytes += f . size self . tot_files += 1 print ( 'All Python Program Statistics' ) print ( 'Files = ' , self . tot_files , ' Bytes = ' , self . tot_bytes , ' Lines = ' , self . tot_lines , ' Lines of Code = ' , self . tot_loc )
2,242
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/programs.py#L38-L58
[ "def", "insert", "(", "self", ",", "storagemodel", ")", "->", "StorageTableModel", ":", "modeldefinition", "=", "self", ".", "getmodeldefinition", "(", "storagemodel", ",", "True", ")", "try", ":", "modeldefinition", "[", "'tableservice'", "]", ".", "insert_or_replace_entity", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "entity", "(", ")", ")", "storagemodel", ".", "_exists", "=", "True", "except", "AzureMissingResourceHttpError", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "log", ".", "debug", "(", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "getPartitionKey", "(", ")", ",", "storagemodel", ".", "getRowKey", "(", ")", ",", "e", ")", ")", "except", "Exception", "as", "e", ":", "storagemodel", ".", "_exists", "=", "False", "msg", "=", "'can not insert or replace table entity: Table {}, PartitionKey {}, RowKey {} because {!s}'", ".", "format", "(", "modeldefinition", "[", "'tablename'", "]", ",", "storagemodel", ".", "PartitionKey", ",", "storagemodel", ".", "RowKey", ",", "e", ")", "raise", "AzureStorageWrapException", "(", "msg", "=", "msg", ")", "finally", ":", "return", "storagemodel" ]
Save the list of items to AIKIF core and optionally to local file fname
def save ( self , fname = '' ) : if fname != '' : with open ( fname , 'w' ) as f : for i in self . lstPrograms : f . write ( self . get_file_info_line ( i , ',' ) ) # save to standard AIKIF structure filemap = mod_filemap . FileMap ( [ ] , [ ] ) #location_fileList = filemap.get_full_filename(filemap.find_type('LOCATION'), filemap.find_ontology('FILE-PROGRAM')[0]) object_fileList = filemap . get_full_filename ( filemap . find_type ( 'OBJECT' ) , filemap . find_ontology ( 'FILE-PROGRAM' ) [ 0 ] ) print ( 'object_fileList = ' + object_fileList + '\n' ) if os . path . exists ( object_fileList ) : os . remove ( object_fileList ) self . lstPrograms . sort ( ) try : with open ( object_fileList , 'a' ) as f : f . write ( '\n' . join ( [ i [ 0 ] for i in self . lstPrograms ] ) ) except Exception as ex : print ( 'ERROR = cant write to object_filelist ' , object_fileList , str ( ex ) )
2,243
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/programs.py#L88-L112
[ "def", "from_Track", "(", "track", ",", "maxwidth", "=", "80", ",", "tuning", "=", "None", ")", ":", "result", "=", "[", "]", "width", "=", "_get_width", "(", "maxwidth", ")", "if", "not", "tuning", ":", "tuning", "=", "track", ".", "get_tuning", "(", ")", "lastlen", "=", "0", "for", "bar", "in", "track", ":", "r", "=", "from_Bar", "(", "bar", ",", "width", ",", "tuning", ",", "collapse", "=", "False", ")", "barstart", "=", "r", "[", "1", "]", ".", "find", "(", "'||'", ")", "+", "2", "if", "(", "len", "(", "r", "[", "0", "]", ")", "+", "lastlen", ")", "-", "barstart", "<", "maxwidth", "and", "result", "!=", "[", "]", ":", "for", "i", "in", "range", "(", "1", ",", "len", "(", "r", ")", "+", "1", ")", ":", "item", "=", "r", "[", "len", "(", "r", ")", "-", "i", "]", "result", "[", "-", "i", "]", "+=", "item", "[", "barstart", ":", "]", "else", ":", "result", "+=", "[", "''", ",", "''", "]", "+", "r", "lastlen", "=", "len", "(", "result", "[", "-", "1", "]", ")", "return", "os", ".", "linesep", ".", "join", "(", "result", ")" ]
gets details on the program size date list of functions and produces a Markdown file for documentation
def collect_program_info ( self , fname ) : md = '#AIKIF Technical details\n' md += 'Autogenerated list of programs with comments and progress\n' md += '\nFilename | Comment | Date | Size\n' md += '--- | --- | --- | ---\n' for i in self . lstPrograms : md += self . get_file_info_line ( i , ' | ' ) # save the details an Markdown file with open ( fname , 'w' ) as f : f . write ( md )
2,244
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/programs.py#L140-L154
[ "def", "close", "(", "self", ",", "*", "args", ",", "*", "*", "kwds", ")", ":", "self", ".", "cur", ".", "close", "(", ")", "self", ".", "commit", "(", ")", "self", ".", "DB", ".", "close", "(", ")" ]
Given a DAVIDenrich output it converts ensembl gene ids to genes names and adds this column to the output
def id_nameDAVID ( df , GTF = None , name_id = None ) : if name_id is None : gene_name = retrieve_GTF_field ( 'gene_name' , GTF ) gene_id = retrieve_GTF_field ( 'gene_id' , GTF ) GTF = pd . concat ( [ gene_name , gene_id ] , axis = 1 ) else : GTF = name_id . copy ( ) df [ 'Gene_names' ] = "genes" terms = df [ 'termName' ] . tolist ( ) enrichN = pd . DataFrame ( ) for term in terms : tmp = df [ df [ 'termName' ] == term ] tmp = tmp . reset_index ( drop = True ) ids = tmp . xs ( 0 ) [ 'geneIds' ] ids = pd . DataFrame ( data = ids . split ( ", " ) ) ids . columns = [ 'geneIds' ] ids [ 'geneIds' ] = ids [ 'geneIds' ] . map ( str . lower ) GTF [ 'gene_id' ] = GTF [ 'gene_id' ] . astype ( str ) GTF [ 'gene_id' ] = GTF [ 'gene_id' ] . map ( str . lower ) ids = pd . merge ( ids , GTF , how = 'left' , left_on = 'geneIds' , right_on = 'gene_id' ) names = ids [ 'gene_name' ] . tolist ( ) names = ', ' . join ( names ) tmp [ "Gene_names" ] = names #tmp=tmp.replace(to_replace=tmp.xs(0)['Gene_names'], value=names) enrichN = pd . concat ( [ enrichN , tmp ] ) enrichN = enrichN . reset_index ( drop = True ) gene_names = enrichN [ [ 'Gene_names' ] ] gpos = enrichN . columns . get_loc ( "geneIds" ) enrichN = enrichN . drop ( [ 'Gene_names' ] , axis = 1 ) cols = enrichN . columns . tolist ( ) enrichN = pd . concat ( [ enrichN [ cols [ : gpos + 1 ] ] , gene_names , enrichN [ cols [ gpos + 1 : ] ] ] , axis = 1 ) return enrichN
2,245
https://github.com/mpg-age-bioinformatics/AGEpy/blob/887808a7a2c1504f39ce8d8cb36c15c1721cd29f/AGEpy/david.py#L92-L134
[ "def", "check_result", "(", "data", ",", "key", "=", "''", ")", ":", "if", "not", "isinstance", "(", "data", ",", "dict", ")", ":", "return", "False", "if", "key", ":", "if", "key", "in", "data", ":", "return", "True", "return", "False", "if", "'resultCode'", "in", "data", ".", "keys", "(", ")", ":", "# OpenBus", "return", "True", "if", "data", ".", "get", "(", "'resultCode'", ",", "-", "1", ")", "==", "0", "else", "False", "elif", "'code'", "in", "data", ".", "keys", "(", ")", ":", "# Parking", "return", "True", "if", "data", ".", "get", "(", "'code'", ",", "-", "1", ")", "==", "0", "else", "False", "return", "False" ]
Returns a list of gene names for given gene ids .
def DAVIDgetGeneAttribute ( x , df , refCol = "ensembl_gene_id" , fieldTOretrieve = "gene_name" ) : l = x . split ( ", " ) l = [ s . upper ( ) for s in l ] tmpdf = pd . DataFrame ( { refCol : l } , index = range ( len ( l ) ) ) df_fix = df [ [ refCol , fieldTOretrieve ] ] . drop_duplicates ( ) df_fix [ refCol ] = df_fix [ refCol ] . apply ( lambda x : x . upper ( ) ) ids = pd . merge ( tmpdf , df_fix , how = "left" , on = [ refCol ] ) ids = ids [ fieldTOretrieve ] . tolist ( ) ids = [ str ( s ) for s in ids ] ids = ", " . join ( ids ) return ids
2,246
https://github.com/mpg-age-bioinformatics/AGEpy/blob/887808a7a2c1504f39ce8d8cb36c15c1721cd29f/AGEpy/david.py#L136-L157
[ "def", "on_websocket_message", "(", "message", ":", "str", ")", "->", "None", ":", "msgs", "=", "json", ".", "loads", "(", "message", ")", "for", "msg", "in", "msgs", ":", "if", "not", "isinstance", "(", "msg", ",", "dict", ")", ":", "logger", ".", "error", "(", "'Invalid WS message format: {}'", ".", "format", "(", "message", ")", ")", "continue", "_type", "=", "msg", ".", "get", "(", "'type'", ")", "if", "_type", "==", "'log'", ":", "log_handler", "(", "msg", "[", "'level'", "]", ",", "msg", "[", "'message'", "]", ")", "elif", "_type", "==", "'event'", ":", "event_handler", "(", "msg", "[", "'event'", "]", ")", "elif", "_type", "==", "'response'", ":", "response_handler", "(", "msg", ")", "else", ":", "raise", "ValueError", "(", "'Unkown message type: {}'", ".", "format", "(", "message", ")", ")" ]
Spline loc tool .
def main ( * * options ) : application = Application ( * * options ) # fails application when your defined threshold is higher than your ratio of com/loc. if not application . run ( ) : sys . exit ( 1 ) return application
2,247
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/loc/application.py#L170-L176
[ "def", "unbind", "(", "self", ",", "devices_to_unbind", ")", ":", "if", "self", ".", "entity_api_key", "==", "\"\"", ":", "return", "{", "'status'", ":", "'failure'", ",", "'response'", ":", "'No API key found in request'", "}", "url", "=", "self", ".", "base_url", "+", "\"api/0.1.0/subscribe/unbind\"", "headers", "=", "{", "\"apikey\"", ":", "self", ".", "entity_api_key", "}", "data", "=", "{", "\"exchange\"", ":", "\"amq.topic\"", ",", "\"keys\"", ":", "devices_to_unbind", ",", "\"queue\"", ":", "self", ".", "entity_id", "}", "with", "self", ".", "no_ssl_verification", "(", ")", ":", "r", "=", "requests", ".", "delete", "(", "url", ",", "json", "=", "data", ",", "headers", "=", "headers", ")", "print", "(", "r", ")", "response", "=", "dict", "(", ")", "if", "\"No API key\"", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "json", ".", "loads", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", "[", "'message'", "]", "elif", "'unbind'", "in", "str", "(", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", ")", ":", "response", "[", "\"status\"", "]", "=", "\"success\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "else", ":", "response", "[", "\"status\"", "]", "=", "\"failure\"", "r", "=", "r", ".", "content", ".", "decode", "(", "\"utf-8\"", ")", "response", "[", "\"response\"", "]", "=", "str", "(", "r", ")", "return", "response" ]
Loading configuration .
def load_configuration ( self ) : filename = os . path . join ( os . path . dirname ( __file__ ) , 'templates/spline-loc.yml.j2' ) with open ( filename ) as handle : return Adapter ( safe_load ( handle ) ) . configuration
2,248
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/loc/application.py#L56-L60
[ "def", "_add_dependency", "(", "self", ",", "dependency", ",", "var_name", "=", "None", ")", ":", "if", "var_name", "is", "None", ":", "var_name", "=", "next", "(", "self", ".", "temp_var_names", ")", "# Don't add duplicate dependencies", "if", "(", "dependency", ",", "var_name", ")", "not", "in", "self", ".", "dependencies", ":", "self", ".", "dependencies", ".", "append", "(", "(", "dependency", ",", "var_name", ")", ")", "return", "var_name" ]
Verify whether to ignore a path .
def ignore_path ( path ) : ignore = False for name in [ '.tox' , 'dist' , 'build' , 'node_modules' , 'htmlcov' ] : if path . find ( name ) >= 0 : ignore = True break return ignore
2,249
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/loc/application.py#L63-L78
[ "def", "cudnnSetPooling2dDescriptor", "(", "poolingDesc", ",", "mode", ",", "windowHeight", ",", "windowWidth", ",", "verticalPadding", ",", "horizontalPadding", ",", "verticalStride", ",", "horizontalStride", ")", ":", "status", "=", "_libcudnn", ".", "cudnnSetPooling2dDescriptor", "(", "poolingDesc", ",", "mode", ",", "windowHeight", ",", "windowWidth", ",", "verticalPadding", ",", "horizontalPadding", ",", "verticalStride", ",", "horizontalStride", ")", "cudnnCheckStatus", "(", "status", ")" ]
Iterating files for given extensions .
def walk_files_for ( paths , supported_extensions ) : for path in paths : for root , _ , files in os . walk ( path ) : if Application . ignore_path ( root . replace ( path , '' ) ) : continue for filename in files : extension = os . path . splitext ( filename ) [ 1 ] if extension in supported_extensions : yield path , os . path . join ( root , filename ) , extension
2,250
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/loc/application.py#L81-L99
[ "def", "update", "(", "self", ")", ":", "# We need to save the console and state and restore it", "console", "=", "self", ".", "console", "aux", "=", "self", ".", "aux", "state", "=", "yield", "from", "self", ".", "_get_container_state", "(", ")", "yield", "from", "self", ".", "reset", "(", ")", "yield", "from", "self", ".", "create", "(", ")", "self", ".", "console", "=", "console", "self", ".", "aux", "=", "aux", "if", "state", "==", "\"running\"", ":", "yield", "from", "self", ".", "start", "(", ")" ]
Find out lines of code and lines of comments .
def analyse ( self , path_and_filename , pattern ) : with open ( path_and_filename ) as handle : content = handle . read ( ) loc = content . count ( '\n' ) + 1 com = 0 for match in re . findall ( pattern , content , re . DOTALL ) : com += match . count ( '\n' ) + 1 return max ( 0 , loc - com ) , com
2,251
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/tools/loc/application.py#L101-L119
[ "def", "record", "(", "self", ",", "person", ",", "event", ",", "properties", "=", "None", ",", "timestamp", "=", "None", ",", "path", "=", "KISSmetrics", ".", "RECORD_PATH", ")", ":", "this_request", "=", "request", ".", "record", "(", "self", ".", "key", ",", "person", ",", "event", ",", "timestamp", "=", "timestamp", ",", "properties", "=", "properties", ",", "scheme", "=", "self", ".", "trk_scheme", ",", "host", "=", "self", ".", "trk_host", ",", "path", "=", "path", ")", "return", "self", ".", "_request", "(", "this_request", ")" ]
Lists BioMart datasets .
def datasetsBM ( host = biomart_host ) : stdout_ = sys . stdout #Keep track of the previous value. stream = StringIO ( ) sys . stdout = stream server = BiomartServer ( biomart_host ) server . show_datasets ( ) sys . stdout = stdout_ # restore the previous stdout. variable = stream . getvalue ( ) v = variable . replace ( "{" , " " ) v = v . replace ( "}" , " " ) v = v . replace ( ": " , "\t" ) print ( v )
2,252
https://github.com/mpg-age-bioinformatics/AGEpy/blob/887808a7a2c1504f39ce8d8cb36c15c1721cd29f/AGEpy/biom.py#L12-L31
[ "def", "restart", "(", "self", ",", "timeout", "=", "None", ")", ":", "msg", "=", "{", "\"value\"", ":", "\"Restart requested by \"", "+", "self", ".", "username", "+", "\"via the Splunk SDK for Python\"", "}", "# This message will be deleted once the server actually restarts.", "self", ".", "messages", ".", "create", "(", "name", "=", "\"restart_required\"", ",", "*", "*", "msg", ")", "result", "=", "self", ".", "post", "(", "\"server/control/restart\"", ")", "if", "timeout", "is", "None", ":", "return", "result", "start", "=", "datetime", ".", "now", "(", ")", "diff", "=", "timedelta", "(", "seconds", "=", "timeout", ")", "while", "datetime", ".", "now", "(", ")", "-", "start", "<", "diff", ":", "try", ":", "self", ".", "login", "(", ")", "if", "not", "self", ".", "restart_required", ":", "return", "result", "except", "Exception", "as", "e", ":", "sleep", "(", "1", ")", "raise", "Exception", "(", "\"Operation time out.\"", ")" ]
Lists BioMart filters for a specific dataset .
def filtersBM ( dataset , host = biomart_host ) : stdout_ = sys . stdout #Keep track of the previous value. stream = StringIO ( ) sys . stdout = stream server = BiomartServer ( host ) d = server . datasets [ dataset ] d . show_filters ( ) sys . stdout = stdout_ # restore the previous stdout. variable = stream . getvalue ( ) v = variable . replace ( "{" , " " ) v = v . replace ( "}" , " " ) v = v . replace ( ": " , "\t" ) print ( v )
2,253
https://github.com/mpg-age-bioinformatics/AGEpy/blob/887808a7a2c1504f39ce8d8cb36c15c1721cd29f/AGEpy/biom.py#L33-L54
[ "def", "run", "(", "self", ")", ":", "self", ".", "busy", "=", "True", "for", "i", "in", "range", "(", "9", ")", ":", "self", ".", "counter", "+=", "1", "time", ".", "sleep", "(", "0.5", ")", "pass", "self", ".", "counter", "+=", "1", "self", ".", "busy", "=", "False", "return" ]
Prepares the data in CSV format
def format_csv ( self , delim = ',' , qu = '"' ) : res = qu + self . name + qu + delim if self . data : for d in self . data : res += qu + str ( d ) + qu + delim return res + '\n'
2,254
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L40-L48
[ "def", "remove_from_space_size", "(", "self", ",", "removal_bytes", ")", ":", "# type: (int) -> None", "if", "not", "self", ".", "_initialized", ":", "raise", "pycdlibexception", ".", "PyCdlibInternalError", "(", "'This Volume Descriptor is not yet initialized'", ")", "# The 'removal' parameter is expected to be in bytes, but the space", "# size we track is in extents. Round up to the next extent.", "self", ".", "space_size", "-=", "utils", ".", "ceiling_div", "(", "removal_bytes", ",", "self", ".", "log_block_size", ")" ]
return a trace of parents and children of the obect
def format_all ( self ) : res = '\n--- Format all : ' + str ( self . name ) + ' -------------\n' res += ' parent = ' + str ( self . parent ) + '\n' res += self . _get_all_children ( ) res += self . _get_links ( ) return res
2,255
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L60-L69
[ "def", "create_token_response", "(", "self", ",", "request", ",", "token_handler", ")", ":", "headers", "=", "self", ".", "_get_default_headers", "(", ")", "try", ":", "if", "self", ".", "request_validator", ".", "client_authentication_required", "(", "request", ")", ":", "log", ".", "debug", "(", "'Authenticating client, %r.'", ",", "request", ")", "if", "not", "self", ".", "request_validator", ".", "authenticate_client", "(", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "elif", "not", "self", ".", "request_validator", ".", "authenticate_client_id", "(", "request", ".", "client_id", ",", "request", ")", ":", "log", ".", "debug", "(", "'Client authentication failed, %r.'", ",", "request", ")", "raise", "errors", ".", "InvalidClientError", "(", "request", "=", "request", ")", "log", ".", "debug", "(", "'Validating access token request, %r.'", ",", "request", ")", "self", ".", "validate_token_request", "(", "request", ")", "except", "errors", ".", "OAuth2Error", "as", "e", ":", "log", ".", "debug", "(", "'Client error in token request, %s.'", ",", "e", ")", "headers", ".", "update", "(", "e", ".", "headers", ")", "return", "headers", ",", "e", ".", "json", ",", "e", ".", "status_code", "token", "=", "token_handler", ".", "create_token", "(", "request", ",", "self", ".", "refresh_token", ")", "for", "modifier", "in", "self", ".", "_token_modifiers", ":", "token", "=", "modifier", "(", "token", ")", "self", ".", "request_validator", ".", "save_token", "(", "token", ",", "request", ")", "log", ".", "debug", "(", "'Issuing token %r to client id %r (%r) and username %s.'", ",", "token", ",", "request", ".", "client_id", ",", "request", ".", "client", ",", "request", ".", "username", ")", "return", "headers", ",", "json", ".", "dumps", "(", "token", ")", ",", "200" ]
return the list of children of a node
def _get_all_children ( self , ) : res = '' if self . child_nodes : for c in self . child_nodes : res += ' child = ' + str ( c ) + '\n' if c . child_nodes : for grandchild in c . child_nodes : res += ' child = ' + str ( grandchild ) + '\n' else : res += ' child = None\n' return res
2,256
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L71-L84
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
return the list of links of a node
def _get_links ( self , ) : res = '' if self . links : for l in self . links : res += ' links = ' + str ( l [ 0 ] ) + '\n' if l [ 0 ] . child_nodes : for chld in l [ 0 ] . child_nodes : res += ' child = ' + str ( chld ) + '\n' if l [ 0 ] . links : for lnk in l [ 0 ] . links : res += ' sublink = ' + str ( lnk [ 0 ] ) + '\n' else : res += ' links = None\n' return res
2,257
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L86-L103
[ "def", "_adapt_WSDateTime", "(", "dt", ")", ":", "try", ":", "ts", "=", "int", "(", "(", "dt", ".", "replace", "(", "tzinfo", "=", "pytz", ".", "utc", ")", "-", "datetime", "(", "1970", ",", "1", ",", "1", ",", "tzinfo", "=", "pytz", ".", "utc", ")", ")", ".", "total_seconds", "(", ")", ")", "except", "(", "OverflowError", ",", "OSError", ")", ":", "if", "dt", "<", "datetime", ".", "now", "(", ")", ":", "ts", "=", "0", "else", ":", "ts", "=", "2", "**", "63", "-", "1", "return", "ts" ]
find the child object by name and return the object
def get_child_by_name ( self , name ) : for c in self . child_nodes : if c . name == name : return c return None
2,258
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L149-L156
[ "def", "handleServerEvents", "(", "self", ",", "msg", ")", ":", "self", ".", "log", ".", "debug", "(", "'MSG %s'", ",", "msg", ")", "self", ".", "handleConnectionState", "(", "msg", ")", "if", "msg", ".", "typeName", "==", "\"error\"", ":", "self", ".", "handleErrorEvents", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CURRENT_TIME\"", "]", ":", "if", "self", ".", "time", "<", "msg", ".", "time", ":", "self", ".", "time", "=", "msg", ".", "time", "elif", "(", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_MKT_DEPTH\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_MKT_DEPTH_L2\"", "]", ")", ":", "self", ".", "handleMarketDepth", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_STRING\"", "]", ":", "self", ".", "handleTickString", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_PRICE\"", "]", ":", "self", ".", "handleTickPrice", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_GENERIC\"", "]", ":", "self", ".", "handleTickGeneric", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_SIZE\"", "]", ":", "self", ".", "handleTickSize", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_TICK_OPTION\"", "]", ":", "self", ".", "handleTickOptionComputation", "(", "msg", ")", "elif", "(", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_OPEN_ORDER\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_OPEN_ORDER_END\"", "]", "or", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_ORDER_STATUS\"", "]", ")", ":", "self", ".", "handleOrders", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_HISTORICAL_DATA\"", "]", ":", "self", ".", "handleHistoricalData", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_ACCOUNT_UPDATES\"", "]", ":", "self", ".", "handleAccount", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_PORTFOLIO_UPDATES\"", "]", ":", "self", ".", "handlePortfolio", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_POSITION\"", "]", ":", "self", ".", "handlePosition", "(", "msg", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TYPE_NEXT_ORDER_ID\"", "]", ":", "self", ".", "handleNextValidId", "(", "msg", ".", "orderId", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONNECTION_CLOSED\"", "]", ":", "self", ".", "handleConnectionClosed", "(", "msg", ")", "# elif msg.typeName == dataTypes[\"MSG_TYPE_MANAGED_ACCOUNTS\"]:", "# self.accountCode = msg.accountsList", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_COMMISSION_REPORT\"", "]", ":", "self", ".", "commission", "=", "msg", ".", "commissionReport", ".", "m_commission", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONTRACT_DETAILS\"", "]", ":", "self", ".", "handleContractDetails", "(", "msg", ",", "end", "=", "False", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_CONTRACT_DETAILS_END\"", "]", ":", "self", ".", "handleContractDetails", "(", "msg", ",", "end", "=", "True", ")", "elif", "msg", ".", "typeName", "==", "dataTypes", "[", "\"MSG_TICK_SNAPSHOT_END\"", "]", ":", "self", ".", "ibCallback", "(", "caller", "=", "\"handleTickSnapshotEnd\"", ",", "msg", "=", "msg", ")", "else", ":", "# log handler msg", "self", ".", "log_msg", "(", "\"server\"", ",", "msg", ")" ]
returns the filename
def get_filename ( self , year ) : res = self . fldr + os . sep + self . type + year + '.' + self . user return res
2,259
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L301-L306
[ "def", "vapour_pressure", "(", "Temperature", ",", "element", ")", ":", "if", "element", "==", "\"Rb\"", ":", "Tmelt", "=", "39.30", "+", "273.15", "# K.", "if", "Temperature", "<", "Tmelt", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.857", "-", "4215.0", "/", "Temperature", ")", "# Torr.", "else", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.312", "-", "4040.0", "/", "Temperature", ")", "# Torr.", "elif", "element", "==", "\"Cs\"", ":", "Tmelt", "=", "28.5", "+", "273.15", "# K.", "if", "Temperature", "<", "Tmelt", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.711", "-", "3999.0", "/", "Temperature", ")", "# Torr.", "else", ":", "P", "=", "10", "**", "(", "2.881", "+", "4.165", "-", "3830.0", "/", "Temperature", ")", "# Torr.", "else", ":", "s", "=", "str", "(", "element", ")", "s", "+=", "\" is not an element in the database for this function.\"", "raise", "ValueError", "(", "s", ")", "P", "=", "P", "*", "101325.0", "/", "760.0", "# Pascals.", "return", "P" ]
save table to folder in appropriate files NOTE - ONLY APPEND AT THIS STAGE - THEN USE DATABASE
def save ( self , file_tag = '2016' , add_header = 'N' ) : fname = self . get_filename ( file_tag ) with open ( fname , 'a' ) as f : if add_header == 'Y' : f . write ( self . format_hdr ( ) ) for e in self . table : f . write ( e . format_csv ( ) )
2,260
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L320-L331
[ "def", "handle_stale", "(", "msg", "=", "''", ",", "exceptions", "=", "None", ")", ":", "exc", "=", "[", "StaleElementReferenceException", "]", "if", "exceptions", "is", "not", "None", ":", "try", ":", "exc", ".", "extend", "(", "iter", "(", "exceptions", ")", ")", "except", "TypeError", ":", "# exceptions is not iterable", "exc", ".", "append", "(", "exceptions", ")", "exc", "=", "tuple", "(", "exc", ")", "if", "not", "msg", ":", "msg", "=", "\"Could not recover from Exception(s): {}\"", ".", "format", "(", "', '", ".", "join", "(", "[", "e", ".", "__name__", "for", "e", "in", "exc", "]", ")", ")", "def", "wrapper", "(", "func", ")", ":", "def", "exc_handler", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "time", "timeout", "=", "10", "poll_freq", "=", "0.5", "end_time", "=", "time", ".", "time", "(", ")", "+", "timeout", "while", "time", ".", "time", "(", ")", "<=", "end_time", ":", "try", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "except", "exc", ":", "time", ".", "sleep", "(", "poll_freq", ")", "poll_freq", "*=", "1.25", "continue", "raise", "RuntimeError", "(", "msg", ")", "return", "exc_handler", "return", "wrapper" ]
Prepares the header in CSV format
def format_hdr ( self , delim = ',' , qu = '"' ) : res = '' if self . header : for d in self . header : res += qu + str ( d ) + qu + delim return res + '\n'
2,261
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L333-L341
[ "def", "_validateIterCommonParams", "(", "MaxObjectCount", ",", "OperationTimeout", ")", ":", "if", "MaxObjectCount", "is", "None", "or", "MaxObjectCount", "<=", "0", ":", "raise", "ValueError", "(", "_format", "(", "\"MaxObjectCount must be > 0 but is {0}\"", ",", "MaxObjectCount", ")", ")", "if", "OperationTimeout", "is", "not", "None", "and", "OperationTimeout", "<", "0", ":", "raise", "ValueError", "(", "_format", "(", "\"OperationTimeout must be >= 0 but is {0}\"", ",", "OperationTimeout", ")", ")" ]
extracts event information from core tables into diary files
def generate_diary ( self ) : print ( 'Generate diary files from Event rows only' ) for r in self . table : print ( str ( type ( r ) ) + ' = ' , r )
2,262
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/core_data.py#L344-L350
[ "def", "hide", "(", "self", ",", "selections", ")", ":", "if", "'atoms'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'atoms'", "]", "=", "selections", "[", "'atoms'", "]", "self", ".", "on_atom_hidden_changed", "(", ")", "if", "'bonds'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'bonds'", "]", "=", "selections", "[", "'bonds'", "]", "self", ".", "on_bond_hidden_changed", "(", ")", "if", "'box'", "in", "selections", ":", "self", ".", "hidden_state", "[", "'box'", "]", "=", "box_s", "=", "selections", "[", "'box'", "]", "if", "box_s", ".", "mask", "[", "0", "]", ":", "if", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "remove_renderer", "(", "self", ".", "box_renderer", ")", "else", ":", "if", "not", "self", ".", "viewer", ".", "has_renderer", "(", "self", ".", "box_renderer", ")", ":", "self", ".", "viewer", ".", "add_renderer", "(", "self", ".", "box_renderer", ")", "return", "self", ".", "hidden_state" ]
Takes a list of VariantProbeCoverages and returns a Call for the Variant . Note in the simplest case the list will be of length one . However we may be typing the Variant on multiple backgrouds leading to multiple VariantProbes for a single Variant .
def type ( self , variant_probe_coverages , variant = None ) : if not isinstance ( variant_probe_coverages , list ) : variant_probe_coverages = [ variant_probe_coverages ] calls = [ ] for variant_probe_coverage in variant_probe_coverages : calls . append ( self . _type_variant_probe_coverages ( variant_probe_coverage , variant ) ) hom_alt_calls = [ c for c in calls if sum ( c [ "genotype" ] ) > 1 ] het_calls = [ c for c in calls if sum ( c [ "genotype" ] ) == 1 ] if hom_alt_calls : hom_alt_calls . sort ( key = lambda x : x [ "info" ] [ "conf" ] , reverse = True ) return hom_alt_calls [ 0 ] elif het_calls : het_calls . sort ( key = lambda x : x [ "info" ] [ "conf" ] , reverse = True ) return het_calls [ 0 ] else : calls . sort ( key = lambda x : x [ "info" ] [ "conf" ] , reverse = True ) return calls [ 0 ]
2,263
https://github.com/Phelimb/atlas/blob/02e85497bb5ac423d6452a10dca11964582ac4d7/mykatlas/typing/typer/variant.py#L64-L88
[ "def", "stop", "(", "self", ")", ":", "if", "self", ".", "stream", "and", "self", ".", "stream", ".", "session", ".", "state", "!=", "STATE_STOPPED", ":", "self", ".", "stream", ".", "stop", "(", ")" ]
Creator function for creating an instance of an Ansible script .
def creator ( entry , config ) : ansible_playbook = "ansible.playbook.dry.run.see.comment" ansible_inventory = "ansible.inventory.dry.run.see.comment" ansible_playbook_content = render ( config . script , model = config . model , env = config . env , variables = config . variables , item = config . item ) ansible_inventory_content = render ( entry [ 'inventory' ] , model = config . model , env = config . env , variables = config . variables , item = config . item ) if not config . dry_run : ansible_playbook = write_temporary_file ( ansible_playbook_content , 'ansible-play-' , '.yaml' ) ansible_playbook_content = '' ansible_inventory = write_temporary_file ( ansible_inventory_content , prefix = 'ansible-inventory-' ) ansible_inventory_content = '' # rendering the Bash script for running the Ansible playbook template_file = os . path . join ( os . path . dirname ( __file__ ) , 'templates/ansible.sh.j2' ) with open ( template_file ) as handle : template = handle . read ( ) config . script = render ( template , debug = config . debug , ansible_playbook_content = ansible_playbook_content , ansible_playbook = ansible_playbook , ansible_inventory_content = ansible_inventory_content , ansible_inventory = ansible_inventory , limit = entry [ 'limit' ] ) return Ansible ( config )
2,264
https://github.com/Nachtfeuer/pipeline/blob/04ca18c4e95e4349532bb45b768206393e1f2c13/spline/components/ansible.py#L37-L64
[ "def", "deletecols", "(", "X", ",", "cols", ")", ":", "if", "isinstance", "(", "cols", ",", "str", ")", ":", "cols", "=", "cols", ".", "split", "(", "','", ")", "retain", "=", "[", "n", "for", "n", "in", "X", ".", "dtype", ".", "names", "if", "n", "not", "in", "cols", "]", "if", "len", "(", "retain", ")", ">", "0", ":", "return", "X", "[", "retain", "]", "else", ":", "return", "None" ]
Function that performs one step of the Game of Life
def update_gol ( self ) : updated_grid = [ [ self . update_cell ( row , col ) for col in range ( self . get_grid_width ( ) ) ] for row in range ( self . get_grid_height ( ) ) ] self . replace_grid ( updated_grid )
2,265
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid_life.py#L19-L28
[ "def", "get_file_url", "(", "self", ",", "fid", ",", "public", "=", "None", ")", ":", "try", ":", "volume_id", ",", "rest", "=", "fid", ".", "strip", "(", ")", ".", "split", "(", "\",\"", ")", "except", "ValueError", ":", "raise", "BadFidFormat", "(", "\"fid must be in format: <volume_id>,<file_name_hash>\"", ")", "file_location", "=", "self", ".", "get_file_location", "(", "volume_id", ")", "if", "public", "is", "None", ":", "public", "=", "self", ".", "use_public_url", "volume_url", "=", "file_location", ".", "public_url", "if", "public", "else", "file_location", ".", "url", "url", "=", "\"http://{volume_url}/{fid}\"", ".", "format", "(", "volume_url", "=", "volume_url", ",", "fid", "=", "fid", ")", "return", "url" ]
Function that computes the update for one cell in the Game of Life
def update_cell ( self , row , col ) : # compute number of living neighbors neighbors = self . eight_neighbors ( row , col ) living_neighbors = 0 for neighbor in neighbors : if not self . is_empty ( neighbor [ 0 ] , neighbor [ 1 ] ) : living_neighbors += 1 # logic for Game of life if ( living_neighbors == 3 ) or ( living_neighbors == 2 and not self . is_empty ( row , col ) ) : return mod_grid . FULL else : return mod_grid . EMPTY
2,266
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid_life.py#L31-L46
[ "def", "get_file_url", "(", "self", ",", "fid", ",", "public", "=", "None", ")", ":", "try", ":", "volume_id", ",", "rest", "=", "fid", ".", "strip", "(", ")", ".", "split", "(", "\",\"", ")", "except", "ValueError", ":", "raise", "BadFidFormat", "(", "\"fid must be in format: <volume_id>,<file_name_hash>\"", ")", "file_location", "=", "self", ".", "get_file_location", "(", "volume_id", ")", "if", "public", "is", "None", ":", "public", "=", "self", ".", "use_public_url", "volume_url", "=", "file_location", ".", "public_url", "if", "public", "else", "file_location", ".", "url", "url", "=", "\"http://{volume_url}/{fid}\"", ".", "format", "(", "volume_url", "=", "volume_url", ",", "fid", "=", "fid", ")", "return", "url" ]
offsets a pattern list generated below to a random position in the grid
def random_offset ( self , lst ) : res = [ ] x = random . randint ( 4 , self . max_x - 42 ) y = random . randint ( 4 , self . max_y - 10 ) for itm in lst : res . append ( [ itm [ 0 ] + y , itm [ 1 ] + x ] ) return res
2,267
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid_life.py#L100-L110
[ "def", "getDiskFreeSpace", "(", "self", ",", "freeBytesAvailable", ",", "totalNumberOfBytes", ",", "totalNumberOfFreeBytes", ",", "dokanFileInfo", ",", ")", ":", "ret", "=", "self", ".", "operations", "(", "'getDiskFreeSpace'", ")", "ctypes", ".", "memmove", "(", "freeBytesAvailable", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_longlong", "(", "ret", "[", "'freeBytesAvailable'", "]", ")", ")", ",", "ctypes", ".", "sizeof", "(", "ctypes", ".", "c_longlong", ")", ",", ")", "ctypes", ".", "memmove", "(", "totalNumberOfBytes", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_longlong", "(", "ret", "[", "'totalNumberOfBytes'", "]", ")", ")", ",", "ctypes", ".", "sizeof", "(", "ctypes", ".", "c_longlong", ")", ",", ")", "ctypes", ".", "memmove", "(", "totalNumberOfFreeBytes", ",", "ctypes", ".", "byref", "(", "ctypes", ".", "c_longlong", "(", "ret", "[", "'totalNumberOfFreeBytes'", "]", ")", ")", ",", "ctypes", ".", "sizeof", "(", "ctypes", ".", "c_longlong", ")", ",", ")", "return", "d1_onedrive", ".", "impl", ".", "drivers", ".", "dokan", ".", "const", ".", "DOKAN_SUCCESS" ]
Returns random variates from the histogram . Note this assumes the histogram is an events per bin not a pdf . Inside the bins a uniform distribution is assumed .
def get_random ( self , size = 10 ) : bin_i = np . random . choice ( np . arange ( len ( self . bin_centers ) ) , size = size , p = self . normalized_histogram ) return self . bin_centers [ bin_i ] + np . random . uniform ( - 0.5 , 0.5 , size = size ) * self . bin_volumes ( ) [ bin_i ]
2,268
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L187-L193
[ "async", "def", "on_raw_375", "(", "self", ",", "message", ")", ":", "await", "self", ".", "_registration_completed", "(", "message", ")", "self", ".", "motd", "=", "message", ".", "params", "[", "1", "]", "+", "'\\n'" ]
Estimates std of underlying data assuming each datapoint was exactly in the center of its bin .
def std ( self , bessel_correction = True ) : if bessel_correction : n = self . n bc = n / ( n - 1 ) else : bc = 1 return np . sqrt ( np . average ( ( self . bin_centers - self . mean ) ** 2 , weights = self . histogram ) ) * bc
2,269
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L205-L212
[ "def", "list_tables", "(", ")", ":", "tables", "=", "[", "]", "try", ":", "table_list", "=", "DYNAMODB_CONNECTION", ".", "list_tables", "(", ")", "while", "True", ":", "for", "table_name", "in", "table_list", "[", "u'TableNames'", "]", ":", "tables", ".", "append", "(", "get_table", "(", "table_name", ")", ")", "if", "u'LastEvaluatedTableName'", "in", "table_list", ":", "table_list", "=", "DYNAMODB_CONNECTION", ".", "list_tables", "(", "table_list", "[", "u'LastEvaluatedTableName'", "]", ")", "else", ":", "break", "except", "DynamoDBResponseError", "as", "error", ":", "dynamodb_error", "=", "error", ".", "body", "[", "'__type'", "]", ".", "rsplit", "(", "'#'", ",", "1", ")", "[", "1", "]", "if", "dynamodb_error", "==", "'ResourceNotFoundException'", ":", "logger", ".", "error", "(", "'No tables found'", ")", "elif", "dynamodb_error", "==", "'AccessDeniedException'", ":", "logger", ".", "debug", "(", "'Your AWS API keys lack access to listing tables. '", "'That is an issue if you are trying to use regular '", "'expressions in your table configuration.'", ")", "elif", "dynamodb_error", "==", "'UnrecognizedClientException'", ":", "logger", ".", "error", "(", "'Invalid security token. Are your AWS API keys correct?'", ")", "else", ":", "logger", ".", "error", "(", "(", "'Unhandled exception: {0}: {1}. '", "'Please file a bug report at '", "'https://github.com/sebdah/dynamic-dynamodb/issues'", ")", ".", "format", "(", "dynamodb_error", ",", "error", ".", "body", "[", "'message'", "]", ")", ")", "except", "JSONResponseError", "as", "error", ":", "logger", ".", "error", "(", "'Communication error: {0}'", ".", "format", "(", "error", ")", ")", "sys", ".", "exit", "(", "1", ")", "return", "tables" ]
Return bin center nearest to percentile
def percentile ( self , percentile ) : return self . bin_centers [ np . argmin ( np . abs ( self . cumulative_density * 100 - percentile ) ) ]
2,270
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L238-L240
[ "def", "create_config", "(", "config_path", "=", "\"scriptworker.yaml\"", ")", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "config_path", ")", ":", "print", "(", "\"{} doesn't exist! Exiting...\"", ".", "format", "(", "config_path", ")", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "with", "open", "(", "config_path", ",", "\"r\"", ",", "encoding", "=", "\"utf-8\"", ")", "as", "fh", ":", "secrets", "=", "safe_load", "(", "fh", ")", "config", "=", "dict", "(", "deepcopy", "(", "DEFAULT_CONFIG", ")", ")", "if", "not", "secrets", ".", "get", "(", "\"credentials\"", ")", ":", "secrets", "[", "'credentials'", "]", "=", "read_worker_creds", "(", ")", "config", ".", "update", "(", "secrets", ")", "apply_product_config", "(", "config", ")", "messages", "=", "check_config", "(", "config", ",", "config_path", ")", "if", "messages", ":", "print", "(", "'\\n'", ".", "join", "(", "messages", ")", ",", "file", "=", "sys", ".", "stderr", ")", "print", "(", "\"Exiting...\"", ",", "file", "=", "sys", ".", "stderr", ")", "sys", ".", "exit", "(", "1", ")", "credentials", "=", "get_frozen_copy", "(", "secrets", "[", "'credentials'", "]", ")", "del", "(", "config", "[", "'credentials'", "]", ")", "config", "=", "get_frozen_copy", "(", "config", ")", "return", "config", ",", "credentials" ]
Return bin_edges histogram array
def _data_to_hist ( self , data , * * kwargs ) : if hasattr ( self , 'bin_edges' ) : kwargs . setdefault ( 'bins' , self . bin_edges ) if len ( data ) == 1 and isinstance ( data [ 0 ] , COLUMNAR_DATA_SOURCES ) : data = data [ 0 ] if self . axis_names is None : raise ValueError ( "When histogramming from a columnar data source, " "axis_names or dimensions is mandatory" ) is_dask = False if WE_HAVE_DASK : is_dask = isinstance ( data , dask . dataframe . DataFrame ) if is_dask : fake_histogram = Histdd ( axis_names = self . axis_names , bins = kwargs [ 'bins' ] ) partial_hists = [ ] for partition in data . to_delayed ( ) : ph = dask . delayed ( Histdd ) ( partition , axis_names = self . axis_names , bins = kwargs [ 'bins' ] ) ph = dask . delayed ( lambda x : x . histogram ) ( ph ) ph = dask . array . from_delayed ( ph , shape = fake_histogram . histogram . shape , dtype = fake_histogram . histogram . dtype ) partial_hists . append ( ph ) partial_hists = dask . array . stack ( partial_hists , axis = 0 ) compute_options = kwargs . get ( 'compute_options' , { } ) for k , v in DEFAULT_DASK_COMPUTE_KWARGS . items ( ) : compute_options . setdefault ( k , v ) histogram = partial_hists . sum ( axis = 0 ) . compute ( * * compute_options ) bin_edges = fake_histogram . bin_edges return histogram , bin_edges else : data = np . vstack ( [ data [ x ] . values for x in self . axis_names ] ) data = np . array ( data ) . T return np . histogramdd ( data , bins = kwargs . get ( 'bins' ) , weights = kwargs . get ( 'weights' ) , range = kwargs . get ( 'range' ) )
2,271
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L302-L345
[ "def", "build", "(", "self", ",", "message", ")", ":", "context", "=", "None", "if", "message", ".", "message_type", "in", "[", "Types", ".", "CALL_REQ", ",", "Types", ".", "CALL_RES", "]", ":", "self", ".", "verify_message", "(", "message", ")", "context", "=", "self", ".", "build_context", "(", "message", ")", "# streaming message", "if", "message", ".", "flags", "==", "common", ".", "FlagsType", ".", "fragment", ":", "self", ".", "message_buffer", "[", "message", ".", "id", "]", "=", "context", "# find the incompleted stream", "num", "=", "0", "for", "i", ",", "arg", "in", "enumerate", "(", "context", ".", "argstreams", ")", ":", "if", "arg", ".", "state", "!=", "StreamState", ".", "completed", ":", "num", "=", "i", "break", "self", ".", "close_argstream", "(", "context", ",", "num", ")", "return", "context", "elif", "message", ".", "message_type", "in", "[", "Types", ".", "CALL_REQ_CONTINUE", ",", "Types", ".", "CALL_RES_CONTINUE", "]", ":", "context", "=", "self", ".", "message_buffer", ".", "get", "(", "message", ".", "id", ")", "if", "context", "is", "None", ":", "# missing call msg before continue msg", "raise", "FatalProtocolError", "(", "\"missing call message after receiving continue message\"", ",", "message", ".", "id", ",", ")", "# find the incompleted stream", "dst", "=", "0", "for", "i", ",", "arg", "in", "enumerate", "(", "context", ".", "argstreams", ")", ":", "if", "arg", ".", "state", "!=", "StreamState", ".", "completed", ":", "dst", "=", "i", "break", "try", ":", "self", ".", "verify_message", "(", "message", ")", "except", "InvalidChecksumError", "as", "e", ":", "context", ".", "argstreams", "[", "dst", "]", ".", "set_exception", "(", "e", ")", "raise", "src", "=", "0", "while", "src", "<", "len", "(", "message", ".", "args", ")", ":", "context", ".", "argstreams", "[", "dst", "]", ".", "write", "(", "message", ".", "args", "[", "src", "]", ")", "dst", "+=", "1", "src", "+=", "1", "if", "message", ".", "flags", "!=", "FlagsType", ".", "fragment", ":", "# get last fragment. mark it as completed", "assert", "(", "len", "(", "context", ".", "argstreams", ")", "==", "CallContinueMessage", ".", "max_args_num", ")", "self", ".", "message_buffer", ".", "pop", "(", "message", ".", "id", ",", "None", ")", "context", ".", "flags", "=", "FlagsType", ".", "none", "self", ".", "close_argstream", "(", "context", ",", "dst", "-", "1", ")", "return", "None", "elif", "message", ".", "message_type", "==", "Types", ".", "ERROR", ":", "context", "=", "self", ".", "message_buffer", ".", "pop", "(", "message", ".", "id", ",", "None", ")", "if", "context", "is", "None", ":", "log", ".", "info", "(", "'Unconsumed error %s'", ",", "message", ")", "return", "None", "else", ":", "error", "=", "TChannelError", ".", "from_code", "(", "message", ".", "code", ",", "description", "=", "message", ".", "description", ",", "tracing", "=", "context", ".", "tracing", ",", ")", "context", ".", "set_exception", "(", "error", ")", "return", "error", "else", ":", "return", "message" ]
Return axis names without axis or None if axis_names is None
def axis_names_without ( self , axis ) : if self . axis_names is None : return None return itemgetter ( * self . other_axes ( axis ) ) ( self . axis_names )
2,272
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L369-L373
[ "def", "factorize", "(", "cls", ",", "pq", ")", ":", "if", "pq", "%", "2", "==", "0", ":", "return", "2", ",", "pq", "//", "2", "y", ",", "c", ",", "m", "=", "randint", "(", "1", ",", "pq", "-", "1", ")", ",", "randint", "(", "1", ",", "pq", "-", "1", ")", ",", "randint", "(", "1", ",", "pq", "-", "1", ")", "g", "=", "r", "=", "q", "=", "1", "x", "=", "ys", "=", "0", "while", "g", "==", "1", ":", "x", "=", "y", "for", "i", "in", "range", "(", "r", ")", ":", "y", "=", "(", "pow", "(", "y", ",", "2", ",", "pq", ")", "+", "c", ")", "%", "pq", "k", "=", "0", "while", "k", "<", "r", "and", "g", "==", "1", ":", "ys", "=", "y", "for", "i", "in", "range", "(", "min", "(", "m", ",", "r", "-", "k", ")", ")", ":", "y", "=", "(", "pow", "(", "y", ",", "2", ",", "pq", ")", "+", "c", ")", "%", "pq", "q", "=", "q", "*", "(", "abs", "(", "x", "-", "y", ")", ")", "%", "pq", "g", "=", "cls", ".", "gcd", "(", "q", ",", "pq", ")", "k", "+=", "m", "r", "*=", "2", "if", "g", "==", "pq", ":", "while", "True", ":", "ys", "=", "(", "pow", "(", "ys", ",", "2", ",", "pq", ")", "+", "c", ")", "%", "pq", "g", "=", "cls", ".", "gcd", "(", "abs", "(", "x", "-", "ys", ")", ",", "pq", ")", "if", "g", ">", "1", ":", "break", "p", ",", "q", "=", "g", ",", "pq", "//", "g", "return", "(", "p", ",", "q", ")", "if", "p", "<", "q", "else", "(", "q", ",", "p", ")" ]
Return bin centers along an axis or if axis = None list of bin_centers along each axis
def bin_centers ( self , axis = None ) : if axis is None : return np . array ( [ self . bin_centers ( axis = i ) for i in range ( self . dimensions ) ] ) axis = self . get_axis_number ( axis ) return 0.5 * ( self . bin_edges [ axis ] [ 1 : ] + self . bin_edges [ axis ] [ : - 1 ] )
2,273
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L378-L383
[ "def", "get_logs", "(", "self", ")", ":", "assert", "BeautifulSoup", ",", "\"Please install bs4 to use this method\"", "url", "=", "self", ".", "base_url", "+", "\"/system/syslog.lua\"", "response", "=", "self", ".", "session", ".", "get", "(", "url", ",", "params", "=", "{", "'sid'", ":", "self", ".", "sid", ",", "'stylemode'", ":", "'print'", ",", "}", ",", "timeout", "=", "15", ")", "response", ".", "raise_for_status", "(", ")", "entries", "=", "[", "]", "tree", "=", "BeautifulSoup", "(", "response", ".", "text", ")", "rows", "=", "tree", ".", "find", "(", "'table'", ")", ".", "find_all", "(", "'tr'", ")", "for", "row", "in", "rows", ":", "columns", "=", "row", ".", "find_all", "(", "\"td\"", ")", "date", "=", "columns", "[", "0", "]", ".", "string", "time", "=", "columns", "[", "1", "]", ".", "string", "message", "=", "columns", "[", "2", "]", ".", "find", "(", "\"a\"", ")", ".", "string", "merged", "=", "\"{} {} {}\"", ".", "format", "(", "date", ",", "time", ",", "message", ".", "encode", "(", "\"UTF-8\"", ")", ")", "msg_hash", "=", "hashlib", ".", "md5", "(", "merged", ")", ".", "hexdigest", "(", ")", "entries", ".", "append", "(", "LogEntry", "(", "date", ",", "time", ",", "message", ",", "msg_hash", ")", ")", "return", "entries" ]
Returns index along axis of bin in histogram which contains value Inclusive on both endpoints
def get_axis_bin_index ( self , value , axis ) : axis = self . get_axis_number ( axis ) bin_edges = self . bin_edges [ axis ] # The right bin edge of np.histogram is inclusive: if value == bin_edges [ - 1 ] : # Minus two: one for bin edges rather than centers, one for 0-based indexing return len ( bin_edges ) - 2 # For all other bins, it is exclusive. result = np . searchsorted ( bin_edges , [ value ] , side = 'right' ) [ 0 ] - 1 if not 0 <= result <= len ( bin_edges ) - 1 : raise CoordinateOutOfRangeException ( "Value %s is not in range (%s-%s) of axis %s" % ( value , bin_edges [ 0 ] , bin_edges [ - 1 ] , axis ) ) return result
2,274
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L385-L400
[ "def", "random_stochastic_matrix", "(", "n", ",", "k", "=", "None", ",", "sparse", "=", "False", ",", "format", "=", "'csr'", ",", "random_state", "=", "None", ")", ":", "P", "=", "_random_stochastic_matrix", "(", "m", "=", "n", ",", "n", "=", "n", ",", "k", "=", "k", ",", "sparse", "=", "sparse", ",", "format", "=", "format", ",", "random_state", "=", "random_state", ")", "return", "P" ]
Returns index tuple in histogram of bin which contains value
def get_bin_indices ( self , values ) : return tuple ( [ self . get_axis_bin_index ( values [ ax_i ] , ax_i ) for ax_i in range ( self . dimensions ) ] )
2,275
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L402-L405
[ "def", "get_rsa_key", "(", "self", ",", "username", ")", ":", "try", ":", "resp", "=", "self", ".", "session", ".", "post", "(", "'https://steamcommunity.com/login/getrsakey/'", ",", "timeout", "=", "15", ",", "data", "=", "{", "'username'", ":", "username", ",", "'donotchache'", ":", "int", "(", "time", "(", ")", "*", "1000", ")", ",", "}", ",", ")", ".", "json", "(", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", "e", ":", "raise", "HTTPError", "(", "str", "(", "e", ")", ")", "return", "resp" ]
Return ndarray of same shape as histogram containing bin center value along axis at each point
def all_axis_bin_centers ( self , axis ) : # Arcane hack that seems to work, at least in 3d... hope axis = self . get_axis_number ( axis ) return np . meshgrid ( * self . bin_centers ( ) , indexing = 'ij' ) [ axis ]
2,276
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L407-L411
[ "def", "markdown_search_user", "(", "request", ")", ":", "data", "=", "{", "}", "username", "=", "request", ".", "GET", ".", "get", "(", "'username'", ")", "if", "username", "is", "not", "None", "and", "username", "!=", "''", "and", "' '", "not", "in", "username", ":", "users", "=", "User", ".", "objects", ".", "filter", "(", "Q", "(", "username__icontains", "=", "username", ")", ")", ".", "filter", "(", "is_active", "=", "True", ")", "if", "users", ".", "exists", "(", ")", ":", "data", ".", "update", "(", "{", "'status'", ":", "200", ",", "'data'", ":", "[", "{", "'username'", ":", "u", ".", "username", "}", "for", "u", "in", "users", "]", "}", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "data", ",", "cls", "=", "LazyEncoder", ")", ",", "content_type", "=", "'application/json'", ")", "data", ".", "update", "(", "{", "'status'", ":", "204", ",", "'error'", ":", "_", "(", "'No users registered as `%(username)s` '", "'or user is unactived.'", ")", "%", "{", "'username'", ":", "username", "}", "}", ")", "else", ":", "data", ".", "update", "(", "{", "'status'", ":", "204", ",", "'error'", ":", "_", "(", "'Validation Failed for field `username`'", ")", "}", ")", "return", "HttpResponse", "(", "json", ".", "dumps", "(", "data", ",", "cls", "=", "LazyEncoder", ")", ",", "content_type", "=", "'application/json'", ")" ]
Sums all data along axis returns d - 1 dimensional histogram
def sum ( self , axis ) : axis = self . get_axis_number ( axis ) if self . dimensions == 2 : new_hist = Hist1d else : new_hist = Histdd return new_hist . from_histogram ( np . sum ( self . histogram , axis = axis ) , bin_edges = itemgetter ( * self . other_axes ( axis ) ) ( self . bin_edges ) , axis_names = self . axis_names_without ( axis ) )
2,277
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L416-L425
[ "def", "require_oauth", "(", "self", ",", "realm", "=", "None", ",", "require_resource_owner", "=", "True", ",", "require_verifier", "=", "False", ",", "require_realm", "=", "False", ")", ":", "def", "decorator", "(", "f", ")", ":", "@", "wraps", "(", "f", ")", "def", "verify_request", "(", "*", "args", ",", "*", "*", "kwargs", ")", ":", "\"\"\"Verify OAuth params before running view function f\"\"\"", "try", ":", "if", "request", ".", "form", ":", "body", "=", "request", ".", "form", ".", "to_dict", "(", ")", "else", ":", "body", "=", "request", ".", "data", ".", "decode", "(", "\"utf-8\"", ")", "verify_result", "=", "self", ".", "verify_request", "(", "request", ".", "url", ".", "decode", "(", "\"utf-8\"", ")", ",", "http_method", "=", "request", ".", "method", ".", "decode", "(", "\"utf-8\"", ")", ",", "body", "=", "body", ",", "headers", "=", "request", ".", "headers", ",", "require_resource_owner", "=", "require_resource_owner", ",", "require_verifier", "=", "require_verifier", ",", "require_realm", "=", "require_realm", "or", "bool", "(", "realm", ")", ",", "required_realm", "=", "realm", ")", "valid", ",", "oauth_request", "=", "verify_result", "if", "valid", ":", "request", ".", "oauth", "=", "self", ".", "collect_request_parameters", "(", "request", ")", "# Request tokens are only valid when a verifier is too", "token", "=", "{", "}", "if", "require_verifier", ":", "token", "[", "u'request_token'", "]", "=", "request", ".", "oauth", ".", "resource_owner_key", "else", ":", "token", "[", "u'access_token'", "]", "=", "request", ".", "oauth", ".", "resource_owner_key", "# All nonce/timestamp pairs must be stored to prevent", "# replay attacks, they may be connected to a specific", "# client and token to decrease collision probability.", "self", ".", "save_timestamp_and_nonce", "(", "request", ".", "oauth", ".", "client_key", ",", "request", ".", "oauth", ".", "timestamp", ",", "request", ".", "oauth", ".", "nonce", ",", "*", "*", "token", ")", "# By this point, the request is fully authorized", "return", "f", "(", "*", "args", ",", "*", "*", "kwargs", ")", "else", ":", "# Unauthorized requests should not diclose their cause", "raise", "Unauthorized", "(", ")", "except", "ValueError", "as", "err", ":", "# Caused by missing of or badly formatted parameters", "raise", "BadRequest", "(", "err", ".", "message", ")", "return", "verify_request", "return", "decorator" ]
Slices the histogram along axis then sums over that slice returning a d - 1 dimensional histogram
def slicesum ( self , start , stop = None , axis = 0 ) : return self . slice ( start , stop , axis ) . sum ( axis )
2,278
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L442-L444
[ "def", "generate", "(", "self", ",", "*", "*", "options", ")", ":", "if", "options", ".", "get", "(", "'unsafe'", ",", "False", ")", ":", "return", "unsafe_url", "(", "*", "*", "options", ")", "else", ":", "return", "self", ".", "generate_new", "(", "options", ")" ]
Sums all data along all other axes then return Hist1D
def projection ( self , axis ) : axis = self . get_axis_number ( axis ) projected_hist = np . sum ( self . histogram , axis = self . other_axes ( axis ) ) return Hist1d . from_histogram ( projected_hist , bin_edges = self . bin_edges [ axis ] )
2,279
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L446-L450
[ "def", "patch", "(", "self", ",", "resource_id", ")", ":", "resource", "=", "self", ".", "_resource", "(", "resource_id", ")", "error_message", "=", "is_valid_method", "(", "self", ".", "__model__", ",", "resource", ")", "if", "error_message", ":", "raise", "BadRequestException", "(", "error_message", ")", "if", "not", "request", ".", "json", ":", "raise", "BadRequestException", "(", "'No JSON data received'", ")", "resource", ".", "update", "(", "request", ".", "json", ")", "db", ".", "session", "(", ")", ".", "merge", "(", "resource", ")", "db", ".", "session", "(", ")", ".", "commit", "(", ")", "return", "jsonify", "(", "resource", ")" ]
Returns new histogram with all data cumulated along axis .
def cumulate ( self , axis ) : axis = self . get_axis_number ( axis ) return Histdd . from_histogram ( np . cumsum ( self . histogram , axis = axis ) , bin_edges = self . bin_edges , axis_names = self . axis_names )
2,280
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L456-L461
[ "def", "get_file_object", "(", "username", ",", "password", ",", "utc_start", "=", "None", ",", "utc_stop", "=", "None", ")", ":", "if", "not", "utc_start", ":", "utc_start", "=", "datetime", ".", "now", "(", ")", "if", "not", "utc_stop", ":", "utc_stop", "=", "utc_start", "+", "timedelta", "(", "days", "=", "1", ")", "logging", ".", "info", "(", "\"Downloading schedules for username [%s] in range [%s] to \"", "\"[%s].\"", "%", "(", "username", ",", "utc_start", ",", "utc_stop", ")", ")", "replacements", "=", "{", "'start_time'", ":", "utc_start", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", ",", "'stop_time'", ":", "utc_stop", ".", "strftime", "(", "'%Y-%m-%dT%H:%M:%SZ'", ")", "}", "soap_message_xml", "=", "(", "soap_message_xml_template", "%", "replacements", ")", "authinfo", "=", "urllib2", ".", "HTTPDigestAuthHandler", "(", ")", "authinfo", ".", "add_password", "(", "realm", ",", "url", ",", "username", ",", "password", ")", "try", ":", "request", "=", "urllib2", ".", "Request", "(", "url", ",", "soap_message_xml", ",", "request_headers", ")", "response", "=", "urllib2", ".", "build_opener", "(", "authinfo", ")", ".", "open", "(", "request", ")", "if", "response", ".", "headers", "[", "'Content-Encoding'", "]", "==", "'gzip'", ":", "response", "=", "GzipStream", "(", "response", ")", "except", ":", "logging", ".", "exception", "(", "\"Could not acquire connection to Schedules Direct.\"", ")", "raise", "return", "response" ]
Returns new histogram with all values replaced by their central likelihoods along axis .
def central_likelihood ( self , axis ) : result = self . cumulative_density ( axis ) result . histogram = 1 - 2 * np . abs ( result . histogram - 0.5 ) return result
2,281
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L482-L486
[ "def", "validate_key", "(", "key", ",", "sign", "=", "False", ",", "encrypt", "=", "False", ")", ":", "if", "key", ".", "revoked", ":", "raise", "GPGProblem", "(", "'The key \"{}\" is revoked.'", ".", "format", "(", "key", ".", "uids", "[", "0", "]", ".", "uid", ")", ",", "code", "=", "GPGCode", ".", "KEY_REVOKED", ")", "elif", "key", ".", "expired", ":", "raise", "GPGProblem", "(", "'The key \"{}\" is expired.'", ".", "format", "(", "key", ".", "uids", "[", "0", "]", ".", "uid", ")", ",", "code", "=", "GPGCode", ".", "KEY_EXPIRED", ")", "elif", "key", ".", "invalid", ":", "raise", "GPGProblem", "(", "'The key \"{}\" is invalid.'", ".", "format", "(", "key", ".", "uids", "[", "0", "]", ".", "uid", ")", ",", "code", "=", "GPGCode", ".", "KEY_INVALID", ")", "if", "encrypt", "and", "not", "key", ".", "can_encrypt", ":", "raise", "GPGProblem", "(", "'The key \"{}\" cannot be used to encrypt'", ".", "format", "(", "key", ".", "uids", "[", "0", "]", ".", "uid", ")", ",", "code", "=", "GPGCode", ".", "KEY_CANNOT_ENCRYPT", ")", "if", "sign", "and", "not", "key", ".", "can_sign", ":", "raise", "GPGProblem", "(", "'The key \"{}\" cannot be used to sign'", ".", "format", "(", "key", ".", "uids", "[", "0", "]", ".", "uid", ")", ",", "code", "=", "GPGCode", ".", "KEY_CANNOT_SIGN", ")" ]
Return histogram within binning of Histdd mh with values looked up in this histogram .
def lookup_hist ( self , mh ) : result = mh . similar_blank_histogram ( ) points = np . stack ( [ mh . all_axis_bin_centers ( i ) for i in range ( mh . dimensions ) ] ) . reshape ( mh . dimensions , - 1 ) values = self . lookup ( * points ) result . histogram = values . reshape ( result . histogram . shape ) return result
2,282
https://github.com/JelleAalbers/multihist/blob/072288277f807e7e388fdf424c3921c80576f3ab/multihist.py#L685-L696
[ "def", "send_message", "(", "self", ",", "message", ")", ":", "try", ":", "if", "_message_test_port", "is", "not", "None", ":", "_message_test_port", ".", "sent", ".", "append", "(", "message", ")", "yield", "message", ".", "send", "(", "self", ")", "except", "(", "WebSocketClosedError", ",", "StreamClosedError", ")", ":", "# Tornado 4.x may raise StreamClosedError", "# on_close() is / will be called anyway", "log", ".", "warning", "(", "\"Failed sending message as connection was closed\"", ")", "raise", "gen", ".", "Return", "(", "None", ")" ]
takes a dictionary read from a yaml file and converts it to the roadmap documentation
def create_roadmap_doc ( dat , opFile ) : op = format_title ( 'Roadmap for AIKIF' ) for h1 in dat [ 'projects' ] : op += format_h1 ( h1 ) if dat [ h1 ] is None : op += '(No details)\n' else : for h2 in dat [ h1 ] : op += '\n' + format_h2 ( h2 ) if dat [ h1 ] [ h2 ] is None : op += '(blank text)\n' else : for txt in dat [ h1 ] [ h2 ] : op += ' - ' + txt + '\n' op += '\n' with open ( opFile , 'w' ) as f : f . write ( op )
2,283
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/scripts/examples/doc_roadmap.py#L20-L41
[ "def", "get_consumption", "(", "self", ")", ":", "self", ".", "get_status", "(", ")", "try", ":", "self", ".", "consumption", "=", "self", ".", "data", "[", "'power'", "]", "except", "TypeError", ":", "self", ".", "consumption", "=", "0", "return", "self", ".", "consumption" ]
Clears grid to be EMPTY
def clear ( self ) : self . grid = [ [ EMPTY for dummy_col in range ( self . grid_width ) ] for dummy_row in range ( self . grid_height ) ]
2,284
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L34-L38
[ "def", "_get_filename", "(", "request", ",", "item", ")", ":", "if", "request", ".", "keep_image_names", ":", "filename", "=", "OgcImageService", ".", "finalize_filename", "(", "item", "[", "'niceName'", "]", ".", "replace", "(", "' '", ",", "'_'", ")", ")", "else", ":", "filename", "=", "OgcImageService", ".", "finalize_filename", "(", "'_'", ".", "join", "(", "[", "str", "(", "GeopediaService", ".", "_parse_layer", "(", "request", ".", "layer", ")", ")", ",", "item", "[", "'objectPath'", "]", ".", "rsplit", "(", "'/'", ",", "1", ")", "[", "-", "1", "]", "]", ")", ",", "request", ".", "image_format", ")", "LOGGER", ".", "debug", "(", "\"filename=%s\"", ",", "filename", ")", "return", "filename" ]
saves a grid to file as ASCII text
def save ( self , fname ) : try : with open ( fname , "w" ) as f : f . write ( str ( self ) ) except Exception as ex : print ( 'ERROR = cant save grid results to ' + fname + str ( ex ) )
2,285
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L52-L58
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
loads a ASCII text file grid to self
def load ( self , fname ) : # get height and width of grid from file self . grid_width = 4 self . grid_height = 4 # re-read the file and load it self . grid = [ [ 0 for dummy_l in range ( self . grid_width ) ] for dummy_l in range ( self . grid_height ) ] with open ( fname , 'r' ) as f : for row_num , row in enumerate ( f ) : if row . strip ( '\n' ) == '' : break for col_num , col in enumerate ( row . strip ( '\n' ) ) : self . set_tile ( row_num , col_num , col )
2,286
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L62-L76
[ "def", "get_placement_solver", "(", "service_instance", ")", ":", "stub", "=", "salt", ".", "utils", ".", "vmware", ".", "get_new_service_instance_stub", "(", "service_instance", ",", "ns", "=", "'pbm/2.0'", ",", "path", "=", "'/pbm/sdk'", ")", "pbm_si", "=", "pbm", ".", "ServiceInstance", "(", "'ServiceInstance'", ",", "stub", ")", "try", ":", "profile_manager", "=", "pbm_si", ".", "RetrieveContent", "(", ")", ".", "placementSolver", "except", "vim", ".", "fault", ".", "NoPermission", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "'Not enough permissions. Required privilege: '", "'{0}'", ".", "format", "(", "exc", ".", "privilegeId", ")", ")", "except", "vim", ".", "fault", ".", "VimFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareApiError", "(", "exc", ".", "msg", ")", "except", "vmodl", ".", "RuntimeFault", "as", "exc", ":", "log", ".", "exception", "(", "exc", ")", "raise", "VMwareRuntimeError", "(", "exc", ".", "msg", ")", "return", "profile_manager" ]
get column number col
def extract_col ( self , col ) : new_col = [ row [ col ] for row in self . grid ] return new_col
2,287
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L103-L108
[ "def", "open", "(", "self", ",", "_file", ",", "target", "=", "DEFAULT_TARGET", ")", ":", "# Close any existing BFD structure instance. ", "self", ".", "close", "(", ")", "#", "# STEP 1. Open the BFD pointer.", "#", "# Determine if the user passed a file-descriptor or a _file and", "# proceed accordingly.", "if", "type", "(", "_file", ")", "is", "FileType", ":", "# The user specified a file descriptor.", "filename", "=", "_file", ".", "name", "if", "islink", "(", "filename", ")", ":", "raise", "BfdException", "(", "\"Symlinks file-descriptors are not valid\"", ")", "try", ":", "self", ".", "_ptr", "=", "_bfd", ".", "fdopenr", "(", "filename", ",", "target", ",", "dup", "(", "_file", ".", "fileno", "(", ")", ")", ")", "except", "Exception", ",", "err", ":", "raise", "BfdException", "(", "\"Unable to open file-descriptor %s : %s\"", "%", "(", "filename", ",", "err", ")", ")", "elif", "type", "(", "_file", ")", "is", "StringType", ":", "# The user spcified a filaname so first check if file exists.", "filename", "=", "_file", "try", ":", "with", "open", "(", "_file", ")", ":", "pass", "except", "IOError", ":", "raise", "BfdException", "(", "\"File %s does not exist.\"", "%", "filename", ")", "#", "# Proceed to open the specified file and create a new BFD.", "#", "try", ":", "self", ".", "_ptr", "=", "_bfd", ".", "openr", "(", "filename", ",", "target", ")", "except", "(", "TypeError", ",", "IOError", ")", ",", "err", ":", "raise", "BfdException", "(", "\"Unable to open file %s : %s\"", "%", "(", "filename", ",", "err", ")", ")", "elif", "type", "(", "_file", ")", "is", "IntType", ":", "# The user specified an already-open BFD pointer so we avoid any", "# further open operation and move on to file format recognition.", "self", ".", "_ptr", "=", "_file", "else", ":", "raise", "BfdException", "(", "\"Invalid file type specified for open operation (%r)\"", "%", "_file", ")", "#", "# STEP 2. Determine file format of the BFD.", "#", "# Now that the BFD is open we'll proceed to determine its file format.", "# We'll use the objdump logic to determine it and raise an error in", "# case we were unable to get it right.", "#", "try", ":", "# Type opening it as an archieve and if it success then check", "# subfiles.", "if", "_bfd", ".", "check_format", "(", "self", ".", "_ptr", ",", "BfdFormat", ".", "ARCHIVE", ")", ":", "# Set current format and store the inner file list.", "self", ".", "file_format", "=", "BfdFormat", ".", "ARCHIVE", "self", ".", "__populate_archive_files", "(", ")", "else", ":", "# DO NOT USE bfd_check_format_matches() becuase its not tested.", "# An implementation example if on objdump.c at function", "# display_bfd().", "if", "_bfd", ".", "check_format", "(", "self", ".", "_ptr", ",", "BfdFormat", ".", "OBJECT", ")", ":", "self", ".", "file_format", "=", "BfdFormat", ".", "OBJECT", "elif", "_bfd", ".", "check_format", "(", "self", ".", "_ptr", ",", "BfdFormat", ".", "CORE", ")", ":", "self", ".", "file_format", "=", "BfdFormat", ".", "CORE", "else", ":", "pass", "raise", "BfdException", "(", "_bfd", ".", "get_last_error_message", "(", ")", ")", "except", "TypeError", ",", "err", ":", "raise", "BfdException", "(", "\"Unable to initialize file format : %s\"", "%", "err", ")", "#", "# STEP 3. Extract inner sections and symbolic information.", "#", "if", "self", ".", "_ptr", "is", "not", "None", ":", "# If the file is a valid BFD file format but not an archive then", "# get its sections and symbolic information (if any).", "if", "self", ".", "file_format", "in", "[", "BfdFormat", ".", "OBJECT", ",", "BfdFormat", ".", "CORE", "]", ":", "self", ".", "__populate_sections", "(", ")", "self", ".", "__populate_symbols", "(", ")" ]
get row number row
def extract_row ( self , row ) : new_row = [ ] for col in range ( self . get_grid_width ( ) ) : new_row . append ( self . get_tile ( row , col ) ) return new_row
2,288
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L110-L117
[ "def", "_create_download_failed_message", "(", "exception", ",", "url", ")", ":", "message", "=", "'Failed to download from:\\n{}\\nwith {}:\\n{}'", ".", "format", "(", "url", ",", "exception", ".", "__class__", ".", "__name__", ",", "exception", ")", "if", "_is_temporal_problem", "(", "exception", ")", ":", "if", "isinstance", "(", "exception", ",", "requests", ".", "ConnectionError", ")", ":", "message", "+=", "'\\nPlease check your internet connection and try again.'", "else", ":", "message", "+=", "'\\nThere might be a problem in connection or the server failed to process '", "'your request. Please try again.'", "elif", "isinstance", "(", "exception", ",", "requests", ".", "HTTPError", ")", ":", "try", ":", "server_message", "=", "''", "for", "elem", "in", "decode_data", "(", "exception", ".", "response", ".", "content", ",", "MimeType", ".", "XML", ")", ":", "if", "'ServiceException'", "in", "elem", ".", "tag", "or", "'Message'", "in", "elem", ".", "tag", ":", "server_message", "+=", "elem", ".", "text", ".", "strip", "(", "'\\n\\t '", ")", "except", "ElementTree", ".", "ParseError", ":", "server_message", "=", "exception", ".", "response", ".", "text", "message", "+=", "'\\nServer response: \"{}\"'", ".", "format", "(", "server_message", ")", "return", "message" ]
replace a grids row at index ndx with line
def replace_row ( self , line , ndx ) : for col in range ( len ( line ) ) : self . set_tile ( ndx , col , line [ col ] )
2,289
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L119-L124
[ "def", "protein_statistics", "(", "self", ")", ":", "# TODO: can i use get_dict here instead", "d", "=", "{", "}", "d", "[", "'id'", "]", "=", "self", ".", "id", "d", "[", "'sequences'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "sequences", "]", "d", "[", "'num_sequences'", "]", "=", "self", ".", "num_sequences", "if", "self", ".", "representative_sequence", ":", "d", "[", "'representative_sequence'", "]", "=", "self", ".", "representative_sequence", ".", "id", "d", "[", "'repseq_gene_name'", "]", "=", "self", ".", "representative_sequence", ".", "gene_name", "d", "[", "'repseq_uniprot'", "]", "=", "self", ".", "representative_sequence", ".", "uniprot", "d", "[", "'repseq_description'", "]", "=", "self", ".", "representative_sequence", ".", "description", "d", "[", "'num_structures'", "]", "=", "self", ".", "num_structures", "d", "[", "'experimental_structures'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_experimental_structures", "(", ")", "]", "d", "[", "'num_experimental_structures'", "]", "=", "self", ".", "num_structures_experimental", "d", "[", "'homology_models'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_homology_models", "(", ")", "]", "d", "[", "'num_homology_models'", "]", "=", "self", ".", "num_structures_homology", "if", "self", ".", "representative_structure", ":", "d", "[", "'representative_structure'", "]", "=", "self", ".", "representative_structure", ".", "id", "d", "[", "'representative_chain'", "]", "=", "self", ".", "representative_chain", "d", "[", "'representative_chain_seq_coverage'", "]", "=", "self", ".", "representative_chain_seq_coverage", "d", "[", "'repstruct_description'", "]", "=", "self", ".", "description", "if", "self", ".", "representative_structure", ".", "is_experimental", ":", "d", "[", "'repstruct_resolution'", "]", "=", "self", ".", "representative_structure", ".", "resolution", "d", "[", "'num_sequence_alignments'", "]", "=", "len", "(", "self", ".", "sequence_alignments", ")", "d", "[", "'num_structure_alignments'", "]", "=", "len", "(", "self", ".", "structure_alignments", ")", "return", "d" ]
replace a grids column at index ndx with line
def replace_col ( self , line , ndx ) : for row in range ( len ( line ) ) : self . set_tile ( row , ndx , line [ row ] )
2,290
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L126-L131
[ "def", "protein_statistics", "(", "self", ")", ":", "# TODO: can i use get_dict here instead", "d", "=", "{", "}", "d", "[", "'id'", "]", "=", "self", ".", "id", "d", "[", "'sequences'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "sequences", "]", "d", "[", "'num_sequences'", "]", "=", "self", ".", "num_sequences", "if", "self", ".", "representative_sequence", ":", "d", "[", "'representative_sequence'", "]", "=", "self", ".", "representative_sequence", ".", "id", "d", "[", "'repseq_gene_name'", "]", "=", "self", ".", "representative_sequence", ".", "gene_name", "d", "[", "'repseq_uniprot'", "]", "=", "self", ".", "representative_sequence", ".", "uniprot", "d", "[", "'repseq_description'", "]", "=", "self", ".", "representative_sequence", ".", "description", "d", "[", "'num_structures'", "]", "=", "self", ".", "num_structures", "d", "[", "'experimental_structures'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_experimental_structures", "(", ")", "]", "d", "[", "'num_experimental_structures'", "]", "=", "self", ".", "num_structures_experimental", "d", "[", "'homology_models'", "]", "=", "[", "x", ".", "id", "for", "x", "in", "self", ".", "get_homology_models", "(", ")", "]", "d", "[", "'num_homology_models'", "]", "=", "self", ".", "num_structures_homology", "if", "self", ".", "representative_structure", ":", "d", "[", "'representative_structure'", "]", "=", "self", ".", "representative_structure", ".", "id", "d", "[", "'representative_chain'", "]", "=", "self", ".", "representative_chain", "d", "[", "'representative_chain_seq_coverage'", "]", "=", "self", ".", "representative_chain_seq_coverage", "d", "[", "'repstruct_description'", "]", "=", "self", ".", "description", "if", "self", ".", "representative_structure", ".", "is_experimental", ":", "d", "[", "'repstruct_resolution'", "]", "=", "self", ".", "representative_structure", ".", "resolution", "d", "[", "'num_sequence_alignments'", "]", "=", "len", "(", "self", ".", "sequence_alignments", ")", "d", "[", "'num_structure_alignments'", "]", "=", "len", "(", "self", ".", "structure_alignments", ")", "return", "d" ]
Create a new tile in a randomly selected empty square . The tile should be 2 90% of the time and 4 10% of the time .
def new_tile ( self , num = 1 ) : for _ in range ( num ) : if random . random ( ) > .5 : new_tile = self . pieces [ 0 ] else : new_tile = self . pieces [ 1 ] # check for game over blanks = self . count_blank_positions ( ) if blanks == 0 : print ( "GAME OVER" ) else : res = self . find_random_blank_cell ( ) row = res [ 0 ] col = res [ 1 ] self . set_tile ( row , col , new_tile )
2,291
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L140-L161
[ "def", "get_condarc_channels", "(", "self", ",", "normalize", "=", "False", ",", "conda_url", "=", "'https://conda.anaconda.org'", ",", "channels", "=", "None", ")", ":", "# https://docs.continuum.io/anaconda-repository/configuration", "# They can only exist on a system condarc", "default_channels", "=", "self", ".", "load_rc", "(", "system", "=", "True", ")", ".", "get", "(", "'default_channels'", ",", "self", ".", "DEFAULT_CHANNELS", ")", "normalized_channels", "=", "[", "]", "if", "channels", "is", "None", ":", "condarc", "=", "self", ".", "load_rc", "(", ")", "channels", "=", "condarc", ".", "get", "(", "'channels'", ")", "if", "channels", "is", "None", ":", "channels", "=", "[", "'defaults'", "]", "if", "normalize", ":", "template", "=", "'{0}/{1}'", "if", "conda_url", "[", "-", "1", "]", "!=", "'/'", "else", "'{0}{1}'", "for", "channel", "in", "channels", ":", "if", "channel", "==", "'defaults'", ":", "normalized_channels", "+=", "default_channels", "elif", "channel", ".", "startswith", "(", "'http'", ")", ":", "normalized_channels", ".", "append", "(", "channel", ")", "else", ":", "# Append to the conda_url that comes from anaconda client", "# default_channel_alias key is deliberately ignored", "normalized_channels", ".", "append", "(", "template", ".", "format", "(", "conda_url", ",", "channel", ")", ")", "channels", "=", "normalized_channels", "return", "channels" ]
Set the tile at position row col to have the given value .
def set_tile ( self , row , col , value ) : #print('set_tile: y=', row, 'x=', col) if col < 0 : print ( "ERROR - x less than zero" , col ) col = 0 #return if col > self . grid_width - 1 : print ( "ERROR - x larger than grid" , col ) col = self . grid_width - 1 #return if row < 0 : print ( "ERROR - y less than zero" , row ) row = 0 #return if row > self . grid_height - 1 : print ( "ERROR - y larger than grid" , row ) row = self . grid_height - 1 self . grid [ row ] [ col ] = value
2,292
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L196-L220
[ "def", "sanitize_type", "(", "raw_type", ")", ":", "cleaned", "=", "get_printable", "(", "raw_type", ")", ".", "strip", "(", ")", "for", "bad", "in", "[", "r'__drv_aliasesMem'", ",", "r'__drv_freesMem'", ",", "r'__drv_strictTypeMatch\\(\\w+\\)'", ",", "r'__out_data_source\\(\\w+\\)'", ",", "r'_In_NLS_string_\\(\\w+\\)'", ",", "r'_Frees_ptr_'", ",", "r'_Frees_ptr_opt_'", ",", "r'opt_'", ",", "r'\\(Mem\\) '", "]", ":", "cleaned", "=", "re", ".", "sub", "(", "bad", ",", "''", ",", "cleaned", ")", ".", "strip", "(", ")", "if", "cleaned", "in", "[", "'_EXCEPTION_RECORD *'", ",", "'_EXCEPTION_POINTERS *'", "]", ":", "cleaned", "=", "cleaned", ".", "strip", "(", "'_'", ")", "cleaned", "=", "cleaned", ".", "replace", "(", "'[]'", ",", "'*'", ")", "return", "cleaned" ]
replace all cells in current grid with updated grid
def replace_grid ( self , updated_grid ) : for col in range ( self . get_grid_width ( ) ) : for row in range ( self . get_grid_height ( ) ) : if updated_grid [ row ] [ col ] == EMPTY : self . set_empty ( row , col ) else : self . set_full ( row , col )
2,293
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L289-L298
[ "def", "device_info", "(", "index", "=", "None", ")", ":", "if", "index", "is", "None", ":", "return", "(", "device_info", "(", "i", ")", "for", "i", "in", "range", "(", "_pa", ".", "Pa_GetDeviceCount", "(", ")", ")", ")", "else", ":", "info", "=", "_pa", ".", "Pa_GetDeviceInfo", "(", "index", ")", "if", "not", "info", ":", "raise", "RuntimeError", "(", "\"Invalid device\"", ")", "assert", "info", ".", "structVersion", "==", "2", "if", "'DirectSound'", "in", "hostapi_info", "(", "info", ".", "hostApi", ")", "[", "'name'", "]", ":", "enc", "=", "'mbcs'", "else", ":", "enc", "=", "'utf-8'", "return", "{", "'name'", ":", "ffi", ".", "string", "(", "info", ".", "name", ")", ".", "decode", "(", "encoding", "=", "enc", ",", "errors", "=", "'ignore'", ")", ",", "'hostapi'", ":", "info", ".", "hostApi", ",", "'max_input_channels'", ":", "info", ".", "maxInputChannels", ",", "'max_output_channels'", ":", "info", ".", "maxOutputChannels", ",", "'default_low_input_latency'", ":", "info", ".", "defaultLowInputLatency", ",", "'default_low_output_latency'", ":", "info", ".", "defaultLowOutputLatency", ",", "'default_high_input_latency'", ":", "info", ".", "defaultHighInputLatency", ",", "'default_high_output_latency'", ":", "info", ".", "defaultHighOutputLatency", ",", "'default_samplerate'", ":", "info", ".", "defaultSampleRate", "}" ]
finds a place on the grid which is clear on all sides to avoid starting in the middle of a blockage
def find_safe_starting_point ( self ) : y = random . randint ( 2 , self . grid_height - 4 ) x = random . randint ( 2 , self . grid_width - 4 ) return y , x
2,294
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/cls_grid.py#L300-L307
[ "def", "base_mortality_rate", "(", "self", ",", "index", ":", "pd", ".", "Index", ")", "->", "pd", ".", "Series", ":", "return", "pd", ".", "Series", "(", "self", ".", "config", ".", "mortality_rate", ",", "index", "=", "index", ")" ]
resize an image to basewidth
def resize ( fname , basewidth , opFilename ) : if basewidth == 0 : basewidth = 300 img = Image . open ( fname ) wpercent = ( basewidth / float ( img . size [ 0 ] ) ) hsize = int ( ( float ( img . size [ 1 ] ) * float ( wpercent ) ) ) img = img . resize ( ( basewidth , hsize ) , Image . ANTIALIAS ) img . save ( opFilename )
2,295
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/image_tools.py#L103-L111
[ "def", "ConfigureUrls", "(", "config", ",", "external_hostname", "=", "None", ")", ":", "print", "(", "\"\\n\\n-=GRR URLs=-\\n\"", "\"For GRR to work each client has to be able to communicate with the\\n\"", "\"server. To do this we normally need a public dns name or IP address\\n\"", "\"to communicate with. In the standard configuration this will be used\\n\"", "\"to host both the client facing server and the admin user interface.\\n\"", ")", "existing_ui_urn", "=", "grr_config", ".", "CONFIG", ".", "Get", "(", "\"AdminUI.url\"", ",", "default", "=", "None", ")", "existing_frontend_urns", "=", "grr_config", ".", "CONFIG", ".", "Get", "(", "\"Client.server_urls\"", ")", "if", "not", "existing_frontend_urns", ":", "# Port from older deprecated setting Client.control_urls.", "existing_control_urns", "=", "grr_config", ".", "CONFIG", ".", "Get", "(", "\"Client.control_urls\"", ",", "default", "=", "None", ")", "if", "existing_control_urns", "is", "not", "None", ":", "existing_frontend_urns", "=", "[", "]", "for", "existing_control_urn", "in", "existing_control_urns", ":", "if", "not", "existing_control_urn", ".", "endswith", "(", "\"control\"", ")", ":", "raise", "RuntimeError", "(", "\"Invalid existing control URL: %s\"", "%", "existing_control_urn", ")", "existing_frontend_urns", ".", "append", "(", "existing_control_urn", ".", "rsplit", "(", "\"/\"", ",", "1", ")", "[", "0", "]", "+", "\"/\"", ")", "config", ".", "Set", "(", "\"Client.server_urls\"", ",", "existing_frontend_urns", ")", "config", ".", "Set", "(", "\"Client.control_urls\"", ",", "[", "\"deprecated use Client.server_urls\"", "]", ")", "if", "not", "existing_frontend_urns", "or", "not", "existing_ui_urn", ":", "ConfigureHostnames", "(", "config", ",", "external_hostname", "=", "external_hostname", ")", "else", ":", "print", "(", "\"Found existing settings:\\n AdminUI URL: %s\\n \"", "\"Frontend URL(s): %s\\n\"", "%", "(", "existing_ui_urn", ",", "existing_frontend_urns", ")", ")", "if", "not", "RetryBoolQuestion", "(", "\"Do you want to keep this configuration?\"", ",", "True", ")", ":", "ConfigureHostnames", "(", "config", ",", "external_hostname", "=", "external_hostname", ")" ]
prints stats remember that img should already have been loaded
def print_stats ( img ) : stat = ImageStat . Stat ( img ) print ( "extrema : " , stat . extrema ) print ( "count : " , stat . count ) print ( "sum : " , stat . sum ) print ( "sum2 : " , stat . sum2 ) print ( "mean : " , stat . mean ) print ( "median : " , stat . median ) print ( "rms : " , stat . rms ) print ( "var : " , stat . var ) print ( "stddev : " , stat . stddev )
2,296
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/image_tools.py#L114-L125
[ "def", "Modify", "(", "self", ",", "client_limit", "=", "None", ",", "client_rate", "=", "None", ",", "duration", "=", "None", ")", ":", "args", "=", "hunt_pb2", ".", "ApiModifyHuntArgs", "(", "hunt_id", "=", "self", ".", "hunt_id", ")", "if", "client_limit", "is", "not", "None", ":", "args", ".", "client_limit", "=", "client_limit", "if", "client_rate", "is", "not", "None", ":", "args", ".", "client_rate", "=", "client_rate", "if", "duration", "is", "not", "None", ":", "args", ".", "duration", "=", "duration", "data", "=", "self", ".", "_context", ".", "SendRequest", "(", "\"ModifyHunt\"", ",", "args", ")", "return", "Hunt", "(", "data", "=", "data", ",", "context", "=", "self", ".", "_context", ")" ]
high level that prints all as long list
def print_all_metadata ( fname ) : print ( "Filename :" , fname ) print ( "Basename :" , os . path . basename ( fname ) ) print ( "Path :" , os . path . dirname ( fname ) ) print ( "Size :" , os . path . getsize ( fname ) ) img = Image . open ( fname ) # get the image's width and height in pixels width , height = img . size # get the largest dimension #max_dim = max(img.size) print ( "Width :" , width ) print ( "Height :" , height ) print ( "Format :" , img . format ) print ( "palette :" , img . palette ) print_stats ( img ) #print_exif_data(img) exif_data = get_exif_data ( img ) ( lat , lon ) = get_lat_lon ( exif_data ) print ( "GPS Lat :" , lat ) print ( "GPS Long :" , lon )
2,297
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/image_tools.py#L152-L173
[ "def", "memoize_nonzero", "(", "func", ")", ":", "class", "_memorizer", "(", "dict", ")", ":", "def", "__init__", "(", "self", ",", "func", ")", ":", "self", ".", "func", "=", "func", "def", "__call__", "(", "self", ",", "*", "args", ")", ":", "return", "self", "[", "args", "]", "def", "__missing__", "(", "self", ",", "key", ")", ":", "ret", "=", "self", "[", "key", "]", "=", "self", ".", "func", "(", "*", "key", ")", "return", "ret", "return", "_memorizer", "(", "func", ")" ]
Gets all metadata and puts into dictionary
def get_metadata_as_dict ( fname ) : imgdict = { } try : imgdict [ 'filename' ] = fname imgdict [ 'size' ] = str ( os . path . getsize ( fname ) ) imgdict [ 'basename' ] = os . path . basename ( fname ) imgdict [ 'path' ] = os . path . dirname ( fname ) img = Image . open ( fname ) # get the image's width and height in pixels width , height = img . size imgdict [ 'width' ] = str ( width ) imgdict [ 'height' ] = str ( height ) imgdict [ 'format' ] = str ( img . format ) imgdict [ 'palette' ] = str ( img . palette ) stat = ImageStat . Stat ( img ) #res = res + q + str(stat.extrema) + q + d imgdict [ 'count' ] = List2String ( stat . count , "," ) imgdict [ 'sum' ] = List2String ( stat . sum , "," ) imgdict [ 'sum2' ] = List2String ( stat . sum2 , "," ) imgdict [ 'mean' ] = List2String ( stat . mean , "," ) imgdict [ 'median' ] = List2String ( stat . median , "," ) imgdict [ 'rms' ] = List2String ( stat . rms , "," ) imgdict [ 'var' ] = List2String ( stat . var , "," ) imgdict [ 'stddev' ] = List2String ( stat . stddev , "," ) exif_data = get_exif_data ( img ) print ( 'exif_data = ' , exif_data ) ( lat , lon ) = get_lat_lon ( exif_data ) print ( '(lat, lon)' , ( lat , lon ) ) imgdict [ 'lat' ] = str ( lat ) imgdict [ 'lon' ] = str ( lon ) except Exception as ex : print ( 'problem reading image file metadata in ' , fname , str ( ex ) ) imgdict [ 'lat' ] = 'ERROR' imgdict [ 'lon' ] = 'ERROR' return imgdict
2,298
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/image_tools.py#L198-L235
[ "def", "BuildChecks", "(", "self", ",", "request", ")", ":", "result", "=", "[", "]", "if", "request", ".", "HasField", "(", "\"start_time\"", ")", "or", "request", ".", "HasField", "(", "\"end_time\"", ")", ":", "def", "FilterTimestamp", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_mtime\"", ")", "and", "(", "file_stat", ".", "st_mtime", "<", "request", ".", "start_time", "or", "file_stat", ".", "st_mtime", ">", "request", ".", "end_time", ")", "result", ".", "append", "(", "FilterTimestamp", ")", "if", "request", ".", "HasField", "(", "\"min_file_size\"", ")", "or", "request", ".", "HasField", "(", "\"max_file_size\"", ")", ":", "def", "FilterSize", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "HasField", "(", "\"st_size\"", ")", "and", "(", "file_stat", ".", "st_size", "<", "request", ".", "min_file_size", "or", "file_stat", ".", "st_size", ">", "request", ".", "max_file_size", ")", "result", ".", "append", "(", "FilterSize", ")", "if", "request", ".", "HasField", "(", "\"perm_mode\"", ")", ":", "def", "FilterPerms", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "(", "file_stat", ".", "st_mode", "&", "request", ".", "perm_mask", ")", "!=", "request", ".", "perm_mode", "result", ".", "append", "(", "FilterPerms", ")", "if", "request", ".", "HasField", "(", "\"uid\"", ")", ":", "def", "FilterUID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_uid", "!=", "request", ".", "uid", "result", ".", "append", "(", "FilterUID", ")", "if", "request", ".", "HasField", "(", "\"gid\"", ")", ":", "def", "FilterGID", "(", "file_stat", ",", "request", "=", "request", ")", ":", "return", "file_stat", ".", "st_gid", "!=", "request", ".", "gid", "result", ".", "append", "(", "FilterGID", ")", "if", "request", ".", "HasField", "(", "\"path_regex\"", ")", ":", "regex", "=", "request", ".", "path_regex", "def", "FilterPath", "(", "file_stat", ",", "regex", "=", "regex", ")", ":", "\"\"\"Suppress any filename not matching the regular expression.\"\"\"", "return", "not", "regex", ".", "Search", "(", "file_stat", ".", "pathspec", ".", "Basename", "(", ")", ")", "result", ".", "append", "(", "FilterPath", ")", "if", "request", ".", "HasField", "(", "\"data_regex\"", ")", ":", "def", "FilterData", "(", "file_stat", ",", "*", "*", "_", ")", ":", "\"\"\"Suppress files that do not match the content.\"\"\"", "return", "not", "self", ".", "TestFileContent", "(", "file_stat", ")", "result", ".", "append", "(", "FilterData", ")", "return", "result" ]
Gets all metadata and puts into CSV format
def get_metadata_as_csv ( fname ) : q = chr ( 34 ) d = "," res = q + fname + q + d res = res + q + os . path . basename ( fname ) + q + d res = res + q + os . path . dirname ( fname ) + q + d try : res = res + q + str ( os . path . getsize ( fname ) ) + q + d img = Image . open ( fname ) # get the image's width and height in pixels width , height = img . size res = res + q + str ( width ) + q + d res = res + q + str ( height ) + q + d res = res + q + str ( img . format ) + q + d res = res + q + str ( img . palette ) + q + d stat = ImageStat . Stat ( img ) #print(fname, width, height) #res = res + q + str(stat.extrema) + q + d res = res + q + List2String ( stat . count , "," ) + q + d res = res + q + List2String ( stat . sum , "," ) + q + d res = res + q + List2String ( stat . sum2 , "," ) + q + d res = res + q + List2String ( stat . mean , "," ) + q + d res = res + q + List2String ( stat . median , "," ) + q + d res = res + q + List2String ( stat . rms , "," ) + q + d res = res + q + List2String ( stat . var , "," ) + q + d res = res + q + List2String ( stat . stddev , "," ) + q + d exif_data = get_exif_data ( img ) ( lat , lon ) = get_lat_lon ( exif_data ) res = res + q + str ( lat ) + q + d res = res + q + str ( lon ) + q + d except Exception as ex : print ( 'problem reading image file metadata in ' , fname , str ( ex ) ) return res
2,299
https://github.com/acutesoftware/AIKIF/blob/fcf1582dc5f884b9a4fa7c6e20e9de9d94d21d03/aikif/toolbox/image_tools.py#L237-L271
[ "def", "distanz", "(", "x", ",", "y", "=", "None", ")", ":", "try", ":", "x", ".", "shape", "[", "1", "]", "except", "IndexError", ":", "x", "=", "x", ".", "reshape", "(", "1", ",", "x", ".", "shape", "[", "0", "]", ")", "if", "y", "is", "None", ":", "y", "=", "x", "else", ":", "try", ":", "y", ".", "shape", "[", "1", "]", "except", "IndexError", ":", "y", "=", "y", ".", "reshape", "(", "1", ",", "y", ".", "shape", "[", "0", "]", ")", "rx", ",", "cx", "=", "x", ".", "shape", "ry", ",", "cy", "=", "y", ".", "shape", "# Size verification", "if", "rx", "!=", "ry", ":", "raise", "ValueError", "(", "\"The sizes of x and y do not fit\"", ")", "xx", "=", "(", "x", "*", "x", ")", ".", "sum", "(", "axis", "=", "0", ")", "yy", "=", "(", "y", "*", "y", ")", ".", "sum", "(", "axis", "=", "0", ")", "xy", "=", "np", ".", "dot", "(", "x", ".", "T", ",", "y", ")", "d", "=", "abs", "(", "np", ".", "kron", "(", "np", ".", "ones", "(", "(", "cy", ",", "1", ")", ")", ",", "xx", ")", ".", "T", "+", "np", ".", "kron", "(", "np", ".", "ones", "(", "(", "cx", ",", "1", ")", ")", ",", "yy", ")", "-", "2", "*", "xy", ")", "return", "np", ".", "sqrt", "(", "d", ")" ]